From a4da1f0f7610f5220babca38fc5bc9b55f772ed4 Mon Sep 17 00:00:00 2001 From: Carlos Natalino Date: Wed, 23 Nov 2022 18:05:52 +0100 Subject: [PATCH 01/13] Improvements to the scripts and tutorial of the OFC'22 --- src/tests/ofc22/deploy_specs.sh | 11 ++- src/tests/ofc22/run_test_01_bootstrap.sh | 4 - ...run_tests_and_coverage.sh => run_tests.sh} | 11 ++- src/tests/ofc22/setup_test_env.sh | 9 --- tutorial/2-2-ofc22.md | 80 ++++++++++++++----- 5 files changed, 74 insertions(+), 41 deletions(-) rename src/tests/ofc22/{run_tests_and_coverage.sh => run_tests.sh} (77%) delete mode 100755 src/tests/ofc22/setup_test_env.sh diff --git a/src/tests/ofc22/deploy_specs.sh b/src/tests/ofc22/deploy_specs.sh index 8afd68384..ffd91da35 100644 --- a/src/tests/ofc22/deploy_specs.sh +++ b/src/tests/ofc22/deploy_specs.sh @@ -2,6 +2,11 @@ export TFS_REGISTRY_IMAGE="http://localhost:32000/tfs/" # Set the list of components, separated by spaces, you want to build images for, and deploy. +# Supported components are: +# context device automation policy service compute monitoring webui +# interdomain slice pathcomp dlt +# dbscanserving opticalattackmitigator opticalattackdetector +# l3_attackmitigator l3_centralizedattackdetector l3_distributedattackdetector export TFS_COMPONENTS="context device automation monitoring pathcomp service slice compute webui" # Set the tag you want to use for your images. @@ -13,5 +18,9 @@ export TFS_K8S_NAMESPACE="tfs" # Set additional manifest files to be applied after the deployment export TFS_EXTRA_MANIFESTS="manifests/nginx_ingress_http.yaml" -# Set the neew Grafana admin password +# Set the new Grafana admin password export TFS_GRAFANA_PASSWORD="admin123+" + +# If not already set, disable skip-build flag. +# If TFS_SKIP_BUILD is "YES", the containers are not rebuilt-retagged-repushed and existing ones are used. +export TFS_SKIP_BUILD=${TFS_SKIP_BUILD:-""} diff --git a/src/tests/ofc22/run_test_01_bootstrap.sh b/src/tests/ofc22/run_test_01_bootstrap.sh index bb7407073..4f0b6cd7d 100755 --- a/src/tests/ofc22/run_test_01_bootstrap.sh +++ b/src/tests/ofc22/run_test_01_bootstrap.sh @@ -13,9 +13,5 @@ # See the License for the specific language governing permissions and # limitations under the License. -# make sure to source the following scripts: -# - my_deploy.sh -# - tfs_runtime_env_vars.sh - source tfs_runtime_env_vars.sh pytest --verbose src/tests/ofc22/tests/test_functional_bootstrap.py diff --git a/src/tests/ofc22/run_tests_and_coverage.sh b/src/tests/ofc22/run_tests.sh similarity index 77% rename from src/tests/ofc22/run_tests_and_coverage.sh rename to src/tests/ofc22/run_tests.sh index ae956925a..0ad4be313 100755 --- a/src/tests/ofc22/run_tests_and_coverage.sh +++ b/src/tests/ofc22/run_tests.sh @@ -16,7 +16,6 @@ PROJECTDIR=`pwd` -# cd $PROJECTDIR/src RCFILE=$PROJECTDIR/coverage/.coveragerc COVERAGEFILE=$PROJECTDIR/coverage/.coverage @@ -31,15 +30,15 @@ source tfs_runtime_env_vars.sh # Force a flush of Context database kubectl --namespace $TFS_K8S_NAMESPACE exec -it deployment/contextservice --container redis -- redis-cli FLUSHALL -# Run functional tests and analyze code coverage at the same time -coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ +# Run functional tests +pytest --log-level=INFO --verbose \ src/tests/ofc22/tests/test_functional_bootstrap.py -coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ +pytest --log-level=INFO --verbose \ src/tests/ofc22/tests/test_functional_create_service.py -coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ +pytest --log-level=INFO --verbose \ src/tests/ofc22/tests/test_functional_delete_service.py -coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ +pytest --log-level=INFO --verbose \ src/tests/ofc22/tests/test_functional_cleanup.py diff --git a/src/tests/ofc22/setup_test_env.sh b/src/tests/ofc22/setup_test_env.sh deleted file mode 100755 index 1f8b0a5a7..000000000 --- a/src/tests/ofc22/setup_test_env.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/sh -export CONTEXTSERVICE_SERVICE_HOST=$(kubectl get service/contextservice --namespace tfs --template '{{.spec.clusterIP}}') -export CONTEXTSERVICE_SERVICE_PORT_GRPC=$(kubectl get service/contextservice --namespace tfs -o jsonpath='{.spec.ports[?(@.name=="grpc")].port}') -export COMPUTESERVICE_SERVICE_HOST=$(kubectl get service/computeservice --namespace tfs --template '{{.spec.clusterIP}}') -export COMPUTESERVICE_SERVICE_PORT_HTTP=$(kubectl get service/computeservice --namespace tfs -o jsonpath='{.spec.ports[?(@.name=="http")].port}') -echo "CONTEXTSERVICE_SERVICE_HOST=$CONTEXTSERVICE_SERVICE_HOST" -echo "CONTEXTSERVICE_SERVICE_PORT_GRPC=$CONTEXTSERVICE_SERVICE_PORT_GRPC" -echo "COMPUTESERVICE_SERVICE_HOST=$COMPUTESERVICE_SERVICE_HOST" -echo "COMPUTESERVICE_SERVICE_PORT_HTTP=$COMPUTESERVICE_SERVICE_PORT_HTTP" diff --git a/tutorial/2-2-ofc22.md b/tutorial/2-2-ofc22.md index 3b55a0961..04d585d24 100644 --- a/tutorial/2-2-ofc22.md +++ b/tutorial/2-2-ofc22.md @@ -37,9 +37,6 @@ environment and a TeraFlowSDN controller instance as described in the [Tutorial: Deployment Guide](./1-0-deployment.md), and you configured the Python environment as described in [Tutorial: Run Experiments Guide > 2.1. Configure Python Environment](./2-1-python-environment.md). -Remember to source the scenario settings, e.g., `cd ~/tfs-ctrl && source ofc22/deploy_specs.sh` in each terminal you open. -Then, re-build the protocol buffers code from the proto files: -`./proto/generate_code_python.sh` ## 2.2.4. Access to the WebUI and Dashboard @@ -55,25 +52,33 @@ Notes: ## 2.2.5. Test execution -Before executing the tests, the environment variables need to be prepared. -First, make sure to load your deployment variables by: +Before executing the tests, we need to prepare a few things. + +First, you need to make sure that you have all the gRPC-generate code in your folder. +To do so, run: ``` -source my_deploy.sh +proto/generate_code_python.sh ``` -Then, you also need to load the environment variables to support the execution of the -tests by: +Then, it is time to deploy TeraFlowSDN with the correct specification for this scenario. +Make sure to load your deployment variables for this scenario by: ``` -source tfs_runtime_env_vars.sh +source ofc22/deploy_specs.sh ``` -You also need to make sure that you have all the gRPC-generate code in your folder. -To do so, run: +Then, you need to deploy the components by running: ``` -proto/generate_code_python.sh +./deploy.sh +``` + +After the deployment is finished, you need to load the environment variables to support +the execution of the tests by: + +``` +source tfs_runtime_env_vars.sh ``` To execute this functional test, four main steps needs to be carried out: @@ -90,8 +95,24 @@ See the troubleshooting section if needed. You can check the logs of the different components using the appropriate `scripts/show_logs_[component].sh` scripts after you execute each step. +There are two ways to execute the functional tests, *running all the tests with a single script* or *running each test independently*. +In the following we start with the first option, then we comment on how to run each test independently. + + +### 2.2.5.1. Running all tests with a single script + +We have a script that executes all the steps at once. +It is meant for being used to test if all components involved in this scenario are working correct. +To run all the functional tests, you can run: + +``` +ofc22/run_tests_and_coverage.sh +``` + +The following sections explain each one of the steps. -### 2.2.5.1. Device bootstrapping + +### 2.2.5.2. Device bootstrapping This step configures some basic entities (Context and Topology), the devices, and the links in the topology. @@ -103,7 +124,11 @@ The expected results are: To run this step, you can do it from the WebUI by uploading the file `./ofc22/tests/descriptors_emulated.json` that contains the descriptors of the contexts, topologies, devices, and links, or by -executing the `./ofc22/run_test_01_bootstrap.sh` script. +executing the script: + +``` +./ofc22/run_test_01_bootstrap.sh +``` When the bootstrapping finishes, check in the Grafana L3-Monitoring Dashboard and you should see the monitoring data being plotted and updated every 5 seconds (by default). @@ -117,12 +142,16 @@ Note here that the emulated devices produce synthetic randomly-generated monitor and do not represent any particularservices configured. -### 2.2.5.2. L3VPN Service creation +### 2.2.5.3. L3VPN Service creation This step configures a new service emulating the request an OSM WIM would make by means of a Mock OSM instance. -To run this step, execute the `./ofc22/run_test_02_create_service.sh` script. +To run this step, execute the script: + +``` +./ofc22/run_test_02_create_service.sh +``` When the script finishes, check the WebUI *Services* tab. You should see that two services have been created, one for the optical layer and another for the packet layer. @@ -133,13 +162,18 @@ the plots with the monitored data for the device. By default, device R1-EMU is selected. -### 2.2.5.3. L3VPN Service removal +### 2.2.5.4. L3VPN Service removal This step deconfigures the previously created services emulating the request an OSM WIM would make by means of a Mock OSM instance. -To run this step, execute the `./ofc22/run_test_03_delete_service.sh` script, or delete -the L3NM service from the WebUI. +To run this step, execute the script: + +``` +./ofc22/run_test_03_delete_service.sh +``` + +or delete the L3NM service from the WebUI. When the script finishes, check the WebUI *Services* tab. You should see that the two services have been removed. @@ -149,12 +183,16 @@ In the Grafana Dashboard, given that there is no service configured, you should 0-valued flat plot again. -### 2.2.5.4. Cleanup +### 2.2.5.5. Cleanup This last step performs a cleanup of the scenario removing all the TeraFlowSDN entities for completeness. -To run this step, execute the `./ofc22/run_test_04_cleanup.sh` script. +To run this step, execute the script: + +``` +./ofc22/run_test_04_cleanup.sh +``` When the script finishes, check the WebUI *Devices* tab, you should see that the devices have been removed. -- GitLab From c7b10a868b5536cc3f10a0d810ef474012b9d7b7 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Wed, 23 Nov 2022 17:31:38 +0000 Subject: [PATCH 02/13] OFC'22 code migration - Code under migration to new Device, Service, Slice, PathComp, WebUI logic. --- src/tests/ofc22/tests/Objects.py | 236 ++++++++++++++++-- .../ofc22/tests/test_functional_bootstrap.py | 131 +--------- 2 files changed, 231 insertions(+), 136 deletions(-) diff --git a/src/tests/ofc22/tests/Objects.py b/src/tests/ofc22/tests/Objects.py index d2fb32ebb..47f0a2624 100644 --- a/src/tests/ofc22/tests/Objects.py +++ b/src/tests/ofc22/tests/Objects.py @@ -1,3 +1,5 @@ +##### LLUIS GIFRE (CTTC): CODE UNDER REARRANGEMENT ##### + # Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -12,6 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os, uuid from typing import Dict, List, Tuple from common.Constants import DEFAULT_CONTEXT_UUID, DEFAULT_TOPOLOGY_UUID from common.tools.object_factory.Context import json_context, json_context_id @@ -23,6 +26,66 @@ from common.tools.object_factory.Link import json_link, json_link_id from common.tools.object_factory.Topology import json_topology, json_topology_id from common.proto.kpi_sample_types_pb2 import KpiSampleType +import os, uuid +from common.Constants import DEFAULT_CONTEXT_UUID, DEFAULT_TOPOLOGY_UUID +from common.tools.object_factory.Context import json_context, json_context_id +from common.tools.object_factory.Device import ( + json_device_emulated_connect_rules, json_device_emulated_datacenter_disabled, + json_device_emulated_packet_router_disabled, json_device_emulated_tapi_disabled, json_device_id) +from common.tools.object_factory.EndPoint import json_endpoints +from common.tools.object_factory.Link import get_link_uuid, json_link, json_link_id +from common.tools.object_factory.Service import get_service_uuid, json_service_l3nm_planned +from common.tools.object_factory.Topology import json_topology, json_topology_id + + + + +# if true, Device component is present and will infeer the endpoints from connect-rules +# if false, Device component is not present and device objects must contain preconfigured endpoints +ADD_CONNECT_RULES_TO_DEVICES = os.environ.get('ADD_CONNECT_RULES_TO_DEVICES', 'True') +ADD_CONNECT_RULES_TO_DEVICES = ADD_CONNECT_RULES_TO_DEVICES.upper() in {'T', 'TRUE', '1', 'Y', 'YES'} + + + +def compose_router(device_uuid, endpoint_uuids, topology_id=None): + device_id = json_device_id(device_uuid) + r_endpoints = [(endpoint_uuid, 'copper', []) for endpoint_uuid in endpoint_uuids] + config_rules = json_device_emulated_connect_rules(r_endpoints) if ADD_CONNECT_RULES_TO_DEVICES else [] + endpoints = json_endpoints(device_id, r_endpoints, topology_id=topology_id) + j_endpoints = [] if ADD_CONNECT_RULES_TO_DEVICES else endpoints + device = json_device_emulated_packet_router_disabled(device_uuid, config_rules=config_rules, endpoints=j_endpoints) + return device_id, endpoints, device + +def compose_ols(device_uuid, endpoint_uuids, topology_id=None): + device_id = json_device_id(device_uuid) + r_endpoints = [(endpoint_uuid, 'optical', []) for endpoint_uuid in endpoint_uuids] + config_rules = json_device_emulated_connect_rules(r_endpoints) if ADD_CONNECT_RULES_TO_DEVICES else [] + endpoints = json_endpoints(device_id, r_endpoints, topology_id=topology_id) + j_endpoints = [] if ADD_CONNECT_RULES_TO_DEVICES else endpoints + device = json_device_emulated_tapi_disabled(device_uuid, config_rules=config_rules, endpoints=j_endpoints) + return device_id, endpoints, device + +def compose_datacenter(device_uuid, endpoint_uuids, topology_id=None): + device_id = json_device_id(device_uuid) + r_endpoints = [(endpoint_uuid, 'copper', []) for endpoint_uuid in endpoint_uuids] + config_rules = json_device_emulated_connect_rules(r_endpoints) if ADD_CONNECT_RULES_TO_DEVICES else [] + endpoints = json_endpoints(device_id, r_endpoints, topology_id=topology_id) + j_endpoints = [] if ADD_CONNECT_RULES_TO_DEVICES else endpoints + device = json_device_emulated_datacenter_disabled(device_uuid, config_rules=config_rules, endpoints=j_endpoints) + return device_id, endpoints, device + +def compose_link(endpoint_a, endpoint_z): + link_uuid = get_link_uuid(endpoint_a['endpoint_id'], endpoint_z['endpoint_id']) + link_id = json_link_id(link_uuid) + link = json_link(link_uuid, [endpoint_a['endpoint_id'], endpoint_z['endpoint_id']]) + return link_id, link + +def compose_service(endpoint_a, endpoint_z, constraints=[]): + service_uuid = get_service_uuid(endpoint_a['endpoint_id'], endpoint_z['endpoint_id']) + endpoint_ids = [endpoint_a['endpoint_id'], endpoint_z['endpoint_id']] + service = json_service_l3nm_planned(service_uuid, endpoint_ids=endpoint_ids, constraints=constraints) + return service + # ----- Context -------------------------------------------------------------------------------------------------------- CONTEXT_ID = json_context_id(DEFAULT_CONTEXT_UUID) CONTEXT = json_context(DEFAULT_CONTEXT_UUID) @@ -31,6 +94,39 @@ CONTEXT = json_context(DEFAULT_CONTEXT_UUID) TOPOLOGY_ID = json_topology_id(DEFAULT_TOPOLOGY_UUID, context_id=CONTEXT_ID) TOPOLOGY = json_topology(DEFAULT_TOPOLOGY_UUID, context_id=CONTEXT_ID) +# ----- Domains -------------------------------------------------------------------------------------------------------- +# Overall network topology +TOPO_ADMIN_UUID = DEFAULT_TOPOLOGY_UUID +TOPO_ADMIN_ID = json_topology_id(TOPO_ADMIN_UUID, context_id=CONTEXT_ID) +TOPO_ADMIN = json_topology(TOPO_ADMIN_UUID, context_id=CONTEXT_ID) + +# DataCenter #1 Network +TOPO_DC1_UUID = 'DC1' +TOPO_DC1_ID = json_topology_id(TOPO_DC1_UUID, context_id=CONTEXT_ID) +TOPO_DC1 = json_topology(TOPO_DC1_UUID, context_id=CONTEXT_ID) + +# DataCenter #2 Network +TOPO_DC2_UUID = 'DC2' +TOPO_DC2_ID = json_topology_id(TOPO_DC2_UUID, context_id=CONTEXT_ID) +TOPO_DC2 = json_topology(TOPO_DC2_UUID, context_id=CONTEXT_ID) + +# CellSite #1 Network +TOPO_CS1_UUID = 'CS1' +TOPO_CS1_ID = json_topology_id(TOPO_CS1_UUID, context_id=CONTEXT_ID) +TOPO_CS1 = json_topology(TOPO_CS1_UUID, context_id=CONTEXT_ID) + +# CellSite #2 Network +TOPO_CS2_UUID = 'CS2' +TOPO_CS2_ID = json_topology_id(TOPO_CS2_UUID, context_id=CONTEXT_ID) +TOPO_CS2 = json_topology(TOPO_CS2_UUID, context_id=CONTEXT_ID) + +# Transport Network Network +TOPO_TN_UUID = 'TN' +TOPO_TN_ID = json_topology_id(TOPO_TN_UUID, context_id=CONTEXT_ID) +TOPO_TN = json_topology(TOPO_TN_UUID, context_id=CONTEXT_ID) + + + # ----- Monitoring Samples --------------------------------------------------------------------------------------------- PACKET_PORT_SAMPLE_TYPES = [ KpiSampleType.KPISAMPLETYPE_PACKETS_TRANSMITTED, @@ -161,6 +257,29 @@ DEVICE_O1_CONNECT_RULES = json_device_connect_rules(DEVICE_O1_ADDRESS, DEVICE_O1 }) if USE_REAL_DEVICES else json_device_emulated_connect_rules(DEVICE_O1_ENDPOINT_DEFS) +# ----- Devices -------------------------------------------------------------------------------------------------------- +# DataCenters +DEV_DC1GW_ID, DEV_DC1GW_EPS, DEV_DC1GW = compose_datacenter('DC1-GW', ['eth1', 'eth2', 'int']) +DEV_DC2GW_ID, DEV_DC2GW_EPS, DEV_DC2GW = compose_datacenter('DC2-GW', ['eth1', 'eth2', 'int']) + +# CellSites +DEV_CS1GW1_ID, DEV_CS1GW1_EPS, DEV_CS1GW1 = compose_router('CS1-GW1', ['10/1', '1/1', '1/2']) +DEV_CS1GW2_ID, DEV_CS1GW2_EPS, DEV_CS1GW2 = compose_router('CS1-GW2', ['10/1', '1/1', '1/2']) +DEV_CS2GW1_ID, DEV_CS2GW1_EPS, DEV_CS2GW1 = compose_router('CS2-GW1', ['10/1', '1/1', '1/2']) +DEV_CS2GW2_ID, DEV_CS2GW2_EPS, DEV_CS2GW2 = compose_router('CS2-GW2', ['10/1', '1/1', '1/2']) + +# Transport Network +DEV_TNR1_ID, DEV_TNR1_EPS, DEV_TNR1 = compose_router('TN-R1', ['1/1', '1/2', '2/1']) +DEV_TNR2_ID, DEV_TNR2_EPS, DEV_TNR2 = compose_router('TN-R2', ['1/1', '1/2', '2/1']) +DEV_TNR3_ID, DEV_TNR3_EPS, DEV_TNR3 = compose_router('TN-R3', ['1/1', '1/2', '2/1']) +DEV_TNR4_ID, DEV_TNR4_EPS, DEV_TNR4 = compose_router('TN-R4', ['1/1', '1/2', '2/1']) + +#tols_ep_uuids = [str(uuid.uuid4()).split('-')[-1] for _ in range(4)] +tols_ep_uuids = ['afd8ffbb5403', '04b84e213e83', '3169ae676ac6', '93506f786270'] +DEV_TOLS_ID, DEV_TOLS_EPS, DEV_TOLS = compose_ols('TN-OLS', tols_ep_uuids) + + + # ----- Links ---------------------------------------------------------------------------------------------------------- LINK_R1_O1_UUID = get_link_uuid(DEVICE_R1_ID, ENDPOINT_ID_R1_13_0_0, DEVICE_O1_ID, ENDPOINT_ID_O1_EP1) LINK_R1_O1_ID = json_link_id(LINK_R1_O1_UUID) @@ -179,7 +298,34 @@ LINK_R4_O1_ID = json_link_id(LINK_R4_O1_UUID) LINK_R4_O1 = json_link(LINK_R4_O1_UUID, [ENDPOINT_ID_R4_13_0_0, ENDPOINT_ID_O1_EP4]) +# ----- Links ---------------------------------------------------------------------------------------------------------- +# InterDomain DC-CSGW +LINK_DC1GW_CS1GW1_ID, LINK_DC1GW_CS1GW1 = compose_link(DEV_DC1GW_EPS[0], DEV_CS1GW1_EPS[0]) +LINK_DC1GW_CS1GW2_ID, LINK_DC1GW_CS1GW2 = compose_link(DEV_DC1GW_EPS[1], DEV_CS1GW2_EPS[0]) +LINK_DC2GW_CS2GW1_ID, LINK_DC2GW_CS2GW1 = compose_link(DEV_DC2GW_EPS[0], DEV_CS2GW1_EPS[0]) +LINK_DC2GW_CS2GW2_ID, LINK_DC2GW_CS2GW2 = compose_link(DEV_DC2GW_EPS[1], DEV_CS2GW2_EPS[0]) + +# InterDomain CSGW-TN +LINK_CS1GW1_TNR1_ID, LINK_CS1GW1_TNR1 = compose_link(DEV_CS1GW1_EPS[1], DEV_TNR1_EPS[0]) +LINK_CS1GW2_TNR2_ID, LINK_CS1GW2_TNR2 = compose_link(DEV_CS1GW2_EPS[1], DEV_TNR2_EPS[0]) +LINK_CS1GW1_TNR2_ID, LINK_CS1GW1_TNR2 = compose_link(DEV_CS1GW1_EPS[2], DEV_TNR2_EPS[1]) +LINK_CS1GW2_TNR1_ID, LINK_CS1GW2_TNR1 = compose_link(DEV_CS1GW2_EPS[2], DEV_TNR1_EPS[1]) +LINK_CS2GW1_TNR3_ID, LINK_CS2GW1_TNR3 = compose_link(DEV_CS2GW1_EPS[1], DEV_TNR3_EPS[0]) +LINK_CS2GW2_TNR4_ID, LINK_CS2GW2_TNR4 = compose_link(DEV_CS2GW2_EPS[1], DEV_TNR4_EPS[0]) +LINK_CS2GW1_TNR4_ID, LINK_CS2GW1_TNR4 = compose_link(DEV_CS2GW1_EPS[2], DEV_TNR4_EPS[1]) +LINK_CS2GW2_TNR3_ID, LINK_CS2GW2_TNR3 = compose_link(DEV_CS2GW2_EPS[2], DEV_TNR3_EPS[1]) + +# IntraDomain TN +LINK_TNR1_TOLS_ID, LINK_TNR1_TOLS = compose_link(DEV_TNR1_EPS[2], DEV_TOLS_EPS[0]) +LINK_TNR2_TOLS_ID, LINK_TNR2_TOLS = compose_link(DEV_TNR2_EPS[2], DEV_TOLS_EPS[1]) +LINK_TNR3_TOLS_ID, LINK_TNR3_TOLS = compose_link(DEV_TNR3_EPS[2], DEV_TOLS_EPS[2]) +LINK_TNR4_TOLS_ID, LINK_TNR4_TOLS = compose_link(DEV_TNR4_EPS[2], DEV_TOLS_EPS[3]) + + + # ----- WIM Service Settings ------------------------------------------------------------------------------------------- +WIM_USERNAME = 'admin' +WIM_PASSWORD = 'admin' def compose_service_endpoint_id(endpoint_id): device_uuid = endpoint_id['device_id']['device_uuid']['uuid'] @@ -196,9 +342,6 @@ WIM_SEP_R3_SITE_ID = '2' WIM_SEP_R3_BEARER = WIM_SEP_R3_ID WIM_SRV_R3_VLAN_ID = 500 -WIM_USERNAME = 'admin' -WIM_PASSWORD = 'admin' - WIM_MAPPING = [ {'device-id': DEVICE_R1_UUID, 'service_endpoint_id': WIM_SEP_R1_ID, 'service_mapping_info': {'bearer': {'bearer-reference': WIM_SEP_R1_BEARER}, 'site-id': WIM_SEP_R1_SITE_ID}}, @@ -215,17 +358,82 @@ WIM_SERVICE_CONNECTION_POINTS = [ 'service_endpoint_encapsulation_info': {'vlan': WIM_SRV_R3_VLAN_ID}}, ] -# ----- Object Collections --------------------------------------------------------------------------------------------- +# New code: +def mapping(site_id, ce_endpoint_id, pe_device_id, priority=None, redundant=[]): + ce_endpoint_id = ce_endpoint_id['endpoint_id'] + ce_device_uuid = ce_endpoint_id['device_id']['device_uuid']['uuid'] + ce_endpoint_uuid = ce_endpoint_id['endpoint_uuid']['uuid'] + pe_device_uuid = pe_device_id['device_uuid']['uuid'] + service_endpoint_id = '{:s}:{:s}:{:s}'.format(site_id, ce_device_uuid, ce_endpoint_uuid) + bearer = '{:s}:{:s}'.format(ce_device_uuid, pe_device_uuid) + _mapping = { + 'service_endpoint_id': service_endpoint_id, + 'datacenter_id': site_id, 'device_id': ce_device_uuid, 'device_interface_id': ce_endpoint_uuid, + 'service_mapping_info': { + 'site-id': site_id, + 'bearer': {'bearer-reference': bearer}, + } + } + if priority is not None: _mapping['service_mapping_info']['priority'] = priority + if len(redundant) > 0: _mapping['service_mapping_info']['redundant'] = redundant + return service_endpoint_id, _mapping + +WIM_SEP_DC1_PRI, WIM_MAP_DC1_PRI = mapping('DC1', DEV_DC1GW_EPS[0], DEV_CS1GW1_ID, priority=10, redundant=['DC1:DC1-GW:eth2']) +WIM_SEP_DC1_SEC, WIM_MAP_DC1_SEC = mapping('DC1', DEV_DC1GW_EPS[1], DEV_CS1GW2_ID, priority=20, redundant=['DC1:DC1-GW:eth1']) +WIM_SEP_DC2_PRI, WIM_MAP_DC2_PRI = mapping('DC2', DEV_DC2GW_EPS[0], DEV_CS2GW1_ID, priority=10, redundant=['DC2:DC2-GW:eth2']) +WIM_SEP_DC2_SEC, WIM_MAP_DC2_SEC = mapping('DC2', DEV_DC2GW_EPS[1], DEV_CS2GW2_ID, priority=20, redundant=['DC2:DC2-GW:eth1']) + +WIM_MAPPING = [WIM_MAP_DC1_PRI, WIM_MAP_DC1_SEC, WIM_MAP_DC2_PRI, WIM_MAP_DC2_SEC] + +WIM_SRV_VLAN_ID = 300 +WIM_SERVICE_TYPE = 'ELAN' +WIM_SERVICE_CONNECTION_POINTS = [ + {'service_endpoint_id': WIM_SEP_DC1_PRI, + 'service_endpoint_encapsulation_type': 'dot1q', + 'service_endpoint_encapsulation_info': {'vlan': WIM_SRV_VLAN_ID}}, + {'service_endpoint_id': WIM_SEP_DC2_PRI, + 'service_endpoint_encapsulation_type': 'dot1q', + 'service_endpoint_encapsulation_info': {'vlan': WIM_SRV_VLAN_ID}}, +] + -CONTEXTS = [CONTEXT] -TOPOLOGIES = [TOPOLOGY] -DEVICES = [ - (DEVICE_R1, DEVICE_R1_CONNECT_RULES), - (DEVICE_R2, DEVICE_R2_CONNECT_RULES), - (DEVICE_R3, DEVICE_R3_CONNECT_RULES), - (DEVICE_R4, DEVICE_R4_CONNECT_RULES), - (DEVICE_O1, DEVICE_O1_CONNECT_RULES), -] -LINKS = [LINK_R1_O1, LINK_R2_O1, LINK_R3_O1, LINK_R4_O1] \ No newline at end of file +# ----- Containers ----------------------------------------------------------------------------------------------------- +CONTEXTS = [CONTEXT ] +TOPOLOGIES = [TOPOLOGY] +DEVICES = [DEVICE_R1, DEVICE_R2, DEVICE_R3, DEVICE_R4, DEVICE_O1] +LINKS = [LINK_R1_O1, LINK_R2_O1, LINK_R3_O1, LINK_R4_O1] + +OBJECTS_PER_TOPOLOGY = [ + (TOPO_ADMIN_ID, + [ DEV_DC1GW_ID, DEV_DC2GW_ID, + DEV_CS1GW1_ID, DEV_CS1GW2_ID, DEV_CS2GW1_ID, DEV_CS2GW2_ID, + DEV_TNR1_ID, DEV_TNR2_ID, DEV_TNR3_ID, DEV_TNR4_ID, + DEV_TOLS_ID, + ], + [ LINK_DC1GW_CS1GW1_ID, LINK_DC1GW_CS1GW2_ID, LINK_DC2GW_CS2GW1_ID, LINK_DC2GW_CS2GW2_ID, + LINK_CS1GW1_TNR1_ID, LINK_CS1GW2_TNR2_ID, LINK_CS1GW1_TNR2_ID, LINK_CS1GW2_TNR1_ID, + LINK_CS2GW1_TNR3_ID, LINK_CS2GW2_TNR4_ID, LINK_CS2GW1_TNR4_ID, LINK_CS2GW2_TNR3_ID, + LINK_TNR1_TOLS_ID, LINK_TNR2_TOLS_ID, LINK_TNR3_TOLS_ID, LINK_TNR4_TOLS_ID, + ], + ), + (TOPO_DC1_ID, + [DEV_DC1GW_ID], + []), + (TOPO_DC2_ID, + [DEV_DC2GW_ID], + []), + (TOPO_CS1_ID, + [DEV_CS1GW1_ID, DEV_CS1GW2_ID], + []), + (TOPO_CS2_ID, + [DEV_CS2GW1_ID, DEV_CS2GW2_ID], + []), + (TOPO_TN_ID, + [ DEV_TNR1_ID, DEV_TNR2_ID, DEV_TNR3_ID, DEV_TNR4_ID, + DEV_TOLS_ID, + ], + [ LINK_TNR1_TOLS_ID, LINK_TNR2_TOLS_ID, LINK_TNR3_TOLS_ID, LINK_TNR4_TOLS_ID, + ]), +] diff --git a/src/tests/ofc22/tests/test_functional_bootstrap.py b/src/tests/ofc22/tests/test_functional_bootstrap.py index 65b7cece1..f2f0dcb68 100644 --- a/src/tests/ofc22/tests/test_functional_bootstrap.py +++ b/src/tests/ofc22/tests/test_functional_bootstrap.py @@ -1,3 +1,5 @@ +##### LLUIS GIFRE (CTTC): CODE UNDER REARRANGEMENT ##### + # Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -44,138 +46,24 @@ def test_scenario_empty(context_client : ContextClient): # pylint: disable=rede assert len(response.links) == 0 -def test_prepare_scenario(context_client : ContextClient): # pylint: disable=redefined-outer-name - - # ----- Start the EventsCollector ---------------------------------------------------------------------------------- - #events_collector = EventsCollector(context_client) - #events_collector.start() - - #expected_events = [] - - # ----- Create Contexts and Topologies ----------------------------------------------------------------------------- - for context in CONTEXTS: - context_uuid = context['context_id']['context_uuid']['uuid'] - LOGGER.info('Adding Context {:s}'.format(context_uuid)) - response = context_client.SetContext(Context(**context)) - assert response.context_uuid.uuid == context_uuid - #expected_events.append(('ContextEvent', EVENT_CREATE, json_context_id(context_uuid))) - - for topology in TOPOLOGIES: - context_uuid = topology['topology_id']['context_id']['context_uuid']['uuid'] - topology_uuid = topology['topology_id']['topology_uuid']['uuid'] - LOGGER.info('Adding Topology {:s}/{:s}'.format(context_uuid, topology_uuid)) - response = context_client.SetTopology(Topology(**topology)) - assert response.context_id.context_uuid.uuid == context_uuid - assert response.topology_uuid.uuid == topology_uuid - context_id = json_context_id(context_uuid) - #expected_events.append(('TopologyEvent', EVENT_CREATE, json_topology_id(topology_uuid, context_id=context_id))) - - # ----- Validate Collected Events ---------------------------------------------------------------------------------- - #check_events(events_collector, expected_events) - - # ----- Stop the EventsCollector ----------------------------------------------------------------------------------- - #events_collector.stop() - - -def test_scenario_ready(context_client : ContextClient): # pylint: disable=redefined-outer-name - # ----- List entities - Ensure scenario is ready ------------------------------------------------------------------- - response = context_client.ListContexts(Empty()) - assert len(response.contexts) == len(CONTEXTS) - - response = context_client.ListTopologies(ContextId(**CONTEXT_ID)) - assert len(response.topologies) == len(TOPOLOGIES) - - response = context_client.ListDevices(Empty()) - assert len(response.devices) == 0 - - response = context_client.ListLinks(Empty()) - assert len(response.links) == 0 - - response = context_client.ListServices(ContextId(**CONTEXT_ID)) - assert len(response.services) == 0 - - -def test_devices_bootstraping( - context_client : ContextClient, device_client : DeviceClient): # pylint: disable=redefined-outer-name - - # ----- Start the EventsCollector ---------------------------------------------------------------------------------- - #events_collector = EventsCollector(context_client, log_events_received=True) - #events_collector.start() +def test_prepare_environment(context_client : ContextClient): # pylint: disable=redefined-outer-name - #expected_events = [] + for context in CONTEXTS : context_client.SetContext (Context (**context )) + for topology in TOPOLOGIES: context_client.SetTopology(Topology(**topology)) - # ----- Create Devices and Validate Collected Events --------------------------------------------------------------- for device, connect_rules in DEVICES: - device_uuid = device['device_id']['device_uuid']['uuid'] - LOGGER.info('Adding Device {:s}'.format(device_uuid)) - device_with_connect_rules = copy.deepcopy(device) device_with_connect_rules['device_config']['config_rules'].extend(connect_rules) - response = device_client.AddDevice(Device(**device_with_connect_rules)) - assert response.device_uuid.uuid == device_uuid - - #expected_events.extend([ - # # Device creation, update for automation to start the device - # ('DeviceEvent', EVENT_CREATE, json_device_id(device_uuid)), - # #('DeviceEvent', EVENT_UPDATE, json_device_id(device_uuid)), - #]) - - #response = context_client.GetDevice(response) - #for endpoint in response.device_endpoints: - # for _ in endpoint.kpi_sample_types: - # # Monitoring configures monitoring for endpoint - # expected_events.append(('DeviceEvent', EVENT_UPDATE, json_device_id(device_uuid))) - - # ----- Validate Collected Events ---------------------------------------------------------------------------------- - #check_events(events_collector, expected_events) + device_client.AddDevice(Device(**device_with_connect_rules)) - # ----- Stop the EventsCollector ----------------------------------------------------------------------------------- - #events_collector.stop() + for link in LINKS : context_client.SetLink (Link (**link )) -def test_devices_bootstrapped(context_client : ContextClient): # pylint: disable=redefined-outer-name - # ----- List entities - Ensure bevices are created ----------------------------------------------------------------- - response = context_client.ListContexts(Empty()) - assert len(response.contexts) == len(CONTEXTS) - - response = context_client.ListTopologies(ContextId(**CONTEXT_ID)) - assert len(response.topologies) == len(TOPOLOGIES) - response = context_client.ListDevices(Empty()) - assert len(response.devices) == len(DEVICES) - response = context_client.ListLinks(Empty()) - assert len(response.links) == 0 - response = context_client.ListServices(ContextId(**CONTEXT_ID)) - assert len(response.services) == 0 - - -def test_links_creation(context_client : ContextClient): # pylint: disable=redefined-outer-name - - # ----- Start the EventsCollector ---------------------------------------------------------------------------------- - #events_collector = EventsCollector(context_client) - #events_collector.start() - - #expected_events = [] - - # ----- Create Links and Validate Collected Events ----------------------------------------------------------------- - for link in LINKS: - link_uuid = link['link_id']['link_uuid']['uuid'] - LOGGER.info('Adding Link {:s}'.format(link_uuid)) - response = context_client.SetLink(Link(**link)) - assert response.link_uuid.uuid == link_uuid - #expected_events.append(('LinkEvent', EVENT_CREATE, json_link_id(link_uuid))) - - # ----- Validate Collected Events ---------------------------------------------------------------------------------- - #check_events(events_collector, expected_events) - - # ----- Stop the EventsCollector ----------------------------------------------------------------------------------- - #events_collector.stop() - - -def test_links_created(context_client : ContextClient): # pylint: disable=redefined-outer-name - # ----- List entities - Ensure links are created ------------------------------------------------------------------- +def test_scenario_ready(context_client : ContextClient): # pylint: disable=redefined-outer-name + # ----- List entities - Ensure scenario is ready ------------------------------------------------------------------- response = context_client.ListContexts(Empty()) assert len(response.contexts) == len(CONTEXTS) @@ -191,7 +79,6 @@ def test_links_created(context_client : ContextClient): # pylint: disable=redef response = context_client.ListServices(ContextId(**CONTEXT_ID)) assert len(response.services) == 0 - def test_scenario_kpis_created(monitoring_client: MonitoringClient): """ This test validates that KPIs related to the service/device/endpoint were created -- GitLab From e14955123cee4c461464bf5a0edfaffca973f74b Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Thu, 24 Nov 2022 17:29:44 +0000 Subject: [PATCH 03/13] Common: - new generic descriptor loader tool - new scenario loader for automated tests --- src/common/tests/LoadScenario.py | 48 ++++++ src/common/tools/descriptor/Loader.py | 188 ++++++++++++++++++++++++ src/common/tools/descriptor/Tools.py | 103 +++++++++++++ src/common/tools/descriptor/__init__.py | 14 ++ 4 files changed, 353 insertions(+) create mode 100644 src/common/tests/LoadScenario.py create mode 100644 src/common/tools/descriptor/Loader.py create mode 100644 src/common/tools/descriptor/Tools.py create mode 100644 src/common/tools/descriptor/__init__.py diff --git a/src/common/tests/LoadScenario.py b/src/common/tests/LoadScenario.py new file mode 100644 index 000000000..1c531ed60 --- /dev/null +++ b/src/common/tests/LoadScenario.py @@ -0,0 +1,48 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json, logging +from common.tools.descriptor.Loader import DescriptorLoader, compose_notifications +from context.client.ContextClient import ContextClient +from device.client.DeviceClient import DeviceClient +from service.client.ServiceClient import ServiceClient +from slice.client.SliceClient import SliceClient + +LOGGER = logging.getLogger(__name__) +LOGGERS = { + 'success': LOGGER.info, + 'danger' : LOGGER.error, + 'error' : LOGGER.error, +} + +def load_scenario_from_descriptor( + descriptor_file : str, context_client : ContextClient, device_client : DeviceClient, + service_client : ServiceClient, slice_client : SliceClient +) -> None: + with open(descriptor_file, 'r', encoding='UTF-8') as f: + descriptors = json.loads(f.read()) + + descriptor_loader = DescriptorLoader( + context_client=context_client, device_client=device_client, + service_client=service_client, slice_client=slice_client) + descriptor_loader.process_descriptors(descriptors) + results = descriptor_loader.get_results() + + num_errors = 0 + for message,level in compose_notifications(results): + LOGGERS.get(level)(message) + if level != 'success': num_errors += 1 + if num_errors > 0: + MSG = 'Failed to load descriptors in file {:s}' + raise Exception(MSG.format(str(descriptor_file))) diff --git a/src/common/tools/descriptor/Loader.py b/src/common/tools/descriptor/Loader.py new file mode 100644 index 000000000..2674cdd3e --- /dev/null +++ b/src/common/tools/descriptor/Loader.py @@ -0,0 +1,188 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# SDN controller descriptor loader + +# Usage example (WebUI): +# descriptors = json.loads(descriptors_data_from_client) +# +# descriptor_loader = DescriptorLoader() +# descriptor_loader.process_descriptors(descriptors) +# results = descriptor_loader.get_results() +# for message,level in compose_notifications(results): +# flash(message, level) + +# Usage example (pytest): +# with open('path/to/descriptor.json', 'r', encoding='UTF-8') as f: +# descriptors = json.loads(f.read()) +# +# descriptor_loader = DescriptorLoader() +# descriptor_loader.process_descriptors(descriptors) +# results = descriptor_loader.get_results() +# loggers = {'success': LOGGER.info, 'danger': LOGGER.error, 'error': LOGGER.error} +# for message,level in compose_notifications(results): +# loggers.get(level)(message) + +from typing import Dict, List, Optional, Tuple +from common.proto.context_pb2 import Connection, Context, Device, Link, Service, Slice, Topology +from context.client.ContextClient import ContextClient +from device.client.DeviceClient import DeviceClient +from service.client.ServiceClient import ServiceClient +from slice.client.SliceClient import SliceClient +from .Tools import ( + format_device_custom_config_rules, format_service_custom_config_rules, format_slice_custom_config_rules, + get_descriptors_add_contexts, get_descriptors_add_services, get_descriptors_add_slices, + get_descriptors_add_topologies, split_devices_by_rules) + +ENTITY_TO_TEXT = { + # name => singular, plural + 'context' : ('Context', 'Contexts' ), + 'topology' : ('Topology', 'Topologies' ), + 'device' : ('Device', 'Devices' ), + 'link' : ('Link', 'Links' ), + 'service' : ('Service', 'Services' ), + 'slice' : ('Slice', 'Slices' ), + 'connection': ('Connection', 'Connections'), +} + +ACTION_TO_TEXT = { + # action => infinitive, past + 'add' : ('Add', 'Added'), + 'update' : ('Update', 'Updated'), + 'config' : ('Configure', 'Configured'), +} + +TypeResults = List[Tuple[str, str, int, List[str]]] # entity_name, action, num_ok, list[error] +TypeNotification = Tuple[str, str] # message, level +TypeNotificationList = List[TypeNotification] + +def compose_notifications(results : TypeResults) -> TypeNotificationList: + notifications = [] + for entity_name, action_name, num_ok, error_list in results: + entity_name_singluar,entity_name_plural = ENTITY_TO_TEXT[entity_name] + action_infinitive, action_past = ACTION_TO_TEXT[action_name] + num_err = len(error_list) + for error in error_list: + notifications.append((f'Unable to {action_infinitive} {entity_name_singluar} {error}', 'error')) + if num_ok : notifications.append((f'{str(num_ok)} {entity_name_plural} {action_past}', 'success')) + if num_err: notifications.append((f'{str(num_err)} {entity_name_plural} failed', 'danger')) + return notifications + +class DescriptorLoader: + def __init__( + self, context_client : Optional[ContextClient] = None, device_client : Optional[DeviceClient] = None, + service_client : Optional[ServiceClient] = None, slice_client : Optional[SliceClient] = None + ) -> None: + self.__ctx_cli = ContextClient() if context_client is None else context_client + self.__dev_cli = DeviceClient() if device_client is None else device_client + self.__svc_cli = ServiceClient() if service_client is None else service_client + self.__slc_cli = SliceClient() if slice_client is None else slice_client + self.__results : TypeResults = list() + self.__connections = None + self.__contexts = None + self.__contexts_add = None + self.__devices = None + self.__devices_add = None + self.__devices_config = None + self.__dummy_mode = None + self.__links = None + self.__services = None + self.__services_add = None + self.__slices = None + self.__slices_add = None + self.__topologies = None + self.__topologies_add = None + + def get_results(self) -> TypeResults: return self.__results + + def process_descriptors(self, descriptors : Dict) -> None: + self.__dummy_mode = descriptors.get('dummy_mode' , False) + self.__contexts = descriptors.get('contexts' , []) + self.__topologies = descriptors.get('topologies' , []) + self.__devices = descriptors.get('devices' , []) + self.__links = descriptors.get('links' , []) + self.__services = descriptors.get('services' , []) + self.__slices = descriptors.get('slices' , []) + self.__connections = descriptors.get('connections', []) + + # Format CustomConfigRules in Devices, Services and Slices provided in JSON format + self.__devices = [format_device_custom_config_rules (device ) for device in self.__devices ] + self.__services = [format_service_custom_config_rules(service) for service in self.__services] + self.__slices = [format_slice_custom_config_rules (slice_ ) for slice_ in self.__slices ] + + # Context and Topology require to create the entity first, and add devices, links, services, + # slices, etc. in a second stage. + self.__contexts_add = get_descriptors_add_contexts(self.__contexts) + self.__topologies_add = get_descriptors_add_topologies(self.__topologies) + + if self.__dummy_mode: + self._dummy_mode() + else: + self._normal_mode() + + def _dummy_mode(self) -> None: + # Dummy Mode: used to pre-load databases (WebUI debugging purposes) with no smart or automated tasks. + self.__ctx_cli.connect() + self._process_descr('context', 'add', self.__ctx_cli.SetContext, Context, self.__contexts_add ) + self._process_descr('topology', 'add', self.__ctx_cli.SetTopology, Topology, self.__topologies_add) + self._process_descr('device', 'add', self.__ctx_cli.SetDevice, Device, self.__devices ) + self._process_descr('link', 'add', self.__ctx_cli.SetLink, Link, self.__links ) + self._process_descr('service', 'add', self.__ctx_cli.SetService, Service, self.__services ) + self._process_descr('slice', 'add', self.__ctx_cli.SetSlice, Slice, self.__slices ) + self._process_descr('connection', 'add', self.__ctx_cli.SetConnection, Connection, self.__connections ) + self._process_descr('context', 'update', self.__ctx_cli.SetContext, Context, self.__contexts ) + self._process_descr('topology', 'update', self.__ctx_cli.SetTopology, Topology, self.__topologies ) + self.__ctx_cli.close() + + def _normal_mode(self) -> None: + # Normal mode: follows the automated workflows in the different components + assert len(self.__connections) == 0, 'in normal mode, connections should not be set' + + # Device, Service and Slice require to first create the entity and the configure it + self.__devices_add, self.__devices_config = split_devices_by_rules(self.__devices) + self.__services_add = get_descriptors_add_services(self.__services) + self.__slices_add = get_descriptors_add_slices(self.__slices) + + self.__ctx_cli.connect() + self.__dev_cli.connect() + self.__svc_cli.connect() + self.__slc_cli.connect() + + self._process_descr('context', 'add', self.__ctx_cli.SetContext, Context, self.__contexts_add ) + self._process_descr('topology', 'add', self.__ctx_cli.SetTopology, Topology, self.__topologies_add) + self._process_descr('device', 'add', self.__dev_cli.AddDevice, Device, self.__devices_add ) + self._process_descr('device', 'config', self.__dev_cli.ConfigureDevice, Device, self.__devices_config) + self._process_descr('link', 'add', self.__ctx_cli.SetLink, Link, self.__links ) + self._process_descr('service', 'add', self.__svc_cli.CreateService, Service, self.__services_add ) + self._process_descr('service', 'update', self.__svc_cli.UpdateService, Service, self.__services ) + self._process_descr('slice', 'add', self.__slc_cli.CreateSlice, Slice, self.__slices_add ) + self._process_descr('slice', 'update', self.__slc_cli.UpdateSlice, Slice, self.__slices ) + self._process_descr('context', 'update', self.__ctx_cli.SetContext, Context, self.__contexts ) + self._process_descr('topology', 'update', self.__ctx_cli.SetTopology, Topology, self.__topologies ) + + self.__slc_cli.close() + self.__svc_cli.close() + self.__dev_cli.close() + self.__ctx_cli.close() + + def _process_descr(self, entity_name, action_name, grpc_method, grpc_class, entities) -> None: + num_ok, error_list = 0, [] + for entity in entities: + try: + grpc_method(grpc_class(**entity)) + num_ok += 1 + except Exception as e: # pylint: disable=broad-except + error_list.append(f'{str(entity)}: {str(e)}') + num_err += 1 + self.__results.append((entity_name, action_name, num_ok, error_list)) diff --git a/src/common/tools/descriptor/Tools.py b/src/common/tools/descriptor/Tools.py new file mode 100644 index 000000000..909cec9d9 --- /dev/null +++ b/src/common/tools/descriptor/Tools.py @@ -0,0 +1,103 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import copy, json +from typing import Dict, List, Optional, Tuple, Union + +def get_descriptors_add_contexts(contexts : List[Dict]) -> List[Dict]: + contexts_add = copy.deepcopy(contexts) + for context in contexts_add: + context['topology_ids'] = [] + context['service_ids'] = [] + return contexts_add + +def get_descriptors_add_topologies(topologies : List[Dict]) -> List[Dict]: + topologies_add = copy.deepcopy(topologies) + for topology in topologies_add: + topology['device_ids'] = [] + topology['link_ids'] = [] + return topologies_add + +def get_descriptors_add_services(services : List[Dict]) -> List[Dict]: + services_add = [] + for service in services: + service_copy = copy.deepcopy(service) + service_copy['service_endpoint_ids'] = [] + service_copy['service_constraints'] = [] + service_copy['service_config'] = {'config_rules': []} + services_add.append(service_copy) + return services_add + +def get_descriptors_add_slices(slices : List[Dict]) -> List[Dict]: + slices_add = [] + for slice_ in slices: + slice_copy = copy.deepcopy(slice_) + slice_copy['slice_endpoint_ids'] = [] + slice_copy['slice_constraints'] = [] + slice_copy['slice_config'] = {'config_rules': []} + slices_add.append(slice_copy) + return slices_add + +TypeResourceValue = Union[str, int, bool, float, dict, list] +def format_custom_config_rules(config_rules : List[Dict]) -> List[Dict]: + for config_rule in config_rules: + if 'custom' not in config_rule: continue + custom_resource_value : TypeResourceValue = config_rule['custom']['resource_value'] + if isinstance(custom_resource_value, (dict, list)): + custom_resource_value = json.dumps(custom_resource_value, sort_keys=True, indent=0) + config_rule['custom']['resource_value'] = custom_resource_value + return config_rules + +def format_device_custom_config_rules(device : Dict) -> Dict: + config_rules = device.get('device_config', {}).get('config_rules', []) + config_rules = format_custom_config_rules(config_rules) + device['device_config']['config_rules'] = config_rules + return device + +def format_service_custom_config_rules(service : Dict) -> Dict: + config_rules = service.get('service_config', {}).get('config_rules', []) + config_rules = format_custom_config_rules(config_rules) + service['service_config']['config_rules'] = config_rules + return service + +def format_slice_custom_config_rules(slice_ : Dict) -> Dict: + config_rules = slice_.get('service_config', {}).get('config_rules', []) + config_rules = format_custom_config_rules(config_rules) + slice_['service_config']['config_rules'] = config_rules + return slice_ + +def split_devices_by_rules(devices : List[Dict]) -> Tuple[List[Dict], List[Dict]]: + devices_add = [] + devices_config = [] + for device in devices: + connect_rules = [] + config_rules = [] + for config_rule in device.get('device_config', {}).get('config_rules', []): + custom_resource_key : Optional[str] = config_rule.get('custom', {}).get('resource_key') + if custom_resource_key is not None and custom_resource_key.startswith('_connect/'): + connect_rules.append(config_rule) + else: + config_rules.append(config_rule) + + if len(connect_rules) > 0: + device_add = copy.deepcopy(device) + device_add['device_endpoints'] = [] + device_add['device_config'] = {'config_rules': connect_rules} + devices_add.append(device_add) + + if len(config_rules) > 0: + device['device_config'] = {'config_rules': config_rules} + devices_config.append(device) + + return devices_add, devices_config diff --git a/src/common/tools/descriptor/__init__.py b/src/common/tools/descriptor/__init__.py new file mode 100644 index 000000000..70a332512 --- /dev/null +++ b/src/common/tools/descriptor/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + -- GitLab From 73ea700b455a87aea2c086379882d91a2a987c6b Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Thu, 24 Nov 2022 17:30:20 +0000 Subject: [PATCH 04/13] WebUI: - migrated to new descriptor loading framework --- src/webui/service/main/DescriptorTools.py | 85 --------------- src/webui/service/main/routes.py | 120 ++-------------------- 2 files changed, 8 insertions(+), 197 deletions(-) delete mode 100644 src/webui/service/main/DescriptorTools.py diff --git a/src/webui/service/main/DescriptorTools.py b/src/webui/service/main/DescriptorTools.py deleted file mode 100644 index 094be2f7d..000000000 --- a/src/webui/service/main/DescriptorTools.py +++ /dev/null @@ -1,85 +0,0 @@ -# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import copy, json -from typing import Dict, List, Optional, Tuple, Union - -def get_descriptors_add_contexts(contexts : List[Dict]) -> List[Dict]: - contexts_add = copy.deepcopy(contexts) - for context in contexts_add: - context['topology_ids'] = [] - context['service_ids'] = [] - return contexts_add - -def get_descriptors_add_topologies(topologies : List[Dict]) -> List[Dict]: - topologies_add = copy.deepcopy(topologies) - for topology in topologies_add: - topology['device_ids'] = [] - topology['link_ids'] = [] - return topologies_add - -def get_descriptors_add_services(services : List[Dict]) -> List[Dict]: - services_add = [] - for service in services: - service_copy = copy.deepcopy(service) - service_copy['service_endpoint_ids'] = [] - service_copy['service_constraints'] = [] - service_copy['service_config'] = {'config_rules': []} - services_add.append(service_copy) - return services_add - -def get_descriptors_add_slices(slices : List[Dict]) -> List[Dict]: - slices_add = [] - for slice in slices: - slice_copy = copy.deepcopy(slice) - slice_copy['slice_endpoint_ids'] = [] - slice_copy['slice_constraints'] = [] - slice_copy['slice_config'] = {'config_rules': []} - slices_add.append(slice_copy) - return slices_add - -TypeResourceValue = Union[str, int, bool, float, dict, list] -def format_custom_config_rules(config_rules : List[Dict]) -> List[Dict]: - for config_rule in config_rules: - if 'custom' not in config_rule: continue - custom_resource_value : TypeResourceValue = config_rule['custom']['resource_value'] - if isinstance(custom_resource_value, (dict, list)): - custom_resource_value = json.dumps(custom_resource_value, sort_keys=True, indent=0) - config_rule['custom']['resource_value'] = custom_resource_value - return config_rules - -def split_devices_by_rules(devices : List[Dict]) -> Tuple[List[Dict], List[Dict]]: - devices_add = [] - devices_config = [] - for device in devices: - connect_rules = [] - config_rules = [] - for config_rule in device.get('device_config', {}).get('config_rules', []): - custom_resource_key : Optional[str] = config_rule.get('custom', {}).get('resource_key') - if custom_resource_key is not None and custom_resource_key.startswith('_connect/'): - connect_rules.append(config_rule) - else: - config_rules.append(config_rule) - - if len(connect_rules) > 0: - device_add = copy.deepcopy(device) - device_add['device_endpoints'] = [] - device_add['device_config'] = {'config_rules': connect_rules} - devices_add.append(device_add) - - if len(config_rules) > 0: - device['device_config'] = {'config_rules': config_rules} - devices_config.append(device) - - return devices_add, devices_config diff --git a/src/webui/service/main/routes.py b/src/webui/service/main/routes.py index 979d0664b..b161fa845 100644 --- a/src/webui/service/main/routes.py +++ b/src/webui/service/main/routes.py @@ -14,8 +14,8 @@ import json, logging, re from flask import jsonify, redirect, render_template, Blueprint, flash, session, url_for, request -from common.proto.context_pb2 import ( - Connection, Context, Device, Empty, Link, Service, Slice, Topology, ContextIdList, TopologyId, TopologyIdList) +from common.proto.context_pb2 import Empty, ContextIdList, TopologyId, TopologyIdList +from common.tools.descriptor.Loader import DescriptorLoader, compose_notifications from common.tools.grpc.Tools import grpc_message_to_json_string from common.tools.object_factory.Context import json_context_id from common.tools.object_factory.Topology import json_topology_id @@ -23,9 +23,6 @@ from context.client.ContextClient import ContextClient from device.client.DeviceClient import DeviceClient from service.client.ServiceClient import ServiceClient from slice.client.SliceClient import SliceClient -from webui.service.main.DescriptorTools import ( - format_custom_config_rules, get_descriptors_add_contexts, get_descriptors_add_services, get_descriptors_add_slices, - get_descriptors_add_topologies, split_devices_by_rules) from webui.service.main.forms import ContextTopologyForm, DescriptorForm main = Blueprint('main', __name__) @@ -37,38 +34,6 @@ slice_client = SliceClient() logger = logging.getLogger(__name__) -ENTITY_TO_TEXT = { - # name => singular, plural - 'context' : ('Context', 'Contexts' ), - 'topology' : ('Topology', 'Topologies' ), - 'device' : ('Device', 'Devices' ), - 'link' : ('Link', 'Links' ), - 'service' : ('Service', 'Services' ), - 'slice' : ('Slice', 'Slices' ), - 'connection': ('Connection', 'Connections'), -} - -ACTION_TO_TEXT = { - # action => infinitive, past - 'add' : ('Add', 'Added'), - 'update' : ('Update', 'Updated'), - 'config' : ('Configure', 'Configured'), -} - -def process_descriptor(entity_name, action_name, grpc_method, grpc_class, entities): - entity_name_singluar,entity_name_plural = ENTITY_TO_TEXT[entity_name] - action_infinitive, action_past = ACTION_TO_TEXT[action_name] - num_ok, num_err = 0, 0 - for entity in entities: - try: - grpc_method(grpc_class(**entity)) - num_ok += 1 - except Exception as e: # pylint: disable=broad-except - flash(f'Unable to {action_infinitive} {entity_name_singluar} {str(entity)}: {str(e)}', 'error') - num_err += 1 - if num_ok : flash(f'{str(num_ok)} {entity_name_plural} {action_past}', 'success') - if num_err: flash(f'{str(num_err)} {entity_name_plural} failed', 'danger') - def process_descriptors(descriptors): try: descriptors_file = request.files[descriptors.name] @@ -78,80 +43,11 @@ def process_descriptors(descriptors): flash(f'Unable to load descriptor file: {str(e)}', 'danger') return - dummy_mode = descriptors.get('dummy_mode' , False) - contexts = descriptors.get('contexts' , []) - topologies = descriptors.get('topologies' , []) - devices = descriptors.get('devices' , []) - links = descriptors.get('links' , []) - services = descriptors.get('services' , []) - slices = descriptors.get('slices' , []) - connections = descriptors.get('connections', []) - - # Format CustomConfigRules in Devices, Services and Slices provided in JSON format - for device in devices: - config_rules = device.get('device_config', {}).get('config_rules', []) - config_rules = format_custom_config_rules(config_rules) - device['device_config']['config_rules'] = config_rules - - for service in services: - config_rules = service.get('service_config', {}).get('config_rules', []) - config_rules = format_custom_config_rules(config_rules) - service['service_config']['config_rules'] = config_rules - - for slice in slices: - config_rules = slice.get('slice_config', {}).get('config_rules', []) - config_rules = format_custom_config_rules(config_rules) - slice['slice_config']['config_rules'] = config_rules - - - # Context and Topology require to create the entity first, and add devices, links, services, slices, etc. in a - # second stage. - contexts_add = get_descriptors_add_contexts(contexts) - topologies_add = get_descriptors_add_topologies(topologies) - - if dummy_mode: - # Dummy Mode: used to pre-load databases (WebUI debugging purposes) with no smart or automated tasks. - context_client.connect() - process_descriptor('context', 'add', context_client.SetContext, Context, contexts_add ) - process_descriptor('topology', 'add', context_client.SetTopology, Topology, topologies_add) - process_descriptor('device', 'add', context_client.SetDevice, Device, devices ) - process_descriptor('link', 'add', context_client.SetLink, Link, links ) - process_descriptor('service', 'add', context_client.SetService, Service, services ) - process_descriptor('slice', 'add', context_client.SetSlice, Slice, slices ) - process_descriptor('connection', 'add', context_client.SetConnection, Connection, connections ) - process_descriptor('context', 'update', context_client.SetContext, Context, contexts ) - process_descriptor('topology', 'update', context_client.SetTopology, Topology, topologies ) - context_client.close() - else: - # Normal mode: follows the automated workflows in the different components - assert len(connections) == 0, 'in normal mode, connections should not be set' - - # Device, Service and Slice require to first create the entity and the configure it - devices_add, devices_config = split_devices_by_rules(devices) - services_add = get_descriptors_add_services(services) - slices_add = get_descriptors_add_slices(slices) - - context_client.connect() - device_client.connect() - service_client.connect() - slice_client.connect() - - process_descriptor('context', 'add', context_client.SetContext, Context, contexts_add ) - process_descriptor('topology', 'add', context_client.SetTopology, Topology, topologies_add) - process_descriptor('device', 'add', device_client .AddDevice, Device, devices_add ) - process_descriptor('device', 'config', device_client .ConfigureDevice, Device, devices_config) - process_descriptor('link', 'add', context_client.SetLink, Link, links ) - process_descriptor('service', 'add', service_client.CreateService, Service, services_add ) - process_descriptor('service', 'update', service_client.UpdateService, Service, services ) - process_descriptor('slice', 'add', slice_client .CreateSlice, Slice, slices_add ) - process_descriptor('slice', 'update', slice_client .UpdateSlice, Slice, slices ) - process_descriptor('context', 'update', context_client.SetContext, Context, contexts ) - process_descriptor('topology', 'update', context_client.SetTopology, Topology, topologies ) - - slice_client.close() - service_client.close() - device_client.close() - context_client.close() + descriptor_loader = DescriptorLoader() + descriptor_loader.process_descriptors(descriptors) + results = descriptor_loader.get_results() + for message,level in compose_notifications(results): + flash(message, level) @main.route('/', methods=['GET', 'POST']) def home(): @@ -191,7 +87,7 @@ def home(): if descriptor_form.validate_on_submit(): process_descriptors(descriptor_form.descriptors) return redirect(url_for("main.home")) - except Exception as e: + except Exception as e: # pylint: disable=broad-except logger.exception('Descriptor load failed') flash(f'Descriptor load failed: `{str(e)}`', 'danger') finally: -- GitLab From 466e72a6e5c73e909c097e1fc27cf767840d16bd Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Thu, 24 Nov 2022 17:30:56 +0000 Subject: [PATCH 05/13] OFC'22 test: - migrated to new scenario loading framework --- src/tests/ofc22/deploy_specs.sh | 2 +- src/tests/ofc22/descriptors_emulated.json | 87 ++++++----- src/tests/ofc22/tests/Fixtures.py | 38 +++++ src/tests/ofc22/tests/Objects.py | 139 +++++------------- .../ofc22/tests/test_functional_bootstrap.py | 60 +++----- 5 files changed, 145 insertions(+), 181 deletions(-) diff --git a/src/tests/ofc22/deploy_specs.sh b/src/tests/ofc22/deploy_specs.sh index ffd91da35..8214ad1fd 100644 --- a/src/tests/ofc22/deploy_specs.sh +++ b/src/tests/ofc22/deploy_specs.sh @@ -7,7 +7,7 @@ export TFS_REGISTRY_IMAGE="http://localhost:32000/tfs/" # interdomain slice pathcomp dlt # dbscanserving opticalattackmitigator opticalattackdetector # l3_attackmitigator l3_centralizedattackdetector l3_distributedattackdetector -export TFS_COMPONENTS="context device automation monitoring pathcomp service slice compute webui" +export TFS_COMPONENTS="context device monitoring pathcomp service slice compute webui" # automation # Set the tag you want to use for your images. export TFS_IMAGE_TAG="dev" diff --git a/src/tests/ofc22/descriptors_emulated.json b/src/tests/ofc22/descriptors_emulated.json index 83f9c39e2..a71d454f4 100644 --- a/src/tests/ofc22/descriptors_emulated.json +++ b/src/tests/ofc22/descriptors_emulated.json @@ -9,70 +9,83 @@ "topologies": [ { "topology_id": {"topology_uuid": {"uuid": "admin"}, "context_id": {"context_uuid": {"uuid": "admin"}}}, - "device_ids": [], - "link_ids": [] + "device_ids": [ + {"device_uuid": {"uuid": "R1-EMU"}}, + {"device_uuid": {"uuid": "R2-EMU"}}, + {"device_uuid": {"uuid": "R3-EMU"}}, + {"device_uuid": {"uuid": "R4-EMU"}}, + {"device_uuid": {"uuid": "O1-OLS"}} + ], + "link_ids": [ + {"link_uuid": {"uuid": "R1-EMU/13/0/0==O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870"}}, + {"link_uuid": {"uuid": "R2-EMU/13/0/0==O1-OLS/eb287d83-f05e-53ec-ab5a-adf6bd2b5418"}}, + {"link_uuid": {"uuid": "R3-EMU/13/0/0==O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513"}}, + {"link_uuid": {"uuid": "R4-EMU/13/0/0==O1-OLS/50296d99-58cc-5ce7-82f5-fc8ee4eec2ec"}} + ] } ], "devices": [ { - "device_id": {"device_uuid": {"uuid": "R1-EMU"}}, - "device_type": "emu-packet-router", + "device_id": {"device_uuid": {"uuid": "R1-EMU"}}, "device_type": "emu-packet-router", + "device_operational_status": 1, "device_drivers": [0], "device_endpoints": [], "device_config": {"config_rules": [ {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, - {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"13/0/0\"}, {\"sample_types\": [101, 102, 201, 202], \"type\": \"copper\", \"uuid\": \"13/1/2\"}]}"}} - ]}, - "device_operational_status": 1, - "device_drivers": [0], - "device_endpoints": [] + {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": {"endpoints": [ + {"uuid": "13/0/0", "type": "optical", "sample_types": []}, + {"uuid": "13/1/2", "type": "copper", "sample_types": [101, 102, 201, 202]} + ]}}} + ]} }, { - "device_id": {"device_uuid": {"uuid": "R2-EMU"}}, - "device_type": "emu-packet-router", + "device_id": {"device_uuid": {"uuid": "R2-EMU"}}, "device_type": "emu-packet-router", + "device_operational_status": 1, "device_drivers": [0], "device_endpoints": [], "device_config": {"config_rules": [ {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, - {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"13/0/0\"}, {\"sample_types\": [101, 102, 201, 202], \"type\": \"copper\", \"uuid\": \"13/1/2\"}]}"}} - ]}, - "device_operational_status": 1, - "device_drivers": [0], - "device_endpoints": [] + {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": {"endpoints": [ + {"uuid": "13/0/0", "type": "optical", "sample_types": []}, + {"uuid": "13/1/2", "type": "copper", "sample_types": [101, 102, 201, 202]} + ]}}} + ]} }, { - "device_id": {"device_uuid": {"uuid": "R3-EMU"}}, - "device_type": "emu-packet-router", + "device_id": {"device_uuid": {"uuid": "R3-EMU"}}, "device_type": "emu-packet-router", + "device_operational_status": 1, "device_drivers": [0], "device_endpoints": [], "device_config": {"config_rules": [ {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, - {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"13/0/0\"}, {\"sample_types\": [101, 102, 201, 202], \"type\": \"copper\", \"uuid\": \"13/1/2\"}]}"}} - ]}, - "device_operational_status": 1, - "device_drivers": [0], - "device_endpoints": [] + {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": {"endpoints": [ + {"uuid": "13/0/0", "type": "optical", "sample_types": []}, + {"uuid": "13/1/2", "type": "copper", "sample_types": [101, 102, 201, 202]} + ]}}} + ]} }, { - "device_id": {"device_uuid": {"uuid": "R4-EMU"}}, - "device_type": "emu-packet-router", + "device_id": {"device_uuid": {"uuid": "R4-EMU"}}, "device_type": "emu-packet-router", + "device_operational_status": 1, "device_drivers": [0], "device_endpoints": [], "device_config": {"config_rules": [ {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, - {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"13/0/0\"}, {\"sample_types\": [101, 102, 201, 202], \"type\": \"copper\", \"uuid\": \"13/1/2\"}]}"}} - ]}, - "device_operational_status": 1, - "device_drivers": [0], - "device_endpoints": [] + {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": {"endpoints": [ + {"uuid": "13/0/0", "type": "optical", "sample_types": []}, + {"uuid": "13/1/2", "type": "copper", "sample_types": [101, 102, 201, 202]} + ]}}} + ]} }, { - "device_id": {"device_uuid": {"uuid": "O1-OLS"}}, - "device_type": "emu-open-line-system", + "device_id": {"device_uuid": {"uuid": "O1-OLS"}}, "device_type": "emu-open-line-system", + "device_operational_status": 1, "device_drivers": [0], "device_endpoints": [], "device_config": {"config_rules": [ {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, - {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"aade6001-f00b-5e2f-a357-6a0a9d3de870\"}, {\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"eb287d83-f05e-53ec-ab5a-adf6bd2b5418\"}, {\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"0ef74f99-1acc-57bd-ab9d-4b958b06c513\"}, {\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"50296d99-58cc-5ce7-82f5-fc8ee4eec2ec\"}]}"}} - ]}, - "device_operational_status": 1, - "device_drivers": [0], - "device_endpoints": [] + {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": {"endpoints": [ + {"uuid": "aade6001-f00b-5e2f-a357-6a0a9d3de870", "type": "optical", "sample_types": []}, + {"uuid": "eb287d83-f05e-53ec-ab5a-adf6bd2b5418", "type": "optical", "sample_types": []}, + {"uuid": "0ef74f99-1acc-57bd-ab9d-4b958b06c513", "type": "optical", "sample_types": []}, + {"uuid": "50296d99-58cc-5ce7-82f5-fc8ee4eec2ec", "type": "optical", "sample_types": []} + ]}}} + ]} } ], "links": [ diff --git a/src/tests/ofc22/tests/Fixtures.py b/src/tests/ofc22/tests/Fixtures.py index 370731e5d..95e301055 100644 --- a/src/tests/ofc22/tests/Fixtures.py +++ b/src/tests/ofc22/tests/Fixtures.py @@ -17,6 +17,44 @@ from common.Settings import get_setting from compute.tests.mock_osm.MockOSM import MockOSM from .Objects import WIM_MAPPING, WIM_PASSWORD, WIM_USERNAME +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from common.Settings import get_setting +from compute.tests.mock_osm.MockOSM import MockOSM +from context.client.ContextClient import ContextClient +from device.client.DeviceClient import DeviceClient +from monitoring.client.MonitoringClient import MonitoringClient + +@pytest.fixture(scope='session') +def context_client(): + _client = ContextClient() + yield _client + _client.close() + +@pytest.fixture(scope='session') +def device_client(): + _client = DeviceClient() + yield _client + _client.close() + +@pytest.fixture(scope='session') +def monitoring_client(): + _client = MonitoringClient() + yield _client + _client.close() @pytest.fixture(scope='session') def osm_wim(): diff --git a/src/tests/ofc22/tests/Objects.py b/src/tests/ofc22/tests/Objects.py index 47f0a2624..f73d56cfc 100644 --- a/src/tests/ofc22/tests/Objects.py +++ b/src/tests/ofc22/tests/Objects.py @@ -19,33 +19,40 @@ from typing import Dict, List, Tuple from common.Constants import DEFAULT_CONTEXT_UUID, DEFAULT_TOPOLOGY_UUID from common.tools.object_factory.Context import json_context, json_context_id from common.tools.object_factory.Device import ( - json_device_connect_rules, json_device_emulated_connect_rules, json_device_emulated_packet_router_disabled, - json_device_emulated_tapi_disabled, json_device_id, json_device_packetrouter_disabled, json_device_tapi_disabled) -from common.tools.object_factory.EndPoint import json_endpoint, json_endpoint_id -from common.tools.object_factory.Link import json_link, json_link_id -from common.tools.object_factory.Topology import json_topology, json_topology_id -from common.proto.kpi_sample_types_pb2 import KpiSampleType - -import os, uuid -from common.Constants import DEFAULT_CONTEXT_UUID, DEFAULT_TOPOLOGY_UUID -from common.tools.object_factory.Context import json_context, json_context_id -from common.tools.object_factory.Device import ( - json_device_emulated_connect_rules, json_device_emulated_datacenter_disabled, - json_device_emulated_packet_router_disabled, json_device_emulated_tapi_disabled, json_device_id) -from common.tools.object_factory.EndPoint import json_endpoints + json_device_connect_rules, json_device_emulated_connect_rules, json_device_emulated_datacenter_disabled, + json_device_emulated_packet_router_disabled, json_device_emulated_tapi_disabled, json_device_id, + json_device_packetrouter_disabled, json_device_tapi_disabled) +from common.tools.object_factory.EndPoint import json_endpoint, json_endpoint_id, json_endpoints from common.tools.object_factory.Link import get_link_uuid, json_link, json_link_id +from common.tools.object_factory.Topology import json_topology, json_topology_id from common.tools.object_factory.Service import get_service_uuid, json_service_l3nm_planned from common.tools.object_factory.Topology import json_topology, json_topology_id +from common.proto.kpi_sample_types_pb2 import KpiSampleType - - +# ----- Device Credentials and Settings -------------------------------------------------------------------------------- # if true, Device component is present and will infeer the endpoints from connect-rules # if false, Device component is not present and device objects must contain preconfigured endpoints ADD_CONNECT_RULES_TO_DEVICES = os.environ.get('ADD_CONNECT_RULES_TO_DEVICES', 'True') ADD_CONNECT_RULES_TO_DEVICES = ADD_CONNECT_RULES_TO_DEVICES.upper() in {'T', 'TRUE', '1', 'Y', 'YES'} +try: + from .Credentials import DEVICE_R1_ADDRESS, DEVICE_R1_PORT, DEVICE_R1_USERNAME, DEVICE_R1_PASSWORD + from .Credentials import DEVICE_R3_ADDRESS, DEVICE_R3_PORT, DEVICE_R3_USERNAME, DEVICE_R3_PASSWORD + from .Credentials import DEVICE_O1_ADDRESS, DEVICE_O1_PORT + USE_REAL_DEVICES = True # Use real devices +except ImportError: + USE_REAL_DEVICES = False # Use emulated devices + + DEVICE_R1_ADDRESS, DEVICE_R1_PORT, DEVICE_R1_USERNAME, DEVICE_R1_PASSWORD = '0.0.0.0', 830, 'admin', 'admin' + DEVICE_R3_ADDRESS, DEVICE_R3_PORT, DEVICE_R3_USERNAME, DEVICE_R3_PASSWORD = '0.0.0.0', 830, 'admin', 'admin' + DEVICE_O1_ADDRESS, DEVICE_O1_PORT = '0.0.0.0', 4900 +#USE_REAL_DEVICES = False # Uncomment to force to use emulated devices + +if not USE_REAL_DEVICES: + json_device_packetrouter_disabled = json_device_emulated_packet_router_disabled + json_device_tapi_disabled = json_device_emulated_tapi_disabled def compose_router(device_uuid, endpoint_uuids, topology_id=None): device_id = json_device_id(device_uuid) @@ -74,12 +81,6 @@ def compose_datacenter(device_uuid, endpoint_uuids, topology_id=None): device = json_device_emulated_datacenter_disabled(device_uuid, config_rules=config_rules, endpoints=j_endpoints) return device_id, endpoints, device -def compose_link(endpoint_a, endpoint_z): - link_uuid = get_link_uuid(endpoint_a['endpoint_id'], endpoint_z['endpoint_id']) - link_id = json_link_id(link_uuid) - link = json_link(link_uuid, [endpoint_a['endpoint_id'], endpoint_z['endpoint_id']]) - return link_id, link - def compose_service(endpoint_a, endpoint_z, constraints=[]): service_uuid = get_service_uuid(endpoint_a['endpoint_id'], endpoint_z['endpoint_id']) endpoint_ids = [endpoint_a['endpoint_id'], endpoint_z['endpoint_id']] @@ -91,42 +92,10 @@ CONTEXT_ID = json_context_id(DEFAULT_CONTEXT_UUID) CONTEXT = json_context(DEFAULT_CONTEXT_UUID) # ----- Topology ------------------------------------------------------------------------------------------------------- -TOPOLOGY_ID = json_topology_id(DEFAULT_TOPOLOGY_UUID, context_id=CONTEXT_ID) -TOPOLOGY = json_topology(DEFAULT_TOPOLOGY_UUID, context_id=CONTEXT_ID) - -# ----- Domains -------------------------------------------------------------------------------------------------------- -# Overall network topology TOPO_ADMIN_UUID = DEFAULT_TOPOLOGY_UUID TOPO_ADMIN_ID = json_topology_id(TOPO_ADMIN_UUID, context_id=CONTEXT_ID) TOPO_ADMIN = json_topology(TOPO_ADMIN_UUID, context_id=CONTEXT_ID) -# DataCenter #1 Network -TOPO_DC1_UUID = 'DC1' -TOPO_DC1_ID = json_topology_id(TOPO_DC1_UUID, context_id=CONTEXT_ID) -TOPO_DC1 = json_topology(TOPO_DC1_UUID, context_id=CONTEXT_ID) - -# DataCenter #2 Network -TOPO_DC2_UUID = 'DC2' -TOPO_DC2_ID = json_topology_id(TOPO_DC2_UUID, context_id=CONTEXT_ID) -TOPO_DC2 = json_topology(TOPO_DC2_UUID, context_id=CONTEXT_ID) - -# CellSite #1 Network -TOPO_CS1_UUID = 'CS1' -TOPO_CS1_ID = json_topology_id(TOPO_CS1_UUID, context_id=CONTEXT_ID) -TOPO_CS1 = json_topology(TOPO_CS1_UUID, context_id=CONTEXT_ID) - -# CellSite #2 Network -TOPO_CS2_UUID = 'CS2' -TOPO_CS2_ID = json_topology_id(TOPO_CS2_UUID, context_id=CONTEXT_ID) -TOPO_CS2 = json_topology(TOPO_CS2_UUID, context_id=CONTEXT_ID) - -# Transport Network Network -TOPO_TN_UUID = 'TN' -TOPO_TN_ID = json_topology_id(TOPO_TN_UUID, context_id=CONTEXT_ID) -TOPO_TN = json_topology(TOPO_TN_UUID, context_id=CONTEXT_ID) - - - # ----- Monitoring Samples --------------------------------------------------------------------------------------------- PACKET_PORT_SAMPLE_TYPES = [ KpiSampleType.KPISAMPLETYPE_PACKETS_TRANSMITTED, @@ -135,52 +104,7 @@ PACKET_PORT_SAMPLE_TYPES = [ KpiSampleType.KPISAMPLETYPE_BYTES_RECEIVED, ] -# ----- Device Credentials and Settings -------------------------------------------------------------------------------- -try: - from .Credentials import DEVICE_R1_ADDRESS, DEVICE_R1_PORT, DEVICE_R1_USERNAME, DEVICE_R1_PASSWORD - from .Credentials import DEVICE_R3_ADDRESS, DEVICE_R3_PORT, DEVICE_R3_USERNAME, DEVICE_R3_PASSWORD - from .Credentials import DEVICE_O1_ADDRESS, DEVICE_O1_PORT - USE_REAL_DEVICES = True # Use real devices -except ImportError: - USE_REAL_DEVICES = False # Use emulated devices - - DEVICE_R1_ADDRESS = '0.0.0.0' - DEVICE_R1_PORT = 830 - DEVICE_R1_USERNAME = 'admin' - DEVICE_R1_PASSWORD = 'admin' - - DEVICE_R3_ADDRESS = '0.0.0.0' - DEVICE_R3_PORT = 830 - DEVICE_R3_USERNAME = 'admin' - DEVICE_R3_PASSWORD = 'admin' - - DEVICE_O1_ADDRESS = '0.0.0.0' - DEVICE_O1_PORT = 4900 - -#USE_REAL_DEVICES = False # Uncomment to force to use emulated devices - -def json_endpoint_ids(device_id : Dict, endpoint_descriptors : List[Tuple[str, str, List[int]]]): - return [ - json_endpoint_id(device_id, ep_uuid, topology_id=None) - for ep_uuid, _, _ in endpoint_descriptors - ] - -def json_endpoints(device_id : Dict, endpoint_descriptors : List[Tuple[str, str, List[int]]]): - return [ - json_endpoint(device_id, ep_uuid, ep_type, topology_id=None, kpi_sample_types=ep_sample_types) - for ep_uuid, ep_type, ep_sample_types in endpoint_descriptors - ] - -def get_link_uuid(a_device_id : Dict, a_endpoint_id : Dict, z_device_id : Dict, z_endpoint_id : Dict) -> str: - return '{:s}/{:s}=={:s}/{:s}'.format( - a_device_id['device_uuid']['uuid'], a_endpoint_id['endpoint_uuid']['uuid'], - z_device_id['device_uuid']['uuid'], z_endpoint_id['endpoint_uuid']['uuid']) - - # ----- Devices -------------------------------------------------------------------------------------------------------- -if not USE_REAL_DEVICES: - json_device_packetrouter_disabled = json_device_emulated_packet_router_disabled - json_device_tapi_disabled = json_device_emulated_tapi_disabled DEVICE_R1_UUID = 'R1-EMU' DEVICE_R1_TIMEOUT = 120 @@ -191,11 +115,19 @@ DEVICE_R1_ENDPOINT_IDS = json_endpoint_ids(DEVICE_R1_ID, DEVICE_R1_ENDPOINT_DEF DEVICE_R1 = json_device_packetrouter_disabled(DEVICE_R1_UUID) ENDPOINT_ID_R1_13_0_0 = DEVICE_R1_ENDPOINT_IDS[0] ENDPOINT_ID_R1_13_1_2 = DEVICE_R1_ENDPOINT_IDS[1] -DEVICE_R1_CONNECT_RULES = json_device_connect_rules(DEVICE_R1_ADDRESS, DEVICE_R1_PORT, { +DEVICE_R1_CONNECT_RULES = json_device_connect_rules(DEVICE_R1_ADDRESS, DEVICE_R1_PORT, ) if USE_REAL_DEVICES else json_device_emulated_connect_rules(DEVICE_R1_ENDPOINT_DEFS) + +def json_device_connect_rules(address : str, port : int, settings : Dict = {}): + return [ + json_config_rule_set('_connect/address', address), + json_config_rule_set('_connect/port', port), + json_config_rule_set('_connect/settings', { 'username': DEVICE_R1_USERNAME, 'password': DEVICE_R1_PASSWORD, 'timeout' : DEVICE_R1_TIMEOUT, -}) if USE_REAL_DEVICES else json_device_emulated_connect_rules(DEVICE_R1_ENDPOINT_DEFS) +}), + ] + DEVICE_R2_UUID = 'R2-EMU' @@ -258,11 +190,6 @@ DEVICE_O1_CONNECT_RULES = json_device_connect_rules(DEVICE_O1_ADDRESS, DEVICE_O1 # ----- Devices -------------------------------------------------------------------------------------------------------- -# DataCenters -DEV_DC1GW_ID, DEV_DC1GW_EPS, DEV_DC1GW = compose_datacenter('DC1-GW', ['eth1', 'eth2', 'int']) -DEV_DC2GW_ID, DEV_DC2GW_EPS, DEV_DC2GW = compose_datacenter('DC2-GW', ['eth1', 'eth2', 'int']) - -# CellSites DEV_CS1GW1_ID, DEV_CS1GW1_EPS, DEV_CS1GW1 = compose_router('CS1-GW1', ['10/1', '1/1', '1/2']) DEV_CS1GW2_ID, DEV_CS1GW2_EPS, DEV_CS1GW2 = compose_router('CS1-GW2', ['10/1', '1/1', '1/2']) DEV_CS2GW1_ID, DEV_CS2GW1_EPS, DEV_CS2GW1 = compose_router('CS2-GW1', ['10/1', '1/1', '1/2']) diff --git a/src/tests/ofc22/tests/test_functional_bootstrap.py b/src/tests/ofc22/tests/test_functional_bootstrap.py index f2f0dcb68..c149a308a 100644 --- a/src/tests/ofc22/tests/test_functional_bootstrap.py +++ b/src/tests/ofc22/tests/test_functional_bootstrap.py @@ -14,27 +14,25 @@ # See the License for the specific language governing permissions and # limitations under the License. -import copy, logging, pytest -from common.Settings import get_setting +import logging +from common.proto.context_pb2 import ContextId, Empty from common.proto.monitoring_pb2 import KpiDescriptorList -from common.tests.EventTools import EVENT_CREATE, EVENT_UPDATE, check_events -from common.tools.object_factory.Context import json_context_id -from common.tools.object_factory.Device import json_device_id -from common.tools.object_factory.Link import json_link_id -from common.tools.object_factory.Topology import json_topology_id +from common.tests.LoadScenario import load_scenario_from_descriptor from context.client.ContextClient import ContextClient -from monitoring.client.MonitoringClient import MonitoringClient -from context.client.EventsCollector import EventsCollector -from common.proto.context_pb2 import Context, ContextId, Device, Empty, Link, Topology from device.client.DeviceClient import DeviceClient +from monitoring.client.MonitoringClient import MonitoringClient +from tests.Fixtures import context_client, device_client, monitoring_client # pylint: disable=unused-import from .Objects import CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES -from tests.Fixtures import context_client, device_client, monitoring_client LOGGER = logging.getLogger(__name__) LOGGER.setLevel(logging.DEBUG) +DESCRIPTOR_FILE = 'ofc22/descriptors_emulated.json' -def test_scenario_empty(context_client : ContextClient): # pylint: disable=redefined-outer-name +def test_scenario_empty( + context_client : ContextClient, # pylint: disable=redefined-outer-name + device_client : DeviceClient, # pylint: disable=redefined-outer-name +) -> None: # ----- List entities - Ensure database is empty ------------------------------------------------------------------- response = context_client.ListContexts(Empty()) assert len(response.contexts) == 0 @@ -46,38 +44,26 @@ def test_scenario_empty(context_client : ContextClient): # pylint: disable=rede assert len(response.links) == 0 -def test_prepare_environment(context_client : ContextClient): # pylint: disable=redefined-outer-name - - for context in CONTEXTS : context_client.SetContext (Context (**context )) - for topology in TOPOLOGIES: context_client.SetTopology(Topology(**topology)) - - for device, connect_rules in DEVICES: - device_with_connect_rules = copy.deepcopy(device) - device_with_connect_rules['device_config']['config_rules'].extend(connect_rules) - device_client.AddDevice(Device(**device_with_connect_rules)) - - - for link in LINKS : context_client.SetLink (Link (**link )) + # ----- Load Scenario ---------------------------------------------------------------------------------------------- + load_scenario_from_descriptor(DESCRIPTOR_FILE, context_client, device_client, None, None) - - -def test_scenario_ready(context_client : ContextClient): # pylint: disable=redefined-outer-name # ----- List entities - Ensure scenario is ready ------------------------------------------------------------------- - response = context_client.ListContexts(Empty()) - assert len(response.contexts) == len(CONTEXTS) + #response = context_client.ListContexts(Empty()) + #assert len(response.contexts) == len(CONTEXTS) - response = context_client.ListTopologies(ContextId(**CONTEXT_ID)) - assert len(response.topologies) == len(TOPOLOGIES) + #response = context_client.ListTopologies(ContextId(**CONTEXT_ID)) + #assert len(response.topologies) == len(TOPOLOGIES) - response = context_client.ListDevices(Empty()) - assert len(response.devices) == len(DEVICES) + #response = context_client.ListDevices(Empty()) + #assert len(response.devices) == len(DEVICES) - response = context_client.ListLinks(Empty()) - assert len(response.links) == len(LINKS) + #response = context_client.ListLinks(Empty()) + #assert len(response.links) == len(LINKS) + + #response = context_client.ListServices(ContextId(**CONTEXT_ID)) + #assert len(response.services) == 0 - response = context_client.ListServices(ContextId(**CONTEXT_ID)) - assert len(response.services) == 0 def test_scenario_kpis_created(monitoring_client: MonitoringClient): """ -- GitLab From c9d88a74134ce31016125706a6f759a624a8ffb9 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Thu, 24 Nov 2022 17:38:18 +0000 Subject: [PATCH 06/13] OFC'22 test: - restored list of components --- src/tests/ofc22/deploy_specs.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tests/ofc22/deploy_specs.sh b/src/tests/ofc22/deploy_specs.sh index 8214ad1fd..ffd91da35 100644 --- a/src/tests/ofc22/deploy_specs.sh +++ b/src/tests/ofc22/deploy_specs.sh @@ -7,7 +7,7 @@ export TFS_REGISTRY_IMAGE="http://localhost:32000/tfs/" # interdomain slice pathcomp dlt # dbscanserving opticalattackmitigator opticalattackdetector # l3_attackmitigator l3_centralizedattackdetector l3_distributedattackdetector -export TFS_COMPONENTS="context device monitoring pathcomp service slice compute webui" # automation +export TFS_COMPONENTS="context device automation monitoring pathcomp service slice compute webui" # Set the tag you want to use for your images. export TFS_IMAGE_TAG="dev" -- GitLab From 5ae72c8e561996fe0ffd823bdebcfbaa4b4eb916 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Fri, 25 Nov 2022 08:10:05 +0000 Subject: [PATCH 07/13] OFC'22 test: - fixed imports for bootstrap devices --- src/tests/ofc22/tests/test_functional_bootstrap.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/tests/ofc22/tests/test_functional_bootstrap.py b/src/tests/ofc22/tests/test_functional_bootstrap.py index c149a308a..0005ced0d 100644 --- a/src/tests/ofc22/tests/test_functional_bootstrap.py +++ b/src/tests/ofc22/tests/test_functional_bootstrap.py @@ -15,14 +15,13 @@ # limitations under the License. import logging -from common.proto.context_pb2 import ContextId, Empty +from common.proto.context_pb2 import Empty from common.proto.monitoring_pb2 import KpiDescriptorList from common.tests.LoadScenario import load_scenario_from_descriptor from context.client.ContextClient import ContextClient from device.client.DeviceClient import DeviceClient from monitoring.client.MonitoringClient import MonitoringClient from tests.Fixtures import context_client, device_client, monitoring_client # pylint: disable=unused-import -from .Objects import CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES LOGGER = logging.getLogger(__name__) LOGGER.setLevel(logging.DEBUG) -- GitLab From e871a09e6878bd6d18a03cc678b03cb1d76d5f85 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Fri, 25 Nov 2022 13:06:26 +0000 Subject: [PATCH 08/13] Common: - updated generic descriptor loader tool - updated scenario loader for automated tests --- src/common/tests/LoadScenario.py | 12 ++- src/common/tools/descriptor/Loader.py | 138 +++++++++++++++++++------- 2 files changed, 109 insertions(+), 41 deletions(-) diff --git a/src/common/tests/LoadScenario.py b/src/common/tests/LoadScenario.py index 1c531ed60..3c3940e67 100644 --- a/src/common/tests/LoadScenario.py +++ b/src/common/tests/LoadScenario.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -import json, logging +import logging from common.tools.descriptor.Loader import DescriptorLoader, compose_notifications from context.client.ContextClient import ContextClient from device.client.DeviceClient import DeviceClient @@ -29,15 +29,15 @@ LOGGERS = { def load_scenario_from_descriptor( descriptor_file : str, context_client : ContextClient, device_client : DeviceClient, service_client : ServiceClient, slice_client : SliceClient -) -> None: +) -> DescriptorLoader: with open(descriptor_file, 'r', encoding='UTF-8') as f: - descriptors = json.loads(f.read()) + descriptors = f.read() descriptor_loader = DescriptorLoader( + descriptors, context_client=context_client, device_client=device_client, service_client=service_client, slice_client=slice_client) - descriptor_loader.process_descriptors(descriptors) - results = descriptor_loader.get_results() + results = descriptor_loader.process() num_errors = 0 for message,level in compose_notifications(results): @@ -46,3 +46,5 @@ def load_scenario_from_descriptor( if num_errors > 0: MSG = 'Failed to load descriptors in file {:s}' raise Exception(MSG.format(str(descriptor_file))) + + return descriptor_loader \ No newline at end of file diff --git a/src/common/tools/descriptor/Loader.py b/src/common/tools/descriptor/Loader.py index 2674cdd3e..468cd4851 100644 --- a/src/common/tools/descriptor/Loader.py +++ b/src/common/tools/descriptor/Loader.py @@ -16,25 +16,23 @@ # Usage example (WebUI): # descriptors = json.loads(descriptors_data_from_client) -# -# descriptor_loader = DescriptorLoader() -# descriptor_loader.process_descriptors(descriptors) -# results = descriptor_loader.get_results() +# descriptor_loader = DescriptorLoader(descriptors) +# results = descriptor_loader.process() # for message,level in compose_notifications(results): # flash(message, level) # Usage example (pytest): # with open('path/to/descriptor.json', 'r', encoding='UTF-8') as f: # descriptors = json.loads(f.read()) -# -# descriptor_loader = DescriptorLoader() -# descriptor_loader.process_descriptors(descriptors) -# results = descriptor_loader.get_results() +# descriptor_loader = DescriptorLoader( +# descriptors, context_client=..., device_client=..., service_client=..., slice_client=...) +# results = descriptor_loader.process() # loggers = {'success': LOGGER.info, 'danger': LOGGER.error, 'error': LOGGER.error} # for message,level in compose_notifications(results): # loggers.get(level)(message) -from typing import Dict, List, Optional, Tuple +import json +from typing import Dict, List, Optional, Tuple, Union from common.proto.context_pb2 import Connection, Context, Device, Link, Service, Slice, Topology from context.client.ContextClient import ContextClient from device.client.DeviceClient import DeviceClient @@ -81,41 +79,107 @@ def compose_notifications(results : TypeResults) -> TypeNotificationList: class DescriptorLoader: def __init__( - self, context_client : Optional[ContextClient] = None, device_client : Optional[DeviceClient] = None, + self, descriptors : Union[str, Dict], + context_client : Optional[ContextClient] = None, device_client : Optional[DeviceClient] = None, service_client : Optional[ServiceClient] = None, slice_client : Optional[SliceClient] = None ) -> None: + self.__descriptors = json.loads(descriptors) if isinstance(descriptors, str) else descriptors + self.__dummy_mode = self.__descriptors.get('dummy_mode' , False) + self.__contexts = self.__descriptors.get('contexts' , []) + self.__topologies = self.__descriptors.get('topologies' , []) + self.__devices = self.__descriptors.get('devices' , []) + self.__links = self.__descriptors.get('links' , []) + self.__services = self.__descriptors.get('services' , []) + self.__slices = self.__descriptors.get('slices' , []) + self.__connections = self.__descriptors.get('connections', []) + + self.__contexts_add = None + self.__topologies_add = None + self.__devices_add = None + self.__devices_config = None + self.__services_add = None + self.__slices_add = None + self.__ctx_cli = ContextClient() if context_client is None else context_client self.__dev_cli = DeviceClient() if device_client is None else device_client self.__svc_cli = ServiceClient() if service_client is None else service_client self.__slc_cli = SliceClient() if slice_client is None else slice_client - self.__results : TypeResults = list() - self.__connections = None - self.__contexts = None - self.__contexts_add = None - self.__devices = None - self.__devices_add = None - self.__devices_config = None - self.__dummy_mode = None - self.__links = None - self.__services = None - self.__services_add = None - self.__slices = None - self.__slices_add = None - self.__topologies = None - self.__topologies_add = None - def get_results(self) -> TypeResults: return self.__results - - def process_descriptors(self, descriptors : Dict) -> None: - self.__dummy_mode = descriptors.get('dummy_mode' , False) - self.__contexts = descriptors.get('contexts' , []) - self.__topologies = descriptors.get('topologies' , []) - self.__devices = descriptors.get('devices' , []) - self.__links = descriptors.get('links' , []) - self.__services = descriptors.get('services' , []) - self.__slices = descriptors.get('slices' , []) - self.__connections = descriptors.get('connections', []) + self.__results : TypeResults = list() + @property + def contexts(self) -> List[Dict]: return self.__contexts + + @property + def num_contexts(self) -> int: return len(self.__contexts) + + @property + def topologies(self) -> Dict[str, List[Dict]]: + _topologies = {} + for topology in self.__topologies: + context_uuid = topology.topology_id.context_id.context_uuid.uuid + _topologies.setdefault(context_uuid, []).append(topology) + return _topologies + + @property + def num_topologies(self) -> Dict[str, int]: + _num_topologies = {} + for topology in self.__topologies: + context_uuid = topology.topology_id.context_id.context_uuid.uuid + _num_topologies[context_uuid] = _num_topologies.get(context_uuid, 0) + 1 + return _num_topologies + + @property + def devices(self) -> List[Dict]: return self.__devices + + @property + def num_devices(self) -> int: return len(self.__devices) + + @property + def links(self) -> List[Dict]: return self.__links + + @property + def num_links(self) -> int: return len(self.__links) + + @property + def services(self) -> Dict[str, List[Dict]]: + _services = {} + for service in self.__services: + context_uuid = service.service_id.context_id.context_uuid.uuid + _services.setdefault(context_uuid, []).append(service) + return _services + + @property + def num_services(self) -> Dict[str, int]: + _num_services = {} + for service in self.__services: + context_uuid = service.service_id.context_id.context_uuid.uuid + _num_services[context_uuid] = _num_services.get(context_uuid, 0) + 1 + return _num_services + + @property + def slices(self) -> Dict[str, List[Dict]]: + _slices = {} + for slice_ in self.__slices: + context_uuid = slice_.slice_id.context_id.context_uuid.uuid + _slices.setdefault(context_uuid, []).append(slice_) + return _slices + + @property + def num_slices(self) -> Dict[str, int]: + _num_slices = {} + for slice_ in self.__slices: + context_uuid = slice_.slice_id.context_id.context_uuid.uuid + _num_slices[context_uuid] = _num_slices.get(context_uuid, 0) + 1 + return _num_slices + + @property + def connections(self) -> List[Dict]: return self.__connections + + @property + def num_connections(self) -> int: return len(self.__connections) + + def process(self) -> TypeResults: # Format CustomConfigRules in Devices, Services and Slices provided in JSON format self.__devices = [format_device_custom_config_rules (device ) for device in self.__devices ] self.__services = [format_service_custom_config_rules(service) for service in self.__services] @@ -130,6 +194,8 @@ class DescriptorLoader: self._dummy_mode() else: self._normal_mode() + + return self.__results def _dummy_mode(self) -> None: # Dummy Mode: used to pre-load databases (WebUI debugging purposes) with no smart or automated tasks. -- GitLab From 688051a764f5dd97e8d818ed1414fa1397d16e3f Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Fri, 25 Nov 2022 13:06:50 +0000 Subject: [PATCH 09/13] Compute: - updated MockOSM constants and helper methods --- src/compute/tests/mock_osm/Constants.py | 16 +++++++++ src/compute/tests/mock_osm/Tools.py | 44 +++++++++++++++++++++++++ 2 files changed, 60 insertions(+) create mode 100644 src/compute/tests/mock_osm/Constants.py create mode 100644 src/compute/tests/mock_osm/Tools.py diff --git a/src/compute/tests/mock_osm/Constants.py b/src/compute/tests/mock_osm/Constants.py new file mode 100644 index 000000000..44d74169f --- /dev/null +++ b/src/compute/tests/mock_osm/Constants.py @@ -0,0 +1,16 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +WIM_USERNAME = 'admin' +WIM_PASSWORD = 'admin' diff --git a/src/compute/tests/mock_osm/Tools.py b/src/compute/tests/mock_osm/Tools.py new file mode 100644 index 000000000..292832bf1 --- /dev/null +++ b/src/compute/tests/mock_osm/Tools.py @@ -0,0 +1,44 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +def compose_service_endpoint_id(endpoint_id): + device_uuid = endpoint_id['device_id']['device_uuid']['uuid'] + endpoint_uuid = endpoint_id['endpoint_uuid']['uuid'] + return ':'.join([device_uuid, endpoint_uuid]) + +def wim_mapping(site_id, ce_endpoint_id, pe_device_id, priority=None, redundant=[]): + ce_endpoint_id = ce_endpoint_id['endpoint_id'] + ce_device_uuid = ce_endpoint_id['device_id']['device_uuid']['uuid'] + ce_endpoint_uuid = ce_endpoint_id['endpoint_uuid']['uuid'] + pe_device_uuid = pe_device_id['device_uuid']['uuid'] + service_endpoint_id = '{:s}:{:s}:{:s}'.format(site_id, ce_device_uuid, ce_endpoint_uuid) + bearer = '{:s}:{:s}'.format(ce_device_uuid, pe_device_uuid) + _mapping = { + 'service_endpoint_id': service_endpoint_id, + 'datacenter_id': site_id, 'device_id': ce_device_uuid, 'device_interface_id': ce_endpoint_uuid, + 'service_mapping_info': { + 'site-id': site_id, + 'bearer': {'bearer-reference': bearer}, + } + } + if priority is not None: _mapping['service_mapping_info']['priority'] = priority + if len(redundant) > 0: _mapping['service_mapping_info']['redundant'] = redundant + return service_endpoint_id, _mapping + +def connection_point(service_endpoint_id : str, encapsulation_type : str, vlan_id : int): + return { + 'service_endpoint_id': service_endpoint_id, + 'service_endpoint_encapsulation_type': encapsulation_type, + 'service_endpoint_encapsulation_info': {'vlan': vlan_id} + } -- GitLab From 12b2c871270ea39eee748edd5401da384bed6c57 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Fri, 25 Nov 2022 13:07:05 +0000 Subject: [PATCH 10/13] WebUI: - updated descriptor loading framework --- src/webui/service/main/routes.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/webui/service/main/routes.py b/src/webui/service/main/routes.py index b161fa845..0e0087347 100644 --- a/src/webui/service/main/routes.py +++ b/src/webui/service/main/routes.py @@ -43,9 +43,8 @@ def process_descriptors(descriptors): flash(f'Unable to load descriptor file: {str(e)}', 'danger') return - descriptor_loader = DescriptorLoader() - descriptor_loader.process_descriptors(descriptors) - results = descriptor_loader.get_results() + descriptor_loader = DescriptorLoader(descriptors) + results = descriptor_loader.process() for message,level in compose_notifications(results): flash(message, level) -- GitLab From 17129baeb289b719d0d6bb21a169a42011879cb9 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Fri, 25 Nov 2022 13:07:21 +0000 Subject: [PATCH 11/13] OFC'22 test: - updated test scripts --- src/tests/ofc22/tests/Fixtures.py | 39 -- src/tests/ofc22/tests/Objects.py | 354 +----------------- .../ofc22/tests/test_functional_bootstrap.py | 33 +- .../ofc22/tests/test_functional_cleanup.py | 98 +++-- .../tests/test_functional_create_service.py | 118 +++--- .../tests/test_functional_delete_service.py | 122 +++--- 6 files changed, 172 insertions(+), 592 deletions(-) diff --git a/src/tests/ofc22/tests/Fixtures.py b/src/tests/ofc22/tests/Fixtures.py index 95e301055..b68f27460 100644 --- a/src/tests/ofc22/tests/Fixtures.py +++ b/src/tests/ofc22/tests/Fixtures.py @@ -17,45 +17,6 @@ from common.Settings import get_setting from compute.tests.mock_osm.MockOSM import MockOSM from .Objects import WIM_MAPPING, WIM_PASSWORD, WIM_USERNAME -# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import pytest -from common.Settings import get_setting -from compute.tests.mock_osm.MockOSM import MockOSM -from context.client.ContextClient import ContextClient -from device.client.DeviceClient import DeviceClient -from monitoring.client.MonitoringClient import MonitoringClient - -@pytest.fixture(scope='session') -def context_client(): - _client = ContextClient() - yield _client - _client.close() - -@pytest.fixture(scope='session') -def device_client(): - _client = DeviceClient() - yield _client - _client.close() - -@pytest.fixture(scope='session') -def monitoring_client(): - _client = MonitoringClient() - yield _client - _client.close() - @pytest.fixture(scope='session') def osm_wim(): wim_url = 'http://{:s}:{:s}'.format( diff --git a/src/tests/ofc22/tests/Objects.py b/src/tests/ofc22/tests/Objects.py index f73d56cfc..1a52f5636 100644 --- a/src/tests/ofc22/tests/Objects.py +++ b/src/tests/ofc22/tests/Objects.py @@ -1,5 +1,3 @@ -##### LLUIS GIFRE (CTTC): CODE UNDER REARRANGEMENT ##### - # Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,353 +12,27 @@ # See the License for the specific language governing permissions and # limitations under the License. -import os, uuid -from typing import Dict, List, Tuple -from common.Constants import DEFAULT_CONTEXT_UUID, DEFAULT_TOPOLOGY_UUID -from common.tools.object_factory.Context import json_context, json_context_id -from common.tools.object_factory.Device import ( - json_device_connect_rules, json_device_emulated_connect_rules, json_device_emulated_datacenter_disabled, - json_device_emulated_packet_router_disabled, json_device_emulated_tapi_disabled, json_device_id, - json_device_packetrouter_disabled, json_device_tapi_disabled) -from common.tools.object_factory.EndPoint import json_endpoint, json_endpoint_id, json_endpoints -from common.tools.object_factory.Link import get_link_uuid, json_link, json_link_id -from common.tools.object_factory.Topology import json_topology, json_topology_id -from common.tools.object_factory.Service import get_service_uuid, json_service_l3nm_planned -from common.tools.object_factory.Topology import json_topology, json_topology_id -from common.proto.kpi_sample_types_pb2 import KpiSampleType - -# ----- Device Credentials and Settings -------------------------------------------------------------------------------- - -# if true, Device component is present and will infeer the endpoints from connect-rules -# if false, Device component is not present and device objects must contain preconfigured endpoints -ADD_CONNECT_RULES_TO_DEVICES = os.environ.get('ADD_CONNECT_RULES_TO_DEVICES', 'True') -ADD_CONNECT_RULES_TO_DEVICES = ADD_CONNECT_RULES_TO_DEVICES.upper() in {'T', 'TRUE', '1', 'Y', 'YES'} - -try: - from .Credentials import DEVICE_R1_ADDRESS, DEVICE_R1_PORT, DEVICE_R1_USERNAME, DEVICE_R1_PASSWORD - from .Credentials import DEVICE_R3_ADDRESS, DEVICE_R3_PORT, DEVICE_R3_USERNAME, DEVICE_R3_PASSWORD - from .Credentials import DEVICE_O1_ADDRESS, DEVICE_O1_PORT - USE_REAL_DEVICES = True # Use real devices -except ImportError: - USE_REAL_DEVICES = False # Use emulated devices - - DEVICE_R1_ADDRESS, DEVICE_R1_PORT, DEVICE_R1_USERNAME, DEVICE_R1_PASSWORD = '0.0.0.0', 830, 'admin', 'admin' - DEVICE_R3_ADDRESS, DEVICE_R3_PORT, DEVICE_R3_USERNAME, DEVICE_R3_PASSWORD = '0.0.0.0', 830, 'admin', 'admin' - DEVICE_O1_ADDRESS, DEVICE_O1_PORT = '0.0.0.0', 4900 - -#USE_REAL_DEVICES = False # Uncomment to force to use emulated devices - -if not USE_REAL_DEVICES: - json_device_packetrouter_disabled = json_device_emulated_packet_router_disabled - json_device_tapi_disabled = json_device_emulated_tapi_disabled - -def compose_router(device_uuid, endpoint_uuids, topology_id=None): - device_id = json_device_id(device_uuid) - r_endpoints = [(endpoint_uuid, 'copper', []) for endpoint_uuid in endpoint_uuids] - config_rules = json_device_emulated_connect_rules(r_endpoints) if ADD_CONNECT_RULES_TO_DEVICES else [] - endpoints = json_endpoints(device_id, r_endpoints, topology_id=topology_id) - j_endpoints = [] if ADD_CONNECT_RULES_TO_DEVICES else endpoints - device = json_device_emulated_packet_router_disabled(device_uuid, config_rules=config_rules, endpoints=j_endpoints) - return device_id, endpoints, device - -def compose_ols(device_uuid, endpoint_uuids, topology_id=None): - device_id = json_device_id(device_uuid) - r_endpoints = [(endpoint_uuid, 'optical', []) for endpoint_uuid in endpoint_uuids] - config_rules = json_device_emulated_connect_rules(r_endpoints) if ADD_CONNECT_RULES_TO_DEVICES else [] - endpoints = json_endpoints(device_id, r_endpoints, topology_id=topology_id) - j_endpoints = [] if ADD_CONNECT_RULES_TO_DEVICES else endpoints - device = json_device_emulated_tapi_disabled(device_uuid, config_rules=config_rules, endpoints=j_endpoints) - return device_id, endpoints, device - -def compose_datacenter(device_uuid, endpoint_uuids, topology_id=None): - device_id = json_device_id(device_uuid) - r_endpoints = [(endpoint_uuid, 'copper', []) for endpoint_uuid in endpoint_uuids] - config_rules = json_device_emulated_connect_rules(r_endpoints) if ADD_CONNECT_RULES_TO_DEVICES else [] - endpoints = json_endpoints(device_id, r_endpoints, topology_id=topology_id) - j_endpoints = [] if ADD_CONNECT_RULES_TO_DEVICES else endpoints - device = json_device_emulated_datacenter_disabled(device_uuid, config_rules=config_rules, endpoints=j_endpoints) - return device_id, endpoints, device - -def compose_service(endpoint_a, endpoint_z, constraints=[]): - service_uuid = get_service_uuid(endpoint_a['endpoint_id'], endpoint_z['endpoint_id']) - endpoint_ids = [endpoint_a['endpoint_id'], endpoint_z['endpoint_id']] - service = json_service_l3nm_planned(service_uuid, endpoint_ids=endpoint_ids, constraints=constraints) - return service - -# ----- Context -------------------------------------------------------------------------------------------------------- -CONTEXT_ID = json_context_id(DEFAULT_CONTEXT_UUID) -CONTEXT = json_context(DEFAULT_CONTEXT_UUID) - -# ----- Topology ------------------------------------------------------------------------------------------------------- -TOPO_ADMIN_UUID = DEFAULT_TOPOLOGY_UUID -TOPO_ADMIN_ID = json_topology_id(TOPO_ADMIN_UUID, context_id=CONTEXT_ID) -TOPO_ADMIN = json_topology(TOPO_ADMIN_UUID, context_id=CONTEXT_ID) - -# ----- Monitoring Samples --------------------------------------------------------------------------------------------- -PACKET_PORT_SAMPLE_TYPES = [ - KpiSampleType.KPISAMPLETYPE_PACKETS_TRANSMITTED, - KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED, - KpiSampleType.KPISAMPLETYPE_BYTES_TRANSMITTED, - KpiSampleType.KPISAMPLETYPE_BYTES_RECEIVED, -] - -# ----- Devices -------------------------------------------------------------------------------------------------------- - -DEVICE_R1_UUID = 'R1-EMU' -DEVICE_R1_TIMEOUT = 120 -DEVICE_R1_ENDPOINT_DEFS = [('13/0/0', 'optical', []), ('13/1/2', 'copper', PACKET_PORT_SAMPLE_TYPES)] -DEVICE_R1_ID = json_device_id(DEVICE_R1_UUID) -#DEVICE_R1_ENDPOINTS = json_endpoints(DEVICE_R1_ID, DEVICE_R1_ENDPOINT_DEFS) -DEVICE_R1_ENDPOINT_IDS = json_endpoint_ids(DEVICE_R1_ID, DEVICE_R1_ENDPOINT_DEFS) -DEVICE_R1 = json_device_packetrouter_disabled(DEVICE_R1_UUID) -ENDPOINT_ID_R1_13_0_0 = DEVICE_R1_ENDPOINT_IDS[0] -ENDPOINT_ID_R1_13_1_2 = DEVICE_R1_ENDPOINT_IDS[1] -DEVICE_R1_CONNECT_RULES = json_device_connect_rules(DEVICE_R1_ADDRESS, DEVICE_R1_PORT, ) if USE_REAL_DEVICES else json_device_emulated_connect_rules(DEVICE_R1_ENDPOINT_DEFS) - -def json_device_connect_rules(address : str, port : int, settings : Dict = {}): - return [ - json_config_rule_set('_connect/address', address), - json_config_rule_set('_connect/port', port), - json_config_rule_set('_connect/settings', { - 'username': DEVICE_R1_USERNAME, - 'password': DEVICE_R1_PASSWORD, - 'timeout' : DEVICE_R1_TIMEOUT, -}), - ] - - - -DEVICE_R2_UUID = 'R2-EMU' -DEVICE_R2_ENDPOINT_DEFS = [('13/0/0', 'optical', []), ('13/1/2', 'copper', PACKET_PORT_SAMPLE_TYPES)] -DEVICE_R2_ID = json_device_id(DEVICE_R2_UUID) -#DEVICE_R2_ENDPOINTS = json_endpoints(DEVICE_R2_ID, DEVICE_R2_ENDPOINT_DEFS) -DEVICE_R2_ENDPOINT_IDS = json_endpoint_ids(DEVICE_R2_ID, DEVICE_R2_ENDPOINT_DEFS) -DEVICE_R2 = json_device_emulated_packet_router_disabled(DEVICE_R2_UUID) -ENDPOINT_ID_R2_13_0_0 = DEVICE_R2_ENDPOINT_IDS[0] -ENDPOINT_ID_R2_13_1_2 = DEVICE_R2_ENDPOINT_IDS[1] -DEVICE_R2_CONNECT_RULES = json_device_emulated_connect_rules(DEVICE_R2_ENDPOINT_DEFS) - - -DEVICE_R3_UUID = 'R3-EMU' -DEVICE_R3_TIMEOUT = 120 -DEVICE_R3_ENDPOINT_DEFS = [('13/0/0', 'optical', []), ('13/1/2', 'copper', PACKET_PORT_SAMPLE_TYPES)] -DEVICE_R3_ID = json_device_id(DEVICE_R3_UUID) -#DEVICE_R3_ENDPOINTS = json_endpoints(DEVICE_R3_ID, DEVICE_R3_ENDPOINT_DEFS) -DEVICE_R3_ENDPOINT_IDS = json_endpoint_ids(DEVICE_R3_ID, DEVICE_R3_ENDPOINT_DEFS) -DEVICE_R3 = json_device_packetrouter_disabled(DEVICE_R3_UUID) -ENDPOINT_ID_R3_13_0_0 = DEVICE_R3_ENDPOINT_IDS[0] -ENDPOINT_ID_R3_13_1_2 = DEVICE_R3_ENDPOINT_IDS[1] -DEVICE_R3_CONNECT_RULES = json_device_connect_rules(DEVICE_R3_ADDRESS, DEVICE_R3_PORT, { - 'username': DEVICE_R3_USERNAME, - 'password': DEVICE_R3_PASSWORD, - 'timeout' : DEVICE_R3_TIMEOUT, -}) if USE_REAL_DEVICES else json_device_emulated_connect_rules(DEVICE_R3_ENDPOINT_DEFS) - - -DEVICE_R4_UUID = 'R4-EMU' -DEVICE_R4_ENDPOINT_DEFS = [('13/0/0', 'optical', []), ('13/1/2', 'copper', PACKET_PORT_SAMPLE_TYPES)] -DEVICE_R4_ID = json_device_id(DEVICE_R4_UUID) -#DEVICE_R4_ENDPOINTS = json_endpoints(DEVICE_R4_ID, DEVICE_R4_ENDPOINT_DEFS) -DEVICE_R4_ENDPOINT_IDS = json_endpoint_ids(DEVICE_R4_ID, DEVICE_R4_ENDPOINT_DEFS) -DEVICE_R4 = json_device_emulated_packet_router_disabled(DEVICE_R4_UUID) -ENDPOINT_ID_R4_13_0_0 = DEVICE_R4_ENDPOINT_IDS[0] -ENDPOINT_ID_R4_13_1_2 = DEVICE_R4_ENDPOINT_IDS[1] -DEVICE_R4_CONNECT_RULES = json_device_emulated_connect_rules(DEVICE_R4_ENDPOINT_DEFS) - - -DEVICE_O1_UUID = 'O1-OLS' -DEVICE_O1_TIMEOUT = 120 -DEVICE_O1_ENDPOINT_DEFS = [ - ('aade6001-f00b-5e2f-a357-6a0a9d3de870', 'optical', []), # node_1_port_13 - ('eb287d83-f05e-53ec-ab5a-adf6bd2b5418', 'optical', []), # node_2_port_13 - ('0ef74f99-1acc-57bd-ab9d-4b958b06c513', 'optical', []), # node_3_port_13 - ('50296d99-58cc-5ce7-82f5-fc8ee4eec2ec', 'optical', []), # node_4_port_13 -] -DEVICE_O1_ID = json_device_id(DEVICE_O1_UUID) -DEVICE_O1 = json_device_tapi_disabled(DEVICE_O1_UUID) -#DEVICE_O1_ENDPOINTS = json_endpoints(DEVICE_O1_ID, DEVICE_O1_ENDPOINT_DEFS) -DEVICE_O1_ENDPOINT_IDS = json_endpoint_ids(DEVICE_O1_ID, DEVICE_O1_ENDPOINT_DEFS) -ENDPOINT_ID_O1_EP1 = DEVICE_O1_ENDPOINT_IDS[0] -ENDPOINT_ID_O1_EP2 = DEVICE_O1_ENDPOINT_IDS[1] -ENDPOINT_ID_O1_EP3 = DEVICE_O1_ENDPOINT_IDS[2] -ENDPOINT_ID_O1_EP4 = DEVICE_O1_ENDPOINT_IDS[3] -DEVICE_O1_CONNECT_RULES = json_device_connect_rules(DEVICE_O1_ADDRESS, DEVICE_O1_PORT, { - 'timeout' : DEVICE_O1_TIMEOUT, -}) if USE_REAL_DEVICES else json_device_emulated_connect_rules(DEVICE_O1_ENDPOINT_DEFS) - - -# ----- Devices -------------------------------------------------------------------------------------------------------- -DEV_CS1GW1_ID, DEV_CS1GW1_EPS, DEV_CS1GW1 = compose_router('CS1-GW1', ['10/1', '1/1', '1/2']) -DEV_CS1GW2_ID, DEV_CS1GW2_EPS, DEV_CS1GW2 = compose_router('CS1-GW2', ['10/1', '1/1', '1/2']) -DEV_CS2GW1_ID, DEV_CS2GW1_EPS, DEV_CS2GW1 = compose_router('CS2-GW1', ['10/1', '1/1', '1/2']) -DEV_CS2GW2_ID, DEV_CS2GW2_EPS, DEV_CS2GW2 = compose_router('CS2-GW2', ['10/1', '1/1', '1/2']) - -# Transport Network -DEV_TNR1_ID, DEV_TNR1_EPS, DEV_TNR1 = compose_router('TN-R1', ['1/1', '1/2', '2/1']) -DEV_TNR2_ID, DEV_TNR2_EPS, DEV_TNR2 = compose_router('TN-R2', ['1/1', '1/2', '2/1']) -DEV_TNR3_ID, DEV_TNR3_EPS, DEV_TNR3 = compose_router('TN-R3', ['1/1', '1/2', '2/1']) -DEV_TNR4_ID, DEV_TNR4_EPS, DEV_TNR4 = compose_router('TN-R4', ['1/1', '1/2', '2/1']) - -#tols_ep_uuids = [str(uuid.uuid4()).split('-')[-1] for _ in range(4)] -tols_ep_uuids = ['afd8ffbb5403', '04b84e213e83', '3169ae676ac6', '93506f786270'] -DEV_TOLS_ID, DEV_TOLS_EPS, DEV_TOLS = compose_ols('TN-OLS', tols_ep_uuids) - - - -# ----- Links ---------------------------------------------------------------------------------------------------------- -LINK_R1_O1_UUID = get_link_uuid(DEVICE_R1_ID, ENDPOINT_ID_R1_13_0_0, DEVICE_O1_ID, ENDPOINT_ID_O1_EP1) -LINK_R1_O1_ID = json_link_id(LINK_R1_O1_UUID) -LINK_R1_O1 = json_link(LINK_R1_O1_UUID, [ENDPOINT_ID_R1_13_0_0, ENDPOINT_ID_O1_EP1]) - -LINK_R2_O1_UUID = get_link_uuid(DEVICE_R2_ID, ENDPOINT_ID_R2_13_0_0, DEVICE_O1_ID, ENDPOINT_ID_O1_EP2) -LINK_R2_O1_ID = json_link_id(LINK_R2_O1_UUID) -LINK_R2_O1 = json_link(LINK_R2_O1_UUID, [ENDPOINT_ID_R2_13_0_0, ENDPOINT_ID_O1_EP2]) - -LINK_R3_O1_UUID = get_link_uuid(DEVICE_R3_ID, ENDPOINT_ID_R3_13_0_0, DEVICE_O1_ID, ENDPOINT_ID_O1_EP3) -LINK_R3_O1_ID = json_link_id(LINK_R3_O1_UUID) -LINK_R3_O1 = json_link(LINK_R3_O1_UUID, [ENDPOINT_ID_R3_13_0_0, ENDPOINT_ID_O1_EP3]) - -LINK_R4_O1_UUID = get_link_uuid(DEVICE_R4_ID, ENDPOINT_ID_R4_13_0_0, DEVICE_O1_ID, ENDPOINT_ID_O1_EP4) -LINK_R4_O1_ID = json_link_id(LINK_R4_O1_UUID) -LINK_R4_O1 = json_link(LINK_R4_O1_UUID, [ENDPOINT_ID_R4_13_0_0, ENDPOINT_ID_O1_EP4]) - - -# ----- Links ---------------------------------------------------------------------------------------------------------- -# InterDomain DC-CSGW -LINK_DC1GW_CS1GW1_ID, LINK_DC1GW_CS1GW1 = compose_link(DEV_DC1GW_EPS[0], DEV_CS1GW1_EPS[0]) -LINK_DC1GW_CS1GW2_ID, LINK_DC1GW_CS1GW2 = compose_link(DEV_DC1GW_EPS[1], DEV_CS1GW2_EPS[0]) -LINK_DC2GW_CS2GW1_ID, LINK_DC2GW_CS2GW1 = compose_link(DEV_DC2GW_EPS[0], DEV_CS2GW1_EPS[0]) -LINK_DC2GW_CS2GW2_ID, LINK_DC2GW_CS2GW2 = compose_link(DEV_DC2GW_EPS[1], DEV_CS2GW2_EPS[0]) - -# InterDomain CSGW-TN -LINK_CS1GW1_TNR1_ID, LINK_CS1GW1_TNR1 = compose_link(DEV_CS1GW1_EPS[1], DEV_TNR1_EPS[0]) -LINK_CS1GW2_TNR2_ID, LINK_CS1GW2_TNR2 = compose_link(DEV_CS1GW2_EPS[1], DEV_TNR2_EPS[0]) -LINK_CS1GW1_TNR2_ID, LINK_CS1GW1_TNR2 = compose_link(DEV_CS1GW1_EPS[2], DEV_TNR2_EPS[1]) -LINK_CS1GW2_TNR1_ID, LINK_CS1GW2_TNR1 = compose_link(DEV_CS1GW2_EPS[2], DEV_TNR1_EPS[1]) -LINK_CS2GW1_TNR3_ID, LINK_CS2GW1_TNR3 = compose_link(DEV_CS2GW1_EPS[1], DEV_TNR3_EPS[0]) -LINK_CS2GW2_TNR4_ID, LINK_CS2GW2_TNR4 = compose_link(DEV_CS2GW2_EPS[1], DEV_TNR4_EPS[0]) -LINK_CS2GW1_TNR4_ID, LINK_CS2GW1_TNR4 = compose_link(DEV_CS2GW1_EPS[2], DEV_TNR4_EPS[1]) -LINK_CS2GW2_TNR3_ID, LINK_CS2GW2_TNR3 = compose_link(DEV_CS2GW2_EPS[2], DEV_TNR3_EPS[1]) - -# IntraDomain TN -LINK_TNR1_TOLS_ID, LINK_TNR1_TOLS = compose_link(DEV_TNR1_EPS[2], DEV_TOLS_EPS[0]) -LINK_TNR2_TOLS_ID, LINK_TNR2_TOLS = compose_link(DEV_TNR2_EPS[2], DEV_TOLS_EPS[1]) -LINK_TNR3_TOLS_ID, LINK_TNR3_TOLS = compose_link(DEV_TNR3_EPS[2], DEV_TOLS_EPS[2]) -LINK_TNR4_TOLS_ID, LINK_TNR4_TOLS = compose_link(DEV_TNR4_EPS[2], DEV_TOLS_EPS[3]) - - +from common.tools.object_factory.Device import json_device_id +from common.tools.object_factory.EndPoint import json_endpoint_id +from compute.tests.mock_osm.Tools import compose_service_endpoint_id, connection_point, wim_mapping # ----- WIM Service Settings ------------------------------------------------------------------------------------------- -WIM_USERNAME = 'admin' -WIM_PASSWORD = 'admin' - -def compose_service_endpoint_id(endpoint_id): - device_uuid = endpoint_id['device_id']['device_uuid']['uuid'] - endpoint_uuid = endpoint_id['endpoint_uuid']['uuid'] - return ':'.join([device_uuid, endpoint_uuid]) -WIM_SEP_R1_ID = compose_service_endpoint_id(ENDPOINT_ID_R1_13_1_2) +DEV_R1_ID = json_device_id('R1-EMU') +DEV_R1_ENDPOINT_ID = compose_service_endpoint_id(json_endpoint_id(DEV_R1_ID, '13/1/2')) WIM_SEP_R1_SITE_ID = '1' -WIM_SEP_R1_BEARER = WIM_SEP_R1_ID -WIM_SRV_R1_VLAN_ID = 400 -WIM_SEP_R3_ID = compose_service_endpoint_id(ENDPOINT_ID_R3_13_1_2) +DEV_R3_ID = json_device_id('R3-EMU') +DEV_R3_ENDPOINT_ID = compose_service_endpoint_id(json_endpoint_id(DEV_R3_ID, '13/1/2')) WIM_SEP_R3_SITE_ID = '2' -WIM_SEP_R3_BEARER = WIM_SEP_R3_ID -WIM_SRV_R3_VLAN_ID = 500 -WIM_MAPPING = [ - {'device-id': DEVICE_R1_UUID, 'service_endpoint_id': WIM_SEP_R1_ID, - 'service_mapping_info': {'bearer': {'bearer-reference': WIM_SEP_R1_BEARER}, 'site-id': WIM_SEP_R1_SITE_ID}}, - {'device-id': DEVICE_R3_UUID, 'service_endpoint_id': WIM_SEP_R3_ID, - 'service_mapping_info': {'bearer': {'bearer-reference': WIM_SEP_R3_BEARER}, 'site-id': WIM_SEP_R3_SITE_ID}}, -] -WIM_SERVICE_TYPE = 'ELINE' -WIM_SERVICE_CONNECTION_POINTS = [ - {'service_endpoint_id': WIM_SEP_R1_ID, - 'service_endpoint_encapsulation_type': 'dot1q', - 'service_endpoint_encapsulation_info': {'vlan': WIM_SRV_R1_VLAN_ID}}, - {'service_endpoint_id': WIM_SEP_R3_ID, - 'service_endpoint_encapsulation_type': 'dot1q', - 'service_endpoint_encapsulation_info': {'vlan': WIM_SRV_R3_VLAN_ID}}, -] - -# New code: -def mapping(site_id, ce_endpoint_id, pe_device_id, priority=None, redundant=[]): - ce_endpoint_id = ce_endpoint_id['endpoint_id'] - ce_device_uuid = ce_endpoint_id['device_id']['device_uuid']['uuid'] - ce_endpoint_uuid = ce_endpoint_id['endpoint_uuid']['uuid'] - pe_device_uuid = pe_device_id['device_uuid']['uuid'] - service_endpoint_id = '{:s}:{:s}:{:s}'.format(site_id, ce_device_uuid, ce_endpoint_uuid) - bearer = '{:s}:{:s}'.format(ce_device_uuid, pe_device_uuid) - _mapping = { - 'service_endpoint_id': service_endpoint_id, - 'datacenter_id': site_id, 'device_id': ce_device_uuid, 'device_interface_id': ce_endpoint_uuid, - 'service_mapping_info': { - 'site-id': site_id, - 'bearer': {'bearer-reference': bearer}, - } - } - if priority is not None: _mapping['service_mapping_info']['priority'] = priority - if len(redundant) > 0: _mapping['service_mapping_info']['redundant'] = redundant - return service_endpoint_id, _mapping - -WIM_SEP_DC1_PRI, WIM_MAP_DC1_PRI = mapping('DC1', DEV_DC1GW_EPS[0], DEV_CS1GW1_ID, priority=10, redundant=['DC1:DC1-GW:eth2']) -WIM_SEP_DC1_SEC, WIM_MAP_DC1_SEC = mapping('DC1', DEV_DC1GW_EPS[1], DEV_CS1GW2_ID, priority=20, redundant=['DC1:DC1-GW:eth1']) -WIM_SEP_DC2_PRI, WIM_MAP_DC2_PRI = mapping('DC2', DEV_DC2GW_EPS[0], DEV_CS2GW1_ID, priority=10, redundant=['DC2:DC2-GW:eth2']) -WIM_SEP_DC2_SEC, WIM_MAP_DC2_SEC = mapping('DC2', DEV_DC2GW_EPS[1], DEV_CS2GW2_ID, priority=20, redundant=['DC2:DC2-GW:eth1']) - -WIM_MAPPING = [WIM_MAP_DC1_PRI, WIM_MAP_DC1_SEC, WIM_MAP_DC2_PRI, WIM_MAP_DC2_SEC] +WIM_SEP_DC1, WIM_MAP_DC1 = wim_mapping(WIM_SEP_R1_SITE_ID, DEV_DC1GW_EPS[0], DEV_CS1GW1_ID) +WIM_SEP_DC2, WIM_MAP_DC2 = wim_mapping(WIM_SEP_R3_SITE_ID, DEV_DC2GW_EPS[0], DEV_CS2GW1_ID) +WIM_MAPPING = [WIM_MAP_DC1, WIM_MAP_DC2] WIM_SRV_VLAN_ID = 300 -WIM_SERVICE_TYPE = 'ELAN' +WIM_SERVICE_TYPE = 'ELINE' WIM_SERVICE_CONNECTION_POINTS = [ - {'service_endpoint_id': WIM_SEP_DC1_PRI, - 'service_endpoint_encapsulation_type': 'dot1q', - 'service_endpoint_encapsulation_info': {'vlan': WIM_SRV_VLAN_ID}}, - {'service_endpoint_id': WIM_SEP_DC2_PRI, - 'service_endpoint_encapsulation_type': 'dot1q', - 'service_endpoint_encapsulation_info': {'vlan': WIM_SRV_VLAN_ID}}, -] - - - - -# ----- Containers ----------------------------------------------------------------------------------------------------- -CONTEXTS = [CONTEXT ] -TOPOLOGIES = [TOPOLOGY] -DEVICES = [DEVICE_R1, DEVICE_R2, DEVICE_R3, DEVICE_R4, DEVICE_O1] -LINKS = [LINK_R1_O1, LINK_R2_O1, LINK_R3_O1, LINK_R4_O1] - -OBJECTS_PER_TOPOLOGY = [ - (TOPO_ADMIN_ID, - [ DEV_DC1GW_ID, DEV_DC2GW_ID, - DEV_CS1GW1_ID, DEV_CS1GW2_ID, DEV_CS2GW1_ID, DEV_CS2GW2_ID, - DEV_TNR1_ID, DEV_TNR2_ID, DEV_TNR3_ID, DEV_TNR4_ID, - DEV_TOLS_ID, - ], - [ LINK_DC1GW_CS1GW1_ID, LINK_DC1GW_CS1GW2_ID, LINK_DC2GW_CS2GW1_ID, LINK_DC2GW_CS2GW2_ID, - LINK_CS1GW1_TNR1_ID, LINK_CS1GW2_TNR2_ID, LINK_CS1GW1_TNR2_ID, LINK_CS1GW2_TNR1_ID, - LINK_CS2GW1_TNR3_ID, LINK_CS2GW2_TNR4_ID, LINK_CS2GW1_TNR4_ID, LINK_CS2GW2_TNR3_ID, - LINK_TNR1_TOLS_ID, LINK_TNR2_TOLS_ID, LINK_TNR3_TOLS_ID, LINK_TNR4_TOLS_ID, - ], - ), - (TOPO_DC1_ID, - [DEV_DC1GW_ID], - []), - (TOPO_DC2_ID, - [DEV_DC2GW_ID], - []), - (TOPO_CS1_ID, - [DEV_CS1GW1_ID, DEV_CS1GW2_ID], - []), - (TOPO_CS2_ID, - [DEV_CS2GW1_ID, DEV_CS2GW2_ID], - []), - (TOPO_TN_ID, - [ DEV_TNR1_ID, DEV_TNR2_ID, DEV_TNR3_ID, DEV_TNR4_ID, - DEV_TOLS_ID, - ], - [ LINK_TNR1_TOLS_ID, LINK_TNR2_TOLS_ID, LINK_TNR3_TOLS_ID, LINK_TNR4_TOLS_ID, - ]), + connection_point(WIM_SEP_R1_ID, 'dot1q', WIM_SRV_VLAN_ID), + connection_point(WIM_SEP_R3_ID, 'dot1q', WIM_SRV_VLAN_ID), ] diff --git a/src/tests/ofc22/tests/test_functional_bootstrap.py b/src/tests/ofc22/tests/test_functional_bootstrap.py index 0005ced0d..e6d61034b 100644 --- a/src/tests/ofc22/tests/test_functional_bootstrap.py +++ b/src/tests/ofc22/tests/test_functional_bootstrap.py @@ -15,9 +15,10 @@ # limitations under the License. import logging -from common.proto.context_pb2 import Empty +from common.proto.context_pb2 import ContextId, Empty from common.proto.monitoring_pb2 import KpiDescriptorList from common.tests.LoadScenario import load_scenario_from_descriptor +from common.tools.object_factory.Context import json_context_id from context.client.ContextClient import ContextClient from device.client.DeviceClient import DeviceClient from monitoring.client.MonitoringClient import MonitoringClient @@ -44,27 +45,31 @@ def test_scenario_empty( # ----- Load Scenario ---------------------------------------------------------------------------------------------- - load_scenario_from_descriptor(DESCRIPTOR_FILE, context_client, device_client, None, None) + descriptor_loader = load_scenario_from_descriptor( + DESCRIPTOR_FILE, context_client, device_client, None, None) # ----- List entities - Ensure scenario is ready ------------------------------------------------------------------- - #response = context_client.ListContexts(Empty()) - #assert len(response.contexts) == len(CONTEXTS) - - #response = context_client.ListTopologies(ContextId(**CONTEXT_ID)) - #assert len(response.topologies) == len(TOPOLOGIES) + response = context_client.ListContexts(Empty()) + assert len(response.contexts) == descriptor_loader.num_contexts - #response = context_client.ListDevices(Empty()) - #assert len(response.devices) == len(DEVICES) + for context_uuid, num_topologies in descriptor_loader.num_topologies.items(): + response = context_client.ListTopologies(ContextId(**json_context_id(context_uuid))) + assert len(response.topologies) == num_topologies - #response = context_client.ListLinks(Empty()) - #assert len(response.links) == len(LINKS) + response = context_client.ListDevices(Empty()) + assert len(response.devices) == descriptor_loader.num_devices - #response = context_client.ListServices(ContextId(**CONTEXT_ID)) - #assert len(response.services) == 0 + response = context_client.ListLinks(Empty()) + assert len(response.links) == descriptor_loader.num_links + for context_uuid, _ in descriptor_loader.num_services.items(): + response = context_client.ListServices(ContextId(**json_context_id(context_uuid))) + assert len(response.services) == 0 -def test_scenario_kpis_created(monitoring_client: MonitoringClient): +def test_scenario_kpis_created( + monitoring_client: MonitoringClient, # pylint: disable=redefined-outer-name +) -> None: """ This test validates that KPIs related to the service/device/endpoint were created during the service creation process. diff --git a/src/tests/ofc22/tests/test_functional_cleanup.py b/src/tests/ofc22/tests/test_functional_cleanup.py index b0dfe5490..925b75707 100644 --- a/src/tests/ofc22/tests/test_functional_cleanup.py +++ b/src/tests/ofc22/tests/test_functional_cleanup.py @@ -12,93 +12,83 @@ # See the License for the specific language governing permissions and # limitations under the License. -import logging, pytest -from common.Settings import get_setting -from common.tests.EventTools import EVENT_REMOVE, check_events +import logging +from common.tools.descriptor.Loader import DescriptorLoader from common.tools.object_factory.Context import json_context_id -from common.tools.object_factory.Device import json_device_id -from common.tools.object_factory.Link import json_link_id -from common.tools.object_factory.Topology import json_topology_id -from context.client.ContextClient import ContextClient -from context.client.EventsCollector import EventsCollector from common.proto.context_pb2 import ContextId, DeviceId, Empty, LinkId, TopologyId +from context.client.ContextClient import ContextClient from device.client.DeviceClient import DeviceClient -from tests.Fixtures import context_client, device_client -from .Objects import CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES +from tests.Fixtures import context_client, device_client # pylint: disable=unused-import LOGGER = logging.getLogger(__name__) LOGGER.setLevel(logging.DEBUG) +DESCRIPTOR_FILE = 'ofc22/descriptors_emulated.json' -def test_services_removed(context_client : ContextClient): # pylint: disable=redefined-outer-name + +def test_services_removed( + context_client : ContextClient, # pylint: disable=redefined-outer-name + device_client : DeviceClient, # pylint: disable=redefined-outer-name +) -> None: # ----- List entities - Ensure service is removed ------------------------------------------------------------------ + with open(DESCRIPTOR_FILE, 'r', encoding='UTF-8') as f: + descriptors = f.read() + + descriptor_loader = DescriptorLoader(descriptors) + response = context_client.ListContexts(Empty()) - assert len(response.contexts) == len(CONTEXTS) + assert len(response.contexts) == descriptor_loader.num_contexts - response = context_client.ListTopologies(ContextId(**CONTEXT_ID)) - assert len(response.topologies) == len(TOPOLOGIES) + for context_uuid, num_topologies in descriptor_loader.num_topologies.items(): + response = context_client.ListTopologies(ContextId(**json_context_id(context_uuid))) + assert len(response.topologies) == num_topologies response = context_client.ListDevices(Empty()) - assert len(response.devices) == len(DEVICES) + assert len(response.devices) == descriptor_loader.num_devices response = context_client.ListLinks(Empty()) - assert len(response.links) == len(LINKS) - - response = context_client.ListServices(ContextId(**CONTEXT_ID)) - assert len(response.services) == 0 - - -def test_scenario_cleanup( - context_client : ContextClient, device_client : DeviceClient): # pylint: disable=redefined-outer-name + assert len(response.links) == descriptor_loader.num_links - # ----- Start the EventsCollector ---------------------------------------------------------------------------------- - #events_collector = EventsCollector(context_client) - #events_collector.start() + for context_uuid, _ in descriptor_loader.num_services.items(): + response = context_client.ListServices(ContextId(**json_context_id(context_uuid))) + assert len(response.services) == 0 - #expected_events = [] - # ----- Delete Links and Validate Collected Events ----------------------------------------------------------------- - for link in LINKS: + # ----- Delete Links ----------------------------------------------------------------------------------------------- + for link in descriptor_loader.links: link_id = link['link_id'] link_uuid = link_id['link_uuid']['uuid'] LOGGER.info('Deleting Link {:s}'.format(link_uuid)) context_client.RemoveLink(LinkId(**link_id)) - #expected_events.append(('LinkEvent', EVENT_REMOVE, json_link_id(link_uuid))) - # ----- Delete Devices and Validate Collected Events --------------------------------------------------------------- - for device, _ in DEVICES: + + # ----- Delete Devices --------------------------------------------------------------------------------------------- + for device, _ in descriptor_loader.devices: device_id = device['device_id'] device_uuid = device_id['device_uuid']['uuid'] LOGGER.info('Deleting Device {:s}'.format(device_uuid)) device_client.DeleteDevice(DeviceId(**device_id)) - #expected_events.append(('DeviceEvent', EVENT_REMOVE, json_device_id(device_uuid))) - - # ----- Delete Topologies and Validate Collected Events ------------------------------------------------------------ - for topology in TOPOLOGIES: - topology_id = topology['topology_id'] - context_uuid = topology_id['context_id']['context_uuid']['uuid'] - topology_uuid = topology_id['topology_uuid']['uuid'] - LOGGER.info('Deleting Topology {:s}/{:s}'.format(context_uuid, topology_uuid)) - context_client.RemoveTopology(TopologyId(**topology_id)) - context_id = json_context_id(context_uuid) - #expected_events.append(('TopologyEvent', EVENT_REMOVE, json_topology_id(topology_uuid, context_id=context_id))) - - # ----- Delete Contexts and Validate Collected Events -------------------------------------------------------------- - for context in CONTEXTS: + + + # ----- Delete Topologies ------------------------------------------------------------------------------------------ + for context_uuid, topology_list in descriptor_loader.topologies.items(): + for topology in topology_list: + topology_id = topology['topology_id'] + context_uuid = topology_id['context_id']['context_uuid']['uuid'] + topology_uuid = topology_id['topology_uuid']['uuid'] + LOGGER.info('Deleting Topology {:s}/{:s}'.format(context_uuid, topology_uuid)) + context_client.RemoveTopology(TopologyId(**topology_id)) + context_id = json_context_id(context_uuid) + + + # ----- Delete Contexts -------------------------------------------------------------------------------------------- + for context in descriptor_loader.contexts: context_id = context['context_id'] context_uuid = context_id['context_uuid']['uuid'] LOGGER.info('Deleting Context {:s}'.format(context_uuid)) context_client.RemoveContext(ContextId(**context_id)) - #expected_events.append(('ContextEvent', EVENT_REMOVE, json_context_id(context_uuid))) - - # ----- Validate Collected Events ---------------------------------------------------------------------------------- - #check_events(events_collector, expected_events) - - # ----- Stop the EventsCollector ----------------------------------------------------------------------------------- - #events_collector.stop() -def test_scenario_empty_again(context_client : ContextClient): # pylint: disable=redefined-outer-name # ----- List entities - Ensure database is empty again ------------------------------------------------------------- response = context_client.ListContexts(Empty()) assert len(response.contexts) == 0 diff --git a/src/tests/ofc22/tests/test_functional_create_service.py b/src/tests/ofc22/tests/test_functional_create_service.py index 5615f119b..ab32c01f4 100644 --- a/src/tests/ofc22/tests/test_functional_create_service.py +++ b/src/tests/ofc22/tests/test_functional_create_service.py @@ -12,24 +12,18 @@ # See the License for the specific language governing permissions and # limitations under the License. -import logging, pytest, random, time +import logging, random from common.DeviceTypes import DeviceTypeEnum -from common.Settings import get_setting -from common.tests.EventTools import EVENT_CREATE, EVENT_UPDATE, check_events -from common.tools.object_factory.Connection import json_connection_id -from common.tools.object_factory.Device import json_device_id -from common.tools.object_factory.Service import json_service_id +from common.proto.context_pb2 import ContextId, Empty +from common.tools.descriptor.Loader import DescriptorLoader from common.tools.grpc.Tools import grpc_message_to_json_string +from common.tools.object_factory.Context import json_context_id from compute.tests.mock_osm.MockOSM import MockOSM from context.client.ContextClient import ContextClient from monitoring.client.MonitoringClient import MonitoringClient -from context.client.EventsCollector import EventsCollector -from common.proto.context_pb2 import ContextId, Empty -from tests.Fixtures import context_client, monitoring_client -from .Fixtures import osm_wim -from .Objects import ( - CONTEXT_ID, CONTEXTS, DEVICE_O1_UUID, DEVICE_R1_UUID, DEVICE_R3_UUID, DEVICES, LINKS, TOPOLOGIES, - WIM_SERVICE_CONNECTION_POINTS, WIM_SERVICE_TYPE) +from tests.Fixtures import context_client, device_client, monitoring_client # pylint: disable=unused-import +from .Fixtures import osm_wim # pylint: disable=unused-import +from .Objects import WIM_SERVICE_CONNECTION_POINTS, WIM_SERVICE_TYPE LOGGER = logging.getLogger(__name__) LOGGER.setLevel(logging.DEBUG) @@ -37,89 +31,69 @@ LOGGER.setLevel(logging.DEBUG) DEVTYPE_EMU_PR = DeviceTypeEnum.EMULATED_PACKET_ROUTER.value DEVTYPE_EMU_OLS = DeviceTypeEnum.EMULATED_OPEN_LINE_SYSTEM.value +DESCRIPTOR_FILE = 'ofc22/descriptors_emulated.json' + +def test_service_creation(context_client : ContextClient, osm_wim : MockOSM): # pylint: disable=redefined-outer-name + # ----- List entities - Ensure scenario is ready ------------------------------------------------------------------- + with open(DESCRIPTOR_FILE, 'r', encoding='UTF-8') as f: + descriptors = f.read() + + descriptor_loader = DescriptorLoader(descriptors) -def test_scenario_is_correct(context_client : ContextClient): # pylint: disable=redefined-outer-name - # ----- List entities - Ensure links are created ------------------------------------------------------------------- response = context_client.ListContexts(Empty()) - assert len(response.contexts) == len(CONTEXTS) + assert len(response.contexts) == descriptor_loader.num_contexts - response = context_client.ListTopologies(ContextId(**CONTEXT_ID)) - assert len(response.topologies) == len(TOPOLOGIES) + for context_uuid, num_topologies in descriptor_loader.num_topologies.items(): + response = context_client.ListTopologies(ContextId(**json_context_id(context_uuid))) + assert len(response.topologies) == num_topologies response = context_client.ListDevices(Empty()) - assert len(response.devices) == len(DEVICES) + assert len(response.devices) == descriptor_loader.num_devices response = context_client.ListLinks(Empty()) - assert len(response.links) == len(LINKS) + assert len(response.links) == descriptor_loader.num_links - response = context_client.ListServices(ContextId(**CONTEXT_ID)) - assert len(response.services) == 0 + for context_uuid, num_services in descriptor_loader.num_services.items(): + response = context_client.ListServices(ContextId(**json_context_id(context_uuid))) + assert len(response.services) == 0 -def test_service_creation(context_client : ContextClient, osm_wim : MockOSM): # pylint: disable=redefined-outer-name - # ----- Start the EventsCollector ---------------------------------------------------------------------------------- - # TODO: restablish the tests of the events - # events_collector = EventsCollector(context_client, log_events_received=True) - # events_collector.start() - # ----- Create Service --------------------------------------------------------------------------------------------- service_uuid = osm_wim.create_connectivity_service(WIM_SERVICE_TYPE, WIM_SERVICE_CONNECTION_POINTS) osm_wim.get_connectivity_service_status(service_uuid) - # ----- Validate collected events ---------------------------------------------------------------------------------- - - # packet_connection_uuid = '{:s}:{:s}'.format(service_uuid, DEVTYPE_EMU_PR) - # optical_connection_uuid = '{:s}:optical:{:s}'.format(service_uuid, DEVTYPE_EMU_OLS) - # optical_service_uuid = '{:s}:optical'.format(service_uuid) - - # expected_events = [ - # # Create packet service and add first endpoint - # ('ServiceEvent', EVENT_CREATE, json_service_id(service_uuid, context_id=CONTEXT_ID)), - # ('ServiceEvent', EVENT_UPDATE, json_service_id(service_uuid, context_id=CONTEXT_ID)), - - # # Configure OLS controller, create optical service, create optical connection - # ('DeviceEvent', EVENT_UPDATE, json_device_id(DEVICE_O1_UUID)), - # ('ServiceEvent', EVENT_CREATE, json_service_id(optical_service_uuid, context_id=CONTEXT_ID)), - # ('ConnectionEvent', EVENT_CREATE, json_connection_id(optical_connection_uuid)), - - # # Configure endpoint packet devices, add second endpoint to service, create connection - # ('DeviceEvent', EVENT_UPDATE, json_device_id(DEVICE_R1_UUID)), - # ('DeviceEvent', EVENT_UPDATE, json_device_id(DEVICE_R3_UUID)), - # ('ServiceEvent', EVENT_UPDATE, json_service_id(service_uuid, context_id=CONTEXT_ID)), - # ('ConnectionEvent', EVENT_CREATE, json_connection_id(packet_connection_uuid)), - # ] - # check_events(events_collector, expected_events) - - # # ----- Stop the EventsCollector ----------------------------------------------------------------------------------- - # events_collector.stop() - - -def test_scenario_service_created(context_client : ContextClient): # pylint: disable=redefined-outer-name + # ----- List entities - Ensure service is created ------------------------------------------------------------------ response = context_client.ListContexts(Empty()) - assert len(response.contexts) == len(CONTEXTS) + assert len(response.contexts) == descriptor_loader.num_contexts - response = context_client.ListTopologies(ContextId(**CONTEXT_ID)) - assert len(response.topologies) == len(TOPOLOGIES) + for context_uuid, num_topologies in descriptor_loader.num_topologies.items(): + response = context_client.ListTopologies(ContextId(**json_context_id(context_uuid))) + assert len(response.topologies) == num_topologies response = context_client.ListDevices(Empty()) - assert len(response.devices) == len(DEVICES) + assert len(response.devices) == descriptor_loader.num_devices response = context_client.ListLinks(Empty()) - assert len(response.links) == len(LINKS) + assert len(response.links) == descriptor_loader.num_links + + for context_uuid, num_services in descriptor_loader.num_services.items(): + response = context_client.ListServices(ContextId(**json_context_id(context_uuid))) + LOGGER.info('Services[{:d}] = {:s}'.format(len(response.services), grpc_message_to_json_string(response))) + assert len(response.services) == 2*num_services # OLS & L3NM => (L3NM + TAPI) - response = context_client.ListServices(ContextId(**CONTEXT_ID)) - LOGGER.info('Services[{:d}] = {:s}'.format(len(response.services), grpc_message_to_json_string(response))) - assert len(response.services) == 2 # L3NM + TAPI - for service in response.services: - service_id = service.service_id - response = context_client.ListConnections(service_id) - LOGGER.info(' ServiceId[{:s}] => Connections[{:d}] = {:s}'.format( - grpc_message_to_json_string(service_id), len(response.connections), grpc_message_to_json_string(response))) - assert len(response.connections) == 1 # one connection per service + for service in response.services: + service_id = service.service_id + response = context_client.ListConnections(service_id) + LOGGER.info(' ServiceId[{:s}] => Connections[{:d}] = {:s}'.format( + grpc_message_to_json_string(service_id), len(response.connections), + grpc_message_to_json_string(response))) + assert len(response.connections) == 1 # one connection per service -def test_scenario_kpi_values_created(monitoring_client: MonitoringClient): +def test_scenario_kpi_values_created( + monitoring_client: MonitoringClient, # pylint: disable=redefined-outer-name +) -> None: """ This test validates that KPI values have been inserted into the monitoring database. We short k KPI descriptors to test. diff --git a/src/tests/ofc22/tests/test_functional_delete_service.py b/src/tests/ofc22/tests/test_functional_delete_service.py index 5d9568cd8..e9839f89b 100644 --- a/src/tests/ofc22/tests/test_functional_delete_service.py +++ b/src/tests/ofc22/tests/test_functional_delete_service.py @@ -12,23 +12,16 @@ # See the License for the specific language governing permissions and # limitations under the License. -import logging, pytest +import logging from common.DeviceTypes import DeviceTypeEnum -from common.Settings import get_setting -from common.tests.EventTools import EVENT_REMOVE, EVENT_UPDATE, check_events -from common.tools.object_factory.Connection import json_connection_id -from common.tools.object_factory.Device import json_device_id -from common.tools.object_factory.Service import json_service_id +from common.proto.context_pb2 import ContextId, Empty, ServiceTypeEnum +from common.tools.descriptor.Loader import DescriptorLoader +from common.tools.object_factory.Context import json_context_id from common.tools.grpc.Tools import grpc_message_to_json_string from compute.tests.mock_osm.MockOSM import MockOSM from context.client.ContextClient import ContextClient -from context.client.EventsCollector import EventsCollector -from common.proto.context_pb2 import ContextId, Empty, ServiceTypeEnum -from tests.Fixtures import context_client -from .Fixtures import osm_wim -from .Objects import ( - CONTEXT_ID, CONTEXTS, DEVICE_O1_UUID, DEVICE_R1_UUID, DEVICE_R3_UUID, DEVICES, LINKS, TOPOLOGIES, WIM_MAPPING, - WIM_PASSWORD, WIM_USERNAME) +from tests.Fixtures import context_client # pylint: disable=unused-import +from .Fixtures import osm_wim # pylint: disable=unused-import LOGGER = logging.getLogger(__name__) @@ -37,86 +30,71 @@ LOGGER.setLevel(logging.DEBUG) DEVTYPE_EMU_PR = DeviceTypeEnum.EMULATED_PACKET_ROUTER.value DEVTYPE_EMU_OLS = DeviceTypeEnum.EMULATED_OPEN_LINE_SYSTEM.value +DESCRIPTOR_FILE = 'ofc22/descriptors_emulated.json' + -def test_scenario_is_correct(context_client : ContextClient): # pylint: disable=redefined-outer-name +def test_service_removal(context_client : ContextClient, osm_wim : MockOSM): # pylint: disable=redefined-outer-name # ----- List entities - Ensure service is created ------------------------------------------------------------------ + with open(DESCRIPTOR_FILE, 'r', encoding='UTF-8') as f: + descriptors = f.read() + + descriptor_loader = DescriptorLoader(descriptors) + response = context_client.ListContexts(Empty()) - assert len(response.contexts) == len(CONTEXTS) + assert len(response.contexts) == descriptor_loader.num_contexts - response = context_client.ListTopologies(ContextId(**CONTEXT_ID)) - assert len(response.topologies) == len(TOPOLOGIES) + for context_uuid, num_topologies in descriptor_loader.num_topologies.items(): + response = context_client.ListTopologies(ContextId(**json_context_id(context_uuid))) + assert len(response.topologies) == num_topologies response = context_client.ListDevices(Empty()) - assert len(response.devices) == len(DEVICES) + assert len(response.devices) == descriptor_loader.num_devices response = context_client.ListLinks(Empty()) - assert len(response.links) == len(LINKS) - - response = context_client.ListServices(ContextId(**CONTEXT_ID)) - LOGGER.info('Services[{:d}] = {:s}'.format(len(response.services), grpc_message_to_json_string(response))) - assert len(response.services) == 2 # L3NM + TAPI - for service in response.services: - service_id = service.service_id - response = context_client.ListConnections(service_id) - LOGGER.info(' ServiceId[{:s}] => Connections[{:d}] = {:s}'.format( - grpc_message_to_json_string(service_id), len(response.connections), grpc_message_to_json_string(response))) - assert len(response.connections) == 1 # one connection per service + assert len(response.links) == descriptor_loader.num_links - -def test_service_removal(context_client : ContextClient, osm_wim : MockOSM): # pylint: disable=redefined-outer-name - # ----- Start the EventsCollector ---------------------------------------------------------------------------------- - #events_collector = EventsCollector(context_client, log_events_received=True) - #events_collector.start() - - # ----- Delete Service --------------------------------------------------------------------------------------------- - response = context_client.ListServices(ContextId(**CONTEXT_ID)) - LOGGER.info('Services[{:d}] = {:s}'.format(len(response.services), grpc_message_to_json_string(response))) - assert len(response.services) == 2 # L3NM + TAPI service_uuids = set() - for service in response.services: - if service.service_type != ServiceTypeEnum.SERVICETYPE_L3NM: continue - service_uuid = service.service_id.service_uuid.uuid - service_uuids.add(service_uuid) - osm_wim.conn_info[service_uuid] = {} - + for context_uuid, num_services in descriptor_loader.num_services.items(): + response = context_client.ListServices(ContextId(**json_context_id(context_uuid))) + LOGGER.info('Services[{:d}] = {:s}'.format(len(response.services), grpc_message_to_json_string(response))) + assert len(response.services) == 2*num_services # OLS & L3NM => (L3NM + TAPI) + + if service.service_type == ServiceTypeEnum.SERVICETYPE_L3NM: + service_uuid = service.service_id.service_uuid.uuid + service_uuids.add(service_uuid) + osm_wim.conn_info[service_uuid] = {} + + for service in response.services: + service_id = service.service_id + response = context_client.ListConnections(service_id) + LOGGER.info(' ServiceId[{:s}] => Connections[{:d}] = {:s}'.format( + grpc_message_to_json_string(service_id), len(response.connections), + grpc_message_to_json_string(response))) + assert len(response.connections) == 1 # one connection per service + + # Identify service to delete assert len(service_uuids) == 1 # assume a single L3NM service has been created service_uuid = set(service_uuids).pop() - osm_wim.delete_connectivity_service(service_uuid) - - # ----- Validate collected events ---------------------------------------------------------------------------------- - #packet_connection_uuid = '{:s}:{:s}'.format(service_uuid, DEVTYPE_EMU_PR) - #optical_connection_uuid = '{:s}:optical:{:s}'.format(service_uuid, DEVTYPE_EMU_OLS) - #optical_service_uuid = '{:s}:optical'.format(service_uuid) - - #expected_events = [ - # ('ConnectionEvent', EVENT_REMOVE, json_connection_id(packet_connection_uuid)), - # ('DeviceEvent', EVENT_UPDATE, json_device_id(DEVICE_R1_UUID)), - # ('DeviceEvent', EVENT_UPDATE, json_device_id(DEVICE_R3_UUID)), - # ('ServiceEvent', EVENT_REMOVE, json_service_id(service_uuid, context_id=CONTEXT_ID)), - # ('ConnectionEvent', EVENT_REMOVE, json_connection_id(optical_connection_uuid)), - # ('DeviceEvent', EVENT_UPDATE, json_device_id(DEVICE_O1_UUID)), - # ('ServiceEvent', EVENT_REMOVE, json_service_id(optical_service_uuid, context_id=CONTEXT_ID)), - #] - #check_events(events_collector, expected_events) - # ----- Stop the EventsCollector ----------------------------------------------------------------------------------- - #events_collector.stop() + # ----- Delete Service --------------------------------------------------------------------------------------------- + osm_wim.delete_connectivity_service(service_uuid) -def test_services_removed(context_client : ContextClient): # pylint: disable=redefined-outer-name # ----- List entities - Ensure service is removed ------------------------------------------------------------------ response = context_client.ListContexts(Empty()) - assert len(response.contexts) == len(CONTEXTS) + assert len(response.contexts) == descriptor_loader.num_contexts - response = context_client.ListTopologies(ContextId(**CONTEXT_ID)) - assert len(response.topologies) == len(TOPOLOGIES) + for context_uuid, num_topologies in descriptor_loader.num_topologies.items(): + response = context_client.ListTopologies(ContextId(**json_context_id(context_uuid))) + assert len(response.topologies) == num_topologies response = context_client.ListDevices(Empty()) - assert len(response.devices) == len(DEVICES) + assert len(response.devices) == descriptor_loader.num_devices response = context_client.ListLinks(Empty()) - assert len(response.links) == len(LINKS) + assert len(response.links) == descriptor_loader.num_links - response = context_client.ListServices(ContextId(**CONTEXT_ID)) - assert len(response.services) == 0 + for context_uuid, num_services in descriptor_loader.num_services.items(): + response = context_client.ListServices(ContextId(**json_context_id(context_uuid))) + assert len(response.services) == 0 -- GitLab From 9fedfbf4a74a1523f9b48ffa0f7d07724e813c59 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Fri, 25 Nov 2022 19:56:59 +0000 Subject: [PATCH 12/13] OFC'22 test: - updated WIM service settings --- src/compute/tests/mock_osm/Tools.py | 11 ++++++++--- src/tests/ofc22/tests/Objects.py | 10 ++++++---- 2 files changed, 14 insertions(+), 7 deletions(-) diff --git a/src/compute/tests/mock_osm/Tools.py b/src/compute/tests/mock_osm/Tools.py index 292832bf1..d64ba9021 100644 --- a/src/compute/tests/mock_osm/Tools.py +++ b/src/compute/tests/mock_osm/Tools.py @@ -12,18 +12,23 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import Dict, Optional + def compose_service_endpoint_id(endpoint_id): device_uuid = endpoint_id['device_id']['device_uuid']['uuid'] endpoint_uuid = endpoint_id['endpoint_uuid']['uuid'] return ':'.join([device_uuid, endpoint_uuid]) -def wim_mapping(site_id, ce_endpoint_id, pe_device_id, priority=None, redundant=[]): +def wim_mapping(site_id, ce_endpoint_id, pe_device_id : Optional[Dict] = None, priority=None, redundant=[]): ce_endpoint_id = ce_endpoint_id['endpoint_id'] ce_device_uuid = ce_endpoint_id['device_id']['device_uuid']['uuid'] ce_endpoint_uuid = ce_endpoint_id['endpoint_uuid']['uuid'] - pe_device_uuid = pe_device_id['device_uuid']['uuid'] service_endpoint_id = '{:s}:{:s}:{:s}'.format(site_id, ce_device_uuid, ce_endpoint_uuid) - bearer = '{:s}:{:s}'.format(ce_device_uuid, pe_device_uuid) + if pe_device_id is None: + bearer = '{:s}:{:s}'.format(ce_device_uuid, ce_endpoint_uuid) + else: + pe_device_uuid = pe_device_id['device_uuid']['uuid'] + bearer = '{:s}:{:s}'.format(ce_device_uuid, pe_device_uuid) _mapping = { 'service_endpoint_id': service_endpoint_id, 'datacenter_id': site_id, 'device_id': ce_device_uuid, 'device_interface_id': ce_endpoint_uuid, diff --git a/src/tests/ofc22/tests/Objects.py b/src/tests/ofc22/tests/Objects.py index 1a52f5636..e56b986bc 100644 --- a/src/tests/ofc22/tests/Objects.py +++ b/src/tests/ofc22/tests/Objects.py @@ -19,15 +19,17 @@ from compute.tests.mock_osm.Tools import compose_service_endpoint_id, connection # ----- WIM Service Settings ------------------------------------------------------------------------------------------- DEV_R1_ID = json_device_id('R1-EMU') -DEV_R1_ENDPOINT_ID = compose_service_endpoint_id(json_endpoint_id(DEV_R1_ID, '13/1/2')) +DEV_R1_ENDPOINT_ID = json_endpoint_id(DEV_R1_ID, '13/1/2') +WIM_SEP_R1_ID = compose_service_endpoint_id(DEV_R1_ENDPOINT_ID) WIM_SEP_R1_SITE_ID = '1' DEV_R3_ID = json_device_id('R3-EMU') -DEV_R3_ENDPOINT_ID = compose_service_endpoint_id(json_endpoint_id(DEV_R3_ID, '13/1/2')) +DEV_R3_ENDPOINT_ID = json_endpoint_id(DEV_R3_ID, '13/1/2') +WIM_SEP_R3_ID = compose_service_endpoint_id(DEV_R3_ENDPOINT_ID) WIM_SEP_R3_SITE_ID = '2' -WIM_SEP_DC1, WIM_MAP_DC1 = wim_mapping(WIM_SEP_R1_SITE_ID, DEV_DC1GW_EPS[0], DEV_CS1GW1_ID) -WIM_SEP_DC2, WIM_MAP_DC2 = wim_mapping(WIM_SEP_R3_SITE_ID, DEV_DC2GW_EPS[0], DEV_CS2GW1_ID) +WIM_SEP_DC1, WIM_MAP_DC1 = wim_mapping(WIM_SEP_R1_SITE_ID, DEV_R1_ENDPOINT_ID) +WIM_SEP_DC2, WIM_MAP_DC2 = wim_mapping(WIM_SEP_R3_SITE_ID, DEV_R3_ENDPOINT_ID) WIM_MAPPING = [WIM_MAP_DC1, WIM_MAP_DC2] WIM_SRV_VLAN_ID = 300 -- GitLab From acc602d7259cde790c37843bb03e216264553a13 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Mon, 28 Nov 2022 14:03:57 +0000 Subject: [PATCH 13/13] Common: - fixed show_logs_monitoring script - Context Queries: added get_service, get_slice, get_topology methods - Context Queries: corrected bug with get inter topology - Moved old Build/Load Descriptor scripts to common - Fixed retrieval of objects loaded from descriptors - Deactivated close of clients in descriptor loader - Moved MockOSM library to common tests/tools folder Compute component: - updated to use new context query methods - updated to use new MockOSM location OFC'22 tests: - updated MockOSM paths - updated fixtures - added loggers - corrected WIM service definitions - improved verification of KPIs created and values returned - optimized removal of entities - optimized service discovery in delete test - added on-line logging to tests --- scripts/show_logs_monitoring.sh | 2 +- .../tools/context_queries/InterDomain.py | 15 ++++--- src/common/tools/context_queries/Service.py | 39 +++++++++++++++++++ src/common/tools/context_queries/Slice.py | 39 +++++++++++++++++++ src/common/tools/context_queries/Topology.py | 26 ++++++++++++- src/common/tools/descriptor/Loader.py | 22 +++++------ .../tools/descriptor/old}/BuildDescriptors.py | 0 .../tools/descriptor/old}/LoadDescriptors.py | 2 +- .../nbi_plugins/ietf_l2vpn/L2VPN_Service.py | 4 +- .../ietf_l2vpn/L2VPN_SiteNetworkAccesses.py | 4 +- .../ietf_l2vpn/tools/ContextMethods.py | 39 ------------------- src/compute/tests/PrepareTestScenario.py | 2 +- src/compute/tests/test_unitary.py | 2 +- src/tests/Fixtures.py | 2 - src/tests/ofc22/run_test_01_bootstrap.sh | 2 +- src/tests/ofc22/run_test_02_create_service.sh | 2 +- src/tests/ofc22/run_test_03_delete_service.sh | 2 +- src/tests/ofc22/run_test_04_cleanup.sh | 2 +- src/tests/ofc22/tests/Fixtures.py | 10 +++-- src/tests/ofc22/tests/Objects.py | 24 ++++++------ .../ofc22/tests/test_functional_bootstrap.py | 31 +++++++++++---- .../ofc22/tests/test_functional_cleanup.py | 32 +++------------ .../tests/test_functional_create_service.py | 23 +++++++++-- .../tests/test_functional_delete_service.py | 35 ++++++++--------- .../mock_osm => tests/tools}/__init__.py | 0 .../tools}/mock_osm/Constants.py | 0 .../tests => tests/tools}/mock_osm/MockOSM.py | 0 .../tests => tests/tools}/mock_osm/Tools.py | 15 ++++--- .../tools}/mock_osm/WimconnectorIETFL2VPN.py | 0 src/tests/tools/mock_osm/__init__.py | 14 +++++++ .../tools}/mock_osm/acknowledgements.txt | 0 .../tests => tests/tools}/mock_osm/sdnconn.py | 0 32 files changed, 240 insertions(+), 150 deletions(-) create mode 100644 src/common/tools/context_queries/Service.py create mode 100644 src/common/tools/context_queries/Slice.py rename src/{tests/ofc22/tests => common/tools/descriptor/old}/BuildDescriptors.py (100%) rename src/{tests/ofc22/tests => common/tools/descriptor/old}/LoadDescriptors.py (100%) delete mode 100644 src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/tools/ContextMethods.py rename src/{compute/tests/mock_osm => tests/tools}/__init__.py (100%) rename src/{compute/tests => tests/tools}/mock_osm/Constants.py (100%) rename src/{compute/tests => tests/tools}/mock_osm/MockOSM.py (100%) rename src/{compute/tests => tests/tools}/mock_osm/Tools.py (79%) rename src/{compute/tests => tests/tools}/mock_osm/WimconnectorIETFL2VPN.py (100%) create mode 100644 src/tests/tools/mock_osm/__init__.py rename src/{compute/tests => tests/tools}/mock_osm/acknowledgements.txt (100%) rename src/{compute/tests => tests/tools}/mock_osm/sdnconn.py (100%) diff --git a/scripts/show_logs_monitoring.sh b/scripts/show_logs_monitoring.sh index 520a9da1c..faa825fdf 100755 --- a/scripts/show_logs_monitoring.sh +++ b/scripts/show_logs_monitoring.sh @@ -24,4 +24,4 @@ export TFS_K8S_NAMESPACE=${TFS_K8S_NAMESPACE:-"tfs"} # Automated steps start here ######################################################################################################################## -kubectl --namespace $TFS_K8S_NAMESPACE logs deployment/monitoringserver +kubectl --namespace $TFS_K8S_NAMESPACE logs deployment/monitoringservice server diff --git a/src/common/tools/context_queries/InterDomain.py b/src/common/tools/context_queries/InterDomain.py index c47db248e..0a202ccd8 100644 --- a/src/common/tools/context_queries/InterDomain.py +++ b/src/common/tools/context_queries/InterDomain.py @@ -16,13 +16,13 @@ import logging from typing import Dict, List, Set, Tuple from common.Constants import DEFAULT_CONTEXT_UUID, DEFAULT_TOPOLOGY_UUID, INTERDOMAIN_TOPOLOGY_UUID from common.DeviceTypes import DeviceTypeEnum -from common.proto.context_pb2 import ContextId, Device, Empty, EndPointId, ServiceTypeEnum, Slice, TopologyId +from common.proto.context_pb2 import ContextId, Device, Empty, EndPointId, ServiceTypeEnum, Slice from common.proto.pathcomp_pb2 import PathCompRequest from common.tools.context_queries.CheckType import device_type_is_network -from common.tools.context_queries.Device import get_devices_in_topology, get_uuids_of_devices_in_topology +from common.tools.context_queries.Device import get_devices_in_topology +from common.tools.context_queries.Topology import get_topology from common.tools.grpc.Tools import grpc_message_to_json_string from common.tools.object_factory.Context import json_context_id -from common.tools.object_factory.Topology import json_topology_id from context.client.ContextClient import ContextClient from pathcomp.frontend.client.PathCompClient import PathCompClient @@ -60,8 +60,13 @@ def get_local_device_uuids(context_client : ContextClient) -> Set[str]: return local_device_uuids def get_interdomain_device_uuids(context_client : ContextClient) -> Set[str]: - interdomain_topology_id = TopologyId(**json_topology_id(INTERDOMAIN_TOPOLOGY_UUID, context_id=ADMIN_CONTEXT_ID)) - interdomain_topology = context_client.GetTopology(interdomain_topology_id) + context_uuid = DEFAULT_CONTEXT_UUID + topology_uuid = INTERDOMAIN_TOPOLOGY_UUID + interdomain_topology = get_topology(context_client, topology_uuid, context_uuid=context_uuid) + if interdomain_topology is None: + MSG = '[get_interdomain_device_uuids] {:s}/{:s} topology not found' + LOGGER.warning(MSG.format(context_uuid, topology_uuid)) + return set() # add abstracted devices in the interdomain topology interdomain_device_ids = interdomain_topology.device_ids diff --git a/src/common/tools/context_queries/Service.py b/src/common/tools/context_queries/Service.py new file mode 100644 index 000000000..15b201e73 --- /dev/null +++ b/src/common/tools/context_queries/Service.py @@ -0,0 +1,39 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import grpc, logging +from typing import Optional +from common.Constants import DEFAULT_CONTEXT_UUID +from common.proto.context_pb2 import Service, ServiceId +from context.client.ContextClient import ContextClient + +LOGGER = logging.getLogger(__name__) + +def get_service( + context_client : ContextClient, service_uuid : str, context_uuid : str = DEFAULT_CONTEXT_UUID, + rw_copy : bool = False + ) -> Optional[Service]: + try: + # pylint: disable=no-member + service_id = ServiceId() + service_id.context_id.context_uuid.uuid = context_uuid + service_id.service_uuid.uuid = service_uuid + ro_service = context_client.GetService(service_id) + if not rw_copy: return ro_service + rw_service = Service() + rw_service.CopyFrom(ro_service) + return rw_service + except grpc.RpcError: + #LOGGER.exception('Unable to get service({:s} / {:s})'.format(str(context_uuid), str(service_uuid))) + return None diff --git a/src/common/tools/context_queries/Slice.py b/src/common/tools/context_queries/Slice.py new file mode 100644 index 000000000..9f884aa94 --- /dev/null +++ b/src/common/tools/context_queries/Slice.py @@ -0,0 +1,39 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import grpc, logging +from typing import Optional +from common.Constants import DEFAULT_CONTEXT_UUID +from common.proto.context_pb2 import Slice, SliceId +from context.client.ContextClient import ContextClient + +LOGGER = logging.getLogger(__name__) + +def get_slice( + context_client : ContextClient, slice_uuid : str, context_uuid : str = DEFAULT_CONTEXT_UUID, + rw_copy : bool = False + ) -> Optional[Slice]: + try: + # pylint: disable=no-member + slice_id = SliceId() + slice_id.context_id.context_uuid.uuid = context_uuid + slice_id.slice_uuid.uuid = slice_uuid + ro_slice = context_client.GetSlice(slice_id) + if not rw_copy: return ro_slice + rw_slice = Slice() + rw_slice.CopyFrom(ro_slice) + return rw_slice + except grpc.RpcError: + #LOGGER.exception('Unable to get slice({:s} / {:s})'.format(str(context_uuid), str(slice_uuid))) + return None diff --git a/src/common/tools/context_queries/Topology.py b/src/common/tools/context_queries/Topology.py index fcf1b96bb..3d2077e96 100644 --- a/src/common/tools/context_queries/Topology.py +++ b/src/common/tools/context_queries/Topology.py @@ -12,12 +12,16 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import List -from common.proto.context_pb2 import ContextId, Topology +import grpc, logging +from typing import List, Optional +from common.Constants import DEFAULT_CONTEXT_UUID +from common.proto.context_pb2 import ContextId, Topology, TopologyId from common.tools.object_factory.Context import json_context_id from common.tools.object_factory.Topology import json_topology from context.client.ContextClient import ContextClient +LOGGER = logging.getLogger(__name__) + def create_topology( context_client : ContextClient, context_uuid : str, topology_uuid : str ) -> None: @@ -39,3 +43,21 @@ def create_missing_topologies( if topology_uuid in existing_topology_uuids: continue grpc_topology = Topology(**json_topology(topology_uuid, context_id=context_id)) context_client.SetTopology(grpc_topology) + +def get_topology( + context_client : ContextClient, topology_uuid : str, context_uuid : str = DEFAULT_CONTEXT_UUID, + rw_copy : bool = False + ) -> Optional[Topology]: + try: + # pylint: disable=no-member + topology_id = TopologyId() + topology_id.context_id.context_uuid.uuid = context_uuid + topology_id.topology_uuid.uuid = topology_uuid + ro_topology = context_client.GetTopology(topology_id) + if not rw_copy: return ro_topology + rw_topology = Topology() + rw_topology.CopyFrom(ro_topology) + return rw_topology + except grpc.RpcError: + #LOGGER.exception('Unable to get topology({:s} / {:s})'.format(str(context_uuid), str(topology_uuid))) + return None diff --git a/src/common/tools/descriptor/Loader.py b/src/common/tools/descriptor/Loader.py index 468cd4851..f14e2caf6 100644 --- a/src/common/tools/descriptor/Loader.py +++ b/src/common/tools/descriptor/Loader.py @@ -117,7 +117,7 @@ class DescriptorLoader: def topologies(self) -> Dict[str, List[Dict]]: _topologies = {} for topology in self.__topologies: - context_uuid = topology.topology_id.context_id.context_uuid.uuid + context_uuid = topology['topology_id']['context_id']['context_uuid']['uuid'] _topologies.setdefault(context_uuid, []).append(topology) return _topologies @@ -125,7 +125,7 @@ class DescriptorLoader: def num_topologies(self) -> Dict[str, int]: _num_topologies = {} for topology in self.__topologies: - context_uuid = topology.topology_id.context_id.context_uuid.uuid + context_uuid = topology['topology_id']['context_id']['context_uuid']['uuid'] _num_topologies[context_uuid] = _num_topologies.get(context_uuid, 0) + 1 return _num_topologies @@ -145,7 +145,7 @@ class DescriptorLoader: def services(self) -> Dict[str, List[Dict]]: _services = {} for service in self.__services: - context_uuid = service.service_id.context_id.context_uuid.uuid + context_uuid = service['service_id']['context_id']['context_uuid']['uuid'] _services.setdefault(context_uuid, []).append(service) return _services @@ -153,7 +153,7 @@ class DescriptorLoader: def num_services(self) -> Dict[str, int]: _num_services = {} for service in self.__services: - context_uuid = service.service_id.context_id.context_uuid.uuid + context_uuid = service['service_id']['context_id']['context_uuid']['uuid'] _num_services[context_uuid] = _num_services.get(context_uuid, 0) + 1 return _num_services @@ -161,7 +161,7 @@ class DescriptorLoader: def slices(self) -> Dict[str, List[Dict]]: _slices = {} for slice_ in self.__slices: - context_uuid = slice_.slice_id.context_id.context_uuid.uuid + context_uuid = slice_['slice_id']['context_id']['context_uuid']['uuid'] _slices.setdefault(context_uuid, []).append(slice_) return _slices @@ -169,7 +169,7 @@ class DescriptorLoader: def num_slices(self) -> Dict[str, int]: _num_slices = {} for slice_ in self.__slices: - context_uuid = slice_.slice_id.context_id.context_uuid.uuid + context_uuid = slice_['slice_id']['context_id']['context_uuid']['uuid'] _num_slices[context_uuid] = _num_slices.get(context_uuid, 0) + 1 return _num_slices @@ -209,7 +209,7 @@ class DescriptorLoader: self._process_descr('connection', 'add', self.__ctx_cli.SetConnection, Connection, self.__connections ) self._process_descr('context', 'update', self.__ctx_cli.SetContext, Context, self.__contexts ) self._process_descr('topology', 'update', self.__ctx_cli.SetTopology, Topology, self.__topologies ) - self.__ctx_cli.close() + #self.__ctx_cli.close() def _normal_mode(self) -> None: # Normal mode: follows the automated workflows in the different components @@ -237,10 +237,10 @@ class DescriptorLoader: self._process_descr('context', 'update', self.__ctx_cli.SetContext, Context, self.__contexts ) self._process_descr('topology', 'update', self.__ctx_cli.SetTopology, Topology, self.__topologies ) - self.__slc_cli.close() - self.__svc_cli.close() - self.__dev_cli.close() - self.__ctx_cli.close() + #self.__slc_cli.close() + #self.__svc_cli.close() + #self.__dev_cli.close() + #self.__ctx_cli.close() def _process_descr(self, entity_name, action_name, grpc_method, grpc_class, entities) -> None: num_ok, error_list = 0, [] diff --git a/src/tests/ofc22/tests/BuildDescriptors.py b/src/common/tools/descriptor/old/BuildDescriptors.py similarity index 100% rename from src/tests/ofc22/tests/BuildDescriptors.py rename to src/common/tools/descriptor/old/BuildDescriptors.py diff --git a/src/tests/ofc22/tests/LoadDescriptors.py b/src/common/tools/descriptor/old/LoadDescriptors.py similarity index 100% rename from src/tests/ofc22/tests/LoadDescriptors.py rename to src/common/tools/descriptor/old/LoadDescriptors.py index 33bc699af..f0b19196a 100644 --- a/src/tests/ofc22/tests/LoadDescriptors.py +++ b/src/common/tools/descriptor/old/LoadDescriptors.py @@ -14,8 +14,8 @@ import json, logging, sys from common.Settings import get_setting -from context.client.ContextClient import ContextClient from common.proto.context_pb2 import Context, Device, Link, Topology +from context.client.ContextClient import ContextClient from device.client.DeviceClient import DeviceClient LOGGER = logging.getLogger(__name__) diff --git a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_Service.py b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_Service.py index 7e050289f..e3d120881 100644 --- a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_Service.py +++ b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_Service.py @@ -17,10 +17,10 @@ from flask import request from flask.json import jsonify from flask_restful import Resource from common.proto.context_pb2 import SliceStatusEnum +from common.tools.context_queries.Slice import get_slice from context.client.ContextClient import ContextClient from slice.client.SliceClient import SliceClient from .tools.Authentication import HTTP_AUTH -from .tools.ContextMethods import get_slice from .tools.HttpStatusCodes import HTTP_GATEWAYTIMEOUT, HTTP_NOCONTENT, HTTP_OK, HTTP_SERVERERROR LOGGER = logging.getLogger(__name__) @@ -34,7 +34,7 @@ class L2VPN_Service(Resource): try: context_client = ContextClient() - target = get_slice(context_client, vpn_id) + target = get_slice(context_client, vpn_id, rw_copy=True) if target is None: raise Exception('VPN({:s}) not found in database'.format(str(vpn_id))) diff --git a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_SiteNetworkAccesses.py b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_SiteNetworkAccesses.py index 8aa410e9a..819d8995d 100644 --- a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_SiteNetworkAccesses.py +++ b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_SiteNetworkAccesses.py @@ -20,6 +20,7 @@ from flask.wrappers import Response from flask_restful import Resource from werkzeug.exceptions import UnsupportedMediaType from common.proto.context_pb2 import Slice +from common.tools.context_queries.Slice import get_slice from common.tools.grpc.ConfigRules import update_config_rule_custom from common.tools.grpc.Constraints import ( update_constraint_custom_dict, update_constraint_endpoint_location, update_constraint_endpoint_priority, @@ -30,7 +31,6 @@ from context.client.ContextClient import ContextClient from slice.client.SliceClient import SliceClient from .schemas.site_network_access import SCHEMA_SITE_NETWORK_ACCESS from .tools.Authentication import HTTP_AUTH -from .tools.ContextMethods import get_slice from .tools.HttpStatusCodes import HTTP_NOCONTENT, HTTP_SERVERERROR from .tools.Validator import validate_message from .Constants import ( @@ -69,7 +69,7 @@ def process_site_network_access(context_client : ContextClient, site_id : str, s address_ip, address_prefix, remote_router, circuit_id ) = mapping - target = get_slice(context_client, vpn_id) + target = get_slice(context_client, vpn_id, rw_copy=True) if target is None: raise Exception('VPN({:s}) not found in database'.format(str(vpn_id))) endpoint_ids = target.slice_endpoint_ids # pylint: disable=no-member diff --git a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/tools/ContextMethods.py b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/tools/ContextMethods.py deleted file mode 100644 index ac9e6fe4a..000000000 --- a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/tools/ContextMethods.py +++ /dev/null @@ -1,39 +0,0 @@ -import grpc, logging -from typing import Optional -from common.Constants import DEFAULT_CONTEXT_UUID -from common.proto.context_pb2 import Service, ServiceId, Slice, SliceId -from context.client.ContextClient import ContextClient - -LOGGER = logging.getLogger(__name__) - -def get_service( - context_client : ContextClient, service_uuid : str, context_uuid : str = DEFAULT_CONTEXT_UUID - ) -> Optional[Service]: - try: - # pylint: disable=no-member - service_id = ServiceId() - service_id.context_id.context_uuid.uuid = context_uuid - service_id.service_uuid.uuid = service_uuid - service_readonly = context_client.GetService(service_id) - service = Service() - service.CopyFrom(service_readonly) - return service - except grpc.RpcError: - #LOGGER.exception('Unable to get service({:s} / {:s})'.format(str(context_uuid), str(service_uuid))) - return None - -def get_slice( - context_client : ContextClient, slice_uuid : str, context_uuid : str = DEFAULT_CONTEXT_UUID - ) -> Optional[Slice]: - try: - # pylint: disable=no-member - slice_id = SliceId() - slice_id.context_id.context_uuid.uuid = context_uuid - slice_id.slice_uuid.uuid = slice_uuid - slice_readonly = context_client.GetSlice(slice_id) - slice_ = Slice() - slice_.CopyFrom(slice_readonly) - return slice_ - except grpc.RpcError: - #LOGGER.exception('Unable to get slice({:s} / {:s})'.format(str(context_uuid), str(slice_uuid))) - return None diff --git a/src/compute/tests/PrepareTestScenario.py b/src/compute/tests/PrepareTestScenario.py index d534a4a28..06fb34f9e 100644 --- a/src/compute/tests/PrepareTestScenario.py +++ b/src/compute/tests/PrepareTestScenario.py @@ -19,7 +19,7 @@ from common.Settings import ( from compute.service.rest_server.RestServer import RestServer from compute.service.rest_server.nbi_plugins.ietf_l2vpn import register_ietf_l2vpn from compute.tests.MockService_Dependencies import MockService_Dependencies -from .mock_osm.MockOSM import MockOSM +from tests.tools.mock_osm.MockOSM import MockOSM from .Constants import WIM_MAPPING, WIM_USERNAME, WIM_PASSWORD LOCAL_HOST = '127.0.0.1' diff --git a/src/compute/tests/test_unitary.py b/src/compute/tests/test_unitary.py index 05c45c1b3..acef6d4a6 100644 --- a/src/compute/tests/test_unitary.py +++ b/src/compute/tests/test_unitary.py @@ -13,7 +13,7 @@ # limitations under the License. import logging -from .mock_osm.MockOSM import MockOSM +from tests.tools.mock_osm.MockOSM import MockOSM from .Constants import SERVICE_CONNECTION_POINTS_1, SERVICE_CONNECTION_POINTS_2, SERVICE_TYPE from .PrepareTestScenario import ( # pylint: disable=unused-import # be careful, order of symbols is important here! diff --git a/src/tests/Fixtures.py b/src/tests/Fixtures.py index aeead8448..25b73e1de 100644 --- a/src/tests/Fixtures.py +++ b/src/tests/Fixtures.py @@ -13,8 +13,6 @@ # limitations under the License. import pytest -from common.Settings import get_setting -from compute.tests.mock_osm.MockOSM import MockOSM from context.client.ContextClient import ContextClient from device.client.DeviceClient import DeviceClient from monitoring.client.MonitoringClient import MonitoringClient diff --git a/src/tests/ofc22/run_test_01_bootstrap.sh b/src/tests/ofc22/run_test_01_bootstrap.sh index 4f0b6cd7d..61b49b251 100755 --- a/src/tests/ofc22/run_test_01_bootstrap.sh +++ b/src/tests/ofc22/run_test_01_bootstrap.sh @@ -14,4 +14,4 @@ # limitations under the License. source tfs_runtime_env_vars.sh -pytest --verbose src/tests/ofc22/tests/test_functional_bootstrap.py +pytest --verbose --log-level=INFO -o log_cli=true -o log_cli_level=INFO src/tests/ofc22/tests/test_functional_bootstrap.py diff --git a/src/tests/ofc22/run_test_02_create_service.sh b/src/tests/ofc22/run_test_02_create_service.sh index 8b6c8658d..135a3f74f 100755 --- a/src/tests/ofc22/run_test_02_create_service.sh +++ b/src/tests/ofc22/run_test_02_create_service.sh @@ -14,4 +14,4 @@ # limitations under the License. source tfs_runtime_env_vars.sh -pytest --verbose src/tests/ofc22/tests/test_functional_create_service.py +pytest --verbose --log-level=INFO -o log_cli=true -o log_cli_level=INFO src/tests/ofc22/tests/test_functional_create_service.py diff --git a/src/tests/ofc22/run_test_03_delete_service.sh b/src/tests/ofc22/run_test_03_delete_service.sh index 51df41aee..cbe6714fe 100755 --- a/src/tests/ofc22/run_test_03_delete_service.sh +++ b/src/tests/ofc22/run_test_03_delete_service.sh @@ -14,4 +14,4 @@ # limitations under the License. source tfs_runtime_env_vars.sh -pytest --verbose src/tests/ofc22/tests/test_functional_delete_service.py +pytest --verbose --log-level=INFO -o log_cli=true -o log_cli_level=INFO src/tests/ofc22/tests/test_functional_delete_service.py diff --git a/src/tests/ofc22/run_test_04_cleanup.sh b/src/tests/ofc22/run_test_04_cleanup.sh index 2ba91684f..e88ddbd32 100755 --- a/src/tests/ofc22/run_test_04_cleanup.sh +++ b/src/tests/ofc22/run_test_04_cleanup.sh @@ -14,4 +14,4 @@ # limitations under the License. source tfs_runtime_env_vars.sh -pytest --verbose src/tests/ofc22/tests/test_functional_cleanup.py +pytest --verbose --log-level=INFO -o log_cli=true -o log_cli_level=INFO src/tests/ofc22/tests/test_functional_cleanup.py diff --git a/src/tests/ofc22/tests/Fixtures.py b/src/tests/ofc22/tests/Fixtures.py index b68f27460..3b35a12e2 100644 --- a/src/tests/ofc22/tests/Fixtures.py +++ b/src/tests/ofc22/tests/Fixtures.py @@ -12,13 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest +import pytest, logging from common.Settings import get_setting -from compute.tests.mock_osm.MockOSM import MockOSM -from .Objects import WIM_MAPPING, WIM_PASSWORD, WIM_USERNAME +from tests.tools.mock_osm.Constants import WIM_PASSWORD, WIM_USERNAME +from tests.tools.mock_osm.MockOSM import MockOSM +from .Objects import WIM_MAPPING + +LOGGER = logging.getLogger(__name__) @pytest.fixture(scope='session') def osm_wim(): wim_url = 'http://{:s}:{:s}'.format( get_setting('COMPUTESERVICE_SERVICE_HOST'), str(get_setting('COMPUTESERVICE_SERVICE_PORT_HTTP'))) + LOGGER.info('WIM_MAPPING = {:s}'.format(str(WIM_MAPPING))) return MockOSM(wim_url, WIM_MAPPING, WIM_USERNAME, WIM_PASSWORD) diff --git a/src/tests/ofc22/tests/Objects.py b/src/tests/ofc22/tests/Objects.py index e56b986bc..7bfbe9fce 100644 --- a/src/tests/ofc22/tests/Objects.py +++ b/src/tests/ofc22/tests/Objects.py @@ -14,27 +14,25 @@ from common.tools.object_factory.Device import json_device_id from common.tools.object_factory.EndPoint import json_endpoint_id -from compute.tests.mock_osm.Tools import compose_service_endpoint_id, connection_point, wim_mapping +from tests.tools.mock_osm.Tools import connection_point, wim_mapping # ----- WIM Service Settings ------------------------------------------------------------------------------------------- -DEV_R1_ID = json_device_id('R1-EMU') -DEV_R1_ENDPOINT_ID = json_endpoint_id(DEV_R1_ID, '13/1/2') -WIM_SEP_R1_ID = compose_service_endpoint_id(DEV_R1_ENDPOINT_ID) -WIM_SEP_R1_SITE_ID = '1' +WIM_DC1_SITE_ID = '1' +WIM_DC1_DEVICE_ID = json_device_id('R1-EMU') +WIM_DC1_ENDPOINT_ID = json_endpoint_id(WIM_DC1_DEVICE_ID, '13/1/2') -DEV_R3_ID = json_device_id('R3-EMU') -DEV_R3_ENDPOINT_ID = json_endpoint_id(DEV_R3_ID, '13/1/2') -WIM_SEP_R3_ID = compose_service_endpoint_id(DEV_R3_ENDPOINT_ID) -WIM_SEP_R3_SITE_ID = '2' +WIM_DC2_SITE_ID = '2' +WIM_DC2_DEVICE_ID = json_device_id('R3-EMU') +WIM_DC2_ENDPOINT_ID = json_endpoint_id(WIM_DC2_DEVICE_ID, '13/1/2') -WIM_SEP_DC1, WIM_MAP_DC1 = wim_mapping(WIM_SEP_R1_SITE_ID, DEV_R1_ENDPOINT_ID) -WIM_SEP_DC2, WIM_MAP_DC2 = wim_mapping(WIM_SEP_R3_SITE_ID, DEV_R3_ENDPOINT_ID) +WIM_SEP_DC1, WIM_MAP_DC1 = wim_mapping(WIM_DC1_SITE_ID, WIM_DC1_ENDPOINT_ID) +WIM_SEP_DC2, WIM_MAP_DC2 = wim_mapping(WIM_DC2_SITE_ID, WIM_DC2_ENDPOINT_ID) WIM_MAPPING = [WIM_MAP_DC1, WIM_MAP_DC2] WIM_SRV_VLAN_ID = 300 WIM_SERVICE_TYPE = 'ELINE' WIM_SERVICE_CONNECTION_POINTS = [ - connection_point(WIM_SEP_R1_ID, 'dot1q', WIM_SRV_VLAN_ID), - connection_point(WIM_SEP_R3_ID, 'dot1q', WIM_SRV_VLAN_ID), + connection_point(WIM_SEP_DC1, 'dot1q', WIM_SRV_VLAN_ID), + connection_point(WIM_SEP_DC2, 'dot1q', WIM_SRV_VLAN_ID), ] diff --git a/src/tests/ofc22/tests/test_functional_bootstrap.py b/src/tests/ofc22/tests/test_functional_bootstrap.py index e6d61034b..71deb9d59 100644 --- a/src/tests/ofc22/tests/test_functional_bootstrap.py +++ b/src/tests/ofc22/tests/test_functional_bootstrap.py @@ -1,5 +1,3 @@ -##### LLUIS GIFRE (CTTC): CODE UNDER REARRANGEMENT ##### - # Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,10 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -import logging +import logging, time from common.proto.context_pb2 import ContextId, Empty from common.proto.monitoring_pb2 import KpiDescriptorList from common.tests.LoadScenario import load_scenario_from_descriptor +from common.tools.grpc.Tools import grpc_message_to_json_string from common.tools.object_factory.Context import json_context_id from context.client.ContextClient import ContextClient from device.client.DeviceClient import DeviceClient @@ -29,7 +28,7 @@ LOGGER.setLevel(logging.DEBUG) DESCRIPTOR_FILE = 'ofc22/descriptors_emulated.json' -def test_scenario_empty( +def test_scenario_bootstrap( context_client : ContextClient, # pylint: disable=redefined-outer-name device_client : DeviceClient, # pylint: disable=redefined-outer-name ) -> None: @@ -68,13 +67,29 @@ def test_scenario_empty( assert len(response.services) == 0 def test_scenario_kpis_created( + context_client : ContextClient, # pylint: disable=redefined-outer-name monitoring_client: MonitoringClient, # pylint: disable=redefined-outer-name ) -> None: """ This test validates that KPIs related to the service/device/endpoint were created during the service creation process. """ - response: KpiDescriptorList = monitoring_client.GetKpiDescriptorList(Empty()) - # TODO: replace the magic number `16` below for a formula that adapts to the number - # of links and devices - assert len(response.kpi_descriptor_list) >= 16 + response = context_client.ListDevices(Empty()) + kpis_expected = set() + for device in response.devices: + device_uuid = device.device_id.device_uuid.uuid + for endpoint in device.device_endpoints: + endpoint_uuid = endpoint.endpoint_id.endpoint_uuid.uuid + for kpi_sample_type in endpoint.kpi_sample_types: + kpis_expected.add((device_uuid, endpoint_uuid, kpi_sample_type)) + num_kpis_expected = len(kpis_expected) + LOGGER.info('Num KPIs expected: {:d}'.format(num_kpis_expected)) + + num_kpis_created, num_retry = 0, 0 + while (num_kpis_created != num_kpis_expected) and (num_retry < 5): + response: KpiDescriptorList = monitoring_client.GetKpiDescriptorList(Empty()) + num_kpis_created = len(response.kpi_descriptor_list) + LOGGER.info('Num KPIs created: {:d}'.format(num_kpis_created)) + time.sleep(0.5) + num_retry += 1 + assert num_kpis_created == num_kpis_expected diff --git a/src/tests/ofc22/tests/test_functional_cleanup.py b/src/tests/ofc22/tests/test_functional_cleanup.py index 925b75707..be807eaa0 100644 --- a/src/tests/ofc22/tests/test_functional_cleanup.py +++ b/src/tests/ofc22/tests/test_functional_cleanup.py @@ -54,39 +54,19 @@ def test_services_removed( assert len(response.services) == 0 - # ----- Delete Links ----------------------------------------------------------------------------------------------- + # ----- Delete Links, Devices, Topologies, Contexts ---------------------------------------------------------------- for link in descriptor_loader.links: - link_id = link['link_id'] - link_uuid = link_id['link_uuid']['uuid'] - LOGGER.info('Deleting Link {:s}'.format(link_uuid)) - context_client.RemoveLink(LinkId(**link_id)) + context_client.RemoveLink(LinkId(**link['link_id'])) + for device in descriptor_loader.devices: + device_client .DeleteDevice(DeviceId(**device['device_id'])) - # ----- Delete Devices --------------------------------------------------------------------------------------------- - for device, _ in descriptor_loader.devices: - device_id = device['device_id'] - device_uuid = device_id['device_uuid']['uuid'] - LOGGER.info('Deleting Device {:s}'.format(device_uuid)) - device_client.DeleteDevice(DeviceId(**device_id)) - - - # ----- Delete Topologies ------------------------------------------------------------------------------------------ for context_uuid, topology_list in descriptor_loader.topologies.items(): for topology in topology_list: - topology_id = topology['topology_id'] - context_uuid = topology_id['context_id']['context_uuid']['uuid'] - topology_uuid = topology_id['topology_uuid']['uuid'] - LOGGER.info('Deleting Topology {:s}/{:s}'.format(context_uuid, topology_uuid)) - context_client.RemoveTopology(TopologyId(**topology_id)) - context_id = json_context_id(context_uuid) - + context_client.RemoveTopology(TopologyId(**topology['topology_id'])) - # ----- Delete Contexts -------------------------------------------------------------------------------------------- for context in descriptor_loader.contexts: - context_id = context['context_id'] - context_uuid = context_id['context_uuid']['uuid'] - LOGGER.info('Deleting Context {:s}'.format(context_uuid)) - context_client.RemoveContext(ContextId(**context_id)) + context_client.RemoveContext(ContextId(**context['context_id'])) # ----- List entities - Ensure database is empty again ------------------------------------------------------------- diff --git a/src/tests/ofc22/tests/test_functional_create_service.py b/src/tests/ofc22/tests/test_functional_create_service.py index ab32c01f4..e606d060d 100644 --- a/src/tests/ofc22/tests/test_functional_create_service.py +++ b/src/tests/ofc22/tests/test_functional_create_service.py @@ -15,13 +15,14 @@ import logging, random from common.DeviceTypes import DeviceTypeEnum from common.proto.context_pb2 import ContextId, Empty +from common.proto.kpi_sample_types_pb2 import KpiSampleType from common.tools.descriptor.Loader import DescriptorLoader from common.tools.grpc.Tools import grpc_message_to_json_string from common.tools.object_factory.Context import json_context_id -from compute.tests.mock_osm.MockOSM import MockOSM from context.client.ContextClient import ContextClient from monitoring.client.MonitoringClient import MonitoringClient from tests.Fixtures import context_client, device_client, monitoring_client # pylint: disable=unused-import +from tests.tools.mock_osm.MockOSM import MockOSM from .Fixtures import osm_wim # pylint: disable=unused-import from .Objects import WIM_SERVICE_CONNECTION_POINTS, WIM_SERVICE_TYPE @@ -102,6 +103,22 @@ def test_scenario_kpi_values_created( kpi_descriptors = random.choices(response.kpi_descriptor_list, k=2) for kpi_descriptor in kpi_descriptors: + MSG = 'KPI(kpi_uuid={:s}, device_uuid={:s}, endpoint_uuid={:s}, service_uuid={:s}, kpi_sample_type={:s})...' + LOGGER.info(MSG.format( + str(kpi_descriptor.kpi_id.kpi_id.uuid), str(kpi_descriptor.device_id.device_uuid.uuid), + str(kpi_descriptor.endpoint_id.endpoint_uuid.uuid), str(kpi_descriptor.service_id.service_uuid.uuid), + str(KpiSampleType.Name(kpi_descriptor.kpi_sample_type)))) response = monitoring_client.GetInstantKpi(kpi_descriptor.kpi_id) - assert response.kpi_id.kpi_id.uuid == kpi_descriptor.kpi_id.kpi_id.uuid - assert response.timestamp.timestamp > 0 + kpi_uuid = response.kpi_id.kpi_id.uuid + assert kpi_uuid == kpi_descriptor.kpi_id.kpi_id.uuid + kpi_value_type = response.kpi_value.WhichOneof('value') + if kpi_value_type is None: + MSG = ' KPI({:s}): No instant value found' + LOGGER.warning(MSG.format(str(kpi_uuid))) + else: + kpi_timestamp = response.timestamp.timestamp + assert kpi_timestamp > 0 + assert kpi_value_type == 'floatVal' + kpi_value = getattr(response.kpi_value, kpi_value_type) + MSG = ' KPI({:s}): timestamp={:s} value_type={:s} value={:s}' + LOGGER.info(MSG.format(str(kpi_uuid), str(kpi_timestamp), str(kpi_value_type), str(kpi_value))) diff --git a/src/tests/ofc22/tests/test_functional_delete_service.py b/src/tests/ofc22/tests/test_functional_delete_service.py index e9839f89b..0f8d08801 100644 --- a/src/tests/ofc22/tests/test_functional_delete_service.py +++ b/src/tests/ofc22/tests/test_functional_delete_service.py @@ -13,14 +13,15 @@ # limitations under the License. import logging +from common.Constants import DEFAULT_CONTEXT_UUID from common.DeviceTypes import DeviceTypeEnum from common.proto.context_pb2 import ContextId, Empty, ServiceTypeEnum from common.tools.descriptor.Loader import DescriptorLoader from common.tools.object_factory.Context import json_context_id from common.tools.grpc.Tools import grpc_message_to_json_string -from compute.tests.mock_osm.MockOSM import MockOSM from context.client.ContextClient import ContextClient from tests.Fixtures import context_client # pylint: disable=unused-import +from tests.tools.mock_osm.MockOSM import MockOSM from .Fixtures import osm_wim # pylint: disable=unused-import @@ -53,32 +54,30 @@ def test_service_removal(context_client : ContextClient, osm_wim : MockOSM): # p response = context_client.ListLinks(Empty()) assert len(response.links) == descriptor_loader.num_links - service_uuids = set() - for context_uuid, num_services in descriptor_loader.num_services.items(): - response = context_client.ListServices(ContextId(**json_context_id(context_uuid))) - LOGGER.info('Services[{:d}] = {:s}'.format(len(response.services), grpc_message_to_json_string(response))) - assert len(response.services) == 2*num_services # OLS & L3NM => (L3NM + TAPI) + l3nm_service_uuids = set() + response = context_client.ListServices(ContextId(**json_context_id(DEFAULT_CONTEXT_UUID))) + assert len(response.services) == 2 # OLS & L3NM => (L3NM + TAPI) + for service in response.services: + service_id = service.service_id if service.service_type == ServiceTypeEnum.SERVICETYPE_L3NM: - service_uuid = service.service_id.service_uuid.uuid - service_uuids.add(service_uuid) + service_uuid = service_id.service_uuid.uuid + l3nm_service_uuids.add(service_uuid) osm_wim.conn_info[service_uuid] = {} - for service in response.services: - service_id = service.service_id - response = context_client.ListConnections(service_id) - LOGGER.info(' ServiceId[{:s}] => Connections[{:d}] = {:s}'.format( - grpc_message_to_json_string(service_id), len(response.connections), - grpc_message_to_json_string(response))) - assert len(response.connections) == 1 # one connection per service + response = context_client.ListConnections(service_id) + LOGGER.info(' ServiceId[{:s}] => Connections[{:d}] = {:s}'.format( + grpc_message_to_json_string(service_id), len(response.connections), + grpc_message_to_json_string(response))) + assert len(response.connections) == 1 # one connection per service # Identify service to delete - assert len(service_uuids) == 1 # assume a single L3NM service has been created - service_uuid = set(service_uuids).pop() + assert len(l3nm_service_uuids) == 1 # assume a single L3NM service has been created + l3nm_service_uuid = set(l3nm_service_uuids).pop() # ----- Delete Service --------------------------------------------------------------------------------------------- - osm_wim.delete_connectivity_service(service_uuid) + osm_wim.delete_connectivity_service(l3nm_service_uuid) # ----- List entities - Ensure service is removed ------------------------------------------------------------------ diff --git a/src/compute/tests/mock_osm/__init__.py b/src/tests/tools/__init__.py similarity index 100% rename from src/compute/tests/mock_osm/__init__.py rename to src/tests/tools/__init__.py diff --git a/src/compute/tests/mock_osm/Constants.py b/src/tests/tools/mock_osm/Constants.py similarity index 100% rename from src/compute/tests/mock_osm/Constants.py rename to src/tests/tools/mock_osm/Constants.py diff --git a/src/compute/tests/mock_osm/MockOSM.py b/src/tests/tools/mock_osm/MockOSM.py similarity index 100% rename from src/compute/tests/mock_osm/MockOSM.py rename to src/tests/tools/mock_osm/MockOSM.py diff --git a/src/compute/tests/mock_osm/Tools.py b/src/tests/tools/mock_osm/Tools.py similarity index 79% rename from src/compute/tests/mock_osm/Tools.py rename to src/tests/tools/mock_osm/Tools.py index d64ba9021..25a8b6111 100644 --- a/src/compute/tests/mock_osm/Tools.py +++ b/src/tests/tools/mock_osm/Tools.py @@ -14,22 +14,21 @@ from typing import Dict, Optional -def compose_service_endpoint_id(endpoint_id): +def compose_service_endpoint_id(site_id : str, endpoint_id : Dict): device_uuid = endpoint_id['device_id']['device_uuid']['uuid'] endpoint_uuid = endpoint_id['endpoint_uuid']['uuid'] - return ':'.join([device_uuid, endpoint_uuid]) + return ':'.join([site_id, device_uuid, endpoint_uuid]) def wim_mapping(site_id, ce_endpoint_id, pe_device_id : Optional[Dict] = None, priority=None, redundant=[]): - ce_endpoint_id = ce_endpoint_id['endpoint_id'] ce_device_uuid = ce_endpoint_id['device_id']['device_uuid']['uuid'] ce_endpoint_uuid = ce_endpoint_id['endpoint_uuid']['uuid'] - service_endpoint_id = '{:s}:{:s}:{:s}'.format(site_id, ce_device_uuid, ce_endpoint_uuid) + service_endpoint_id = compose_service_endpoint_id(site_id, ce_endpoint_id) if pe_device_id is None: bearer = '{:s}:{:s}'.format(ce_device_uuid, ce_endpoint_uuid) else: pe_device_uuid = pe_device_id['device_uuid']['uuid'] bearer = '{:s}:{:s}'.format(ce_device_uuid, pe_device_uuid) - _mapping = { + mapping = { 'service_endpoint_id': service_endpoint_id, 'datacenter_id': site_id, 'device_id': ce_device_uuid, 'device_interface_id': ce_endpoint_uuid, 'service_mapping_info': { @@ -37,9 +36,9 @@ def wim_mapping(site_id, ce_endpoint_id, pe_device_id : Optional[Dict] = None, p 'bearer': {'bearer-reference': bearer}, } } - if priority is not None: _mapping['service_mapping_info']['priority'] = priority - if len(redundant) > 0: _mapping['service_mapping_info']['redundant'] = redundant - return service_endpoint_id, _mapping + if priority is not None: mapping['service_mapping_info']['priority'] = priority + if len(redundant) > 0: mapping['service_mapping_info']['redundant'] = redundant + return service_endpoint_id, mapping def connection_point(service_endpoint_id : str, encapsulation_type : str, vlan_id : int): return { diff --git a/src/compute/tests/mock_osm/WimconnectorIETFL2VPN.py b/src/tests/tools/mock_osm/WimconnectorIETFL2VPN.py similarity index 100% rename from src/compute/tests/mock_osm/WimconnectorIETFL2VPN.py rename to src/tests/tools/mock_osm/WimconnectorIETFL2VPN.py diff --git a/src/tests/tools/mock_osm/__init__.py b/src/tests/tools/mock_osm/__init__.py new file mode 100644 index 000000000..70a332512 --- /dev/null +++ b/src/tests/tools/mock_osm/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/src/compute/tests/mock_osm/acknowledgements.txt b/src/tests/tools/mock_osm/acknowledgements.txt similarity index 100% rename from src/compute/tests/mock_osm/acknowledgements.txt rename to src/tests/tools/mock_osm/acknowledgements.txt diff --git a/src/compute/tests/mock_osm/sdnconn.py b/src/tests/tools/mock_osm/sdnconn.py similarity index 100% rename from src/compute/tests/mock_osm/sdnconn.py rename to src/tests/tools/mock_osm/sdnconn.py -- GitLab