diff --git a/my_deploy.sh b/my_deploy.sh index 4c5475da58ed8ab19e7e023c76d92a21d6d4f6dd..f909e9ebd8a2889dbf8924f899cada56eec65c01 100644 --- a/my_deploy.sh +++ b/my_deploy.sh @@ -75,19 +75,19 @@ export NATS_REDEPLOY="" # ----- QuestDB ---------------------------------------------------------------- # If not already set, set the namespace where QuestDB will be deployed. -export QDB_NAMESPACE=${QDB_NAMESPACE:-"qdb"} +export QDB_NAMESPACE="qdb" # If not already set, set the database username to be used by Monitoring. -export QDB_USERNAME=${QDB_USERNAME:-"admin"} +export QDB_USERNAME="admin" # If not already set, set the database user's password to be used by Monitoring. -export QDB_PASSWORD=${QDB_PASSWORD:-"quest"} +export QDB_PASSWORD="quest" # If not already set, set the table name to be used by Monitoring. -export QDB_TABLE=${QDB_TABLE:-"tfs_monitoring"} +export QDB_TABLE="tfs_monitoring" ## If not already set, disable flag for dropping table if exists. -#export QDB_DROP_TABLE_IF_EXISTS=${QDB_DROP_TABLE_IF_EXISTS:-""} +#export QDB_DROP_TABLE_IF_EXISTS="" # If not already set, disable flag for re-deploying QuestDB from scratch. -export QDB_REDEPLOY=${QDB_REDEPLOY:-""} +export QDB_REDEPLOY="" diff --git a/src/pathcomp/frontend/service/algorithms/KDisjointPathAlgorithm.py b/src/pathcomp/frontend/service/algorithms/KDisjointPathAlgorithm.py index 76b49bc8bd4a5ded840ccad13f0941d05070d344..6a80fe698d04aa1e74ef501e6ca97b3d80bb5ad0 100644 --- a/src/pathcomp/frontend/service/algorithms/KDisjointPathAlgorithm.py +++ b/src/pathcomp/frontend/service/algorithms/KDisjointPathAlgorithm.py @@ -63,7 +63,9 @@ class KDisjointPathAlgorithm(_Algorithm): if constraint.WhichOneof('constraint') == 'endpoint_location': endpoint_id = constraint.endpoint_location.endpoint_id device_uuid = endpoint_id.device_id.device_uuid.uuid + device_uuid = self.device_name_mapping.get(device_uuid, device_uuid) endpoint_uuid = endpoint_id.endpoint_uuid.uuid + endpoint_uuid = self.endpoint_name_mapping.get((device_uuid, endpoint_uuid), endpoint_uuid) location_kind = constraint.endpoint_location.location.WhichOneof('location') if location_kind != 'region': MSG = 'Unsupported LocationType({:s}) in Constraint({:s})' @@ -74,7 +76,9 @@ class KDisjointPathAlgorithm(_Algorithm): if constraint.WhichOneof('constraint') == 'endpoint_priority': endpoint_id = constraint.endpoint_priority.endpoint_id device_uuid = endpoint_id.device_id.device_uuid.uuid + device_uuid = self.device_name_mapping.get(device_uuid, device_uuid) endpoint_uuid = endpoint_id.endpoint_uuid.uuid + endpoint_uuid = self.endpoint_name_mapping.get((device_uuid, endpoint_uuid), endpoint_uuid) priority = constraint.endpoint_priority.priority endpoints.setdefault((device_uuid, endpoint_uuid), dict())['priority'] = priority @@ -116,8 +120,10 @@ class KDisjointPathAlgorithm(_Algorithm): algorithm = KShortestPathAlgorithm(Algorithm_KShortestPath(k_inspection=0, k_return=1)) algorithm.sync_paths = True algorithm.device_list = self.device_list + algorithm.device_name_mapping = self.device_name_mapping algorithm.device_dict = self.device_dict algorithm.endpoint_dict = self.endpoint_dict + algorithm.endpoint_name_mapping = self.endpoint_name_mapping algorithm.link_list = self.link_list algorithm.link_dict = self.link_dict algorithm.endpoint_to_link_dict = self.endpoint_to_link_dict @@ -139,6 +145,7 @@ class KDisjointPathAlgorithm(_Algorithm): _service.service_id.context_id.context_uuid.uuid = service_key[0] _service.service_id.service_uuid.uuid = service_key[1] _service.service_type = service_type + for constraint_type, constraint_value in constraints.items(): constraint = _service.service_constraints.add() constraint.custom.constraint_type = constraint_type diff --git a/src/pathcomp/frontend/service/algorithms/_Algorithm.py b/src/pathcomp/frontend/service/algorithms/_Algorithm.py index bf19ed3e10affd707b5032428efce154e05d4169..c4c8efcd7dce4524764ca8fd251f0d9bf2b99d10 100644 --- a/src/pathcomp/frontend/service/algorithms/_Algorithm.py +++ b/src/pathcomp/frontend/service/algorithms/_Algorithm.py @@ -40,7 +40,9 @@ class _Algorithm: self.device_list : List[Dict] = list() self.device_dict : Dict[str, Tuple[Dict, Device]] = dict() + self.device_name_mapping : Dict[str, str] = dict() self.endpoint_dict : Dict[str, Dict[str, Tuple[Dict, EndPointId]]] = dict() + self.endpoint_name_mapping : Dict[Tuple[str, str], str] = dict() self.link_list : List[Dict] = list() self.link_dict : Dict[str, Tuple[Dict, Link]] = dict() self.endpoint_to_link_dict : Dict[Tuple[str, str], Tuple[Dict, Link]] = dict() @@ -56,12 +58,21 @@ class _Algorithm: device_uuid = json_device['device_Id'] self.device_dict[device_uuid] = (json_device, grpc_device) + _device_uuid = grpc_device.device_id.device_uuid.uuid + _device_name = grpc_device.name + self.device_name_mapping[_device_name] = _device_uuid + device_endpoint_dict : Dict[str, Tuple[Dict, EndPointId]] = dict() for json_endpoint,grpc_endpoint in zip(json_device['device_endpoints'], grpc_device.device_endpoints): endpoint_uuid = json_endpoint['endpoint_id']['endpoint_uuid'] endpoint_tuple = (json_endpoint['endpoint_id'], grpc_endpoint.endpoint_id) device_endpoint_dict[endpoint_uuid] = endpoint_tuple + _endpoint_uuid = grpc_endpoint.endpoint_id.endpoint_uuid.uuid + _endpoint_name = grpc_endpoint.name + self.endpoint_name_mapping[(_device_uuid, _endpoint_name)] = _endpoint_uuid + self.endpoint_name_mapping[(_device_name, _endpoint_name)] = _endpoint_uuid + self.endpoint_dict[device_uuid] = device_endpoint_dict def add_links(self, grpc_links : Union[List[Link], LinkList]) -> None: diff --git a/src/tests/ecoc22/deploy_specs.sh b/src/tests/ecoc22/deploy_specs.sh index 8afd683843d4882e75c3cbca8363aa3d63edda7f..1318aefa632a51a035bd6ad99f7725040e0983f4 100644 --- a/src/tests/ecoc22/deploy_specs.sh +++ b/src/tests/ecoc22/deploy_specs.sh @@ -1,17 +1,94 @@ -# Set the URL of your local Docker registry where the images will be uploaded to. -export TFS_REGISTRY_IMAGE="http://localhost:32000/tfs/" +#!/bin/bash +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# ----- TeraFlowSDN ------------------------------------------------------------ + +# Set the URL of the internal MicroK8s Docker registry where the images will be uploaded to. +export TFS_REGISTRY_IMAGES="http://localhost:32000/tfs/" # Set the list of components, separated by spaces, you want to build images for, and deploy. +#export TFS_COMPONENTS="context device automation monitoring pathcomp service slice compute webui load_generator" export TFS_COMPONENTS="context device automation monitoring pathcomp service slice compute webui" # Set the tag you want to use for your images. export TFS_IMAGE_TAG="dev" -# Set the name of the Kubernetes namespace to deploy to. +# Set the name of the Kubernetes namespace to deploy TFS to. export TFS_K8S_NAMESPACE="tfs" # Set additional manifest files to be applied after the deployment export TFS_EXTRA_MANIFESTS="manifests/nginx_ingress_http.yaml" -# Set the neew Grafana admin password +# Set the new Grafana admin password export TFS_GRAFANA_PASSWORD="admin123+" + +# Disable skip-build flag to rebuild the Docker images. +export TFS_SKIP_BUILD="" + + +# ----- CockroachDB ------------------------------------------------------------ + +# Set the namespace where CockroackDB will be deployed. +export CRDB_NAMESPACE="crdb" + +# Set the database username to be used by Context. +export CRDB_USERNAME="tfs" + +# Set the database user's password to be used by Context. +export CRDB_PASSWORD="tfs123" + +# Set the database name to be used by Context. +export CRDB_DATABASE="tfs" + +# Set CockroachDB installation mode to 'single'. This option is convenient for development and testing. +# See ./deploy/all.sh or ./deploy/crdb.sh for additional details +export CRDB_DEPLOY_MODE="single" + +# Disable flag for dropping database, if exists. +export CRDB_DROP_DATABASE_IF_EXISTS="" + +# Disable flag for re-deploying CockroachDB from scratch. +export CRDB_REDEPLOY="" + + +# ----- NATS ------------------------------------------------------------------- + +# Set the namespace where NATS will be deployed. +export NATS_NAMESPACE="nats" + +# Disable flag for re-deploying NATS from scratch. +export NATS_REDEPLOY="" + + +# ----- QuestDB ---------------------------------------------------------------- + +# If not already set, set the namespace where QuestDB will be deployed. +export QDB_NAMESPACE="qdb" + +# If not already set, set the database username to be used by Monitoring. +export QDB_USERNAME="admin" + +# If not already set, set the database user's password to be used by Monitoring. +export QDB_PASSWORD="quest" + +# If not already set, set the table name to be used by Monitoring. +export QDB_TABLE="tfs_monitoring" + +## If not already set, disable flag for dropping table if exists. +#export QDB_DROP_TABLE_IF_EXISTS="" + +# If not already set, disable flag for re-deploying QuestDB from scratch. +export QDB_REDEPLOY="" diff --git a/src/tests/ecoc22/descriptors_emulated.json b/src/tests/ecoc22/descriptors_emulated.json index 46e518b246f76472e978bbe5841b9ca53c7aaa46..f55954d92fbe3cf75b3464286f897c3f931c0c39 100644 --- a/src/tests/ecoc22/descriptors_emulated.json +++ b/src/tests/ecoc22/descriptors_emulated.json @@ -1,15 +1,9 @@ { "contexts": [ - { - "context_id": {"context_uuid": {"uuid": "admin"}}, - "topology_ids": [], "service_ids": [] - } + {"context_id": {"context_uuid": {"uuid": "admin"}}} ], "topologies": [ - { - "topology_id": {"context_id": {"context_uuid": {"uuid": "admin"}}, "topology_uuid": {"uuid": "admin"}}, - "device_ids": [], "link_ids": [] - } + {"topology_id": {"context_id": {"context_uuid": {"uuid": "admin"}}, "topology_uuid": {"uuid": "admin"}}} ], "devices": [ { @@ -17,7 +11,11 @@ "device_endpoints": [], "device_operational_status": 1, "device_config": {"config_rules": [ {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, - {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"eth1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"eth2\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"int\"}]}"}} + {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": {"endpoints": [ + {"sample_types": [], "type": "copper", "uuid": "eth1"}, + {"sample_types": [], "type": "copper", "uuid": "eth2"}, + {"sample_types": [], "type": "copper", "uuid": "int"} + ]}}} ]} }, { @@ -25,39 +23,55 @@ "device_endpoints": [], "device_operational_status": 1, "device_config": {"config_rules": [ {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, - {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"eth1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"eth2\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"int\"}]}"}} + {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": {"endpoints": [ + {"sample_types": [], "type": "copper", "uuid": "eth1"}, + {"sample_types": [], "type": "copper", "uuid": "eth2"}, + {"sample_types": [], "type": "copper", "uuid": "int"} + ]}}} ]} }, { - "device_id": {"device_uuid": {"uuid": "CS1-GW1"}}, "device_type": "packet-router", "device_drivers": [1], + "device_id": {"device_uuid": {"uuid": "CS1-GW1"}}, "device_type": "emu-packet-router", "device_drivers": [1], "device_endpoints": [], "device_operational_status": 1, "device_config": {"config_rules": [ {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, - {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"10/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}]}"}} + {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": {"endpoints": [ + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "10/1"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/1"} + ]}}} ]} }, { - "device_id": {"device_uuid": {"uuid": "CS1-GW2"}}, "device_type": "packet-router", "device_drivers": [1], + "device_id": {"device_uuid": {"uuid": "CS1-GW2"}}, "device_type": "emu-packet-router", "device_drivers": [1], "device_endpoints": [], "device_operational_status": 1, "device_config": {"config_rules": [ {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, - {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"10/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}]}"}} + {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": {"endpoints": [ + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "10/1"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/1"} + ]}}} ]} }, { - "device_id": {"device_uuid": {"uuid": "CS2-GW1"}}, "device_type": "packet-router", "device_drivers": [1], + "device_id": {"device_uuid": {"uuid": "CS2-GW1"}}, "device_type": "emu-packet-router", "device_drivers": [1], "device_endpoints": [], "device_operational_status": 1, "device_config": {"config_rules": [ {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, - {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"10/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}]}"}} + {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": {"endpoints": [ + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "10/1"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/1"} + ]}}} ]} }, { - "device_id": {"device_uuid": {"uuid": "CS2-GW2"}}, "device_type": "packet-router", "device_drivers": [1], + "device_id": {"device_uuid": {"uuid": "CS2-GW2"}}, "device_type": "emu-packet-router", "device_drivers": [1], "device_endpoints": [], "device_operational_status": 1, "device_config": {"config_rules": [ {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, - {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"10/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}]}"}} + {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": {"endpoints": [ + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "10/1"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/1"} + ]}}} ]} }, { @@ -65,7 +79,12 @@ "device_endpoints": [], "device_operational_status": 1, "device_config": {"config_rules": [ {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, - {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"aade6001-f00b-5e2f-a357-6a0a9d3de870\"}, {\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"eb287d83-f05e-53ec-ab5a-adf6bd2b5418\"}, {\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"0ef74f99-1acc-57bd-ab9d-4b958b06c513\"}, {\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"50296d99-58cc-5ce7-82f5-fc8ee4eec2ec\"}]}"}} + {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": {"endpoints": [ + {"sample_types": [], "type": "optical", "uuid": "aade6001-f00b-5e2f-a357-6a0a9d3de870"}, + {"sample_types": [], "type": "optical", "uuid": "eb287d83-f05e-53ec-ab5a-adf6bd2b5418"}, + {"sample_types": [], "type": "optical", "uuid": "0ef74f99-1acc-57bd-ab9d-4b958b06c513"}, + {"sample_types": [], "type": "optical", "uuid": "50296d99-58cc-5ce7-82f5-fc8ee4eec2ec"} + ]}}} ]} } ], diff --git a/src/tests/ecoc22/redeploy.sh b/src/tests/ecoc22/redeploy.sh index 3f3986debb9aec57e7bc7f67b549b960679a987f..7933284633b5b2fc2b69ed9cabf4ca610175bf02 100755 --- a/src/tests/ecoc22/redeploy.sh +++ b/src/tests/ecoc22/redeploy.sh @@ -1,4 +1,18 @@ #!/bin/bash +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + source ecoc22/deploy_specs.sh -./deploy.sh +./deploy/all.sh source tfs_runtime_env_vars.sh diff --git a/src/tests/ecoc22/run_test_01_bootstrap.sh b/src/tests/ecoc22/run_test_01_bootstrap.sh index 819991d78a499c6d6e4a10e96f6439ee5b56ed8d..73ebfcae8fdeb94acf8350cd3168c5876669a1ac 100755 --- a/src/tests/ecoc22/run_test_01_bootstrap.sh +++ b/src/tests/ecoc22/run_test_01_bootstrap.sh @@ -14,4 +14,4 @@ # limitations under the License. source tfs_runtime_env_vars.sh -pytest --verbose src/tests/ecoc22/tests/test_functional_bootstrap.py +pytest --verbose --log-level=INFO src/tests/ecoc22/tests/test_functional_bootstrap.py diff --git a/src/tests/ecoc22/run_test_02_create_service.sh b/src/tests/ecoc22/run_test_02_create_service.sh index 5a54d39d496e203ee669efda636067dcc1aa27a9..36190e911e121875271862c006aa7243e681d2f2 100755 --- a/src/tests/ecoc22/run_test_02_create_service.sh +++ b/src/tests/ecoc22/run_test_02_create_service.sh @@ -14,4 +14,4 @@ # limitations under the License. source tfs_runtime_env_vars.sh -pytest --verbose src/tests/ecoc22/tests/test_functional_create_service.py +pytest --verbose --log-level=INFO src/tests/ecoc22/tests/test_functional_create_service.py diff --git a/src/tests/ecoc22/run_test_03_delete_service.sh b/src/tests/ecoc22/run_test_03_delete_service.sh index 900e09b658c1a73664dd28dc60ef6a50a9e68570..cde05d6fc5f225f2889c563b2f860722f0bc826b 100755 --- a/src/tests/ecoc22/run_test_03_delete_service.sh +++ b/src/tests/ecoc22/run_test_03_delete_service.sh @@ -14,4 +14,4 @@ # limitations under the License. source tfs_runtime_env_vars.sh -pytest --verbose src/tests/ecoc22/tests/test_functional_delete_service.py +pytest --verbose --log-level=INFO src/tests/ecoc22/tests/test_functional_delete_service.py diff --git a/src/tests/ecoc22/run_test_04_cleanup.sh b/src/tests/ecoc22/run_test_04_cleanup.sh index 4e0622e6b22d470d842d99bb4202e23e88b72982..bede609627119b407a6cbbca813bdc26c734e4cb 100755 --- a/src/tests/ecoc22/run_test_04_cleanup.sh +++ b/src/tests/ecoc22/run_test_04_cleanup.sh @@ -14,4 +14,4 @@ # limitations under the License. source tfs_runtime_env_vars.sh -pytest --verbose src/tests/ecoc22/tests/test_functional_cleanup.py +pytest --verbose --log-level=INFO src/tests/ecoc22/tests/test_functional_cleanup.py diff --git a/src/tests/ecoc22/run_tests.sh b/src/tests/ecoc22/run_tests.sh new file mode 100755 index 0000000000000000000000000000000000000000..38a11390235f607571b00bb84d6b3a973d181c28 --- /dev/null +++ b/src/tests/ecoc22/run_tests.sh @@ -0,0 +1,21 @@ +#!/bin/bash +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Run functional tests +source tfs_runtime_env_vars.sh +pytest --verbose --log-level=INFO src/tests/ecoc22/tests/test_functional_bootstrap.py +pytest --verbose --log-level=INFO src/tests/ecoc22/tests/test_functional_create_service.py +pytest --verbose --log-level=INFO src/tests/ecoc22/tests/test_functional_delete_service.py +pytest --verbose --log-level=INFO src/tests/ecoc22/tests/test_functional_cleanup.py diff --git a/src/tests/ecoc22/run_tests_and_coverage.sh b/src/tests/ecoc22/run_tests_and_coverage.sh deleted file mode 100755 index 4517cc1ea7eec7027219517720c99bfea3b4250b..0000000000000000000000000000000000000000 --- a/src/tests/ecoc22/run_tests_and_coverage.sh +++ /dev/null @@ -1,44 +0,0 @@ -#!/bin/bash -# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -PROJECTDIR=`pwd` - -RCFILE=$PROJECTDIR/coverage/.coveragerc -COVERAGEFILE=$PROJECTDIR/coverage/.coverage - -# Configure the correct folder on the .coveragerc file -cat $PROJECTDIR/coverage/.coveragerc.template | sed s+~/teraflow/controller+$PROJECTDIR+g > $RCFILE - -# Destroy old coverage file -rm -f $COVERAGEFILE - -source tfs_runtime_env_vars.sh - -# Force a flush of Context database -kubectl --namespace $TFS_K8S_NAMESPACE exec -it deployment/contextservice --container redis -- redis-cli FLUSHALL - -# Run functional tests and analyze code coverage at the same time -coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ - src/tests/ecoc22/tests/test_functional_bootstrap.py - -coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ - src/tests/ecoc22/tests/test_functional_create_service.py - -coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ - src/tests/ecoc22/tests/test_functional_delete_service.py - -coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ - src/tests/ecoc22/tests/test_functional_cleanup.py diff --git a/src/tests/ecoc22/tests/.gitignore b/src/tests/ecoc22/tests/.gitignore index 6b97d6fe3ad32f39097745229ab7f547f26ecb12..76cb708d1b532c9b69166e55f36bcb912fd5e370 100644 --- a/src/tests/ecoc22/tests/.gitignore +++ b/src/tests/ecoc22/tests/.gitignore @@ -1 +1,2 @@ # Add here your files containing confidential testbed details such as IP addresses, ports, usernames, passwords, etc. +Credentials.py diff --git a/src/tests/ecoc22/tests/Credentials.py b/src/tests/ecoc22/tests/Credentials.py deleted file mode 100644 index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000 diff --git a/src/tests/ecoc22/tests/Fixtures.py b/src/tests/ecoc22/tests/Fixtures.py index 0e5c7fbe3107ea55ba8243be18e9b100571d1c4b..3b35a12e299ba776e909fbdd2739e971431083a6 100644 --- a/src/tests/ecoc22/tests/Fixtures.py +++ b/src/tests/ecoc22/tests/Fixtures.py @@ -12,15 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest +import pytest, logging from common.Settings import get_setting -from compute.tests.mock_osm.MockOSM import MockOSM -#from .Objects_BigNet import WIM_MAPPING, WIM_PASSWORD, WIM_USERNAME -from .Objects_DC_CSGW_TN import WIM_MAPPING, WIM_PASSWORD, WIM_USERNAME -#from .Objects_DC_CSGW_TN_OLS import WIM_MAPPING, WIM_PASSWORD, WIM_USERNAME +from tests.tools.mock_osm.Constants import WIM_PASSWORD, WIM_USERNAME +from tests.tools.mock_osm.MockOSM import MockOSM +from .Objects import WIM_MAPPING + +LOGGER = logging.getLogger(__name__) @pytest.fixture(scope='session') def osm_wim(): wim_url = 'http://{:s}:{:s}'.format( get_setting('COMPUTESERVICE_SERVICE_HOST'), str(get_setting('COMPUTESERVICE_SERVICE_PORT_HTTP'))) + LOGGER.info('WIM_MAPPING = {:s}'.format(str(WIM_MAPPING))) return MockOSM(wim_url, WIM_MAPPING, WIM_USERNAME, WIM_PASSWORD) diff --git a/src/tests/ecoc22/tests/Objects.py b/src/tests/ecoc22/tests/Objects.py new file mode 100644 index 0000000000000000000000000000000000000000..cbed872f78cf99598e551da56b5f78566c00b40c --- /dev/null +++ b/src/tests/ecoc22/tests/Objects.py @@ -0,0 +1,51 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from common.tools.object_factory.Device import json_device_id +from common.tools.object_factory.EndPoint import json_endpoint_id +from tests.tools.mock_osm.Tools import connection_point, wim_mapping + +# ----- WIM Service Settings ------------------------------------------------------------------------------------------- +# PRI = primary // BKP = backup + +SITE_ID_DC1 = 'DC1' +DEV_ID_DC1 = json_device_id('DC1-GW') +EP_ID_DC1_PRI = json_endpoint_id(DEV_ID_DC1, 'eth1') +EP_ID_DC1_BKP = json_endpoint_id(DEV_ID_DC1, 'eth2') +DEV_ID_CS1GW1 = json_device_id('CS1-GW1') +DEV_ID_CS1GW2 = json_device_id('CS1-GW2') + +SITE_ID_DC2 = 'DC2' +DEV_ID_DC2 = json_device_id('DC2-GW') +EP_ID_DC2_PRI = json_endpoint_id(DEV_ID_DC2, 'eth1') +EP_ID_DC2_BKP = json_endpoint_id(DEV_ID_DC2, 'eth2') +DEV_ID_CS2GW1 = json_device_id('CS2-GW1') +DEV_ID_CS2GW2 = json_device_id('CS2-GW2') + +WIM_SEP_DC1_PRI, WIM_MAP_DC1_PRI = wim_mapping(SITE_ID_DC1, EP_ID_DC1_PRI, DEV_ID_CS1GW1, priority=10, redundant=['DC1:DC1-GW:eth2']) +WIM_SEP_DC1_BKP, WIM_MAP_DC1_BKP = wim_mapping(SITE_ID_DC1, EP_ID_DC1_BKP, DEV_ID_CS1GW2, priority=20, redundant=['DC1:DC1-GW:eth1']) +WIM_SEP_DC2_PRI, WIM_MAP_DC2_PRI = wim_mapping(SITE_ID_DC2, EP_ID_DC2_PRI, DEV_ID_CS2GW1, priority=10, redundant=['DC2:DC2-GW:eth2']) +WIM_SEP_DC2_BKP, WIM_MAP_DC2_BKP = wim_mapping(SITE_ID_DC2, EP_ID_DC2_BKP, DEV_ID_CS2GW2, priority=20, redundant=['DC2:DC2-GW:eth1']) + +WIM_MAPPING = [ + WIM_MAP_DC1_PRI, WIM_MAP_DC1_BKP, + WIM_MAP_DC2_PRI, WIM_MAP_DC2_BKP, +] + +WIM_SRV_VLAN_ID = 300 +WIM_SERVICE_TYPE = 'ELAN' +WIM_SERVICE_CONNECTION_POINTS = [ + connection_point(WIM_SEP_DC1_PRI, 'dot1q', WIM_SRV_VLAN_ID), + connection_point(WIM_SEP_DC2_PRI, 'dot1q', WIM_SRV_VLAN_ID), +] diff --git a/src/tests/ecoc22/tests/BuildDescriptors.py b/src/tests/ecoc22/tests/old_code/BuildDescriptors.py similarity index 100% rename from src/tests/ecoc22/tests/BuildDescriptors.py rename to src/tests/ecoc22/tests/old_code/BuildDescriptors.py diff --git a/src/tests/ecoc22/tests/LoadDescriptors.py b/src/tests/ecoc22/tests/old_code/LoadDescriptors.py similarity index 100% rename from src/tests/ecoc22/tests/LoadDescriptors.py rename to src/tests/ecoc22/tests/old_code/LoadDescriptors.py diff --git a/src/tests/ecoc22/tests/Objects_BigNet.py b/src/tests/ecoc22/tests/old_code/Objects_BigNet.py similarity index 100% rename from src/tests/ecoc22/tests/Objects_BigNet.py rename to src/tests/ecoc22/tests/old_code/Objects_BigNet.py diff --git a/src/tests/ecoc22/tests/Objects_DC_CSGW_OLS.py b/src/tests/ecoc22/tests/old_code/Objects_DC_CSGW_OLS.py similarity index 100% rename from src/tests/ecoc22/tests/Objects_DC_CSGW_OLS.py rename to src/tests/ecoc22/tests/old_code/Objects_DC_CSGW_OLS.py diff --git a/src/tests/ecoc22/tests/Objects_DC_CSGW_TN.py b/src/tests/ecoc22/tests/old_code/Objects_DC_CSGW_TN.py similarity index 100% rename from src/tests/ecoc22/tests/Objects_DC_CSGW_TN.py rename to src/tests/ecoc22/tests/old_code/Objects_DC_CSGW_TN.py diff --git a/src/tests/ecoc22/tests/Objects_DC_CSGW_TN_OLS.py b/src/tests/ecoc22/tests/old_code/Objects_DC_CSGW_TN_OLS.py similarity index 100% rename from src/tests/ecoc22/tests/Objects_DC_CSGW_TN_OLS.py rename to src/tests/ecoc22/tests/old_code/Objects_DC_CSGW_TN_OLS.py diff --git a/src/tests/ecoc22/tests/test_functional_bootstrap.py b/src/tests/ecoc22/tests/test_functional_bootstrap.py index 75f2bddf2c3bb21084efb6be3f5957df122da429..aa260d727b7606c20c60c68a4bc9df4c31b14f95 100644 --- a/src/tests/ecoc22/tests/test_functional_bootstrap.py +++ b/src/tests/ecoc22/tests/test_functional_bootstrap.py @@ -13,21 +13,24 @@ # limitations under the License. import logging -from common.proto.context_pb2 import Context, ContextId, Device, DeviceId, Empty, Link, LinkId, Topology, TopologyId +from common.Constants import DEFAULT_CONTEXT_NAME +from common.proto.context_pb2 import ContextId, Empty +from common.tests.LoadScenario import load_scenario_from_descriptor +from common.tools.object_factory.Context import json_context_id from context.client.ContextClient import ContextClient from device.client.DeviceClient import DeviceClient -from tests.Fixtures import context_client, device_client -#from .Objects_BigNet import CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES -#from .Objects_DC_CSGW_TN import CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES, OBJECTS_PER_TOPOLOGY -#from .Objects_DC_CSGW_TN_OLS import CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES, OBJECTS_PER_TOPOLOGY -from .Objects_DC_CSGW_OLS import CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES, OBJECTS_PER_TOPOLOGY - +from tests.Fixtures import context_client, device_client # pylint: disable=unused-import LOGGER = logging.getLogger(__name__) LOGGER.setLevel(logging.DEBUG) +DESCRIPTOR_FILE = 'ecoc22/descriptors_emulated.json' +ADMIN_CONTEXT_ID = ContextId(**json_context_id(DEFAULT_CONTEXT_NAME)) -def test_scenario_empty(context_client : ContextClient): # pylint: disable=redefined-outer-name +def test_scenario_bootstrap( + context_client : ContextClient, # pylint: disable=redefined-outer-name + device_client : DeviceClient, # pylint: disable=redefined-outer-name +) -> None: # ----- List entities - Ensure database is empty ------------------------------------------------------------------- response = context_client.ListContexts(Empty()) assert len(response.contexts) == 0 @@ -39,53 +42,34 @@ def test_scenario_empty(context_client : ContextClient): # pylint: disable=rede assert len(response.links) == 0 -def test_prepare_environment( - context_client : ContextClient, # pylint: disable=redefined-outer-name - device_client : DeviceClient): # pylint: disable=redefined-outer-name - - for context in CONTEXTS : context_client.SetContext (Context (**context )) - for topology in TOPOLOGIES: context_client.SetTopology(Topology(**topology)) - - for device in DEVICES : device_client .AddDevice (Device (**device )) - for topology_id, device_ids, _ in OBJECTS_PER_TOPOLOGY: - topology = Topology() - topology.CopyFrom(context_client.GetTopology(TopologyId(**topology_id))) - - device_ids_in_topology = {device_id.device_uuid.uuid for device_id in topology.device_ids} - func_device_id_not_added = lambda device_id: device_id['device_uuid']['uuid'] not in device_ids_in_topology - func_device_id_json_to_grpc = lambda device_id: DeviceId(**device_id) - device_ids_to_add = list(map(func_device_id_json_to_grpc, filter(func_device_id_not_added, device_ids))) - topology.device_ids.extend(device_ids_to_add) + # ----- Load Scenario ---------------------------------------------------------------------------------------------- + descriptor_loader = load_scenario_from_descriptor( + DESCRIPTOR_FILE, context_client, device_client, None, None) - context_client.SetTopology(topology) - for link in LINKS : context_client.SetLink (Link (**link )) - for topology_id, _, link_ids in OBJECTS_PER_TOPOLOGY: - topology = Topology() - topology.CopyFrom(context_client.GetTopology(TopologyId(**topology_id))) - - link_ids_in_topology = {link_id.link_uuid.uuid for link_id in topology.link_ids} - func_link_id_not_added = lambda link_id: link_id['link_uuid']['uuid'] not in link_ids_in_topology - func_link_id_json_to_grpc = lambda link_id: LinkId(**link_id) - link_ids_to_add = list(map(func_link_id_json_to_grpc, filter(func_link_id_not_added, link_ids))) - topology.link_ids.extend(link_ids_to_add) - - context_client.SetTopology(topology) - - -def test_scenario_ready(context_client : ContextClient): # pylint: disable=redefined-outer-name # ----- List entities - Ensure scenario is ready ------------------------------------------------------------------- response = context_client.ListContexts(Empty()) - assert len(response.contexts) == len(CONTEXTS) + assert len(response.contexts) == descriptor_loader.num_contexts - response = context_client.ListTopologies(ContextId(**CONTEXT_ID)) - assert len(response.topologies) == len(TOPOLOGIES) + for context_uuid, num_topologies in descriptor_loader.num_topologies.items(): + response = context_client.ListTopologies(ContextId(**json_context_id(context_uuid))) + assert len(response.topologies) == num_topologies response = context_client.ListDevices(Empty()) - assert len(response.devices) == len(DEVICES) + assert len(response.devices) == descriptor_loader.num_devices response = context_client.ListLinks(Empty()) - assert len(response.links) == len(LINKS) + assert len(response.links) == descriptor_loader.num_links + + for context_uuid, _ in descriptor_loader.num_services.items(): + response = context_client.ListServices(ContextId(**json_context_id(context_uuid))) + assert len(response.services) == 0 + + for context_uuid, _ in descriptor_loader.num_slices.items(): + response = context_client.ListSlices(ContextId(**json_context_id(context_uuid))) + assert len(response.slices) == 0 - response = context_client.ListServices(ContextId(**CONTEXT_ID)) - assert len(response.services) == 0 + # This scenario assumes no services are created beforehand + response = context_client.GetContext(ADMIN_CONTEXT_ID) + assert len(response.service_ids) == 0 + assert len(response.slice_ids) == 0 diff --git a/src/tests/ecoc22/tests/test_functional_cleanup.py b/src/tests/ecoc22/tests/test_functional_cleanup.py index 017cc991dd5bb49f6f02f178fc4354653b7bea43..34333fb4d84442edf68ed0d75c459263e2bcf964 100644 --- a/src/tests/ecoc22/tests/test_functional_cleanup.py +++ b/src/tests/ecoc22/tests/test_functional_cleanup.py @@ -13,49 +13,72 @@ # limitations under the License. import logging +from common.Constants import DEFAULT_CONTEXT_NAME from common.proto.context_pb2 import ContextId, DeviceId, Empty, LinkId, TopologyId +from common.tools.descriptor.Loader import DescriptorLoader from common.tools.object_factory.Context import json_context_id from context.client.ContextClient import ContextClient from device.client.DeviceClient import DeviceClient -from tests.Fixtures import context_client, device_client -#from .Objects_BigNet import CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES -#from .Objects_DC_CSGW_TN import CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES -#from .Objects_DC_CSGW_TN_OLS import CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES -from .Objects_DC_CSGW_OLS import CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES - +from tests.Fixtures import context_client, device_client # pylint: disable=unused-import LOGGER = logging.getLogger(__name__) LOGGER.setLevel(logging.DEBUG) +DESCRIPTOR_FILE = 'ecoc22/descriptors_emulated.json' +ADMIN_CONTEXT_ID = ContextId(**json_context_id(DEFAULT_CONTEXT_NAME)) -def test_services_removed(context_client : ContextClient): # pylint: disable=redefined-outer-name +def test_services_removed( + context_client : ContextClient, # pylint: disable=redefined-outer-name + device_client : DeviceClient, # pylint: disable=redefined-outer-name +) -> None: # ----- List entities - Ensure service is removed ------------------------------------------------------------------ + with open(DESCRIPTOR_FILE, 'r', encoding='UTF-8') as f: + descriptors = f.read() + + descriptor_loader = DescriptorLoader(descriptors) + response = context_client.ListContexts(Empty()) - assert len(response.contexts) == len(CONTEXTS) + assert len(response.contexts) == descriptor_loader.num_contexts - response = context_client.ListTopologies(ContextId(**CONTEXT_ID)) - assert len(response.topologies) == len(TOPOLOGIES) + for context_uuid, num_topologies in descriptor_loader.num_topologies.items(): + response = context_client.ListTopologies(ContextId(**json_context_id(context_uuid))) + assert len(response.topologies) == num_topologies response = context_client.ListDevices(Empty()) - assert len(response.devices) == len(DEVICES) + assert len(response.devices) == descriptor_loader.num_devices response = context_client.ListLinks(Empty()) - assert len(response.links) == len(LINKS) + assert len(response.links) == descriptor_loader.num_links + + for context_uuid, _ in descriptor_loader.num_services.items(): + response = context_client.ListServices(ContextId(**json_context_id(context_uuid))) + assert len(response.services) == 0 + + for context_uuid, _ in descriptor_loader.num_slices.items(): + response = context_client.ListSlices(ContextId(**json_context_id(context_uuid))) + assert len(response.slices) == 0 + + # This scenario assumes no services are created beforehand + response = context_client.GetContext(ADMIN_CONTEXT_ID) + assert len(response.service_ids) == 0 + assert len(response.slice_ids) == 0 + - response = context_client.ListServices(ContextId(**CONTEXT_ID)) - assert len(response.services) == 0 + # ----- Delete Links, Devices, Topologies, Contexts ---------------------------------------------------------------- + for link in descriptor_loader.links: + context_client.RemoveLink(LinkId(**link['link_id'])) + for device in descriptor_loader.devices: + device_client .DeleteDevice(DeviceId(**device['device_id'])) -def test_scenario_cleanup( - context_client : ContextClient, device_client : DeviceClient): # pylint: disable=redefined-outer-name + for context_uuid, topology_list in descriptor_loader.topologies.items(): + for topology in topology_list: + context_client.RemoveTopology(TopologyId(**topology['topology_id'])) - for link in LINKS : context_client.RemoveLink (LinkId (**link ['link_id' ])) - for device in DEVICES : device_client .DeleteDevice (DeviceId (**device ['device_id' ])) - for topology in TOPOLOGIES: context_client.RemoveTopology(TopologyId(**topology['topology_id'])) - for context in CONTEXTS : context_client.RemoveContext (ContextId (**context ['context_id' ])) + for context in descriptor_loader.contexts: + context_client.RemoveContext(ContextId(**context['context_id'])) -def test_scenario_empty_again(context_client : ContextClient): # pylint: disable=redefined-outer-name # ----- List entities - Ensure database is empty again ------------------------------------------------------------- response = context_client.ListContexts(Empty()) assert len(response.contexts) == 0 diff --git a/src/tests/ecoc22/tests/test_functional_create_service.py b/src/tests/ecoc22/tests/test_functional_create_service.py index 8c9ca36a96d161f10ed69c1a86794abf78555571..42fc886c229437de7b738f2f1d76dfbbb11cb330 100644 --- a/src/tests/ecoc22/tests/test_functional_create_service.py +++ b/src/tests/ecoc22/tests/test_functional_create_service.py @@ -13,73 +13,90 @@ # limitations under the License. import logging +from common.Constants import DEFAULT_CONTEXT_NAME from common.proto.context_pb2 import ContextId, Empty, ServiceTypeEnum +from common.tools.descriptor.Loader import DescriptorLoader from common.tools.grpc.Tools import grpc_message_to_json_string -from compute.tests.mock_osm.MockOSM import MockOSM +from common.tools.object_factory.Context import json_context_id from context.client.ContextClient import ContextClient -from tests.Fixtures import context_client -from .Fixtures import osm_wim -#from .Objects_BigNet import ( -# CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES, WIM_SERVICE_CONNECTION_POINTS, WIM_SERVICE_TYPE) -#from .Objects_DC_CSGW_TN import ( -# CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES, WIM_SERVICE_CONNECTION_POINTS, WIM_SERVICE_TYPE) -#from .Objects_DC_CSGW_TN_OLS import ( -# CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES, WIM_SERVICE_CONNECTION_POINTS, WIM_SERVICE_TYPE) -from .Objects_DC_CSGW_OLS import ( - CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES, WIM_SERVICE_CONNECTION_POINTS, WIM_SERVICE_TYPE) - +from tests.Fixtures import context_client # pylint: disable=unused-import +from tests.tools.mock_osm.MockOSM import MockOSM +from .Fixtures import osm_wim # pylint: disable=unused-import +from .Objects import WIM_SERVICE_CONNECTION_POINTS, WIM_SERVICE_TYPE LOGGER = logging.getLogger(__name__) LOGGER.setLevel(logging.DEBUG) +DESCRIPTOR_FILE = 'ecoc22/descriptors_emulated.json' +ADMIN_CONTEXT_ID = ContextId(**json_context_id(DEFAULT_CONTEXT_NAME)) + +def test_service_creation(context_client : ContextClient, osm_wim : MockOSM): # pylint: disable=redefined-outer-name + # ----- List entities - Ensure scenario is ready ------------------------------------------------------------------- + with open(DESCRIPTOR_FILE, 'r', encoding='UTF-8') as f: + descriptors = f.read() + + descriptor_loader = DescriptorLoader(descriptors) -def test_scenario_is_correct(context_client : ContextClient): # pylint: disable=redefined-outer-name - # ----- List entities - Ensure links are created ------------------------------------------------------------------- response = context_client.ListContexts(Empty()) - assert len(response.contexts) == len(CONTEXTS) + assert len(response.contexts) == descriptor_loader.num_contexts - response = context_client.ListTopologies(ContextId(**CONTEXT_ID)) - assert len(response.topologies) == len(TOPOLOGIES) + for context_uuid, num_topologies in descriptor_loader.num_topologies.items(): + response = context_client.ListTopologies(ContextId(**json_context_id(context_uuid))) + assert len(response.topologies) == num_topologies response = context_client.ListDevices(Empty()) - assert len(response.devices) == len(DEVICES) + assert len(response.devices) == descriptor_loader.num_devices response = context_client.ListLinks(Empty()) - assert len(response.links) == len(LINKS) + assert len(response.links) == descriptor_loader.num_links - response = context_client.ListServices(ContextId(**CONTEXT_ID)) - assert len(response.services) == 0 + for context_uuid, num_services in descriptor_loader.num_services.items(): + response = context_client.ListServices(ContextId(**json_context_id(context_uuid))) + assert len(response.services) == num_services + + for context_uuid, num_slices in descriptor_loader.num_slices.items(): + response = context_client.ListSlices(ContextId(**json_context_id(context_uuid))) + assert len(response.slices) == num_slices + + # This scenario assumes no services are created beforehand + response = context_client.GetContext(ADMIN_CONTEXT_ID) + assert len(response.service_ids) == 0 + assert len(response.slice_ids) == 0 -def test_service_creation(context_client : ContextClient, osm_wim : MockOSM): # pylint: disable=redefined-outer-name # ----- Create Service --------------------------------------------------------------------------------------------- service_uuid = osm_wim.create_connectivity_service(WIM_SERVICE_TYPE, WIM_SERVICE_CONNECTION_POINTS) osm_wim.get_connectivity_service_status(service_uuid) -def test_scenario_service_created(context_client : ContextClient): # pylint: disable=redefined-outer-name # ----- List entities - Ensure service is created ------------------------------------------------------------------ response = context_client.ListContexts(Empty()) - assert len(response.contexts) == len(CONTEXTS) + assert len(response.contexts) == descriptor_loader.num_contexts - response = context_client.ListTopologies(ContextId(**CONTEXT_ID)) - assert len(response.topologies) == len(TOPOLOGIES) + for context_uuid, num_topologies in descriptor_loader.num_topologies.items(): + response = context_client.ListTopologies(ContextId(**json_context_id(context_uuid))) + assert len(response.topologies) == num_topologies response = context_client.ListDevices(Empty()) - assert len(response.devices) == len(DEVICES) + assert len(response.devices) == descriptor_loader.num_devices response = context_client.ListLinks(Empty()) - assert len(response.links) == len(LINKS) + assert len(response.links) == descriptor_loader.num_links - response = context_client.ListServices(ContextId(**CONTEXT_ID)) + response = context_client.ListServices(ADMIN_CONTEXT_ID) LOGGER.info('Services[{:d}] = {:s}'.format(len(response.services), grpc_message_to_json_string(response))) assert len(response.services) == 3 # 1xL2NM + 2xTAPI + for service in response.services: service_id = service.service_id response = context_client.ListConnections(service_id) LOGGER.info(' ServiceId[{:s}] => Connections[{:d}] = {:s}'.format( grpc_message_to_json_string(service_id), len(response.connections), grpc_message_to_json_string(response))) + if service.service_type == ServiceTypeEnum.SERVICETYPE_L2NM: assert len(response.connections) == 2 # 2 connections per service (primary + backup) elif service.service_type == ServiceTypeEnum.SERVICETYPE_TAPI_CONNECTIVITY_SERVICE: assert len(response.connections) == 1 # 1 connection per service + else: + str_service = grpc_message_to_json_string(service) + raise Exception('Unexpected ServiceType: {:s}'.format(str_service)) diff --git a/src/tests/ecoc22/tests/test_functional_delete_service.py b/src/tests/ecoc22/tests/test_functional_delete_service.py index de152ebb71111c9201dfde18262586b242b04083..43241f40e893fa629ea88587894a6850c7af1465 100644 --- a/src/tests/ecoc22/tests/test_functional_delete_service.py +++ b/src/tests/ecoc22/tests/test_functional_delete_service.py @@ -12,92 +12,100 @@ # See the License for the specific language governing permissions and # limitations under the License. -import logging, pytest -from common.DeviceTypes import DeviceTypeEnum -from common.Settings import get_setting +import logging +from common.Constants import DEFAULT_CONTEXT_NAME from common.proto.context_pb2 import ContextId, Empty, ServiceTypeEnum -from common.tests.EventTools import EVENT_REMOVE, EVENT_UPDATE, check_events -from common.tools.object_factory.Connection import json_connection_id -from common.tools.object_factory.Device import json_device_id -from common.tools.object_factory.Service import json_service_id +from common.tools.descriptor.Loader import DescriptorLoader +from common.tools.object_factory.Context import json_context_id from common.tools.grpc.Tools import grpc_message_to_json_string -from compute.tests.mock_osm.MockOSM import MockOSM from context.client.ContextClient import ContextClient -from tests.Fixtures import context_client -from .Fixtures import osm_wim -#from .Objects_BigNet import ( -# CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES, WIM_SERVICE_CONNECTION_POINTS, WIM_SERVICE_TYPE) -#from .Objects_DC_CSGW_TN import ( -# CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES, WIM_SERVICE_CONNECTION_POINTS, WIM_SERVICE_TYPE) -#from .Objects_DC_CSGW_TN_OLS import ( -# CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES, WIM_SERVICE_CONNECTION_POINTS, WIM_SERVICE_TYPE) -from .Objects_DC_CSGW_OLS import ( - CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES, WIM_SERVICE_CONNECTION_POINTS, WIM_SERVICE_TYPE) +from tests.Fixtures import context_client # pylint: disable=unused-import +from tests.tools.mock_osm.MockOSM import MockOSM +from .Fixtures import osm_wim # pylint: disable=unused-import LOGGER = logging.getLogger(__name__) LOGGER.setLevel(logging.DEBUG) -DEVTYPE_EMU_PR = DeviceTypeEnum.EMULATED_PACKET_ROUTER.value -DEVTYPE_EMU_OLS = DeviceTypeEnum.EMULATED_OPEN_LINE_SYSTEM.value +DESCRIPTOR_FILE = 'ecoc22/descriptors_emulated.json' +ADMIN_CONTEXT_ID = ContextId(**json_context_id(DEFAULT_CONTEXT_NAME)) -def test_scenario_is_correct(context_client : ContextClient): # pylint: disable=redefined-outer-name +def test_service_removal(context_client : ContextClient, osm_wim : MockOSM): # pylint: disable=redefined-outer-name # ----- List entities - Ensure service is created ------------------------------------------------------------------ + with open(DESCRIPTOR_FILE, 'r', encoding='UTF-8') as f: + descriptors = f.read() + + descriptor_loader = DescriptorLoader(descriptors) + response = context_client.ListContexts(Empty()) - assert len(response.contexts) == len(CONTEXTS) + assert len(response.contexts) == descriptor_loader.num_contexts - response = context_client.ListTopologies(ContextId(**CONTEXT_ID)) - assert len(response.topologies) == len(TOPOLOGIES) + for context_uuid, num_topologies in descriptor_loader.num_topologies.items(): + response = context_client.ListTopologies(ContextId(**json_context_id(context_uuid))) + assert len(response.topologies) == num_topologies response = context_client.ListDevices(Empty()) - assert len(response.devices) == len(DEVICES) + assert len(response.devices) == descriptor_loader.num_devices response = context_client.ListLinks(Empty()) - assert len(response.links) == len(LINKS) + assert len(response.links) == descriptor_loader.num_links - response = context_client.ListServices(ContextId(**CONTEXT_ID)) + service_uuids = set() + response = context_client.ListServices(ADMIN_CONTEXT_ID) LOGGER.info('Services[{:d}] = {:s}'.format(len(response.services), grpc_message_to_json_string(response))) assert len(response.services) == 3 # 1xL2NM + 2xTAPI + for service in response.services: service_id = service.service_id + + if service.service_type == ServiceTypeEnum.SERVICETYPE_L2NM: + service_uuid = service_id.service_uuid.uuid + service_uuids.add(service_uuid) + osm_wim.conn_info[service_uuid] = {} + response = context_client.ListConnections(service_id) LOGGER.info(' ServiceId[{:s}] => Connections[{:d}] = {:s}'.format( grpc_message_to_json_string(service_id), len(response.connections), grpc_message_to_json_string(response))) + if service.service_type == ServiceTypeEnum.SERVICETYPE_L2NM: assert len(response.connections) == 2 # 2 connections per service (primary + backup) elif service.service_type == ServiceTypeEnum.SERVICETYPE_TAPI_CONNECTIVITY_SERVICE: assert len(response.connections) == 1 # 1 connection per service + else: + str_service = grpc_message_to_json_string(service) + raise Exception('Unexpected ServiceType: {:s}'.format(str_service)) - -def test_service_removal(context_client : ContextClient, osm_wim : MockOSM): # pylint: disable=redefined-outer-name - # ----- Delete Service --------------------------------------------------------------------------------------------- - response = context_client.ListServices(ContextId(**CONTEXT_ID)) - LOGGER.info('Services[{:d}] = {:s}'.format(len(response.services), grpc_message_to_json_string(response))) - service_uuids = set() - for service in response.services: - if service.service_type != ServiceTypeEnum.SERVICETYPE_L2NM: continue - service_uuid = service.service_id.service_uuid.uuid - service_uuids.add(service_uuid) - osm_wim.conn_info[service_uuid] = {} - - assert len(service_uuids) == 1 # assume a single service has been created + # Identify service to delete + assert len(service_uuids) == 1 # assume a single L2NM service has been created service_uuid = set(service_uuids).pop() + + # ----- Delete Service --------------------------------------------------------------------------------------------- osm_wim.delete_connectivity_service(service_uuid) -def test_services_removed(context_client : ContextClient): # pylint: disable=redefined-outer-name # ----- List entities - Ensure service is removed ------------------------------------------------------------------ response = context_client.ListContexts(Empty()) - assert len(response.contexts) == len(CONTEXTS) + assert len(response.contexts) == descriptor_loader.num_contexts - response = context_client.ListTopologies(ContextId(**CONTEXT_ID)) - assert len(response.topologies) == len(TOPOLOGIES) + for context_uuid, num_topologies in descriptor_loader.num_topologies.items(): + response = context_client.ListTopologies(ContextId(**json_context_id(context_uuid))) + assert len(response.topologies) == num_topologies response = context_client.ListDevices(Empty()) - assert len(response.devices) == len(DEVICES) + assert len(response.devices) == descriptor_loader.num_devices response = context_client.ListLinks(Empty()) - assert len(response.links) == len(LINKS) + assert len(response.links) == descriptor_loader.num_links + + for context_uuid, num_services in descriptor_loader.num_services.items(): + response = context_client.ListServices(ContextId(**json_context_id(context_uuid))) + assert len(response.services) == num_services + + for context_uuid, num_slices in descriptor_loader.num_slices.items(): + response = context_client.ListSlices(ContextId(**json_context_id(context_uuid))) + assert len(response.slices) == num_slices - response = context_client.ListServices(ContextId(**CONTEXT_ID)) - assert len(response.services) == 0 + # This scenario assumes no services are created beforehand + response = context_client.GetContext(ADMIN_CONTEXT_ID) + assert len(response.service_ids) == 0 + assert len(response.slice_ids) == 0 diff --git a/src/tests/ofc22/deploy_specs.sh b/src/tests/ofc22/deploy_specs.sh index ffd91da35186fe21f418950493ef797a9af1b522..1318aefa632a51a035bd6ad99f7725040e0983f4 100644 --- a/src/tests/ofc22/deploy_specs.sh +++ b/src/tests/ofc22/deploy_specs.sh @@ -1,18 +1,32 @@ -# Set the URL of your local Docker registry where the images will be uploaded to. -export TFS_REGISTRY_IMAGE="http://localhost:32000/tfs/" +#!/bin/bash +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# ----- TeraFlowSDN ------------------------------------------------------------ + +# Set the URL of the internal MicroK8s Docker registry where the images will be uploaded to. +export TFS_REGISTRY_IMAGES="http://localhost:32000/tfs/" # Set the list of components, separated by spaces, you want to build images for, and deploy. -# Supported components are: -# context device automation policy service compute monitoring webui -# interdomain slice pathcomp dlt -# dbscanserving opticalattackmitigator opticalattackdetector -# l3_attackmitigator l3_centralizedattackdetector l3_distributedattackdetector +#export TFS_COMPONENTS="context device automation monitoring pathcomp service slice compute webui load_generator" export TFS_COMPONENTS="context device automation monitoring pathcomp service slice compute webui" # Set the tag you want to use for your images. export TFS_IMAGE_TAG="dev" -# Set the name of the Kubernetes namespace to deploy to. +# Set the name of the Kubernetes namespace to deploy TFS to. export TFS_K8S_NAMESPACE="tfs" # Set additional manifest files to be applied after the deployment @@ -21,6 +35,60 @@ export TFS_EXTRA_MANIFESTS="manifests/nginx_ingress_http.yaml" # Set the new Grafana admin password export TFS_GRAFANA_PASSWORD="admin123+" -# If not already set, disable skip-build flag. -# If TFS_SKIP_BUILD is "YES", the containers are not rebuilt-retagged-repushed and existing ones are used. -export TFS_SKIP_BUILD=${TFS_SKIP_BUILD:-""} +# Disable skip-build flag to rebuild the Docker images. +export TFS_SKIP_BUILD="" + + +# ----- CockroachDB ------------------------------------------------------------ + +# Set the namespace where CockroackDB will be deployed. +export CRDB_NAMESPACE="crdb" + +# Set the database username to be used by Context. +export CRDB_USERNAME="tfs" + +# Set the database user's password to be used by Context. +export CRDB_PASSWORD="tfs123" + +# Set the database name to be used by Context. +export CRDB_DATABASE="tfs" + +# Set CockroachDB installation mode to 'single'. This option is convenient for development and testing. +# See ./deploy/all.sh or ./deploy/crdb.sh for additional details +export CRDB_DEPLOY_MODE="single" + +# Disable flag for dropping database, if exists. +export CRDB_DROP_DATABASE_IF_EXISTS="" + +# Disable flag for re-deploying CockroachDB from scratch. +export CRDB_REDEPLOY="" + + +# ----- NATS ------------------------------------------------------------------- + +# Set the namespace where NATS will be deployed. +export NATS_NAMESPACE="nats" + +# Disable flag for re-deploying NATS from scratch. +export NATS_REDEPLOY="" + + +# ----- QuestDB ---------------------------------------------------------------- + +# If not already set, set the namespace where QuestDB will be deployed. +export QDB_NAMESPACE="qdb" + +# If not already set, set the database username to be used by Monitoring. +export QDB_USERNAME="admin" + +# If not already set, set the database user's password to be used by Monitoring. +export QDB_PASSWORD="quest" + +# If not already set, set the table name to be used by Monitoring. +export QDB_TABLE="tfs_monitoring" + +## If not already set, disable flag for dropping table if exists. +#export QDB_DROP_TABLE_IF_EXISTS="" + +# If not already set, disable flag for re-deploying QuestDB from scratch. +export QDB_REDEPLOY="" diff --git a/src/tests/ofc22/redeploy.sh b/src/tests/ofc22/redeploy.sh new file mode 100755 index 0000000000000000000000000000000000000000..f0262c0b8a0beabacf0effc28d95737bdac853d1 --- /dev/null +++ b/src/tests/ofc22/redeploy.sh @@ -0,0 +1,18 @@ +#!/bin/bash +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +source ofc22/deploy_specs.sh +./deploy/all.sh +source tfs_runtime_env_vars.sh diff --git a/src/tests/ofc22/run_test_01_bootstrap.sh b/src/tests/ofc22/run_test_01_bootstrap.sh index 61b49b251f927ffb2e845f0c9094d30ea597abc6..1b5bed63ed957d4c6d3f3939386fb9b21000e785 100755 --- a/src/tests/ofc22/run_test_01_bootstrap.sh +++ b/src/tests/ofc22/run_test_01_bootstrap.sh @@ -14,4 +14,4 @@ # limitations under the License. source tfs_runtime_env_vars.sh -pytest --verbose --log-level=INFO -o log_cli=true -o log_cli_level=INFO src/tests/ofc22/tests/test_functional_bootstrap.py +pytest --verbose --log-level=INFO src/tests/ofc22/tests/test_functional_bootstrap.py diff --git a/src/tests/ofc22/run_test_02_create_service.sh b/src/tests/ofc22/run_test_02_create_service.sh index 135a3f74fe93d0d7a4da6ef0e02371a040fc1eb3..c82606cc14e423104ad191bb50ca2c05b79decba 100755 --- a/src/tests/ofc22/run_test_02_create_service.sh +++ b/src/tests/ofc22/run_test_02_create_service.sh @@ -14,4 +14,4 @@ # limitations under the License. source tfs_runtime_env_vars.sh -pytest --verbose --log-level=INFO -o log_cli=true -o log_cli_level=INFO src/tests/ofc22/tests/test_functional_create_service.py +pytest --verbose --log-level=INFO src/tests/ofc22/tests/test_functional_create_service.py diff --git a/src/tests/ofc22/run_test_03_delete_service.sh b/src/tests/ofc22/run_test_03_delete_service.sh index cbe6714fe91cf1758f62e697e667568d35578181..19540de05dee9dc70e3c2a2ad1c8c9ef1615366f 100755 --- a/src/tests/ofc22/run_test_03_delete_service.sh +++ b/src/tests/ofc22/run_test_03_delete_service.sh @@ -14,4 +14,4 @@ # limitations under the License. source tfs_runtime_env_vars.sh -pytest --verbose --log-level=INFO -o log_cli=true -o log_cli_level=INFO src/tests/ofc22/tests/test_functional_delete_service.py +pytest --verbose --log-level=INFO src/tests/ofc22/tests/test_functional_delete_service.py diff --git a/src/tests/ofc22/run_test_04_cleanup.sh b/src/tests/ofc22/run_test_04_cleanup.sh index e88ddbd3227b3f29dfc7f126d5853e0b1d0e06f1..c84d656ed1d5259239c673d812bb60f5e2193823 100755 --- a/src/tests/ofc22/run_test_04_cleanup.sh +++ b/src/tests/ofc22/run_test_04_cleanup.sh @@ -14,4 +14,4 @@ # limitations under the License. source tfs_runtime_env_vars.sh -pytest --verbose --log-level=INFO -o log_cli=true -o log_cli_level=INFO src/tests/ofc22/tests/test_functional_cleanup.py +pytest --verbose --log-level=INFO src/tests/ofc22/tests/test_functional_cleanup.py diff --git a/src/tests/ofc22/run_tests.sh b/src/tests/ofc22/run_tests.sh index 0ad4be313987b8b5069808873f94840521d4284e..8edc1a431efdcd17e6408a3389f5fc73925ff2b4 100755 --- a/src/tests/ofc22/run_tests.sh +++ b/src/tests/ofc22/run_tests.sh @@ -13,32 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. - -PROJECTDIR=`pwd` - -RCFILE=$PROJECTDIR/coverage/.coveragerc -COVERAGEFILE=$PROJECTDIR/coverage/.coverage - -# Configure the correct folder on the .coveragerc file -cat $PROJECTDIR/coverage/.coveragerc.template | sed s+~/teraflow/controller+$PROJECTDIR/src+g > $RCFILE - -# Destroy old coverage file -rm -f $COVERAGEFILE - -source tfs_runtime_env_vars.sh - -# Force a flush of Context database -kubectl --namespace $TFS_K8S_NAMESPACE exec -it deployment/contextservice --container redis -- redis-cli FLUSHALL - # Run functional tests -pytest --log-level=INFO --verbose \ - src/tests/ofc22/tests/test_functional_bootstrap.py - -pytest --log-level=INFO --verbose \ - src/tests/ofc22/tests/test_functional_create_service.py - -pytest --log-level=INFO --verbose \ - src/tests/ofc22/tests/test_functional_delete_service.py - -pytest --log-level=INFO --verbose \ - src/tests/ofc22/tests/test_functional_cleanup.py +source tfs_runtime_env_vars.sh +pytest --verbose --log-level=INFO src/tests/ofc22/tests/test_functional_bootstrap.py +pytest --verbose --log-level=INFO src/tests/ofc22/tests/test_functional_create_service.py +pytest --verbose --log-level=INFO src/tests/ofc22/tests/test_functional_delete_service.py +pytest --verbose --log-level=INFO src/tests/ofc22/tests/test_functional_cleanup.py diff --git a/src/tests/ofc22/tests/Objects.py b/src/tests/ofc22/tests/Objects.py index 7bfbe9fce558d6a86d965ecb6421369d7f544d4d..79536dc6c5bdeef8b3307080997215362171bd56 100644 --- a/src/tests/ofc22/tests/Objects.py +++ b/src/tests/ofc22/tests/Objects.py @@ -18,17 +18,21 @@ from tests.tools.mock_osm.Tools import connection_point, wim_mapping # ----- WIM Service Settings ------------------------------------------------------------------------------------------- -WIM_DC1_SITE_ID = '1' -WIM_DC1_DEVICE_ID = json_device_id('R1-EMU') -WIM_DC1_ENDPOINT_ID = json_endpoint_id(WIM_DC1_DEVICE_ID, '13/1/2') +SITE_ID_DC1 = '1' +DEV_ID_DC1 = json_device_id('R1-EMU') +EP_ID_DC1 = json_endpoint_id(DEV_ID_DC1, '13/1/2') -WIM_DC2_SITE_ID = '2' -WIM_DC2_DEVICE_ID = json_device_id('R3-EMU') -WIM_DC2_ENDPOINT_ID = json_endpoint_id(WIM_DC2_DEVICE_ID, '13/1/2') +SITE_ID_DC2 = '2' +DEV_ID_DC2 = json_device_id('R3-EMU') +EP_ID_DC2 = json_endpoint_id(DEV_ID_DC2, '13/1/2') -WIM_SEP_DC1, WIM_MAP_DC1 = wim_mapping(WIM_DC1_SITE_ID, WIM_DC1_ENDPOINT_ID) -WIM_SEP_DC2, WIM_MAP_DC2 = wim_mapping(WIM_DC2_SITE_ID, WIM_DC2_ENDPOINT_ID) -WIM_MAPPING = [WIM_MAP_DC1, WIM_MAP_DC2] +WIM_SEP_DC1, WIM_MAP_DC1 = wim_mapping(SITE_ID_DC1, EP_ID_DC1) +WIM_SEP_DC2, WIM_MAP_DC2 = wim_mapping(SITE_ID_DC2, EP_ID_DC2) + +WIM_MAPPING = [ + WIM_MAP_DC1, + WIM_MAP_DC2, +] WIM_SRV_VLAN_ID = 300 WIM_SERVICE_TYPE = 'ELINE' diff --git a/src/tests/ofc22/tests/test_functional_bootstrap.py b/src/tests/ofc22/tests/test_functional_bootstrap.py index 71deb9d596b1494e148b140902ca927e5d664dd3..0a104679f6a5f2815c4d1b12d6b00d099b5ae79c 100644 --- a/src/tests/ofc22/tests/test_functional_bootstrap.py +++ b/src/tests/ofc22/tests/test_functional_bootstrap.py @@ -13,10 +13,10 @@ # limitations under the License. import logging, time +from common.Constants import DEFAULT_CONTEXT_NAME from common.proto.context_pb2 import ContextId, Empty from common.proto.monitoring_pb2 import KpiDescriptorList from common.tests.LoadScenario import load_scenario_from_descriptor -from common.tools.grpc.Tools import grpc_message_to_json_string from common.tools.object_factory.Context import json_context_id from context.client.ContextClient import ContextClient from device.client.DeviceClient import DeviceClient @@ -27,6 +27,7 @@ LOGGER = logging.getLogger(__name__) LOGGER.setLevel(logging.DEBUG) DESCRIPTOR_FILE = 'ofc22/descriptors_emulated.json' +ADMIN_CONTEXT_ID = ContextId(**json_context_id(DEFAULT_CONTEXT_NAME)) def test_scenario_bootstrap( context_client : ContextClient, # pylint: disable=redefined-outer-name @@ -66,6 +67,16 @@ def test_scenario_bootstrap( response = context_client.ListServices(ContextId(**json_context_id(context_uuid))) assert len(response.services) == 0 + for context_uuid, _ in descriptor_loader.num_slices.items(): + response = context_client.ListSlices(ContextId(**json_context_id(context_uuid))) + assert len(response.slices) == 0 + + # This scenario assumes no services are created beforehand + response = context_client.GetContext(ADMIN_CONTEXT_ID) + assert len(response.service_ids) == 0 + assert len(response.slice_ids) == 0 + + def test_scenario_kpis_created( context_client : ContextClient, # pylint: disable=redefined-outer-name monitoring_client: MonitoringClient, # pylint: disable=redefined-outer-name diff --git a/src/tests/ofc22/tests/test_functional_cleanup.py b/src/tests/ofc22/tests/test_functional_cleanup.py index be807eaa0242f2363b5b6c189ce4de264528a54c..381a88a6a289808d0ff57dd3fe1b1e5d922ae6cb 100644 --- a/src/tests/ofc22/tests/test_functional_cleanup.py +++ b/src/tests/ofc22/tests/test_functional_cleanup.py @@ -13,9 +13,10 @@ # limitations under the License. import logging +from common.Constants import DEFAULT_CONTEXT_NAME +from common.proto.context_pb2 import ContextId, DeviceId, Empty, LinkId, TopologyId from common.tools.descriptor.Loader import DescriptorLoader from common.tools.object_factory.Context import json_context_id -from common.proto.context_pb2 import ContextId, DeviceId, Empty, LinkId, TopologyId from context.client.ContextClient import ContextClient from device.client.DeviceClient import DeviceClient from tests.Fixtures import context_client, device_client # pylint: disable=unused-import @@ -24,7 +25,7 @@ LOGGER = logging.getLogger(__name__) LOGGER.setLevel(logging.DEBUG) DESCRIPTOR_FILE = 'ofc22/descriptors_emulated.json' - +ADMIN_CONTEXT_ID = ContextId(**json_context_id(DEFAULT_CONTEXT_NAME)) def test_services_removed( context_client : ContextClient, # pylint: disable=redefined-outer-name @@ -53,6 +54,15 @@ def test_services_removed( response = context_client.ListServices(ContextId(**json_context_id(context_uuid))) assert len(response.services) == 0 + for context_uuid, _ in descriptor_loader.num_slices.items(): + response = context_client.ListSlices(ContextId(**json_context_id(context_uuid))) + assert len(response.slices) == 0 + + # This scenario assumes no services are created beforehand + response = context_client.GetContext(ADMIN_CONTEXT_ID) + assert len(response.service_ids) == 0 + assert len(response.slice_ids) == 0 + # ----- Delete Links, Devices, Topologies, Contexts ---------------------------------------------------------------- for link in descriptor_loader.links: diff --git a/src/tests/ofc22/tests/test_functional_create_service.py b/src/tests/ofc22/tests/test_functional_create_service.py index e606d060d52631ba72e191d7c025bd7b43048b39..2ff8f34eb7b521c19b405e90535fad0e3ef52f2a 100644 --- a/src/tests/ofc22/tests/test_functional_create_service.py +++ b/src/tests/ofc22/tests/test_functional_create_service.py @@ -13,26 +13,24 @@ # limitations under the License. import logging, random -from common.DeviceTypes import DeviceTypeEnum -from common.proto.context_pb2 import ContextId, Empty +from common.Constants import DEFAULT_CONTEXT_NAME +from common.proto.context_pb2 import ContextId, Empty, ServiceTypeEnum from common.proto.kpi_sample_types_pb2 import KpiSampleType from common.tools.descriptor.Loader import DescriptorLoader from common.tools.grpc.Tools import grpc_message_to_json_string from common.tools.object_factory.Context import json_context_id from context.client.ContextClient import ContextClient from monitoring.client.MonitoringClient import MonitoringClient -from tests.Fixtures import context_client, device_client, monitoring_client # pylint: disable=unused-import +from tests.Fixtures import context_client, device_client, monitoring_client # pylint: disable=unused-import from tests.tools.mock_osm.MockOSM import MockOSM -from .Fixtures import osm_wim # pylint: disable=unused-import +from .Fixtures import osm_wim # pylint: disable=unused-import from .Objects import WIM_SERVICE_CONNECTION_POINTS, WIM_SERVICE_TYPE LOGGER = logging.getLogger(__name__) LOGGER.setLevel(logging.DEBUG) -DEVTYPE_EMU_PR = DeviceTypeEnum.EMULATED_PACKET_ROUTER.value -DEVTYPE_EMU_OLS = DeviceTypeEnum.EMULATED_OPEN_LINE_SYSTEM.value - DESCRIPTOR_FILE = 'ofc22/descriptors_emulated.json' +ADMIN_CONTEXT_ID = ContextId(**json_context_id(DEFAULT_CONTEXT_NAME)) def test_service_creation(context_client : ContextClient, osm_wim : MockOSM): # pylint: disable=redefined-outer-name # ----- List entities - Ensure scenario is ready ------------------------------------------------------------------- @@ -56,7 +54,16 @@ def test_service_creation(context_client : ContextClient, osm_wim : MockOSM): # for context_uuid, num_services in descriptor_loader.num_services.items(): response = context_client.ListServices(ContextId(**json_context_id(context_uuid))) - assert len(response.services) == 0 + assert len(response.services) == num_services + + for context_uuid, num_slices in descriptor_loader.num_slices.items(): + response = context_client.ListSlices(ContextId(**json_context_id(context_uuid))) + assert len(response.slices) == num_slices + + # This scenario assumes no services are created beforehand + response = context_client.GetContext(ADMIN_CONTEXT_ID) + assert len(response.service_ids) == 0 + assert len(response.slice_ids) == 0 # ----- Create Service --------------------------------------------------------------------------------------------- @@ -78,18 +85,24 @@ def test_service_creation(context_client : ContextClient, osm_wim : MockOSM): # response = context_client.ListLinks(Empty()) assert len(response.links) == descriptor_loader.num_links - for context_uuid, num_services in descriptor_loader.num_services.items(): - response = context_client.ListServices(ContextId(**json_context_id(context_uuid))) - LOGGER.info('Services[{:d}] = {:s}'.format(len(response.services), grpc_message_to_json_string(response))) - assert len(response.services) == 2*num_services # OLS & L3NM => (L3NM + TAPI) - - for service in response.services: - service_id = service.service_id - response = context_client.ListConnections(service_id) - LOGGER.info(' ServiceId[{:s}] => Connections[{:d}] = {:s}'.format( - grpc_message_to_json_string(service_id), len(response.connections), - grpc_message_to_json_string(response))) - assert len(response.connections) == 1 # one connection per service + response = context_client.ListServices(ADMIN_CONTEXT_ID) + LOGGER.info('Services[{:d}] = {:s}'.format(len(response.services), grpc_message_to_json_string(response))) + assert len(response.services) == 2 # OLS & L3NM => (L3NM + TAPI) + + for service in response.services: + service_id = service.service_id + response = context_client.ListConnections(service_id) + LOGGER.info(' ServiceId[{:s}] => Connections[{:d}] = {:s}'.format( + grpc_message_to_json_string(service_id), len(response.connections), grpc_message_to_json_string(response))) + + if service.service_type == ServiceTypeEnum.SERVICETYPE_L3NM: + assert len(response.connections) == 1 # 1 connection per service + elif service.service_type == ServiceTypeEnum.SERVICETYPE_TAPI_CONNECTIVITY_SERVICE: + assert len(response.connections) == 1 # 1 connection per service + else: + str_service = grpc_message_to_json_string(service) + raise Exception('Unexpected ServiceType: {:s}'.format(str_service)) + def test_scenario_kpi_values_created( diff --git a/src/tests/ofc22/tests/test_functional_delete_service.py b/src/tests/ofc22/tests/test_functional_delete_service.py index 48c2a0d5a16db038ac35c1226c33989d31a23e74..1c47f8b310f5388ef28356da66e637f209fa148b 100644 --- a/src/tests/ofc22/tests/test_functional_delete_service.py +++ b/src/tests/ofc22/tests/test_functional_delete_service.py @@ -14,7 +14,6 @@ import logging from common.Constants import DEFAULT_CONTEXT_NAME -from common.DeviceTypes import DeviceTypeEnum from common.proto.context_pb2 import ContextId, Empty, ServiceTypeEnum from common.tools.descriptor.Loader import DescriptorLoader from common.tools.object_factory.Context import json_context_id @@ -22,17 +21,13 @@ from common.tools.grpc.Tools import grpc_message_to_json_string from context.client.ContextClient import ContextClient from tests.Fixtures import context_client # pylint: disable=unused-import from tests.tools.mock_osm.MockOSM import MockOSM -from .Fixtures import osm_wim # pylint: disable=unused-import - +from .Fixtures import osm_wim # pylint: disable=unused-import LOGGER = logging.getLogger(__name__) LOGGER.setLevel(logging.DEBUG) -DEVTYPE_EMU_PR = DeviceTypeEnum.EMULATED_PACKET_ROUTER.value -DEVTYPE_EMU_OLS = DeviceTypeEnum.EMULATED_OPEN_LINE_SYSTEM.value - DESCRIPTOR_FILE = 'ofc22/descriptors_emulated.json' - +ADMIN_CONTEXT_ID = ContextId(**json_context_id(DEFAULT_CONTEXT_NAME)) def test_service_removal(context_client : ContextClient, osm_wim : MockOSM): # pylint: disable=redefined-outer-name # ----- List entities - Ensure service is created ------------------------------------------------------------------ @@ -54,30 +49,38 @@ def test_service_removal(context_client : ContextClient, osm_wim : MockOSM): # p response = context_client.ListLinks(Empty()) assert len(response.links) == descriptor_loader.num_links - l3nm_service_uuids = set() - response = context_client.ListServices(ContextId(**json_context_id(DEFAULT_CONTEXT_NAME))) + service_uuids = set() + response = context_client.ListServices(ADMIN_CONTEXT_ID) + LOGGER.info('Services[{:d}] = {:s}'.format(len(response.services), grpc_message_to_json_string(response))) assert len(response.services) == 2 # OLS & L3NM => (L3NM + TAPI) + for service in response.services: service_id = service.service_id if service.service_type == ServiceTypeEnum.SERVICETYPE_L3NM: service_uuid = service_id.service_uuid.uuid - l3nm_service_uuids.add(service_uuid) + service_uuids.add(service_uuid) osm_wim.conn_info[service_uuid] = {} response = context_client.ListConnections(service_id) LOGGER.info(' ServiceId[{:s}] => Connections[{:d}] = {:s}'.format( - grpc_message_to_json_string(service_id), len(response.connections), - grpc_message_to_json_string(response))) - assert len(response.connections) == 1 # one connection per service + grpc_message_to_json_string(service_id), len(response.connections), grpc_message_to_json_string(response))) + + if service.service_type == ServiceTypeEnum.SERVICETYPE_L3NM: + assert len(response.connections) == 1 # 1 connection per service + elif service.service_type == ServiceTypeEnum.SERVICETYPE_TAPI_CONNECTIVITY_SERVICE: + assert len(response.connections) == 1 # 1 connection per service + else: + str_service = grpc_message_to_json_string(service) + raise Exception('Unexpected ServiceType: {:s}'.format(str_service)) # Identify service to delete - assert len(l3nm_service_uuids) == 1 # assume a single L3NM service has been created - l3nm_service_uuid = set(l3nm_service_uuids).pop() + assert len(service_uuids) == 1 # assume a single L3NM service has been created + service_uuid = set(service_uuids).pop() # ----- Delete Service --------------------------------------------------------------------------------------------- - osm_wim.delete_connectivity_service(l3nm_service_uuid) + osm_wim.delete_connectivity_service(service_uuid) # ----- List entities - Ensure service is removed ------------------------------------------------------------------ @@ -96,4 +99,13 @@ def test_service_removal(context_client : ContextClient, osm_wim : MockOSM): # p for context_uuid, num_services in descriptor_loader.num_services.items(): response = context_client.ListServices(ContextId(**json_context_id(context_uuid))) - assert len(response.services) == 0 + assert len(response.services) == num_services + + for context_uuid, num_slices in descriptor_loader.num_slices.items(): + response = context_client.ListSlices(ContextId(**json_context_id(context_uuid))) + assert len(response.slices) == num_slices + + # This scenario assumes no services are created beforehand + response = context_client.GetContext(ADMIN_CONTEXT_ID) + assert len(response.service_ids) == 0 + assert len(response.slice_ids) == 0