diff --git a/manifests/nginx_ingress_http.yaml b/manifests/nginx_ingress_http.yaml index 58925dab8e18df6532fc72ae9165a65e6c0b3771..f8546eaa82daeefea00c3d0619d413f31fa593a5 100644 --- a/manifests/nginx_ingress_http.yaml +++ b/manifests/nginx_ingress_http.yaml @@ -92,6 +92,13 @@ spec: name: nbiservice port: number: 8080 + - path: /()(camara/.*) + pathType: Prefix + backend: + service: + name: nbiservice + port: + number: 8080 - path: /()(agent-probes/.*) pathType: Prefix backend: diff --git a/my_deploy.sh b/my_deploy.sh old mode 100755 new mode 100644 diff --git a/proto/qos_profile.proto b/proto/qos_profile.proto index e0b61ff17af3dc0758788d5f71b0b93ef1549e72..71137fe9dc5115ba9a8575547ab2c50bdc77e6ea 100644 --- a/proto/qos_profile.proto +++ b/proto/qos_profile.proto @@ -47,12 +47,19 @@ message QoSProfile { int32 packetErrorLossRate = 16; } +message QoSProfileList { + repeated QoSProfile qos_profiles = 1; +} + +message ConstraintList { + repeated context.Constraint constraints = 1; +} service QoSProfileService { - rpc CreateQoSProfile (QoSProfile ) returns ( QoSProfile ) {} - rpc UpdateQoSProfile (QoSProfile ) returns ( QoSProfile ) {} - rpc DeleteQoSProfile (context.QoSProfileId ) returns ( context.Empty ) {} - rpc GetQoSProfile (context.QoSProfileId ) returns ( QoSProfile ) {} - rpc GetQoSProfiles (context.Empty ) returns (stream QoSProfile ) {} - rpc GetConstraintListFromQoSProfile (QoDConstraintsRequest) returns (stream context.Constraint) {} + rpc CreateQoSProfile (QoSProfile ) returns (QoSProfile ) {} + rpc UpdateQoSProfile (QoSProfile ) returns (QoSProfile ) {} + rpc DeleteQoSProfile (context.QoSProfileId ) returns (context.Empty ) {} + rpc GetQoSProfile (context.QoSProfileId ) returns (QoSProfile ) {} + rpc GetQoSProfiles (context.Empty ) returns (QoSProfileList) {} + rpc GetConstraintsFromQoSProfile(QoDConstraintsRequest) returns (ConstraintList) {} } diff --git a/scripts/run_tests_locally-nbi-all.sh b/scripts/run_tests_locally-nbi-all.sh index a8b01b31aa89214cfc5108ddc0f245a171fd59b1..81bff623bdf8643467a37134fe503657ad2b1c5e 100755 --- a/scripts/run_tests_locally-nbi-all.sh +++ b/scripts/run_tests_locally-nbi-all.sh @@ -47,9 +47,7 @@ KAFKA_IP=$(docker inspect kafka --format "{{.NetworkSettings.Networks.teraflowbr echo "Kafka IP: $KAFKA_IP" docker run --name mock_tfs_nbi_dependencies -d -p 10000:10000 \ - --network=teraflowbridge \ - --env BIND_ADDRESS=0.0.0.0 \ - --env BIND_PORT=10000 \ + --network=teraflowbridge --env BIND_ADDRESS=0.0.0.0 --env BIND_PORT=10000 \ --env LOG_LEVEL=INFO \ mock_tfs_nbi_dependencies:test @@ -69,23 +67,25 @@ printf "\n" sleep 5 # Give extra time to NBI to get ready docker ps -a -docker logs kafka +#docker logs kafka docker logs mock_tfs_nbi_dependencies docker logs nbi # helpful pytest flags: --log-level=INFO -o log_cli=true --verbose --maxfail=1 --durations=0 -docker exec -i nbi bash -c "coverage run --append -m pytest --log-level=INFO --verbose nbi/tests/test_core.py --junitxml=/opt/results/${IMAGE_NAME}_report_core.xml" -docker exec -i nbi bash -c "coverage run --append -m pytest --log-level=INFO --verbose nbi/tests/test_tfs_api.py --junitxml=/opt/results/${IMAGE_NAME}_report_tfs_api.xml" -docker exec -i nbi bash -c "coverage run --append -m pytest --log-level=INFO --verbose nbi/tests/test_ietf_l2vpn.py --junitxml=/opt/results/${IMAGE_NAME}_report_ietf_l2vpn.xml" -docker exec -i nbi bash -c "coverage run --append -m pytest --log-level=INFO --verbose nbi/tests/test_ietf_network.py --junitxml=/opt/results/${IMAGE_NAME}_report_ietf_network.xml" -docker exec -i nbi bash -c "coverage run --append -m pytest --log-level=INFO --verbose nbi/tests/test_ietf_l3vpn.py --junitxml=/opt/results/${IMAGE_NAME}_report_ietf_l3vpn.xml" -docker exec -i nbi bash -c "coverage run --append -m pytest --log-level=INFO --verbose nbi/tests/test_etsi_bwm.py --junitxml=/opt/results/${IMAGE_NAME}_report_etsi_bwm.xml" -docker exec -i nbi bash -c "coverage report --include='${IMAGE_NAME}/*' --show-missing" +docker exec -i nbi bash -c "coverage run --append -m pytest --log-level=INFO --verbose nbi/tests/test_core.py --junitxml=/opt/results/nbi_report_core.xml" +docker exec -i nbi bash -c "coverage run --append -m pytest --log-level=INFO --verbose nbi/tests/test_tfs_api.py --junitxml=/opt/results/nbi_report_tfs_api.xml" +docker exec -i nbi bash -c "coverage run --append -m pytest --log-level=INFO --verbose nbi/tests/test_ietf_l2vpn.py --junitxml=/opt/results/nbi_report_ietf_l2vpn.xml" +docker exec -i nbi bash -c "coverage run --append -m pytest --log-level=INFO --verbose nbi/tests/test_ietf_network.py --junitxml=/opt/results/nbi_report_ietf_network.xml" +docker exec -i nbi bash -c "coverage run --append -m pytest --log-level=INFO --verbose nbi/tests/test_ietf_l3vpn.py --junitxml=/opt/results/nbi_report_ietf_l3vpn.xml" +docker exec -i nbi bash -c "coverage run --append -m pytest --log-level=INFO --verbose nbi/tests/test_etsi_bwm.py --junitxml=/opt/results/nbi_report_etsi_bwm.xml" +docker exec -i nbi bash -c "coverage run --append -m pytest --log-level=INFO --verbose nbi/tests/test_camara_qod.py --junitxml=/opt/results/nbi_report_camara_qod.xml" +docker exec -i nbi bash -c "coverage report --include='nbi/*' --show-missing" #docker logs mock_tfs_nbi_dependencies -#docker logs nbi +docker logs nbi #docker logs kafka -docker rm -f mock_tfs_nbi_dependencies nbi +docker rm -f nbi +docker rm -f mock_tfs_nbi_dependencies docker rm -f kafka docker network rm teraflowbridge diff --git a/scripts/run_tests_locally-nbi-core.sh b/scripts/run_tests_locally-nbi-core.sh deleted file mode 100755 index e6eb06a622a864283dfdbda6d70062ce3d35f1d5..0000000000000000000000000000000000000000 --- a/scripts/run_tests_locally-nbi-core.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/bash -# Copyright 2022-2024 ETSI SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -PROJECTDIR=`pwd` - -cd $PROJECTDIR/src -RCFILE=$PROJECTDIR/coverage/.coveragerc - -# Run unitary tests and analyze coverage of code at same time -# helpful pytest flags: --log-level=INFO -o log_cli=true --verbose --maxfail=1 --durations=0 -coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ - nbi/tests/test_core.py diff --git a/scripts/run_tests_locally-nbi-etsi-bwm.sh b/scripts/run_tests_locally-nbi-etsi-bwm.sh deleted file mode 100755 index a335fbe4fd5f0157307535752bd03f73311e20b7..0000000000000000000000000000000000000000 --- a/scripts/run_tests_locally-nbi-etsi-bwm.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/bash -# Copyright 2022-2024 ETSI SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -PROJECTDIR=`pwd` - -cd $PROJECTDIR/src -RCFILE=$PROJECTDIR/coverage/.coveragerc - -# Run unitary tests and analyze coverage of code at same time -# helpful pytest flags: --log-level=INFO -o log_cli=true --verbose --maxfail=1 --durations=0 -coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ - nbi/tests/test_etsi_bwm.py diff --git a/scripts/run_tests_locally-nbi-ietf-l2vpn.sh b/scripts/run_tests_locally-nbi-ietf-l2vpn.sh deleted file mode 100755 index 19556ddf0f610b5b8866cb035d33e106ac6dd1fc..0000000000000000000000000000000000000000 --- a/scripts/run_tests_locally-nbi-ietf-l2vpn.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/bash -# Copyright 2022-2024 ETSI SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -PROJECTDIR=`pwd` - -cd $PROJECTDIR/src -RCFILE=$PROJECTDIR/coverage/.coveragerc - -# Run unitary tests and analyze coverage of code at same time -# helpful pytest flags: --log-level=INFO -o log_cli=true --verbose --maxfail=1 --durations=0 -coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ - nbi/tests/test_ietf_l2vpn.py diff --git a/scripts/run_tests_locally-nbi-ietf-l3vpn.sh b/scripts/run_tests_locally-nbi-ietf-l3vpn.sh deleted file mode 100755 index 01cf5d975bc106212b69a9ca71b90f2a6e0b57ea..0000000000000000000000000000000000000000 --- a/scripts/run_tests_locally-nbi-ietf-l3vpn.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/bash -# Copyright 2022-2024 ETSI SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -PROJECTDIR=`pwd` - -cd $PROJECTDIR/src -RCFILE=$PROJECTDIR/coverage/.coveragerc - -# Run unitary tests and analyze coverage of code at same time -# helpful pytest flags: --log-level=INFO -o log_cli=true --verbose --maxfail=1 --durations=0 -coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ - nbi/tests/test_ietf_l3vpn.py diff --git a/scripts/run_tests_locally-nbi-ietf-network.sh b/scripts/run_tests_locally-nbi-ietf-network.sh deleted file mode 100755 index 401e2615c0efe7b083fde5afd3bf8c3c9e0bac34..0000000000000000000000000000000000000000 --- a/scripts/run_tests_locally-nbi-ietf-network.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/bash -# Copyright 2022-2024 ETSI SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -PROJECTDIR=`pwd` - -cd $PROJECTDIR/src -RCFILE=$PROJECTDIR/coverage/.coveragerc - -# Run unitary tests and analyze coverage of code at same time -# helpful pytest flags: --log-level=INFO -o log_cli=true --verbose --maxfail=1 --durations=0 -coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ - nbi/tests/test_ietf_network.py diff --git a/scripts/run_tests_locally-nbi-ietf-slice.sh b/scripts/run_tests_locally-nbi-ietf-slice.sh deleted file mode 100755 index bf53f18b9a37f9248b072ae2c699b5874fa2c869..0000000000000000000000000000000000000000 --- a/scripts/run_tests_locally-nbi-ietf-slice.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/bash -# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -PROJECTDIR=`pwd` - -cd $PROJECTDIR/src -RCFILE=$PROJECTDIR/coverage/.coveragerc - -# Run unitary tests and analyze coverage of code at same time -# helpful pytest flags: --log-level=INFO -o log_cli=true --verbose --maxfail=1 --durations=0 -coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ - nbi/tests/test_slice_2.py diff --git a/scripts/run_tests_locally-nbi-tfs-api.sh b/scripts/run_tests_locally-nbi-tfs-api.sh deleted file mode 100755 index e27faa8c271617dcc67eb23c61f96a29144f56d2..0000000000000000000000000000000000000000 --- a/scripts/run_tests_locally-nbi-tfs-api.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/bash -# Copyright 2022-2024 ETSI SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -PROJECTDIR=`pwd` - -cd $PROJECTDIR/src -RCFILE=$PROJECTDIR/coverage/.coveragerc - -# Run unitary tests and analyze coverage of code at same time -# helpful pytest flags: --log-level=INFO -o log_cli=true --verbose --maxfail=1 --durations=0 -coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ - nbi/tests/test_tfs_api.py diff --git a/src/common/tests/MockServicerImpl_QoSProfile.py b/src/common/tests/MockServicerImpl_QoSProfile.py new file mode 100644 index 0000000000000000000000000000000000000000..8769472e6d5b7c8b62b1ccdb967dad1bb3a2997d --- /dev/null +++ b/src/common/tests/MockServicerImpl_QoSProfile.py @@ -0,0 +1,75 @@ +# Copyright 2022-2024 ETSI SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import grpc, logging +from typing import Iterator +from common.proto.context_pb2 import Constraint, Empty, QoSProfileId +from common.proto.qos_profile_pb2 import ConstraintList, QoDConstraintsRequest, QoSProfile, QoSProfileList +from common.proto.qos_profile_pb2_grpc import QoSProfileServiceServicer +from common.tools.grpc.Tools import grpc_message_to_json_string +from common.tools.object_factory.Constraint import json_constraint_qos_profile, json_constraint_schedule +from .InMemoryObjectDatabase import InMemoryObjectDatabase + +LOGGER = logging.getLogger(__name__) + +class MockServicerImpl_QoSProfile(QoSProfileServiceServicer): + def __init__(self): + LOGGER.debug('[__init__] Creating Servicer...') + self.obj_db = InMemoryObjectDatabase() + LOGGER.debug('[__init__] Servicer Created') + + def GetQoSProfiles(self, request : Empty, context : grpc.ServicerContext) -> QoSProfileList: + LOGGER.debug('[GetQoSProfiles] request={:s}'.format(grpc_message_to_json_string(request))) + reply = QoSProfileList(qos_profiles=self.obj_db.get_entries('qos_profile')) + LOGGER.debug('[GetQoSProfiles] reply={:s}'.format(grpc_message_to_json_string(reply))) + return reply + + def GetQoSProfile(self, request : QoSProfileId, context : grpc.ServicerContext) -> QoSProfile: + LOGGER.debug('[GetQoSProfile] request={:s}'.format(grpc_message_to_json_string(request))) + reply = self.obj_db.get_entry('qos_profile', request.qos_profile_id.uuid, context) + LOGGER.debug('[GetQoSProfile] reply={:s}'.format(grpc_message_to_json_string(reply))) + return reply + + def CreateQoSProfile(self, request : QoSProfile, context : grpc.ServicerContext) -> QoSProfile: + LOGGER.debug('[CreateQoSProfile] request={:s}'.format(grpc_message_to_json_string(request))) + reply = self.obj_db.set_entry('qos_profile', request.qos_profile_id.qos_profile_id.uuid, request) + LOGGER.debug('[CreateQoSProfile] reply={:s}'.format(grpc_message_to_json_string(reply))) + return reply + + def UpdateQoSProfile(self, request : QoSProfile, context : grpc.ServicerContext) -> QoSProfile: + LOGGER.debug('[UpdateQoSProfile] request={:s}'.format(grpc_message_to_json_string(request))) + reply = self.obj_db.set_entry('qos_profile', request.qos_profile_id.qos_profile_id.uuid, request) + LOGGER.debug('[UpdateQoSProfile] reply={:s}'.format(grpc_message_to_json_string(reply))) + return reply + + def DeleteQoSProfile(self, request : QoSProfileId, context : grpc.ServicerContext) -> Empty: + LOGGER.debug('[DeleteQoSProfile] request={:s}'.format(grpc_message_to_json_string(request))) + self.obj_db.del_entry('qos_profile', request.qos_profile_id.uuid, context) + reply = Empty() + LOGGER.debug('[DeleteQoSProfile] reply={:s}'.format(grpc_message_to_json_string(reply))) + return reply + + def GetConstraintsFromQoSProfile( + self, request: QoDConstraintsRequest, context: grpc.ServicerContext + ) -> ConstraintList: + LOGGER.debug('[GetConstraintsFromQoSProfile] request={:s}'.format(grpc_message_to_json_string(request))) + qos_profile = self.obj_db.get_entry( + 'qos_profile', request.qos_profile_id.qos_profile_id.uuid, context + ) + reply = ConstraintList(constraints=[ + Constraint(**json_constraint_qos_profile(qos_profile.qos_profile_id, qos_profile.name)), + Constraint(**json_constraint_schedule(request.start_timestamp, request.duration / 86400)), + ]) + LOGGER.debug('[GetConstraintsFromQoSProfile] reply={:s}'.format(grpc_message_to_json_string(reply))) + return reply diff --git a/src/common/tools/grpc/Constraints.py b/src/common/tools/grpc/Constraints.py index b650662191cacf24a0d939fe4ff9445c07469e8d..a395f4cc303185181094fbf5aa434cf3ccc075dd 100644 --- a/src/common/tools/grpc/Constraints.py +++ b/src/common/tools/grpc/Constraints.py @@ -16,9 +16,8 @@ # Ref: https://datatracker.ietf.org/doc/html/rfc8466 -import json from typing import Any, Dict, List, Optional, Tuple -from common.proto.context_pb2 import Constraint, ConstraintActionEnum, EndPointId +from common.proto.context_pb2 import Constraint, ConstraintActionEnum, EndPointId, QoSProfileId from common.tools.grpc.Tools import grpc_message_to_json_string def update_constraint_custom_scalar( @@ -81,6 +80,23 @@ def update_constraint_custom_dict( constraint.custom.constraint_value = json.dumps(json_constraint_value, sort_keys=True) return constraint +def update_constraint_schedule( + constraints, start_timestamp : float, duration_days : float, + new_action : ConstraintActionEnum = ConstraintActionEnum.CONSTRAINTACTION_SET +) -> Constraint: + for constraint in constraints: + if constraint.WhichOneof('constraint') != 'schedule': continue + break # found, end loop + else: + # not found, add it + constraint = constraints.add() # pylint: disable=no-member + + constraint.action = new_action + + constraint.schedule.start_timestamp = start_timestamp + constraint.schedule.duration_days = duration_days + return constraint + def update_constraint_endpoint_location( constraints, endpoint_id : EndPointId, region : Optional[str] = None, gps_position : Optional[Tuple[float, float]] = None, @@ -221,6 +237,23 @@ def update_constraint_sla_isolation( constraint.sla_isolation.isolation_level.append(isolation_level) return constraint +def update_constraint_qos_profile( + constraints, qos_profile_id : QoSProfileId, qos_profile_name : str, + new_action : ConstraintActionEnum = ConstraintActionEnum.CONSTRAINTACTION_SET +) -> Constraint: + for constraint in constraints: + if constraint.WhichOneof('constraint') != 'qos_profile': continue + break # found, end loop + else: + # not found, add it + constraint = constraints.add() # pylint: disable=no-member + + constraint.action = new_action + + constraint.qos_profile.qos_profile_id.qos_profile_id.uuid = qos_profile_id.qos_profile_id.uuid + constraint.qos_profile.qos_profile_name = qos_profile_name + return constraint + def copy_constraints(source_constraints, target_constraints): for source_constraint in source_constraints: constraint_kind = source_constraint.WhichOneof('constraint') @@ -240,6 +273,11 @@ def copy_constraints(source_constraints, target_constraints): update_constraint_custom_scalar( target_constraints, constraint_type, constraint_value, raise_if_differs=raise_if_differs) + elif constraint_kind == 'schedule': + start_timestamp = source_constraint.schedule.start_timestamp + duration_days = source_constraint.schedule.duration_days + update_constraint_schedule(target_constraints, start_timestamp, duration_days) + elif constraint_kind == 'endpoint_location': endpoint_id = source_constraint.endpoint_location.endpoint_id location = source_constraint.endpoint_location.location @@ -282,5 +320,10 @@ def copy_constraints(source_constraints, target_constraints): isolation_levels = sla_isolation.isolation_level update_constraint_sla_isolation(target_constraints, isolation_levels) + elif constraint_kind == 'qos_profile': + qos_profile_id = source_constraint.qos_profile.qos_profile_id + qos_profile_name = source_constraint.qos_profile.qos_profile_name + update_constraint_qos_profile(target_constraints, qos_profile_id, qos_profile_name) + else: raise NotImplementedError('Constraint({:s})'.format(grpc_message_to_json_string(source_constraint))) diff --git a/src/common/tools/object_factory/ConfigRule.py b/src/common/tools/object_factory/ConfigRule.py index 9ba7066bd9f53963d715b2d34cc58227ddee0b05..df3d24eba4bff772b523cfb5e1dff98d1192067e 100644 --- a/src/common/tools/object_factory/ConfigRule.py +++ b/src/common/tools/object_factory/ConfigRule.py @@ -16,12 +16,14 @@ import json from typing import Any, Dict, Union from common.proto.context_pb2 import ConfigActionEnum -def json_config_rule(action : ConfigActionEnum, resource_key : str, resource_value : Union[str, Dict[str, Any]]): +def json_config_rule( + action : ConfigActionEnum, resource_key : str, resource_value : Union[str, Dict[str, Any]] +) -> Dict: if not isinstance(resource_value, str): resource_value = json.dumps(resource_value, sort_keys=True) return {'action': action, 'custom': {'resource_key': resource_key, 'resource_value': resource_value}} -def json_config_rule_set(resource_key : str, resource_value : Union[str, Dict[str, Any]]): +def json_config_rule_set(resource_key : str, resource_value : Union[str, Dict[str, Any]]) -> Dict: return json_config_rule(ConfigActionEnum.CONFIGACTION_SET, resource_key, resource_value) -def json_config_rule_delete(resource_key : str, resource_value : Union[str, Dict[str, Any]]): +def json_config_rule_delete(resource_key : str, resource_value : Union[str, Dict[str, Any]]) -> Dict: return json_config_rule(ConfigActionEnum.CONFIGACTION_DELETE, resource_key, resource_value) diff --git a/src/common/tools/object_factory/Constraint.py b/src/common/tools/object_factory/Constraint.py index dd7ed93070fb7153683b5064460f3f46373abc99..112441fec44faa0e2f763f7c89f144ace521d887 100644 --- a/src/common/tools/object_factory/Constraint.py +++ b/src/common/tools/object_factory/Constraint.py @@ -15,44 +15,96 @@ import json from typing import Any, Dict, List, Union -def json_constraint_custom(constraint_type : str, constraint_value : Union[str, Dict[str, Any]]) -> Dict: +from common.proto.context_pb2 import ConstraintActionEnum + + +def json_constraint_custom( + constraint_type : str, constraint_value : Union[str, Dict[str, Any]], + action : ConstraintActionEnum = ConstraintActionEnum.CONSTRAINTACTION_SET +) -> Dict: if not isinstance(constraint_value, str): constraint_value = json.dumps(constraint_value, sort_keys=True) - return {'custom': {'constraint_type': constraint_type, 'constraint_value': constraint_value}} + return {'action': action, 'custom': { + 'constraint_type': constraint_type, 'constraint_value': constraint_value + }} -def json_constraint_schedule(start_timestamp : float, duration_days : float) -> Dict: - return {'schedule': {'start_timestamp': start_timestamp, 'duration_days': duration_days}} +def json_constraint_schedule( + start_timestamp : float, duration_days : float, + action : ConstraintActionEnum = ConstraintActionEnum.CONSTRAINTACTION_SET +) -> Dict: + return {'action': action, 'schedule': { + 'start_timestamp': start_timestamp, 'duration_days': duration_days + }} -def json_constraint_endpoint_location_region(endpoint_id : Dict, region : str) -> Dict: - return {'endpoint_location': {'endpoint_id': endpoint_id, 'location': {'region': region}}} +def json_constraint_endpoint_location_region( + endpoint_id : Dict, region : str, + action : ConstraintActionEnum = ConstraintActionEnum.CONSTRAINTACTION_SET +) -> Dict: + return {'action': action, 'endpoint_location': { + 'endpoint_id': endpoint_id, 'location': {'region': region} + }} -def json_constraint_endpoint_location_gps(endpoint_id : Dict, latitude : float, longitude : float) -> Dict: +def json_constraint_endpoint_location_gps( + endpoint_id : Dict, latitude : float, longitude : float, + action : ConstraintActionEnum = ConstraintActionEnum.CONSTRAINTACTION_SET +) -> Dict: gps_position = {'latitude': latitude, 'longitude': longitude} - return {'endpoint_location': {'endpoint_id': endpoint_id, 'location': {'gps_position': gps_position}}} + return {'action': action, 'endpoint_location': { + 'endpoint_id': endpoint_id, 'location': {'gps_position': gps_position} + }} -def json_constraint_endpoint_priority(endpoint_id : Dict, priority : int) -> Dict: - return {'endpoint_priority': {'endpoint_id': endpoint_id, 'priority': priority}} +def json_constraint_endpoint_priority( + endpoint_id : Dict, priority : int, + action : ConstraintActionEnum = ConstraintActionEnum.CONSTRAINTACTION_SET +) -> Dict: + return {'action': action, 'endpoint_priority': { + 'endpoint_id': endpoint_id, 'priority': priority + }} -def json_constraint_sla_capacity(capacity_gbps : float) -> Dict: - return {'sla_capacity': {'capacity_gbps': capacity_gbps}} +def json_constraint_sla_capacity( + capacity_gbps : float, + action : ConstraintActionEnum = ConstraintActionEnum.CONSTRAINTACTION_SET +) -> Dict: + return {'action': action, 'sla_capacity': { + 'capacity_gbps': capacity_gbps + }} -def json_constraint_sla_latency(e2e_latency_ms : float) -> Dict: - return {'sla_latency': {'e2e_latency_ms': e2e_latency_ms}} +def json_constraint_sla_latency( + e2e_latency_ms : float, + action : ConstraintActionEnum = ConstraintActionEnum.CONSTRAINTACTION_SET +) -> Dict: + return {'action': action, 'sla_latency': { + 'e2e_latency_ms': e2e_latency_ms + }} -def json_constraint_sla_availability(num_disjoint_paths : int, all_active : bool, availability : float) -> Dict: - return {'sla_availability': { +def json_constraint_sla_availability( + num_disjoint_paths : int, all_active : bool, availability : float, + action : ConstraintActionEnum = ConstraintActionEnum.CONSTRAINTACTION_SET +) -> Dict: + return {'action': action, 'sla_availability': { 'num_disjoint_paths': num_disjoint_paths, 'all_active': all_active, 'availability': availability }} -def json_constraint_sla_isolation(isolation_levels : List[int]) -> Dict: - return {'sla_isolation': {'isolation_level': isolation_levels}} +def json_constraint_sla_isolation( + isolation_levels : List[int], + action : ConstraintActionEnum = ConstraintActionEnum.CONSTRAINTACTION_SET +) -> Dict: + return {'action': action, 'sla_isolation': { + 'isolation_level': isolation_levels + }} def json_constraint_exclusions( is_permanent : bool = False, device_ids : List[Dict] = [], endpoint_ids : List[Dict] = [], - link_ids : List[Dict] = [] + link_ids : List[Dict] = [], + action : ConstraintActionEnum = ConstraintActionEnum.CONSTRAINTACTION_SET +) -> Dict: + return {'action': action, 'exclusions': { + 'is_permanent': is_permanent, 'device_ids': device_ids, 'endpoint_ids': endpoint_ids, 'link_ids': link_ids + }} + +def json_constraint_qos_profile( + qos_profile_id : Dict, qos_profile_name : int, + action : ConstraintActionEnum = ConstraintActionEnum.CONSTRAINTACTION_SET ) -> Dict: - return {'exclusions': { - 'is_permanent' : is_permanent, - 'device_ids' : device_ids, - 'endpoint_ids' : endpoint_ids, - 'link_ids' : link_ids, + return {'action': action, 'qos_profile': { + 'qos_profile_id': qos_profile_id, 'qos_profile_name': qos_profile_name }} diff --git a/src/nbi/.gitlab-ci.yml b/src/nbi/.gitlab-ci.yml index 58a21a2cf84ab8736f9c0fd17fa1a33cb5b2ae24..8456844b7823207a6d7b92306e8f47f59de5b5b1 100644 --- a/src/nbi/.gitlab-ci.yml +++ b/src/nbi/.gitlab-ci.yml @@ -115,6 +115,7 @@ unit_test nbi: - docker exec -i $IMAGE_NAME bash -c "coverage run --append -m pytest --log-level=INFO --verbose $IMAGE_NAME/tests/test_ietf_network.py --junitxml=/opt/results/${IMAGE_NAME}_report_ietf_network.xml" - docker exec -i $IMAGE_NAME bash -c "coverage run --append -m pytest --log-level=INFO --verbose $IMAGE_NAME/tests/test_ietf_l3vpn.py --junitxml=/opt/results/${IMAGE_NAME}_report_ietf_l3vpn.xml" - docker exec -i $IMAGE_NAME bash -c "coverage run --append -m pytest --log-level=INFO --verbose $IMAGE_NAME/tests/test_etsi_bwm.py --junitxml=/opt/results/${IMAGE_NAME}_report_etsi_bwm.xml" + - docker exec -i $IMAGE_NAME bash -c "coverage run --append -m pytest --log-level=INFO --verbose $IMAGE_NAME/tests/test_camara_qod.py --junitxml=/opt/results/${IMAGE_NAME}_report_camara_qod.xml" - docker exec -i $IMAGE_NAME bash -c "coverage report --include='${IMAGE_NAME}/*' --show-missing" coverage: '/TOTAL\s+\d+\s+\d+\s+(\d+%)/' after_script: diff --git a/src/nbi/Dockerfile b/src/nbi/Dockerfile index a1487572743856a5a30fa03aedaa6ef1c6a4bf9e..fa0c6f1c3adf7c61fae770e8fad5116226b1e11e 100644 --- a/src/nbi/Dockerfile +++ b/src/nbi/Dockerfile @@ -83,6 +83,8 @@ COPY src/slice/__init__.py slice/__init__.py COPY src/slice/client/. slice/client/ COPY src/qkd_app/__init__.py qkd_app/__init__.py COPY src/qkd_app/client/. qkd_app/client/ +COPY src/qos_profile/__init__.py qos_profile/__init__.py +COPY src/qos_profile/client/. qos_profile/client/ COPY src/vnt_manager/__init__.py vnt_manager/__init__.py COPY src/vnt_manager/client/. vnt_manager/client/ RUN mkdir -p /var/teraflow/tests/tools diff --git a/src/nbi/service/_tools/HttpStatusCodes.py b/src/nbi/service/_tools/HttpStatusCodes.py index 56ea475c7657c80957f0d34ec5c8a3560d68d20a..811cf4e757960706020ad44d5de66910df1efa14 100644 --- a/src/nbi/service/_tools/HttpStatusCodes.py +++ b/src/nbi/service/_tools/HttpStatusCodes.py @@ -14,7 +14,10 @@ HTTP_OK = 200 HTTP_CREATED = 201 +HTTP_ACCEPTED = 202 HTTP_NOCONTENT = 204 HTTP_BADREQUEST = 400 +HTTP_NOTFOUND = 404 +HTTP_UNSUPMEDIATYPE = 415 HTTP_SERVERERROR = 500 -HTTP_GATEWAYTIMEOUT = 504 \ No newline at end of file +HTTP_GATEWAYTIMEOUT = 504 diff --git a/src/nbi/service/app.py b/src/nbi/service/app.py index 99f66a94cbe85983d6de9cb9247f0a551d5a8da3..e992666fd062f862a1a3d94fbd9099757e6091bd 100644 --- a/src/nbi/service/app.py +++ b/src/nbi/service/app.py @@ -29,6 +29,7 @@ from common.Settings import ( wait_for_environment_variables ) from .NbiApplication import NbiApplication +from .camara_qod import register_camara_qod from .etsi_bwm import register_etsi_bwm_api from .health_probes import register_health_probes from .ietf_acl import register_ietf_acl @@ -96,6 +97,7 @@ register_ietf_acl (nbi_app) register_qkd_app (nbi_app) #register_topology_updates(nbi_app) # does not work; check if eventlet-grpc side effects register_vntm_recommend (nbi_app) +register_camara_qod (nbi_app) LOGGER.info('All connectors registered') nbi_app.dump_configuration() diff --git a/src/nbi/service/camara_qod/Resources.py b/src/nbi/service/camara_qod/Resources.py new file mode 100644 index 0000000000000000000000000000000000000000..a33d7942f041df8e0a99fc44360cde419141a1ad --- /dev/null +++ b/src/nbi/service/camara_qod/Resources.py @@ -0,0 +1,322 @@ +# Copyright 2022-2024 ETSI SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import copy, grpc, grpc._channel, logging +from typing import Dict +from uuid import uuid4 +from flask_restful import Resource, request +from common.proto.context_pb2 import Empty, QoSProfileId, Uuid +from common.Constants import DEFAULT_CONTEXT_NAME +from common.tools.context_queries.Service import get_service_by_uuid +from common.tools.grpc.Tools import grpc_message_to_json_string +from context.client.ContextClient import ContextClient +from qos_profile.client.QoSProfileClient import QoSProfileClient +from service.client.ServiceClient import ServiceClient +from .Tools import ( + create_qos_profile_from_json, grpc_context_id, grpc_service_id, + grpc_message_to_qos_table_data, QOD_2_service, service_2_qod +) +from nbi.service._tools.HttpStatusCodes import ( + HTTP_ACCEPTED, HTTP_BADREQUEST, HTTP_CREATED, HTTP_NOCONTENT, HTTP_NOTFOUND, + HTTP_OK, HTTP_SERVERERROR, HTTP_UNSUPMEDIATYPE +) + +LOGGER = logging.getLogger(__name__) + +class _Resource(Resource): + def __init__(self) -> None: + super().__init__() + self.context_client = ContextClient() + self.qos_profile_client = QoSProfileClient() + self.service_client = ServiceClient() + +def compose_error(msg_error, http_status_code): + LOGGER.exception(msg_error) + return {"error": msg_error}, http_status_code + +def compose_internal_server_error(msg_error): + return compose_error(msg_error, HTTP_SERVERERROR) + +def compose_bad_request_error(msg_error): + return compose_error(msg_error, HTTP_BADREQUEST) + +def compose_not_found_error(msg_error): + return compose_error(msg_error, HTTP_NOTFOUND) + +def compose_unsupported_media_type_error(): + msg_error = "JSON payload is required to proceed" + return compose_error(msg_error, HTTP_UNSUPMEDIATYPE) + + + +##### PROFILES ######################################################################################################### + +class ProfileList(_Resource): + def post(self): + if not request.is_json: return compose_unsupported_media_type_error() + + request_data : Dict = request.get_json() + request_data_with_id = copy.deepcopy(request_data) + request_data_with_id["qos_profile_id"] = str(uuid4()) + + try: + qos_profile = create_qos_profile_from_json(request_data_with_id) + except: # pylint: disable=bare-except + return compose_internal_server_error( + "Error parsing QoSProfile({:s})".format(str(request_data)) + ) + + try: + qos_profile_created = self.qos_profile_client.CreateQoSProfile(qos_profile) + except: # pylint: disable=bare-except + return compose_internal_server_error( + "Error creating QoSProfile({:s}) QoSProfileWithUuid({:s})".format( + str(request_data), str(request_data_with_id) + ) + ) + + return grpc_message_to_qos_table_data(qos_profile_created), HTTP_CREATED + + def get(self): + list_qos_profiles = self.qos_profile_client.GetQoSProfiles(Empty()) + list_qos_profiles = [ + grpc_message_to_qos_table_data(qos_profile) + for qos_profile in list_qos_profiles + ] + return list_qos_profiles, HTTP_OK + +class ProfileDetail(_Resource): + def get(self, qos_profile_id : str): + _qos_profile_id = QoSProfileId(qos_profile_id=Uuid(uuid=qos_profile_id)) + + try: + qos_profile = self.qos_profile_client.GetQoSProfile(_qos_profile_id) + return grpc_message_to_qos_table_data(qos_profile), HTTP_OK + except grpc._channel._InactiveRpcError as e: + if e.code() == grpc.StatusCode.NOT_FOUND: + return compose_not_found_error( + "QoSProfileId({:s}) not found".format(str(qos_profile_id)) + ) + else: + return compose_internal_server_error( + "gRPC error fetching QoSProfileId({:s})".format(str(qos_profile_id)) + ) + except: # pylint: disable=bare-except + return compose_internal_server_error( + "Error fetching QoSProfileId({:s})".format(str(qos_profile_id)) + ) + + def put(self, qos_profile_id : str): + if not request.is_json: return compose_unsupported_media_type_error() + + request_data : Dict = request.get_json() + request_data_orig = copy.deepcopy(request_data) + + if "qos_profile_id" in request_data: + if request_data["qos_profile_id"] != qos_profile_id: + return compose_bad_request_error( + "qos_profile_id({:s}) in JSON payload mismatches qos_profile_id({:s}) in URL".format( + str(request_data["qos_profile_id"]), str(qos_profile_id) + ) + ) + else: + request_data["qos_profile_id"] = qos_profile_id + + try: + qos_profile = create_qos_profile_from_json(request_data) + qos_profile_updated = self.qos_profile_client.UpdateQoSProfile(qos_profile) + return grpc_message_to_qos_table_data(qos_profile_updated), HTTP_ACCEPTED + except grpc._channel._InactiveRpcError as e: + if e.code() == grpc.StatusCode.NOT_FOUND: + return compose_not_found_error( + "QoSProfileId({:s}) not found".format(str(qos_profile_id)) + ) + else: + return compose_internal_server_error( + "gRPC error updating QoSProfileId({:s}) with content QosProfile({:s})".format( + str(qos_profile_id), str(request_data_orig) + ) + ) + except: # pylint: disable=bare-except + return compose_internal_server_error( + "Error updating QoSProfileId({:s}) with content QosProfile({:s})".format( + str(qos_profile_id), str(request_data_orig) + ) + ) + + def delete(self, qos_profile_id : str): + _qos_profile_id = QoSProfileId(qos_profile_id=Uuid(uuid=qos_profile_id)) + + try: + self.qos_profile_client.DeleteQoSProfile(_qos_profile_id) + return {}, HTTP_NOCONTENT + except grpc._channel._InactiveRpcError as e: + if e.code() == grpc.StatusCode.NOT_FOUND: + return compose_not_found_error( + "QoSProfileId({:s}) not found".format(str(qos_profile_id)) + ) + else: + return compose_internal_server_error( + "gRPC error deleting QoSProfileId({:s})".format(str(qos_profile_id)) + ) + except: # pylint: disable=bare-except + return compose_internal_server_error( + "Error deleting QoSProfileId({:s})".format(str(qos_profile_id)) + ) + + +##### SESSIONS ######################################################################################################### + +class QodInfo(_Resource): + def post(self): + if not request.is_json: return compose_unsupported_media_type_error() + + request_data : Dict = request.get_json() + request_data_orig = copy.deepcopy(request_data) + + session_id = request_data.get("session_id") + if session_id is not None: + return compose_bad_request_error("session_id is not allowed in creation") + + qos_profile_id = request_data.get("qos_profile_id") + if qos_profile_id is None: + return compose_bad_request_error("qos_profile_id is required") + + try: + service = QOD_2_service(self.context_client, self.qos_profile_client, request_data) + except: # pylint: disable=bare-except + return compose_internal_server_error( + "Error parsing QoDSession({:s})".format(str(request_data_orig)) + ) + + stripped_service = copy.deepcopy(service) + stripped_service.ClearField("service_endpoint_ids") + stripped_service.ClearField("service_constraints") + stripped_service.ClearField("service_config") + try: + self.service_client.CreateService(stripped_service) + self.service_client.UpdateService(service) + + service_uuid = service.service_id.service_uuid.uuid + updated_service = get_service_by_uuid(self.context_client, service_uuid, rw_copy=False) + except: # pylint: disable=bare-except + return compose_internal_server_error( + "Error creating Service({:s}) for QoDSession({:s})".format( + grpc_message_to_json_string(service), str(request_data_orig) + ) + ) + + return service_2_qod(updated_service), HTTP_CREATED + + def get(self): + list_services = self.context_client.ListServices(grpc_context_id(DEFAULT_CONTEXT_NAME)) + list_services = [service_2_qod(service) for service in list_services.services] + return list_services, HTTP_OK + + +class QodInfoID(_Resource): + def get(self, session_id: str): + try: + service = get_service_by_uuid(self.context_client, session_id, rw_copy=True) + return service_2_qod(service), HTTP_OK + except grpc._channel._InactiveRpcError as e: + if e.code() == grpc.StatusCode.NOT_FOUND: + return compose_not_found_error( + "QoDSessionId({:s}) not found".format(str(session_id)) + ) + else: + return compose_internal_server_error( + "gRPC error fetching QoDSessionId({:s})".format(str(session_id)) + ) + except: # pylint: disable=bare-except + return compose_internal_server_error( + "Error fetching QoDSessionId({:s})".format(str(session_id)) + ) + + def put(self, session_id : str): + if not request.is_json: return compose_unsupported_media_type_error() + + request_data : Dict = request.get_json() + request_data_orig = copy.deepcopy(request_data) + + if "session_id" in request_data: + if request_data["session_id"] != session_id: + return compose_bad_request_error( + "session_id({:s}) in JSON payload mismatches session_id({:s}) in URL".format( + str(request_data["session_id"]), str(session_id) + ) + ) + else: + request_data["session_id"] = session_id + + qos_profile_id = request_data.get("qos_profile_id") + if qos_profile_id is None: + return compose_bad_request_error("qos_profile_id is required") + + duration = request_data.get("duration") + if duration is None: + return compose_bad_request_error("duration is required") + + try: + service = get_service_by_uuid(self.context_client, session_id, rw_copy=True) + except grpc._channel._InactiveRpcError as e: + if e.code() == grpc.StatusCode.NOT_FOUND: + return compose_not_found_error( + "QoDSessionId({:s}) not found".format(str(session_id)) + ) + else: + return compose_internal_server_error( + "gRPC error fetching QoDSessionId({:s})".format(str(session_id)) + ) + except: # pylint: disable=bare-except + return compose_internal_server_error( + "Error fetching QoDSessionId({:s})".format(str(session_id)) + ) + + for constraint in service.service_constraints: + if constraint.WhichOneof("constraint") == "schedule": + constraint.schedule.duration_days = duration + + try: + self.service_client.UpdateService(service) + + service_uuid = service.service_id.service_uuid.uuid + updated_service = get_service_by_uuid(self.context_client, service_uuid, rw_copy=False) + except: # pylint: disable=bare-except + return compose_internal_server_error( + "Error updating Service({:s}) for QoDSession({:s})".format( + grpc_message_to_json_string(service), str(request_data_orig) + ) + ) + + return service_2_qod(updated_service), HTTP_ACCEPTED + + def delete(self, session_id: str): + try: + self.service_client.DeleteService(grpc_service_id(DEFAULT_CONTEXT_NAME, session_id)) + return {}, HTTP_NOCONTENT + except grpc._channel._InactiveRpcError as e: + if e.code() == grpc.StatusCode.NOT_FOUND: + return compose_not_found_error( + "QoDSessionId({:s}) not found".format(str(session_id)) + ) + else: + return compose_internal_server_error( + "gRPC error deleting QoDSessionId({:s})".format(str(session_id)) + ) + except: # pylint: disable=bare-except + return compose_internal_server_error( + "Error deleting QoDSessionId({:s})".format(str(session_id)) + ) diff --git a/src/nbi/service/camara_qod/Tools.py b/src/nbi/service/camara_qod/Tools.py new file mode 100644 index 0000000000000000000000000000000000000000..f0ac60003c8675b0afbe8a575e221e03cb3454ab --- /dev/null +++ b/src/nbi/service/camara_qod/Tools.py @@ -0,0 +1,198 @@ +# Copyright 2022-2024 ETSI SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json, logging, re, time +from netaddr import IPAddress, IPNetwork +from typing import Dict, Tuple +from uuid import uuid4 +from common.Constants import DEFAULT_CONTEXT_NAME +from common.proto.context_pb2 import ( + ContextId, Empty, EndPointId, QoSProfileId, Service, ServiceId, + ServiceStatusEnum, ServiceTypeEnum, Uuid +) +from common.proto.qos_profile_pb2 import ( + QoSProfileValueUnitPair, QoSProfile,QoDConstraintsRequest +) +from common.tools.grpc.ConfigRules import update_config_rule_custom +from common.tools.grpc.Constraints import copy_constraints +from common.tools.grpc.Tools import grpc_message_to_json, grpc_message_to_json_string +from common.tools.object_factory.Context import json_context_id +from common.tools.object_factory.Service import json_service_id +from context.client.ContextClient import ContextClient +from qos_profile.client.QoSProfileClient import QoSProfileClient + + +LOGGER = logging.getLogger(__name__) + +ENDPOINT_SETTINGS_KEY = "/device[{:s}]/endpoint[{:s}]/vlan[{:d}]/settings" +DEVICE_SETTINGS_KEY = "/device[{:s}]/settings" +RE_CONFIG_RULE_IF_SUBIF = re.compile(r"^\/interface\[([^\]]+)\]\/subinterface\[([^\]]+)\]$") +MEC_FIELDS = [ + "device", "applicationServer", "qosProfile", "sessionId", "duration", + "startedAt", "expiresAt", "qosStatus" +] + +def grpc_context_id(context_uuid): + return ContextId(**json_context_id(context_uuid)) + +def grpc_service_id(context_uuid, service_uuid): + return ServiceId(**json_service_id(service_uuid, context_id=json_context_id(context_uuid))) + +def grpc_message_to_qos_table_data(message : QoSProfile) -> dict: + return { + "qos_profile_id" : message.qos_profile_id.qos_profile_id.uuid, + "name" : message.name, + "description" : message.description, + "status" : message.status, + "targetMinUpstreamRate" : grpc_message_to_json(message.targetMinUpstreamRate), + "maxUpstreamRate" : grpc_message_to_json(message.maxUpstreamRate), + "maxUpstreamBurstRate" : grpc_message_to_json(message.maxUpstreamBurstRate), + "targetMinDownstreamRate" : grpc_message_to_json(message.targetMinDownstreamRate), + "maxDownstreamRate" : grpc_message_to_json(message.maxDownstreamRate), + "maxDownstreamBurstRate" : grpc_message_to_json(message.maxDownstreamBurstRate), + "minDuration" : grpc_message_to_json(message.minDuration), + "maxDuration" : grpc_message_to_json(message.maxDuration), + "priority" : message.priority, + "packetDelayBudget" : grpc_message_to_json(message.packetDelayBudget), + "jitter" : grpc_message_to_json(message.jitter), + "packetErrorLossRate" : message.packetErrorLossRate, + } + +def create_value_unit(data) -> QoSProfileValueUnitPair: + return QoSProfileValueUnitPair(value=data["value"], unit=data["unit"]) + +def create_qos_profile_from_json(qos_profile_data : dict) -> QoSProfile: + qos_profile = QoSProfile() + qos_profile.qos_profile_id.CopyFrom(QoSProfileId(qos_profile_id=Uuid(uuid=qos_profile_data["qos_profile_id"]))) + qos_profile.name = qos_profile_data["name"] + qos_profile.description = qos_profile_data["description"] + qos_profile.status = qos_profile_data["status"] + qos_profile.targetMinUpstreamRate.CopyFrom(create_value_unit(qos_profile_data["targetMinUpstreamRate"])) + qos_profile.maxUpstreamRate.CopyFrom(create_value_unit(qos_profile_data["maxUpstreamRate"])) + qos_profile.maxUpstreamBurstRate.CopyFrom(create_value_unit(qos_profile_data["maxUpstreamBurstRate"])) + qos_profile.targetMinDownstreamRate.CopyFrom(create_value_unit(qos_profile_data["targetMinDownstreamRate"])) + qos_profile.maxDownstreamRate.CopyFrom(create_value_unit(qos_profile_data["maxDownstreamRate"])) + qos_profile.maxDownstreamBurstRate.CopyFrom(create_value_unit(qos_profile_data["maxDownstreamBurstRate"])) + qos_profile.minDuration.CopyFrom(create_value_unit(qos_profile_data["minDuration"])) + qos_profile.maxDuration.CopyFrom(create_value_unit(qos_profile_data["maxDuration"])) + qos_profile.priority = qos_profile_data["priority"] + qos_profile.packetDelayBudget.CopyFrom(create_value_unit(qos_profile_data["packetDelayBudget"])) + qos_profile.jitter.CopyFrom(create_value_unit(qos_profile_data["jitter"])) + qos_profile.packetErrorLossRate = qos_profile_data["packetErrorLossRate"] + return qos_profile + +def ip_withoutsubnet(ip_withsubnet, target_ip_address): + network = IPNetwork(ip_withsubnet) + return IPAddress(target_ip_address) in network + +def map_ip_addresses_to_endpoint_ids( + context_client : ContextClient, a_ip : str, z_ip : str +) -> Tuple[EndPointId, EndPointId]: + a_ep_id = None + z_ep_id = None + + devices = context_client.ListDevices(Empty()).devices + for device in devices: + endpoint_mappings = dict() + for endpoint in device.device_endpoints: + endpoint_id = endpoint.endpoint_id + endpoint_uuid = endpoint_id.endpoint_uuid.uuid + endpoint_name = endpoint.name + endpoint_mappings[endpoint_uuid] = endpoint_id + endpoint_mappings[endpoint_name] = endpoint_id + + for config_rule in device.device_config.config_rules: + if config_rule.WhichOneof("config_rule") != "custom": continue + match_subif = RE_CONFIG_RULE_IF_SUBIF.match(config_rule.custom.resource_key) + if not match_subif: continue + + short_port_name = match_subif.groups()[0] + endpoint_id = endpoint_mappings[short_port_name] + + address_ip = json.loads(config_rule.custom.resource_value).get("address_ip") + if ip_withoutsubnet(a_ip, address_ip): a_ep_id = endpoint_id + if ip_withoutsubnet(z_ip, address_ip): z_ep_id = endpoint_id + + return a_ep_id, z_ep_id + +def QOD_2_service( + context_client : ContextClient, qos_profile_client : QoSProfileClient, + qod_info : Dict +) -> Service: + + if "session_id" not in qod_info: + session_id = str(uuid4()) + qod_info["session_id"] = session_id + + service = Service() + service.service_id.service_uuid.uuid = session_id + service.service_id.context_id.context_uuid.uuid = DEFAULT_CONTEXT_NAME + service.name = session_id + service.service_type = ServiceTypeEnum.SERVICETYPE_L3NM + service.service_status.service_status = ServiceStatusEnum.SERVICESTATUS_PLANNED + + if 'device' in qod_info and 'applicationServer' in qod_info: + a_ip = qod_info['device'].get('ipv4Address') + z_ip = qod_info['applicationServer'].get('ipv4Address') + if a_ip and z_ip: + a_ep_id, z_ep_id = map_ip_addresses_to_endpoint_ids(context_client, a_ip, z_ip) + if a_ep_id is not None: service.service_endpoint_ids.append(a_ep_id) + if z_ep_id is not None: service.service_endpoint_ids.append(z_ep_id) + + service_config_rules = service.service_config.config_rules + update_config_rule_custom(service_config_rules, '/settings', {}) + update_config_rule_custom(service_config_rules, '/request', { + k : (qod_info[k], True) for k in MEC_FIELDS if k in qod_info + }) + + qos_profile_id = qod_info.get('qos_profile_id') + qos_profile_id = QoSProfileId(qos_profile_id=Uuid(uuid=qos_profile_id)) + current_time = time.time() + duration_days = qod_info.get('duration') + request = QoDConstraintsRequest( + qos_profile_id=qos_profile_id, start_timestamp=current_time, duration=duration_days + ) + qos_profile_constraints = qos_profile_client.GetConstraintsFromQoSProfile(request) + LOGGER.warning('qos_profile_constraints = {:s}'.format(grpc_message_to_json_string(qos_profile_constraints))) + copy_constraints(qos_profile_constraints.constraints, service.service_constraints) + LOGGER.warning('service.service_constraints = {:s}'.format(grpc_message_to_json_string(service.service_constraints))) + + return service + +def service_2_qod(service : Service) -> Dict: + response = {} + for config_rule in service.service_config.config_rules: + if config_rule.WhichOneof("config_rule") != "custom": continue + if config_rule.custom.resource_key != '/request': continue + resource_value_json = json.loads(config_rule.custom.resource_value) + + if 'device' in resource_value_json and 'ipv4Address' in resource_value_json['device']: + response['device'] = {'ipv4Address': resource_value_json['device']['ipv4Address']} + + if 'applicationServer' in resource_value_json and 'ipv4Address' in resource_value_json['applicationServer']: + response['applicationServer'] = {'ipv4Address': resource_value_json['applicationServer']['ipv4Address']} + + if service.service_id: + response['session_id'] = service.service_id.service_uuid.uuid + + for constraint in service.service_constraints: + if constraint.WhichOneof('constraint') == 'schedule': + response['duration' ] = float(constraint.schedule.duration_days) + response['startedAt'] = int(constraint.schedule.start_timestamp) + response['expiresAt'] = response['startedAt'] + response['duration'] + + if constraint.WhichOneof('constraint') == 'qos_profile': + response['qos_profile_id'] = constraint.qos_profile.qos_profile_id.qos_profile_id.uuid + + return response diff --git a/src/nbi/service/camara_qod/__init__.py b/src/nbi/service/camara_qod/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..293088bfc5579ec57e6a1f44157dc2492bf7168a --- /dev/null +++ b/src/nbi/service/camara_qod/__init__.py @@ -0,0 +1,40 @@ +# Copyright 2022-2024 ETSI SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from nbi.service.NbiApplication import NbiApplication +from .Resources import ProfileList, ProfileDetail, QodInfo, QodInfoID + +URL_PREFIX = '/camara/qod/v0' + +def register_camara_qod(nbi_app : NbiApplication): + nbi_app.add_rest_api_resource( + QodInfo, + URL_PREFIX + '/sessions', + endpoint='camara.qod.session_list' + ) + nbi_app.add_rest_api_resource( + QodInfoID, + URL_PREFIX + '/sessions/', + endpoint='camara.qod.session_detail' + ) + nbi_app.add_rest_api_resource( + ProfileList, + URL_PREFIX + '/profiles', + endpoint='camara.qod.profile_list' + ) + nbi_app.add_rest_api_resource( + ProfileDetail, + URL_PREFIX + '/profiles/', + endpoint='camara.qod.profile_detail' + ) diff --git a/src/nbi/tests/MockService_Dependencies.py b/src/nbi/tests/MockService_Dependencies.py deleted file mode 100644 index 69a8a0b24c9c4f636bb552fdae047c5982bf6c51..0000000000000000000000000000000000000000 --- a/src/nbi/tests/MockService_Dependencies.py +++ /dev/null @@ -1,77 +0,0 @@ -# Copyright 2022-2024 ETSI SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging, signal, sys, threading -from common.proto.context_pb2_grpc import add_ContextServiceServicer_to_server -from common.proto.service_pb2_grpc import add_ServiceServiceServicer_to_server -from common.proto.slice_pb2_grpc import add_SliceServiceServicer_to_server -from common.tests.MockServicerImpl_Context import MockServicerImpl_Context -from common.tests.MockServicerImpl_Service import MockServicerImpl_Service -from common.tests.MockServicerImpl_Slice import MockServicerImpl_Slice -from common.tools.service.GenericGrpcService import GenericGrpcService -from .Constants import LOCAL_HOST, MOCKSERVICE_PORT - - -logging.basicConfig( - level=logging.DEBUG, - format='[%(asctime)s] %(levelname)s:%(name)s:%(message)s', -) -LOGGER = logging.getLogger(__name__) - -class MockService_Dependencies(GenericGrpcService): - # Mock Service implementing Context, Service and Slice to simplify unitary tests of NBI - - def __init__(self) -> None: - super().__init__( - MOCKSERVICE_PORT, LOCAL_HOST, - enable_health_servicer=False, - cls_name='MockService' - ) - - # pylint: disable=attribute-defined-outside-init - def install_servicers(self): - self.context_servicer = MockServicerImpl_Context() - add_ContextServiceServicer_to_server(self.context_servicer, self.server) - - self.service_servicer = MockServicerImpl_Service() - add_ServiceServiceServicer_to_server(self.service_servicer, self.server) - - self.slice_servicer = MockServicerImpl_Slice() - add_SliceServiceServicer_to_server(self.slice_servicer, self.server) - -TERMINATE = threading.Event() - -def signal_handler(signal, frame): # pylint: disable=redefined-outer-name,unused-argument - LOGGER.warning('Terminate signal received') - TERMINATE.set() - -def main(): - LOGGER.info('Starting...') - signal.signal(signal.SIGINT, signal_handler) - signal.signal(signal.SIGTERM, signal_handler) - - grpc_service = MockService_Dependencies() - grpc_service.start() - - # Wait for Ctrl+C or termination signal - while not TERMINATE.wait(timeout=1.0): pass - - LOGGER.info('Terminating...') - grpc_service.stop() - - LOGGER.info('Bye') - return 0 - -if __name__ == '__main__': - sys.exit(main()) diff --git a/src/nbi/tests/MockWebServer.py b/src/nbi/tests/MockWebServer.py index 086b611e40319e526c60838cf0cd89654965b4b5..43734e64c7c47e15b263a8f6c6f643bb595d0285 100644 --- a/src/nbi/tests/MockWebServer.py +++ b/src/nbi/tests/MockWebServer.py @@ -15,6 +15,7 @@ import logging, threading from nbi.service.NbiApplication import NbiApplication +from nbi.service.camara_qod import register_camara_qod from nbi.service.etsi_bwm import register_etsi_bwm_api from nbi.service.health_probes import register_health_probes from nbi.service.ietf_l2vpn import register_ietf_l2vpn @@ -45,6 +46,7 @@ class MockWebServer(threading.Thread): #register_ietf_nss (self.nbi_app) #register_ietf_acl (self.nbi_app) #register_qkd_app (self.nbi_app) + register_camara_qod (self.nbi_app) self.nbi_app.dump_configuration() def run(self): diff --git a/src/nbi/tests/PrepareTestScenario.py b/src/nbi/tests/PrepareTestScenario.py index 1510dd29891b309ad42e9c843ffaaef3804ba6f2..57f1476a8b9ebef5d26725c0822284e307ebecec 100644 --- a/src/nbi/tests/PrepareTestScenario.py +++ b/src/nbi/tests/PrepareTestScenario.py @@ -36,63 +36,25 @@ from .OSM_Constants import WIM_MAPPING from .MockWebServer import MockWebServer -os.environ[get_env_var_name(ServiceNameEnum.NBI, ENVVAR_SUFIX_SERVICE_HOST )] = str(LOCAL_HOST) -os.environ[get_env_var_name(ServiceNameEnum.NBI, ENVVAR_SUFIX_SERVICE_PORT_HTTP)] = str(NBI_SERVICE_PORT) -os.environ[get_env_var_name(ServiceNameEnum.CONTEXT, ENVVAR_SUFIX_SERVICE_HOST )] = str('mock_tfs_nbi_dependencies') -os.environ[get_env_var_name(ServiceNameEnum.CONTEXT, ENVVAR_SUFIX_SERVICE_PORT_GRPC)] = str(MOCKSERVICE_PORT) -os.environ[get_env_var_name(ServiceNameEnum.DEVICE, ENVVAR_SUFIX_SERVICE_HOST )] = str('mock_tfs_nbi_dependencies') -os.environ[get_env_var_name(ServiceNameEnum.DEVICE, ENVVAR_SUFIX_SERVICE_PORT_GRPC)] = str(MOCKSERVICE_PORT) -os.environ[get_env_var_name(ServiceNameEnum.SERVICE, ENVVAR_SUFIX_SERVICE_HOST )] = str('mock_tfs_nbi_dependencies') -os.environ[get_env_var_name(ServiceNameEnum.SERVICE, ENVVAR_SUFIX_SERVICE_PORT_GRPC)] = str(MOCKSERVICE_PORT) -os.environ[get_env_var_name(ServiceNameEnum.SLICE, ENVVAR_SUFIX_SERVICE_HOST )] = str('mock_tfs_nbi_dependencies') -os.environ[get_env_var_name(ServiceNameEnum.SLICE, ENVVAR_SUFIX_SERVICE_PORT_GRPC)] = str(MOCKSERVICE_PORT) - - -## MockService_Dependencies executed as a standalone container during -# tests to prevent apparent dead locks and issues. -#@pytest.fixture(scope='session') -#def mock_service(): -# # NOTE: Starting MockServer in a separate process to prevent -# # issues with eventlet monkey-patched libraries. -# -# cmd = ['python', '-m', 'nbi.tests.MockService_Dependencies'] -# custom_env = os.environ.copy() -# mock_service_process = subprocess.Popen( -# cmd, -# env=custom_env, -# stdout=subprocess.PIPE, -# stderr=subprocess.STDOUT, -# stdin=subprocess.DEVNULL, -# text=True, -# bufsize=1 -# ) -# -# mock_service_logger = logging.getLogger('MockService_Dependencies') -# mock_service_logger.info('Started') -# -# def stream_stdout(): -# for line in iter(mock_service_process.stdout.readline, ''): -# mock_service_logger.info(line.strip()) -# -# stream_stdout_thread = threading.Thread(target=stream_stdout, daemon=True) -# stream_stdout_thread.start() -# -# yield True -# -# # Check if process is still running -# if mock_service_process.poll() is None: -# mock_service_process.terminate() # Try to terminate gracefully -# time.sleep(2) # Give it time to exit -# if mock_service_process.poll() is None: -# mock_service_process.kill() # Force kill if still running -# -# mock_service_logger.info('Terminated') -# stream_stdout_thread.join() +os.environ[get_env_var_name(ServiceNameEnum.NBI, ENVVAR_SUFIX_SERVICE_HOST )] = str(LOCAL_HOST) +os.environ[get_env_var_name(ServiceNameEnum.NBI, ENVVAR_SUFIX_SERVICE_PORT_HTTP)] = str(NBI_SERVICE_PORT) + +MOCK_SERVICES = [ + ServiceNameEnum.CONTEXT, + ServiceNameEnum.DEVICE, + ServiceNameEnum.QOSPROFILE, + ServiceNameEnum.SERVICE, + ServiceNameEnum.SLICE, +] +for mock_service in MOCK_SERVICES: + mock_service_host_env_var = get_env_var_name(mock_service, ENVVAR_SUFIX_SERVICE_HOST) + os.environ[mock_service_host_env_var] = str('mock_tfs_nbi_dependencies') + mock_service_port_env_var = get_env_var_name(mock_service, ENVVAR_SUFIX_SERVICE_PORT_GRPC) + os.environ[mock_service_port_env_var] = str(MOCKSERVICE_PORT) + @pytest.fixture(scope='session') -def nbi_application( -# mock_service # pylint: disable=redefined-outer-name, unused-argument -) -> NbiApplication: +def nbi_application() -> NbiApplication: mock_web_server = MockWebServer() mock_web_server.start() time.sleep(1) # bring time for the server to start @@ -116,33 +78,25 @@ def osm_wim( return MockOSM(wim_url, WIM_MAPPING, USERNAME, PASSWORD) @pytest.fixture(scope='session') -def context_client( -# mock_service # pylint: disable=redefined-outer-name, unused-argument -) -> ContextClient: +def context_client() -> ContextClient: _client = ContextClient() yield _client _client.close() @pytest.fixture(scope='session') -def device_client( -# mock_service # pylint: disable=redefined-outer-name, unused-argument -) -> DeviceClient: +def device_client() -> DeviceClient: _client = DeviceClient() yield _client _client.close() @pytest.fixture(scope='session') -def service_client( -# mock_service # pylint: disable=redefined-outer-name, unused-argument -) -> ServiceClient: +def service_client() -> ServiceClient: _client = ServiceClient() yield _client _client.close() @pytest.fixture(scope='session') -def slice_client( -# mock_service # pylint: disable=redefined-outer-name, unused-argument -) -> SliceClient: +def slice_client() -> SliceClient: _client = SliceClient() yield _client _client.close() diff --git a/src/nbi/tests/test_camara_qod.py b/src/nbi/tests/test_camara_qod.py new file mode 100644 index 0000000000000000000000000000000000000000..de78b1bbd28a9e50f0b56b697dc3f33045343045 --- /dev/null +++ b/src/nbi/tests/test_camara_qod.py @@ -0,0 +1,326 @@ +# Copyright 2022-2024 ETSI SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# Enable eventlet for async networking +# NOTE: monkey_patch needs to be executed before importing any other module. +import eventlet +eventlet.monkey_patch() + +#pylint: disable=wrong-import-position +import deepdiff, logging, pytest +from decimal import ROUND_HALF_UP, Decimal +from typing import Dict +from common.Constants import DEFAULT_CONTEXT_NAME, DEFAULT_TOPOLOGY_NAME +from common.proto.context_pb2 import ContextId +from common.tools.descriptor.Loader import ( + DescriptorLoader, check_descriptor_load_results, validate_empty_scenario +) +from common.tools.object_factory.Context import json_context_id +from context.client.ContextClient import ContextClient +from nbi.service.NbiApplication import NbiApplication +from .PrepareTestScenario import ( # pylint: disable=unused-import + # be careful, order of symbols is important here! + nbi_application, context_client, + do_rest_delete_request, do_rest_get_request, do_rest_post_request, do_rest_put_request, +) + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.DEBUG) + +DESCRIPTOR_FILE = 'nbi/tests/data/tfs_api_dummy.json' + +JSON_ADMIN_CONTEXT_ID = json_context_id(DEFAULT_CONTEXT_NAME) +ADMIN_CONTEXT_ID = ContextId(**JSON_ADMIN_CONTEXT_ID) + + +@pytest.fixture(scope='session') +def storage() -> Dict: + yield dict() + + +# ----- Prepare Environment -------------------------------------------------------------------------------------------- + +def test_prepare_environment(context_client : ContextClient) -> None: # pylint: disable=redefined-outer-name + validate_empty_scenario(context_client) + descriptor_loader = DescriptorLoader(descriptors_file=DESCRIPTOR_FILE, context_client=context_client) + results = descriptor_loader.process() + check_descriptor_load_results(results, descriptor_loader) + descriptor_loader.validate() + + # Verify the scenario has no services/slices + response = context_client.GetContext(ADMIN_CONTEXT_ID) + assert len(response.topology_ids) == 1 + assert len(response.service_ids ) == 3 + assert len(response.slice_ids ) == 1 + + +# ----- Run tests ------------------------------------------------------------------------------------------------------ + +def test_create_profile( + nbi_application : NbiApplication, # pylint: disable=redefined-outer-name + storage : Dict # pylint: disable=redefined-outer-name, unused-argument +) -> None: + qos_profile_data = { + "name" : "QCI_2_voice", + "description" : "QoS profile for video streaming", + "status" : "ACTIVE", + "priority" : 20, + "targetMinUpstreamRate" : {"value": 10, "unit": "bps"}, + "maxUpstreamRate" : {"value": 10, "unit": "bps"}, + "maxUpstreamBurstRate" : {"value": 10, "unit": "bps"}, + "targetMinDownstreamRate": {"value": 10, "unit": "bps"}, + "maxDownstreamRate" : {"value": 10, "unit": "bps"}, + "maxDownstreamBurstRate" : {"value": 10, "unit": "bps"}, + "minDuration" : {"value": 12, "unit": "Minutes"}, + "maxDuration" : {"value": 12, "unit": "Minutes"}, + "packetDelayBudget" : {"value": 12, "unit": "Minutes"}, + "jitter" : {"value": 12, "unit": "Minutes"}, + "packetErrorLossRate" : 3, + } + post_response = do_rest_post_request( + '/camara/qod/v0/profiles', body=qos_profile_data, + expected_status_codes={201} + ) + assert 'qos_profile_id' in post_response + qos_profile_data['qos_profile_id'] = post_response['qos_profile_id'] + + diff_data = deepdiff.DeepDiff(qos_profile_data, post_response) + LOGGER.error('Differences:\n{:s}'.format(str(diff_data.pretty()))) + assert len(diff_data) == 0 + + storage['qos_profile'] = post_response + +def test_get_profile_before_update( + nbi_application : NbiApplication, # pylint: disable=redefined-outer-name + storage : Dict # pylint: disable=redefined-outer-name, unused-argument +) -> None: + qos_profile = storage['qos_profile'] + assert 'qos_profile_id' in qos_profile + qos_profile_id = qos_profile['qos_profile_id'] + + get_response = do_rest_get_request( + '/camara/qod/v0/profiles/{:s}'.format(str(qos_profile_id)), + expected_status_codes={200} + ) + + diff_data = deepdiff.DeepDiff(qos_profile, get_response) + LOGGER.error('Differences:\n{:s}'.format(str(diff_data.pretty()))) + assert len(diff_data) == 0 + +def test_update_profile( + nbi_application : NbiApplication, # pylint: disable=redefined-outer-name + storage : Dict # pylint: disable=redefined-outer-name, unused-argument +) -> None: + qos_profile = storage['qos_profile'] + assert 'qos_profile_id' in qos_profile + qos_profile_id = qos_profile['qos_profile_id'] + + qos_profile_update = { + "qos_profile_id" : qos_profile_id, + "name" : "Updated Name", + "description" : "NEW GAMING PROFILE", + "status" : "ACTIVE", + "targetMinUpstreamRate" : {"value": 20, "unit": "bps"}, + "maxUpstreamRate" : {"value": 50, "unit": "bps"}, + "maxUpstreamBurstRate" : {"value": 60, "unit": "bps"}, + "targetMinDownstreamRate": {"value": 30, "unit": "bps"}, + "maxDownstreamRate" : {"value": 100, "unit": "bps"}, + "maxDownstreamBurstRate" : {"value": 70, "unit": "bps"}, + "minDuration" : {"value": 15, "unit": "Minutes"}, + "maxDuration" : {"value": 25, "unit": "Minutes"}, + "priority" : 15, + "packetDelayBudget" : {"value": 10, "unit": "Minutes"}, + "jitter" : {"value": 10, "unit": "Minutes"}, + "packetErrorLossRate" : 1 + } + put_response = do_rest_put_request( + '/camara/qod/v0/profiles/{:s}'.format(str(qos_profile_id)), body=qos_profile_update, + expected_status_codes={202} + ) + + diff_data = deepdiff.DeepDiff(qos_profile_update, put_response) + LOGGER.error('Differences:\n{:s}'.format(str(diff_data.pretty()))) + assert len(diff_data) == 0 + + storage['qos_profile'] = put_response + +def test_get_profile_after_update( + nbi_application : NbiApplication, # pylint: disable=redefined-outer-name + storage : Dict # pylint: disable=redefined-outer-name, unused-argument +) -> None: + qos_profile = storage['qos_profile'] + assert 'qos_profile_id' in qos_profile + qos_profile_id = qos_profile['qos_profile_id'] + + get_response = do_rest_get_request( + '/camara/qod/v0/profiles/{:s}'.format(str(qos_profile_id)), + expected_status_codes={200} + ) + + diff_data = deepdiff.DeepDiff(qos_profile, get_response) + LOGGER.error('Differences:\n{:s}'.format(str(diff_data.pretty()))) + assert len(diff_data) == 0 + +def test_create_session( + nbi_application : NbiApplication, # pylint: disable=redefined-outer-name + storage : Dict # pylint: disable=redefined-outer-name, unused-argument +) -> None: + qos_profile = storage['qos_profile'] + assert 'qos_profile_id' in qos_profile + qos_profile_id = qos_profile['qos_profile_id'] + + session_data = { + "device" : {"ipv4Address": "84.75.11.12/25"}, + "applicationServer": {"ipv4Address": "192.168.0.1/26"}, + "duration" : float(10), # 10 days + "qos_profile_id" : qos_profile_id, + } + post_response = do_rest_post_request( + '/camara/qod/v0/sessions', body=session_data, + expected_status_codes={201} + ) + + assert 'session_id' in post_response + session_data['session_id'] = post_response['session_id'] + + del post_response['duration'] + del session_data['duration'] + del post_response['startedAt'] + del post_response['expiresAt'] + + diff_data = deepdiff.DeepDiff(session_data, post_response) + LOGGER.error('Differences:\n{:s}'.format(str(diff_data.pretty()))) + assert len(diff_data) == 0 + + storage['session'] = post_response + +def test_get_session_before_update( + nbi_application : NbiApplication, # pylint: disable=redefined-outer-name + storage : Dict # pylint: disable=redefined-outer-name, unused-argument +) -> None: + session = storage['session'] + assert 'session_id' in session + session_id = session['session_id'] + + get_response = do_rest_get_request( + '/camara/qod/v0/sessions/{:s}'.format(str(session_id)), + expected_status_codes={200} + ) + + del get_response['duration'] + del get_response['startedAt'] + del get_response['expiresAt'] + + diff_data = deepdiff.DeepDiff(session, get_response) + LOGGER.error('Differences:\n{:s}'.format(str(diff_data.pretty()))) + assert len(diff_data) == 0 + +def test_update_session( + nbi_application : NbiApplication, # pylint: disable=redefined-outer-name + storage : Dict # pylint: disable=redefined-outer-name, unused-argument +) -> None: + session = storage['session'] + assert 'session_id' in session + session_id = session['session_id'] + + qos_profile = storage['qos_profile'] + assert 'qos_profile_id' in qos_profile + qos_profile_id = qos_profile['qos_profile_id'] + + session_update = { + "session_id" : session_id, + "device" : {"ipv4Address": "84.75.11.12/25"}, + "applicationServer": {"ipv4Address": "192.168.0.1/26"}, + "duration" : float(20), # 20 days + "qos_profile_id" : qos_profile_id, + } + put_response = do_rest_put_request( + '/camara/qod/v0/sessions/{:s}'.format(str(session_id)), body=session_update, + expected_status_codes={202} + ) + + del put_response['duration'] + del session_update['duration'] + del put_response['startedAt'] + del put_response['expiresAt'] + + diff_data = deepdiff.DeepDiff(session_update, put_response) + LOGGER.error('Differences:\n{:s}'.format(str(diff_data.pretty()))) + assert len(diff_data) == 0 + + storage['session'] = put_response + +def test_get_session_after_update( + nbi_application : NbiApplication, # pylint: disable=redefined-outer-name + storage : Dict # pylint: disable=redefined-outer-name, unused-argument +) -> None: + session = storage['session'] + assert 'session_id' in session + session_id = session['session_id'] + + get_response = do_rest_get_request( + '/camara/qod/v0/sessions/{:s}'.format(str(session_id)), + expected_status_codes={200} + ) + + del get_response['duration'] + del get_response['startedAt'] + del get_response['expiresAt'] + + diff_data = deepdiff.DeepDiff(session, get_response) + LOGGER.error('Differences:\n{:s}'.format(str(diff_data.pretty()))) + assert len(diff_data) == 0 + +def test_delete_session( + nbi_application : NbiApplication, # pylint: disable=redefined-outer-name + storage : Dict # pylint: disable=redefined-outer-name, unused-argument +) -> None: + session = storage['session'] + assert 'session_id' in session + session_id = session['session_id'] + do_rest_delete_request( + '/camara/qod/v0/sessions/{:s}'.format(str(session_id)), + expected_status_codes={204} + ) + storage.pop('session') + +def test_delete_profile( + nbi_application : NbiApplication, # pylint: disable=redefined-outer-name + storage : Dict # pylint: disable=redefined-outer-name, unused-argument +) -> None: + qos_profile = storage['qos_profile'] + assert 'qos_profile_id' in qos_profile + qos_profile_id = qos_profile['qos_profile_id'] + do_rest_delete_request( + '/camara/qod/v0/profiles/{:s}'.format(str(qos_profile_id)), + expected_status_codes={204} + ) + storage.pop('qos_profile') + +# ----- Cleanup Environment -------------------------------------------------------------------------------------------- + +def test_cleanup_environment(context_client : ContextClient) -> None: # pylint: disable=redefined-outer-name + # Verify the scenario has no services/slices + response = context_client.GetContext(ADMIN_CONTEXT_ID) + assert len(response.topology_ids) == 1 + assert len(response.service_ids ) == 3 + assert len(response.slice_ids ) == 1 + + # Load descriptors and validate the base scenario + descriptor_loader = DescriptorLoader(descriptors_file=DESCRIPTOR_FILE, context_client=context_client) + descriptor_loader.validate() + descriptor_loader.unload() + validate_empty_scenario(context_client) diff --git a/src/nbi/tests/test_core.py b/src/nbi/tests/test_core.py index 39db882c0e72280fee062382f837003b36c365f8..8d49c5bc676ff8232740166cbad15355e4bcf749 100644 --- a/src/nbi/tests/test_core.py +++ b/src/nbi/tests/test_core.py @@ -25,7 +25,6 @@ from .Constants import NBI_SERVICE_BASE_URL from .HeartbeatClientNamespace import HeartbeatClientNamespace from .PrepareTestScenario import ( # pylint: disable=unused-import # be careful, order of symbols is important here! - #mock_service, nbi_application, do_rest_get_request ) diff --git a/src/nbi/tests/test_etsi_bwm.py b/src/nbi/tests/test_etsi_bwm.py index 29666ffadf7d99bb18c8cb24031cdda9e4e18154..54acc326544d2134a371b1de7710f8b28731fd06 100644 --- a/src/nbi/tests/test_etsi_bwm.py +++ b/src/nbi/tests/test_etsi_bwm.py @@ -29,7 +29,6 @@ from context.client.ContextClient import ContextClient from nbi.service.NbiApplication import NbiApplication from .PrepareTestScenario import ( # pylint: disable=unused-import # be careful, order of symbols is important here! - #mock_service, nbi_application, context_client, do_rest_delete_request, do_rest_get_request, do_rest_patch_request, do_rest_post_request, do_rest_put_request ) diff --git a/src/nbi/tests/test_ietf_l2vpn.py b/src/nbi/tests/test_ietf_l2vpn.py index f620040e176a850edf166c23f20a44c6368360e5..048ef56faad528b6ec5812a706e1bf03bba84886 100644 --- a/src/nbi/tests/test_ietf_l2vpn.py +++ b/src/nbi/tests/test_ietf_l2vpn.py @@ -28,7 +28,6 @@ from tests.tools.mock_osm.MockOSM import MockOSM from .OSM_Constants import SERVICE_CONNECTION_POINTS_1, SERVICE_CONNECTION_POINTS_2, SERVICE_TYPE from .PrepareTestScenario import ( # pylint: disable=unused-import # be careful, order of symbols is important here! - #mock_service, nbi_application, osm_wim, context_client ) diff --git a/src/nbi/tests/test_ietf_l3vpn.py b/src/nbi/tests/test_ietf_l3vpn.py index c3176c25a9f2713430130927a960feb344a337a9..7cdb5fcf12728ab1a09d768b13b0bbc09f03e3cf 100644 --- a/src/nbi/tests/test_ietf_l3vpn.py +++ b/src/nbi/tests/test_ietf_l3vpn.py @@ -30,7 +30,6 @@ from context.client.ContextClient import ContextClient from nbi.service.NbiApplication import NbiApplication from .PrepareTestScenario import ( # pylint: disable=unused-import # be careful, order of symbols is important here! - #mock_service, nbi_application, context_client, do_rest_delete_request, do_rest_get_request, do_rest_post_request ) diff --git a/src/nbi/tests/test_ietf_network.py b/src/nbi/tests/test_ietf_network.py index ceb61aac360ecea1b1fd9f6c6d9d6f2513557b43..13e2392a4199234b39ce6e1556149e0770fff70d 100644 --- a/src/nbi/tests/test_ietf_network.py +++ b/src/nbi/tests/test_ietf_network.py @@ -34,7 +34,6 @@ os.environ['IETF_NETWORK_RENDERER'] = 'PYANGBIND' from .PrepareTestScenario import ( # pylint: disable=unused-import # be careful, order of symbols is important here! - #mock_service, nbi_application, context_client, do_rest_get_request ) diff --git a/src/nbi/tests/test_tfs_api.py b/src/nbi/tests/test_tfs_api.py index ed5630a9b2bd7f712830eb1f38d6111231aadb00..e3278cd0d7894b093ea4effdbe813b8840e9be76 100644 --- a/src/nbi/tests/test_tfs_api.py +++ b/src/nbi/tests/test_tfs_api.py @@ -39,7 +39,6 @@ from context.client.ContextClient import ContextClient from nbi.service.NbiApplication import NbiApplication from .PrepareTestScenario import ( # pylint: disable=unused-import # be careful, order of symbols is important here! - #mock_service, nbi_application, context_client, do_rest_get_request ) diff --git a/src/qos_profile/client/QoSProfileClient.py b/src/qos_profile/client/QoSProfileClient.py index d70966fa6fc089e582a36df2bbad7fd357992b86..f745b202de8dd9320550ded73bafbf836b282acb 100644 --- a/src/qos_profile/client/QoSProfileClient.py +++ b/src/qos_profile/client/QoSProfileClient.py @@ -12,13 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Iterator import grpc, logging from common.Constants import ServiceNameEnum from common.Settings import get_service_host, get_service_port_grpc from common.proto.context_pb2 import Empty, QoSProfileId -from common.proto.qos_profile_pb2 import QoSProfile, QoDConstraintsRequest -from common.proto.context_pb2 import Constraint +from common.proto.qos_profile_pb2 import ConstraintList, QoSProfile, QoDConstraintsRequest, QoSProfileList from common.proto.qos_profile_pb2_grpc import QoSProfileServiceStub from common.tools.client.RetryDecorator import retry, delay_exponential from common.tools.grpc.Tools import grpc_message_to_json_string @@ -77,15 +75,15 @@ class QoSProfileClient: return response @RETRY_DECORATOR - def GetQoSProfiles(self, request: Empty) -> Iterator[QoSProfile]: + def GetQoSProfiles(self, request: Empty) -> QoSProfileList: LOGGER.debug('GetQoSProfiles request: {:s}'.format(grpc_message_to_json_string(request))) response = self.stub.GetQoSProfiles(request) LOGGER.debug('GetQoSProfiles result: {:s}'.format(grpc_message_to_json_string(response))) return response @RETRY_DECORATOR - def GetConstraintListFromQoSProfile(self, request: QoDConstraintsRequest) -> Iterator[Constraint]: - LOGGER.debug('GetConstraintListFromQoSProfile request: {:s}'.format(grpc_message_to_json_string(request))) - response = self.stub.GetConstraintListFromQoSProfile(request) - LOGGER.debug('GetConstraintListFromQoSProfile result: {:s}'.format(grpc_message_to_json_string(response))) + def GetConstraintsFromQoSProfile(self, request: QoDConstraintsRequest) -> ConstraintList: + LOGGER.debug('GetConstraintsFromQoSProfile request: {:s}'.format(grpc_message_to_json_string(request))) + response = self.stub.GetConstraintsFromQoSProfile(request) + #LOGGER.debug('GetConstraintsFromQoSProfile result: {:s}'.format(grpc_message_to_json_string(response))) return response diff --git a/src/qos_profile/service/QoSProfileServiceServicerImpl.py b/src/qos_profile/service/QoSProfileServiceServicerImpl.py index 0ec2730018af0eb49fb53f5a53faedbd2efc3b63..426ab7214f9bc88c1dc4882f2ee7877092358a8e 100644 --- a/src/qos_profile/service/QoSProfileServiceServicerImpl.py +++ b/src/qos_profile/service/QoSProfileServiceServicerImpl.py @@ -13,13 +13,12 @@ # limitations under the License. import grpc, logging, sqlalchemy -from typing import Iterator - -import grpc._channel from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method -from common.proto.context_pb2 import Constraint, ConstraintActionEnum, Constraint_QoSProfile, Constraint_Schedule, Empty, QoSProfileId -from common.proto.qos_profile_pb2 import QoSProfile, QoDConstraintsRequest +from common.proto.context_pb2 import Constraint, Empty, QoSProfileId +from common.proto.qos_profile_pb2 import ConstraintList, QoSProfile, QoDConstraintsRequest, QoSProfileList from common.proto.qos_profile_pb2_grpc import QoSProfileServiceServicer +from common.tools.grpc.Tools import grpc_message_to_json_string +from common.tools.object_factory.Constraint import json_constraint_qos_profile, json_constraint_schedule from .database.QoSProfile import set_qos_profile, delete_qos_profile, get_qos_profile, get_qos_profiles @@ -71,26 +70,23 @@ class QoSProfileServiceServicerImpl(QoSProfileServiceServicer): return qos_profile @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def GetQoSProfiles(self, request: Empty, context: grpc.ServicerContext) -> Iterator[QoSProfile]: - yield from get_qos_profiles(self.db_engine, request) - + def GetQoSProfiles(self, request: Empty, context: grpc.ServicerContext) -> QoSProfileList: + return QoSProfileList(qos_profiles=get_qos_profiles(self.db_engine, request)) @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def GetConstraintListFromQoSProfile(self, request: QoDConstraintsRequest, context: grpc.ServicerContext) -> Iterator[Constraint]: + def GetConstraintsFromQoSProfile( + self, request: QoDConstraintsRequest, context: grpc.ServicerContext + ) -> ConstraintList: + LOGGER.debug('[GetConstraintsFromQoSProfile] request={:s}'.format(grpc_message_to_json_string(request))) qos_profile = get_qos_profile(self.db_engine, request.qos_profile_id.qos_profile_id.uuid) if qos_profile is None: context.set_details(f'QoSProfile {request.qos_profile_id.qos_profile_id.uuid} not found') context.set_code(grpc.StatusCode.NOT_FOUND) - yield Constraint() + return ConstraintList() - qos_profile_constraint = Constraint_QoSProfile() - qos_profile_constraint.qos_profile_name = qos_profile.name - qos_profile_constraint.qos_profile_id.CopyFrom(qos_profile.qos_profile_id) - constraint_qos = Constraint() - constraint_qos.action = ConstraintActionEnum.CONSTRAINTACTION_SET - constraint_qos.qos_profile.CopyFrom(qos_profile_constraint) - yield constraint_qos - constraint_schedule = Constraint() - constraint_schedule.action = ConstraintActionEnum.CONSTRAINTACTION_SET - constraint_schedule.schedule.CopyFrom(Constraint_Schedule(start_timestamp=request.start_timestamp, duration_days=request.duration/86400)) - yield constraint_schedule + reply = ConstraintList(constraints=[ + Constraint(**json_constraint_qos_profile(qos_profile.qos_profile_id, qos_profile.name)), + Constraint(**json_constraint_schedule(request.start_timestamp, request.duration)), + ]) + LOGGER.debug('[GetConstraintsFromQoSProfile] reply={:s}'.format(grpc_message_to_json_string(reply))) + return reply diff --git a/src/qos_profile/tests/test_constraints.py b/src/qos_profile/tests/test_constraints.py index 523147a31b9e74d43dd1cfa4341b603c70eb44bb..7036dc9b15f2b620a38077fdb3970f1445d4ec54 100644 --- a/src/qos_profile/tests/test_constraints.py +++ b/src/qos_profile/tests/test_constraints.py @@ -78,15 +78,19 @@ def test_get_constraints(qos_profile_client: QoSProfileClient): qos_profile = create_qos_profile_from_json(qos_profile_data) qos_profile_created = qos_profile_client.CreateQoSProfile(qos_profile) LOGGER.info('qos_profile_data = {:s}'.format(grpc_message_to_json_string(qos_profile_created))) - constraints = list(qos_profile_client.GetConstraintListFromQoSProfile(QoDConstraintsRequest( - qos_profile_id=qos_profile.qos_profile_id, start_timestamp=1726063284.25332, duration=86400) - )) - constraint_1 = constraints[0] - constraint_2 = constraints[1] - assert len(constraints) == 2 + constraints = qos_profile_client.GetConstraintsFromQoSProfile( + QoDConstraintsRequest( + qos_profile_id=qos_profile.qos_profile_id, start_timestamp=1726063284.25332, duration=86400 + ) + ) + assert len(constraints.constraints) == 2 + + constraint_1 = constraints.constraints[0] assert constraint_1.WhichOneof('constraint') == 'qos_profile' assert constraint_1.qos_profile.qos_profile_id == qos_profile.qos_profile_id assert constraint_1.qos_profile.qos_profile_name == 'QCI_2_voice' + + constraint_2 = constraints.constraints[1] assert constraint_2.WhichOneof('constraint') == 'schedule' assert constraint_2.schedule.start_timestamp == 1726063284.25332 assert constraint_2.schedule.duration_days == 1 diff --git a/src/qos_profile/tests/test_crud.py b/src/qos_profile/tests/test_crud.py index 04125401cd7c20e196b63a81294e0dba71deb892..c5851509b0fd21e4f35021d62ab1a9cbc532cf94 100644 --- a/src/qos_profile/tests/test_crud.py +++ b/src/qos_profile/tests/test_crud.py @@ -92,9 +92,9 @@ def test_get_qos_profile(qos_profile_client: QoSProfileClient): def test_get_qos_profiles(qos_profile_client: QoSProfileClient): qos_profile = create_qos_profile_from_json(qos_profile_data) - qos_profiles_got = list(qos_profile_client.GetQoSProfiles(Empty())) - the_qos_profile = [q for q in qos_profiles_got if q.qos_profile_id == qos_profile.qos_profile_id] + qos_profiles_got = qos_profile_client.GetQoSProfiles(Empty()) LOGGER.info('qos_profile_data = {:s}'.format(grpc_message_to_json_string(qos_profiles_got))) + the_qos_profile = [q for q in qos_profiles_got.qos_profiles if q.qos_profile_id == qos_profile.qos_profile_id] assert len(the_qos_profile) == 1 assert qos_profile == the_qos_profile[0] diff --git a/src/tests/.gitlab-ci.yml b/src/tests/.gitlab-ci.yml index 787b25ee1c8155b4c06be68c57d0314bf1f02c1d..9c441d746451b4b35718a74c1c4ceb76dad7598e 100644 --- a/src/tests/.gitlab-ci.yml +++ b/src/tests/.gitlab-ci.yml @@ -21,8 +21,8 @@ include: #- local: '/src/tests/ofc23/.gitlab-ci.yml' - local: '/src/tests/ofc24/.gitlab-ci.yml' - local: '/src/tests/eucnc24/.gitlab-ci.yml' - - local: '/src/tests/ofc25-camara-agg-net-controller/.gitlab-ci.yml' - - local: '/src/tests/ofc25-camara-e2e-controller/.gitlab-ci.yml' + #- local: '/src/tests/ofc25-camara-agg-net-controller/.gitlab-ci.yml' + #- local: '/src/tests/ofc25-camara-e2e-controller/.gitlab-ci.yml' #- local: '/src/tests/ofc25/.gitlab-ci.yml' - local: '/src/tests/tools/mock_tfs_nbi_dependencies/.gitlab-ci.yml' diff --git a/src/tests/tools/mock_tfs_nbi_dependencies/Config.py b/src/tests/tools/mock_tfs_nbi_dependencies/Config.py index 621bc31872747ca09ca5814d2a40e256809aa876..146fd3a99866511e5c1818b5c8c0859eb5ae57f3 100644 --- a/src/tests/tools/mock_tfs_nbi_dependencies/Config.py +++ b/src/tests/tools/mock_tfs_nbi_dependencies/Config.py @@ -29,6 +29,7 @@ LOG_LEVEL = str(get_log_level()) MOCKED_SERVICES = [ ServiceNameEnum.CONTEXT, ServiceNameEnum.DEVICE, + ServiceNameEnum.QOSPROFILE, ServiceNameEnum.SERVICE, ServiceNameEnum.SLICE, ] diff --git a/src/tests/tools/mock_tfs_nbi_dependencies/MockService_Dependencies.py b/src/tests/tools/mock_tfs_nbi_dependencies/MockService_Dependencies.py index 74ef6bdadf84444049b19d280f7a69594023cbe9..f8a57154a552475aed424a920665d89fb5d22418 100644 --- a/src/tests/tools/mock_tfs_nbi_dependencies/MockService_Dependencies.py +++ b/src/tests/tools/mock_tfs_nbi_dependencies/MockService_Dependencies.py @@ -15,18 +15,21 @@ from typing import Optional, Union from common.proto.context_pb2_grpc import add_ContextServiceServicer_to_server from common.proto.device_pb2_grpc import add_DeviceServiceServicer_to_server +from common.proto.qos_profile_pb2_grpc import add_QoSProfileServiceServicer_to_server from common.proto.service_pb2_grpc import add_ServiceServiceServicer_to_server from common.proto.slice_pb2_grpc import add_SliceServiceServicer_to_server from common.tests.MockServicerImpl_Context import MockServicerImpl_Context from common.tests.MockServicerImpl_Device import MockServicerImpl_Device +from common.tests.MockServicerImpl_QoSProfile import MockServicerImpl_QoSProfile from common.tests.MockServicerImpl_Service import MockServicerImpl_Service from common.tests.MockServicerImpl_Slice import MockServicerImpl_Slice from common.tools.service.GenericGrpcService import GenericGrpcService class MockService_Dependencies(GenericGrpcService): - # Mock Service implementing Mock Context, Device, Service and Slice to - # simplify unitary tests of the NBI component. + # Mock Service implementing multiple mock components to simplify + # unitary tests of the NBI component. + # Mocks implemented: Context, Device, QoS Profile, Service and Slice def __init__( self, bind_port : Union[str, int], bind_address : Optional[str] = None, @@ -53,3 +56,6 @@ class MockService_Dependencies(GenericGrpcService): self.slice_servicer = MockServicerImpl_Slice() add_SliceServiceServicer_to_server(self.slice_servicer, self.server) + + self.qos_profile_servicer = MockServicerImpl_QoSProfile() + add_QoSProfileServiceServicer_to_server(self.qos_profile_servicer, self.server)