diff --git a/proto/service.proto b/proto/service.proto index 33feb28717adf9671b8ebcf6ed5a7ba757ff6c6f..a44d05210f17af124646036097ad05e9761ef264 100644 --- a/proto/service.proto +++ b/proto/service.proto @@ -8,7 +8,7 @@ service ServiceService { rpc GetServiceList (context.Empty) returns (ServiceList) {} rpc CreateService (Service) returns (ServiceId) {} rpc UpdateService (Service) returns (ServiceId) {} - rpc DeleteService (Service) returns (ServiceId) {} + rpc DeleteService (Service) returns (context.Empty) {} rpc GetServiceById (ServiceId) returns (Service) {} rpc GetConnectionList (context.Empty) returns (ConnectionList) {} diff --git a/report_coverage_all.sh b/report_coverage_all.sh index 752f1383da444eca42b91e1301f4abf0402b7e70..e8e11c913accf4b32e2d78ae38d1b9c8d5ee23a4 100755 --- a/report_coverage_all.sh +++ b/report_coverage_all.sh @@ -6,6 +6,6 @@ RCFILE=~/teraflow/controller/coverage/.coveragerc echo echo "Coverage report:" echo "----------------" -coverage report --rcfile=$RCFILE --skip-covered --sort cover --show-missing +coverage report --rcfile=$RCFILE --sort cover --show-missing --skip-covered #coverage html --rcfile=$RCFILE #coverage xml --rcfile=$RCFILE diff --git a/report_coverage_context.sh b/report_coverage_context.sh index f2f71fa744b5d8209589b283c7a375b4f25be0c8..3a404a62698cdd95f94c9ed7d4c8b4b073778d08 100755 --- a/report_coverage_context.sh +++ b/report_coverage_context.sh @@ -1,3 +1,3 @@ #!/bin/bash -./report_coverage_all.sh | grep --color -E -i "^.*context.*$|$" +./report_coverage_all.sh | grep --color -E -i "^context/.*$|$" diff --git a/report_coverage_device.sh b/report_coverage_device.sh index b4215cd30141bb524a7d99717841de127d7cda15..be2612d89ce56d518d992327f93a24853e591a4d 100755 --- a/report_coverage_device.sh +++ b/report_coverage_device.sh @@ -1,3 +1,3 @@ #!/bin/bash -./report_coverage_all.sh | grep --color -E -i "^.*device.*$|$" +./report_coverage_all.sh | grep --color -E -i "^device/.*$|$" diff --git a/report_coverage_service.sh b/report_coverage_service.sh new file mode 100755 index 0000000000000000000000000000000000000000..160f52f126a78be5338456667a97ec9184be421a --- /dev/null +++ b/report_coverage_service.sh @@ -0,0 +1,3 @@ +#!/bin/bash + +./report_coverage_all.sh | grep --color -E -i "^service/.*$|$" diff --git a/run_unitary_tests.sh b/run_unitary_tests.sh index 84b8010341b9fa70275cd2c1039c8c0cca3d2fdc..766d9029232b8fb5db94cfb2a5507fb8270d4494 100755 --- a/run_unitary_tests.sh +++ b/run_unitary_tests.sh @@ -8,7 +8,8 @@ coverage run --rcfile=$RCFILE -m pytest --log-level=DEBUG --verbose \ common/database/tests/test_unitary.py \ common/database/tests/test_engine_inmemory.py \ context/tests/test_unitary.py \ - device/tests/test_unitary.py + device/tests/test_unitary.py \ + service/tests/test_unitary.py ## Run integration tests and analyze coverage of code at same time #coverage run --rcfile=$RCFILE --append -m pytest --log-level=WARN --verbose \ diff --git a/src/common/database/api/Database.py b/src/common/database/api/Database.py index 64a49d7f22ad2231447895f4bad7ca0c26450ff1..319f9bbd380241d11cf6a34cdd2f14a003aed560 100644 --- a/src/common/database/api/Database.py +++ b/src/common/database/api/Database.py @@ -3,20 +3,25 @@ from typing import List from common.database.api.Exceptions import WrongDatabaseEngine, MutexException from common.database.api.context.Context import Context from common.database.api.context.Keys import KEY_CONTEXTS +from common.database.api.entity._Entity import _Entity from common.database.api.entity.EntityCollection import EntityCollection from common.database.engines._DatabaseEngine import _DatabaseEngine LOGGER = logging.getLogger(__name__) -class Database: +class Database(_Entity): def __init__(self, database_engine : _DatabaseEngine): - if not isinstance(database_engine, _DatabaseEngine): + if not isinstance(database_engine, _DatabaseEngine): raise WrongDatabaseEngine('database_engine must inherit from _DatabaseEngine') self._database_engine = database_engine + super().__init__(self, 'root', 'database', {}, {}) self._acquired = False self._owner_key = None self._contexts = EntityCollection(self, KEY_CONTEXTS) + @property + def parent(self) -> 'Database': return self + @property def database_engine(self) -> _DatabaseEngine: return self._database_engine @@ -45,4 +50,4 @@ class Database: @property def contexts(self) -> EntityCollection: return self._contexts - def context(self, context_uuid : str) -> Context: return Context(context_uuid, self._database_engine) + def context(self, context_uuid : str) -> Context: return Context(context_uuid, self) diff --git a/src/common/database/api/context/Context.py b/src/common/database/api/context/Context.py index bf6e56a1f350fb5c3223546479d1a8672b13e1f3..f4b530dd2e4519568f9f27c97b0f78d8efbaa53d 100644 --- a/src/common/database/api/context/Context.py +++ b/src/common/database/api/context/Context.py @@ -1,22 +1,24 @@ -from typing import Dict, List +from typing import TYPE_CHECKING, Dict, List from common.database.api.context.service.Service import Service from common.database.api.context.topology.Topology import Topology from common.database.api.context.Keys import KEY_CONTEXT, KEY_SERVICES, KEY_TOPOLOGIES -from common.database.api.entity._RootEntity import _RootEntity +from common.database.api.entity._Entity import _Entity from common.database.api.entity.EntityCollection import EntityCollection -from common.database.engines._DatabaseEngine import _DatabaseEngine + +if TYPE_CHECKING: + from common.database.api.Database import Database VALIDATORS = {} # no attributes accepted TRANSCODERS = {} # no transcoding applied to attributes -class Context(_RootEntity): - def __init__(self, context_uuid : str, database_engine : _DatabaseEngine): - super().__init__(database_engine, context_uuid, KEY_CONTEXT, VALIDATORS, TRANSCODERS) +class Context(_Entity): + def __init__(self, context_uuid : str, parent : 'Database'): + super().__init__(parent, context_uuid, KEY_CONTEXT, VALIDATORS, TRANSCODERS) self._topologies = EntityCollection(self, KEY_TOPOLOGIES) self._services = EntityCollection(self, KEY_SERVICES) @property - def parent(self) -> 'Context': return self + def parent(self) -> 'Database': return self._parent @property def context(self) -> 'Context': return self @@ -34,11 +36,14 @@ class Context(_RootEntity): def service(self, service_uuid : str) -> Service: return Service(service_uuid, self) - def create(self) -> 'Context': return self + def create(self) -> 'Context': + self.parent.contexts.add(self.context_uuid) + return self def delete(self): for service_uuid in self.services.get(): self.service(service_uuid).delete() for topology_uuid in self.topologies.get(): self.topology(topology_uuid).delete() + self.parent.contexts.delete(self.context_uuid) self.attributes.delete() def dump_id(self) -> Dict: diff --git a/src/common/database/api/entity/_Entity.py b/src/common/database/api/entity/_Entity.py index 9d0b3dfb05f58e9af1a595e7527f3e48b5bb7a5d..784ffbf61c7958329c28f1c7ac371f5126289971 100644 --- a/src/common/database/api/entity/_Entity.py +++ b/src/common/database/api/entity/_Entity.py @@ -1,6 +1,6 @@ from typing import Any, Callable, Dict -from ...engines._DatabaseEngine import _DatabaseEngine -from .EntityAttributes import EntityAttributes +from common.database.engines._DatabaseEngine import _DatabaseEngine +from common.database.api.entity.EntityAttributes import EntityAttributes class _Entity: def __init__(self, parent, entity_uuid : str, attributes_key : str, diff --git a/src/common/database/api/entity/_RootEntity.py b/src/common/database/api/entity/_RootEntity.py deleted file mode 100644 index 6047bec8b268d4e412161d4515b2c3c7c56d800a..0000000000000000000000000000000000000000 --- a/src/common/database/api/entity/_RootEntity.py +++ /dev/null @@ -1,16 +0,0 @@ -from typing import Any, Callable, Dict -from ._Entity import _Entity -from ...engines._DatabaseEngine import _DatabaseEngine - -class _RootEntity(_Entity): - def __init__(self, database_engine : _DatabaseEngine, entity_uuid: str, attributes_key: str, - attributes_validators: Dict[str, Callable[[Any], bool]], - attribute_transcoders: Dict[str, Dict[Any, Callable[[Any], Any]]]): - self._database_engine = database_engine - super().__init__(self, entity_uuid, attributes_key, attributes_validators, attribute_transcoders) - - @property - def parent(self) -> '_RootEntity': return self - - @property - def database_engine(self) -> _DatabaseEngine: return self._database_engine diff --git a/src/common/database/tests/script.py b/src/common/database/tests/script.py index 1387252a4dbdbe8759c972650ce8b640f51570bb..78efa9d6aaaf7c5288faf112b70d200b917b82f1 100644 --- a/src/common/database/tests/script.py +++ b/src/common/database/tests/script.py @@ -7,114 +7,127 @@ from common.database.api.context.topology.device.OperationalStatus import Operat LOGGER = logging.getLogger(__name__) -def populate_example(database : Database, context_uuid=DEFAULT_CONTEXT_ID, topology_uuid=DEFAULT_TOPOLOGY_ID): - database.clear_all() +def populate_example( + database : Database, context_uuid : str = DEFAULT_CONTEXT_ID, topology_uuid : str = DEFAULT_TOPOLOGY_ID, + add_devices : bool = True, add_links : bool = True, add_services : bool = True): + + if add_links: + if not add_devices: raise Exception('add_links requires add_devices') + + if add_services: + if not add_devices: raise Exception('add_services requires add_devices') + if not add_links: raise Exception('add_services requires add_links') with database: + database.clear_all() + context = database.context(context_uuid).create() topology = context.topology(topology_uuid).create() - dev_1 = topology.device('DEV1').create( - device_type='ROADM', device_config='<config/>', device_operational_status=OperationalStatus.ENABLED) - dev1_ep2 = dev_1.endpoint('EP2').create(port_type='WDM') - dev1_ep3 = dev_1.endpoint('EP3').create(port_type='WDM') - dev1_ep4 = dev_1.endpoint('EP4').create(port_type='WDM') - dev1_ep101 = dev_1.endpoint('EP5').create(port_type='OCH') - dev1_ep102 = dev_1.endpoint('EP6').create(port_type='OCH') - dev1_ep103 = dev_1.endpoint('EP7').create(port_type='OCH') - - dev_2 = topology.device('DEV2').create( - device_type='ROADM', device_config='<config/>', device_operational_status=OperationalStatus.ENABLED) - dev2_ep1 = dev_2.endpoint('EP1').create(port_type='WDM') - dev2_ep3 = dev_2.endpoint('EP3').create(port_type='WDM') - dev2_ep4 = dev_2.endpoint('EP4').create(port_type='WDM') - dev2_ep101 = dev_2.endpoint('EP5').create(port_type='OCH') - dev2_ep102 = dev_2.endpoint('EP6').create(port_type='OCH') - dev2_ep103 = dev_2.endpoint('EP7').create(port_type='OCH') - - dev_3 = topology.device('DEV3').create( - device_type='ROADM', device_config='<config/>', device_operational_status=OperationalStatus.ENABLED) - dev3_ep1 = dev_3.endpoint('EP1').create(port_type='WDM') - dev3_ep2 = dev_3.endpoint('EP2').create(port_type='WDM') - dev3_ep4 = dev_3.endpoint('EP4').create(port_type='WDM') - dev3_ep101 = dev_3.endpoint('EP5').create(port_type='OCH') - dev3_ep102 = dev_3.endpoint('EP6').create(port_type='OCH') - dev3_ep103 = dev_3.endpoint('EP7').create(port_type='OCH') - - dev_4 = topology.device('DEV4').create( - device_type='ROADM', device_config='<config/>', device_operational_status=OperationalStatus.ENABLED) - dev4_ep1 = dev_4.endpoint('EP1').create(port_type='WDM') - dev4_ep2 = dev_4.endpoint('EP2').create(port_type='WDM') - dev4_ep3 = dev_4.endpoint('EP3').create(port_type='WDM') - dev4_ep101 = dev_4.endpoint('EP5').create(port_type='OCH') - dev4_ep102 = dev_4.endpoint('EP6').create(port_type='OCH') - dev4_ep103 = dev_4.endpoint('EP7').create(port_type='OCH') - - link_dev1_to_dev2 = topology.link('DEV1/EP2 ==> DEV2/EP1').create() - link_dev1_to_dev2.endpoint('DEV1/EP2').create(dev1_ep2) - link_dev1_to_dev2.endpoint('DEV2/EP1').create(dev2_ep1) - - link_dev1_to_dev3 = topology.link('DEV1/EP3 ==> DEV3/EP1').create() - link_dev1_to_dev3.endpoint('DEV1/EP3').create(dev1_ep3) - link_dev1_to_dev3.endpoint('DEV3/EP1').create(dev3_ep1) - - link_dev1_to_dev4 = topology.link('DEV1/EP4 ==> DEV4/EP1').create() - link_dev1_to_dev4.endpoint('DEV1/EP4').create(dev1_ep4) - link_dev1_to_dev4.endpoint('DEV4/EP1').create(dev4_ep1) - - link_dev2_to_dev1 = topology.link('DEV2/EP1 ==> DEV1/EP2').create() - link_dev2_to_dev1.endpoint('DEV2/EP1').create(dev2_ep1) - link_dev2_to_dev1.endpoint('DEV1/EP2').create(dev1_ep2) - - link_dev2_to_dev3 = topology.link('DEV2/EP3 ==> DEV3/EP2').create() - link_dev2_to_dev3.endpoint('DEV2/EP3').create(dev2_ep3) - link_dev2_to_dev3.endpoint('DEV3/EP2').create(dev3_ep2) - - link_dev2_to_dev4 = topology.link('DEV2/EP4 ==> DEV4/EP2').create() - link_dev2_to_dev4.endpoint('DEV2/EP4').create(dev2_ep4) - link_dev2_to_dev4.endpoint('DEV4/EP2').create(dev4_ep2) - - link_dev3_to_dev1 = topology.link('DEV3/EP1 ==> DEV1/EP3').create() - link_dev3_to_dev1.endpoint('DEV3/EP1').create(dev3_ep1) - link_dev3_to_dev1.endpoint('DEV1/EP3').create(dev1_ep3) - - link_dev3_to_dev2 = topology.link('DEV3/EP2 ==> DEV2/EP3').create() - link_dev3_to_dev2.endpoint('DEV3/EP2').create(dev3_ep2) - link_dev3_to_dev2.endpoint('DEV2/EP3').create(dev2_ep3) - - link_dev3_to_dev4 = topology.link('DEV3/EP4 ==> DEV4/EP3').create() - link_dev3_to_dev4.endpoint('DEV3/EP4').create(dev3_ep4) - link_dev3_to_dev4.endpoint('DEV4/EP3').create(dev4_ep3) - - link_dev4_to_dev1 = topology.link('DEV4/EP1 ==> DEV1/EP4').create() - link_dev4_to_dev1.endpoint('DEV4/EP1').create(dev4_ep1) - link_dev4_to_dev1.endpoint('DEV1/EP4').create(dev1_ep4) - - link_dev4_to_dev2 = topology.link('DEV4/EP2 ==> DEV2/EP4').create() - link_dev4_to_dev2.endpoint('DEV4/EP2').create(dev4_ep2) - link_dev4_to_dev2.endpoint('DEV2/EP4').create(dev2_ep4) - - link_dev4_to_dev3 = topology.link('DEV4/EP3 ==> DEV3/EP4').create() - link_dev4_to_dev3.endpoint('DEV4/EP3').create(dev4_ep3) - link_dev4_to_dev3.endpoint('DEV3/EP4').create(dev3_ep4) - - service = context.service('S01').create(ServiceType.L3NM, '<config/>', ServiceState.PLANNED) - service.endpoint('S01/EP01').create(dev1_ep101) - service.endpoint('S01/EP02').create(dev2_ep101) - service.endpoint('S01/EP03').create(dev3_ep101) - service.endpoint('S01/EP04').create(dev4_ep101) - - service = context.service('S02').create(ServiceType.L3NM, '<config/>', ServiceState.PLANNED) - service.endpoint('S02/EP01').create(dev1_ep102) - service.endpoint('S02/EP02').create(dev2_ep102) - service.endpoint('S02/EP03').create(dev3_ep102) - service.endpoint('S02/EP04').create(dev4_ep102) - - service = context.service('S03').create(ServiceType.L3NM, '<config/>', ServiceState.PLANNED) - service.endpoint('S03/EP01').create(dev1_ep103) - service.endpoint('S03/EP02').create(dev2_ep103) - service.endpoint('S03/EP03').create(dev3_ep103) - service.endpoint('S03/EP04').create(dev4_ep103) + if add_devices: + dev_1 = topology.device('DEV1').create( + device_type='ROADM', device_config='<config/>', device_operational_status=OperationalStatus.ENABLED) + dev1_ep2 = dev_1.endpoint('EP2').create(port_type='WDM') + dev1_ep3 = dev_1.endpoint('EP3').create(port_type='WDM') + dev1_ep4 = dev_1.endpoint('EP4').create(port_type='WDM') + dev1_ep5 = dev_1.endpoint('EP5').create(port_type='OCH') + dev1_ep6 = dev_1.endpoint('EP6').create(port_type='OCH') + dev1_ep7 = dev_1.endpoint('EP7').create(port_type='OCH') + + dev_2 = topology.device('DEV2').create( + device_type='ROADM', device_config='<config/>', device_operational_status=OperationalStatus.ENABLED) + dev2_ep1 = dev_2.endpoint('EP1').create(port_type='WDM') + dev2_ep3 = dev_2.endpoint('EP3').create(port_type='WDM') + dev2_ep4 = dev_2.endpoint('EP4').create(port_type='WDM') + dev2_ep5 = dev_2.endpoint('EP5').create(port_type='OCH') + dev2_ep6 = dev_2.endpoint('EP6').create(port_type='OCH') + dev2_ep7 = dev_2.endpoint('EP7').create(port_type='OCH') + + dev_3 = topology.device('DEV3').create( + device_type='ROADM', device_config='<config/>', device_operational_status=OperationalStatus.ENABLED) + dev3_ep1 = dev_3.endpoint('EP1').create(port_type='WDM') + dev3_ep2 = dev_3.endpoint('EP2').create(port_type='WDM') + dev3_ep4 = dev_3.endpoint('EP4').create(port_type='WDM') + dev3_ep5 = dev_3.endpoint('EP5').create(port_type='OCH') + dev3_ep6 = dev_3.endpoint('EP6').create(port_type='OCH') + dev3_ep7 = dev_3.endpoint('EP7').create(port_type='OCH') + + dev_4 = topology.device('DEV4').create( + device_type='ROADM', device_config='<config/>', device_operational_status=OperationalStatus.ENABLED) + dev4_ep1 = dev_4.endpoint('EP1').create(port_type='WDM') + dev4_ep2 = dev_4.endpoint('EP2').create(port_type='WDM') + dev4_ep3 = dev_4.endpoint('EP3').create(port_type='WDM') + dev4_ep5 = dev_4.endpoint('EP5').create(port_type='OCH') + dev4_ep6 = dev_4.endpoint('EP6').create(port_type='OCH') + dev4_ep7 = dev_4.endpoint('EP7').create(port_type='OCH') + + if add_links: + link_dev1_to_dev2 = topology.link('DEV1/EP2 ==> DEV2/EP1').create() + link_dev1_to_dev2.endpoint('DEV1/EP2').create(dev1_ep2) + link_dev1_to_dev2.endpoint('DEV2/EP1').create(dev2_ep1) + + link_dev1_to_dev3 = topology.link('DEV1/EP3 ==> DEV3/EP1').create() + link_dev1_to_dev3.endpoint('DEV1/EP3').create(dev1_ep3) + link_dev1_to_dev3.endpoint('DEV3/EP1').create(dev3_ep1) + + link_dev1_to_dev4 = topology.link('DEV1/EP4 ==> DEV4/EP1').create() + link_dev1_to_dev4.endpoint('DEV1/EP4').create(dev1_ep4) + link_dev1_to_dev4.endpoint('DEV4/EP1').create(dev4_ep1) + + link_dev2_to_dev1 = topology.link('DEV2/EP1 ==> DEV1/EP2').create() + link_dev2_to_dev1.endpoint('DEV2/EP1').create(dev2_ep1) + link_dev2_to_dev1.endpoint('DEV1/EP2').create(dev1_ep2) + + link_dev2_to_dev3 = topology.link('DEV2/EP3 ==> DEV3/EP2').create() + link_dev2_to_dev3.endpoint('DEV2/EP3').create(dev2_ep3) + link_dev2_to_dev3.endpoint('DEV3/EP2').create(dev3_ep2) + + link_dev2_to_dev4 = topology.link('DEV2/EP4 ==> DEV4/EP2').create() + link_dev2_to_dev4.endpoint('DEV2/EP4').create(dev2_ep4) + link_dev2_to_dev4.endpoint('DEV4/EP2').create(dev4_ep2) + + link_dev3_to_dev1 = topology.link('DEV3/EP1 ==> DEV1/EP3').create() + link_dev3_to_dev1.endpoint('DEV3/EP1').create(dev3_ep1) + link_dev3_to_dev1.endpoint('DEV1/EP3').create(dev1_ep3) + + link_dev3_to_dev2 = topology.link('DEV3/EP2 ==> DEV2/EP3').create() + link_dev3_to_dev2.endpoint('DEV3/EP2').create(dev3_ep2) + link_dev3_to_dev2.endpoint('DEV2/EP3').create(dev2_ep3) + + link_dev3_to_dev4 = topology.link('DEV3/EP4 ==> DEV4/EP3').create() + link_dev3_to_dev4.endpoint('DEV3/EP4').create(dev3_ep4) + link_dev3_to_dev4.endpoint('DEV4/EP3').create(dev4_ep3) + + link_dev4_to_dev1 = topology.link('DEV4/EP1 ==> DEV1/EP4').create() + link_dev4_to_dev1.endpoint('DEV4/EP1').create(dev4_ep1) + link_dev4_to_dev1.endpoint('DEV1/EP4').create(dev1_ep4) + + link_dev4_to_dev2 = topology.link('DEV4/EP2 ==> DEV2/EP4').create() + link_dev4_to_dev2.endpoint('DEV4/EP2').create(dev4_ep2) + link_dev4_to_dev2.endpoint('DEV2/EP4').create(dev2_ep4) + + link_dev4_to_dev3 = topology.link('DEV4/EP3 ==> DEV3/EP4').create() + link_dev4_to_dev3.endpoint('DEV4/EP3').create(dev4_ep3) + link_dev4_to_dev3.endpoint('DEV3/EP4').create(dev3_ep4) + + if add_services: + service = context.service('S01').create(ServiceType.L3NM, '<config/>', ServiceState.PLANNED) + service.endpoint('S01/EP01').create(dev1_ep5) + service.endpoint('S01/EP02').create(dev2_ep5) + service.endpoint('S01/EP03').create(dev3_ep5) + service.endpoint('S01/EP04').create(dev4_ep5) + + service = context.service('S02').create(ServiceType.L3NM, '<config/>', ServiceState.PLANNED) + service.endpoint('S02/EP01').create(dev1_ep6) + service.endpoint('S02/EP02').create(dev2_ep6) + service.endpoint('S02/EP03').create(dev3_ep6) + service.endpoint('S02/EP04').create(dev4_ep6) + + service = context.service('S03').create(ServiceType.L3NM, '<config/>', ServiceState.PLANNED) + service.endpoint('S03/EP01').create(dev1_ep7) + service.endpoint('S03/EP02').create(dev2_ep7) + service.endpoint('S03/EP03').create(dev3_ep7) + service.endpoint('S03/EP04').create(dev4_ep7) def sequence(database : Database): populate_example(database) diff --git a/src/common/database/tests/test_engine_inmemory.py b/src/common/database/tests/test_engine_inmemory.py index 403f428200a6580a0fc32bfe2b90ce20b855d2a9..e3afd995573b04926bbec080e2dd5797eac6ce00 100644 --- a/src/common/database/tests/test_engine_inmemory.py +++ b/src/common/database/tests/test_engine_inmemory.py @@ -1,6 +1,6 @@ import logging -from ..Factory import get_database, DatabaseEngineEnum -from .script import sequence +from common.database.Factory import get_database, DatabaseEngineEnum +from common.database.tests.script import sequence logging.basicConfig(level=logging.INFO) diff --git a/src/common/database/tests/test_engine_redis.py b/src/common/database/tests/test_engine_redis.py index 39df1cf393d22fccfd1f14e0b47327e9cf79f73e..e68600db6a8e196890526c97d79b132c03dd4b32 100644 --- a/src/common/database/tests/test_engine_redis.py +++ b/src/common/database/tests/test_engine_redis.py @@ -1,6 +1,6 @@ import logging -from ..Factory import get_database, DatabaseEngineEnum -from .script import sequence +from common.database.Factory import get_database, DatabaseEngineEnum +from common.database.tests.script import sequence logging.basicConfig(level=logging.INFO) diff --git a/src/common/database/tests/test_unitary.py b/src/common/database/tests/test_unitary.py index c00e2f7c9165ee84bccf4a34f06c0915bf4726c9..8589c7cfacb04505c75108f510b5f7bcca4005a2 100644 --- a/src/common/database/tests/test_unitary.py +++ b/src/common/database/tests/test_unitary.py @@ -1,11 +1,10 @@ import logging, pytest -from ..api.Database import Database -from ..api.entity._Entity import _Entity -from ..api.entity._RootEntity import _RootEntity -from ..api.entity.EntityAttributes import EntityAttributes -from ..api.Exceptions import WrongDatabaseEngine -from ..engines._DatabaseEngine import _DatabaseEngine -from ..engines.inmemory.InMemoryDatabaseEngine import InMemoryDatabaseEngine +from common.database.api.Database import Database +from common.database.api.entity._Entity import _Entity +from common.database.api.entity.EntityAttributes import EntityAttributes +from common.database.api.Exceptions import WrongDatabaseEngine +from common.database.engines._DatabaseEngine import _DatabaseEngine +from common.database.engines.inmemory.InMemoryDatabaseEngine import InMemoryDatabaseEngine logging.basicConfig(level=logging.INFO) @@ -21,10 +20,6 @@ def test_database_gets_correct_database_engine(): def test_entity_gets_invalid_parameters(): - class RootMockEntity(_RootEntity): - def __init__(self, database_engine : _DatabaseEngine): - super().__init__(database_engine, 'valid-uuid', 'valid-key', {}, {}) - # should fail with invalid parent with pytest.raises(AttributeError) as e: _Entity(None, 'valid-uuid', 'valid-attributes-key', {}, {}) @@ -32,45 +27,41 @@ def test_entity_gets_invalid_parameters(): # should fail with invalid entity uuid with pytest.raises(AttributeError) as e: - _Entity(RootMockEntity(InMemoryDatabaseEngine()), None, 'valid-attributes-key', {}, {}) + _Entity(Database(InMemoryDatabaseEngine()), None, 'valid-attributes-key', {}, {}) assert str(e.value) == 'entity_uuid must be a non-empty instance of str' # should fail with invalid entity uuid with pytest.raises(AttributeError) as e: - _Entity(RootMockEntity(InMemoryDatabaseEngine()), '', 'valid-attributes-key', {}, {}) + _Entity(Database(InMemoryDatabaseEngine()), '', 'valid-attributes-key', {}, {}) assert str(e.value) == 'entity_uuid must be a non-empty instance of str' # should fail with invalid attribute key with pytest.raises(AttributeError) as e: - _Entity(RootMockEntity(InMemoryDatabaseEngine()), 'valid-uuid', None, {}, {}) + _Entity(Database(InMemoryDatabaseEngine()), 'valid-uuid', None, {}, {}) assert str(e.value) == 'attributes_key must be a non-empty instance of str' # should fail with invalid attribute key with pytest.raises(AttributeError) as e: - _Entity(RootMockEntity(InMemoryDatabaseEngine()), 'valid-uuid', '', {}, {}) + _Entity(Database(InMemoryDatabaseEngine()), 'valid-uuid', '', {}, {}) assert str(e.value) == 'attributes_key must be a non-empty instance of str' # should fail with invalid attribute validators with pytest.raises(AttributeError) as e: - _Entity(RootMockEntity(InMemoryDatabaseEngine()), 'valid-uuid', 'valid-attributes-key', [], {}) + _Entity(Database(InMemoryDatabaseEngine()), 'valid-uuid', 'valid-attributes-key', [], {}) assert str(e.value) == 'attribute_validators must be an instance of dict' # should fail with invalid attribute transcoders with pytest.raises(AttributeError) as e: - _Entity(RootMockEntity(InMemoryDatabaseEngine()), 'valid-uuid', 'valid-attributes-key', {}, []) + _Entity(Database(InMemoryDatabaseEngine()), 'valid-uuid', 'valid-attributes-key', {}, []) assert str(e.value) == 'attribute_transcoders must be an instance of dict' # should work - assert _Entity(RootMockEntity(InMemoryDatabaseEngine()), 'valid-uuid', 'valid-attributes-key', {}, {}) is not None + assert _Entity(Database(InMemoryDatabaseEngine()), 'valid-uuid', 'valid-attributes-key', {}, {}) is not None def test_entity_attributes_gets_invalid_parameters(): - class RootMockEntity(_RootEntity): - def __init__(self, database_engine : _DatabaseEngine): - super().__init__(database_engine, 'valid-uuid', 'valid-key', {}, {}) - # should work - root_entity = RootMockEntity(InMemoryDatabaseEngine()) + root_entity = Database(InMemoryDatabaseEngine()) validators = {'attr': lambda v: True} entity_attrs = EntityAttributes(root_entity, 'valid-attributes-key', validators, {}) assert entity_attrs is not None diff --git a/src/common/tests/Assertions.py b/src/common/tests/Assertions.py index 7e08621f107805dd89978ad380675ef7b547d582..8417a42345c8b9acf9a7e76e4ea9c2e7add03a89 100644 --- a/src/common/tests/Assertions.py +++ b/src/common/tests/Assertions.py @@ -10,6 +10,12 @@ def validate_uuid(message, allow_empty=False): if allow_empty: return assert len(message['uuid']) > 1 +def validate_context_id(message): + assert type(message) is dict + assert len(message.keys()) == 1 + assert 'contextUuid' in message + validate_uuid(message['contextUuid']) + def validate_device_id(message): assert type(message) is dict assert len(message.keys()) == 1 @@ -22,12 +28,77 @@ def validate_link_id(message): assert 'link_id' in message validate_uuid(message['link_id']) +def validate_topology_id(message): + assert type(message) is dict + assert len(message.keys()) == 2 + assert 'contextId' in message + validate_context_id(message['contextId']) + assert 'topoId' in message + validate_uuid(message['topoId']) + +def validate_device_config(message): + assert type(message) is dict + assert len(message.keys()) == 1 + assert 'device_config' in message + assert type(message['device_config']) is str + +def validate_device_operational_status(message): + assert type(message) is str + assert message in ['KEEP_STATE', 'ENABLED', 'DISABLED'] + +def validate_endpoint_id(message): + assert type(message) is dict + assert len(message.keys()) == 3 + assert 'topoId' in message + validate_topology_id(message['topoId']) + assert 'dev_id' in message + validate_device_id(message['dev_id']) + assert 'port_id' in message + validate_uuid(message['port_id']) + +def validate_endpoint(message): + assert type(message) is dict + assert len(message.keys()) == 2 + assert 'port_id' in message + validate_endpoint_id(message['port_id']) + assert 'port_type' in message + assert type(message['port_type']) is str + +def validate_device(message): + assert type(message) is dict + assert len(message.keys()) == 5 + assert 'device_id' in message + validate_device_id(message['device_id']) + assert 'device_type' in message + assert type(message['device_type']) is str + assert 'device_config' in message + validate_device_config(message['device_config']) + assert 'devOperationalStatus' in message + validate_device_operational_status(message['devOperationalStatus']) + assert 'endpointList' in message + assert type(message['endpointList']) is list + for endpoint in message['endpointList']: validate_endpoint(endpoint) + +def validate_link(message): + assert type(message) is dict + assert len(message.keys()) == 2 + assert 'link_id' in message + validate_link_id(message['link_id']) + assert 'endpointList' in message + assert type(message['endpointList']) is list + for endpoint_id in message['endpointList']: validate_endpoint_id(endpoint_id) + def validate_topology(message): assert type(message) is dict assert len(message.keys()) > 0 assert 'topoId' in message + validate_topology_id(message['topoId']) assert 'device' in message + assert type(message['device']) is list + for device in message['device']: validate_device(device) assert 'link' in message + assert type(message['link']) is list + for link in message['link']: validate_link(link) def validate_topology_is_empty(message): validate_topology(message) @@ -41,3 +112,31 @@ def validate_topology_has_devices(message): def validate_topology_has_links(message): validate_topology(message) assert len(message['link']) > 0 + +def validate_service_id(message): + assert type(message) is dict + assert len(message.keys()) == 2 + assert 'contextId' in message + validate_context_id(message['contextId']) + assert 'cs_id' in message + validate_uuid(message['cs_id']) + +def validate_service(message): + assert type(message) is dict + assert len(message.keys()) > 1 + assert 'cs_id' in message + +def validate_service_list(message): + assert type(message) is dict + assert len(message.keys()) == 1 + assert 'cs' in message + assert type(message['cs']) is list + for cs in message['cs']: validate_service(cs) + +def validate_service_list_is_empty(message): + validate_service_list(message) + assert len(message['cs']) == 0 + +def validate_service_list_is_not_empty(message): + validate_service_list(message) + assert len(message['cs']) > 0 diff --git a/src/context/service/ContextServiceServicerImpl.py b/src/context/service/ContextServiceServicerImpl.py index e68dc6a24ecf8d3162da084a3ab78c604d381767..058155aa7ac44188139036896d8ae0a14f68a59b 100644 --- a/src/context/service/ContextServiceServicerImpl.py +++ b/src/context/service/ContextServiceServicerImpl.py @@ -62,11 +62,13 @@ class ContextServiceServicerImpl(ContextServiceServicer): GETTOPOLOGY_COUNTER_COMPLETED.inc() return reply except ServiceException as e: # pragma: no cover (ServiceException not thrown) - grpc_context.abort(e.code, e.details) # pragma: no cover (ServiceException not thrown) + LOGGER.exception('GetTopology exception') + GETTOPOLOGY_COUNTER_FAILED.inc() + grpc_context.abort(e.code, e.details) except Exception as e: # pragma: no cover - LOGGER.exception('GetTopology exception') # pragma: no cover - GETTOPOLOGY_COUNTER_FAILED.inc() # pragma: no cover - grpc_context.abort(grpc.StatusCode.INTERNAL, str(e)) # pragma: no cover + LOGGER.exception('GetTopology exception') + GETTOPOLOGY_COUNTER_FAILED.inc() + grpc_context.abort(grpc.StatusCode.INTERNAL, str(e)) @ADDLINK_HISTOGRAM_DURATION.time() def AddLink(self, request : Link, grpc_context : grpc.ServicerContext) -> LinkId: @@ -143,16 +145,16 @@ class ContextServiceServicerImpl(ContextServiceServicer): 'Context({})/Topology({})/Device({}) in Endpoint(#{}) of Link({})', 'does not exist in the database.', ]) - msg = msg.format(ep_context_id, ep_topology_id, ep_device_id, ep_port_id, i, link_id) + msg = msg.format(ep_context_id, ep_topology_id, ep_device_id, i, link_id) raise ServiceException(grpc.StatusCode.NOT_FOUND, msg) added_device_and_endpoints = added_devices_and_endpoints.setdefault(ep_device_id, set()) # should never happen since same device cannot appear 2 times in the link if ep_port_id in added_device_and_endpoints: # pragma: no cover - msg = 'Duplicated Device({})/Port({}) in Endpoint(#{}) of Link({}).' # pragma: no cover - msg = msg.format(ep_device_id, ep_port_id, i, link_id) # pragma: no cover - raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg) # pragma: no cover + msg = 'Duplicated Device({})/Port({}) in Endpoint(#{}) of Link({}).' + msg = msg.format(ep_device_id, ep_port_id, i, link_id) + raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg) if not db_topology.device(ep_device_id).endpoints.contains(ep_port_id): msg = ' '.join([ @@ -179,11 +181,13 @@ class ContextServiceServicerImpl(ContextServiceServicer): ADDLINK_COUNTER_COMPLETED.inc() return reply except ServiceException as e: + LOGGER.exception('AddLink exception') + ADDLINK_COUNTER_FAILED.inc() grpc_context.abort(e.code, e.details) except Exception as e: # pragma: no cover - LOGGER.exception('AddLink exception') # pragma: no cover - ADDLINK_COUNTER_FAILED.inc() # pragma: no cover - grpc_context.abort(grpc.StatusCode.INTERNAL, str(e)) # pragma: no cover + LOGGER.exception('AddLink exception') + ADDLINK_COUNTER_FAILED.inc() + grpc_context.abort(grpc.StatusCode.INTERNAL, str(e)) @DELETELINK_HISTOGRAM_DURATION.time() def DeleteLink(self, request : LinkId, grpc_context : grpc.ServicerContext) -> Empty: @@ -217,8 +221,10 @@ class ContextServiceServicerImpl(ContextServiceServicer): DELETELINK_COUNTER_COMPLETED.inc() return reply except ServiceException as e: + LOGGER.exception('DeleteLink exception') + DELETELINK_COUNTER_FAILED.inc() grpc_context.abort(e.code, e.details) except Exception as e: # pragma: no cover - LOGGER.exception('DeleteLink exception') # pragma: no cover - DELETELINK_COUNTER_FAILED.inc() # pragma: no cover - grpc_context.abort(grpc.StatusCode.INTERNAL, str(e)) # pragma: no cover + LOGGER.exception('DeleteLink exception') + DELETELINK_COUNTER_FAILED.inc() + grpc_context.abort(grpc.StatusCode.INTERNAL, str(e)) diff --git a/src/context/tests/test_unitary.py b/src/context/tests/test_unitary.py index c1e5d9658787b8150218f67d211744d32a514633..f53df3004c6891d51cc26a58c1d0d9a7d1e0f790 100644 --- a/src/context/tests/test_unitary.py +++ b/src/context/tests/test_unitary.py @@ -66,7 +66,7 @@ def test_get_topology_empty(context_client : ContextClient, context_database : D def test_get_topology_completed(context_client : ContextClient, context_database : Database): # should work - populate_example(context_database) + populate_example(context_database, add_services=False) validate_topology(MessageToDict( context_client.GetTopology(Empty()), including_default_value_fields=True, preserving_proto_field_name=True, @@ -155,7 +155,7 @@ def test_add_link_endpoint_wrong_device(context_client : ContextClient): copy_link['endpointList'][0]['dev_id']['device_id']['uuid'] = 'wrong-device' context_client.AddLink(Link(**copy_link)) assert e.value.code() == grpc.StatusCode.NOT_FOUND - msg = 'Device(wrong-device) in Endpoint(#0) of Link(DEV1/EP2 ==> DEV2/EP1) does not exist in the database.' + msg = 'Context(admin)/Topology(admin)/Device(wrong-device) in Endpoint(#0) of Link(DEV1/EP2 ==> DEV2/EP1) does not exist in the database.' assert e.value.details() == msg def test_add_link_endpoint_wrong_port(context_client : ContextClient): @@ -165,7 +165,7 @@ def test_add_link_endpoint_wrong_port(context_client : ContextClient): copy_link['endpointList'][0]['port_id']['uuid'] = 'wrong-port' context_client.AddLink(Link(**copy_link)) assert e.value.code() == grpc.StatusCode.NOT_FOUND - msg = 'Device(DEV1)/Port(wrong-port) in Endpoint(#0) of Link(DEV1/EP2 ==> DEV2/EP1) does not exist in the database.' + msg = 'Context(admin)/Topology(admin)/Device(DEV1)/Port(wrong-port) in Endpoint(#0) of Link(DEV1/EP2 ==> DEV2/EP1) does not exist in the database.' assert e.value.details() == msg def test_add_link_endpoint_duplicated_device(context_client : ContextClient): diff --git a/src/device/service/DeviceServiceServicerImpl.py b/src/device/service/DeviceServiceServicerImpl.py index 2bda5d0480294fb6a1dc8ad545db7d7c3e2ddac3..1b1145fcff1e341e7286b71beb5dd2f951e914d0 100644 --- a/src/device/service/DeviceServiceServicerImpl.py +++ b/src/device/service/DeviceServiceServicerImpl.py @@ -165,11 +165,13 @@ class DeviceServiceServicerImpl(DeviceServiceServicer): ADDDEVICE_COUNTER_COMPLETED.inc() return reply except ServiceException as e: + LOGGER.exception('AddDevice exception') + ADDDEVICE_COUNTER_FAILED.inc() grpc_context.abort(e.code, e.details) except Exception as e: # pragma: no cover - LOGGER.exception('AddDevice exception') # pragma: no cover - ADDDEVICE_COUNTER_FAILED.inc() # pragma: no cover - grpc_context.abort(grpc.StatusCode.INTERNAL, str(e)) # pragma: no cover + LOGGER.exception('AddDevice exception') + ADDDEVICE_COUNTER_FAILED.inc() + grpc_context.abort(grpc.StatusCode.INTERNAL, str(e)) @CONFIGUREDEVICE_HISTOGRAM_DURATION.time() def ConfigureDevice(self, request : Device, grpc_context : grpc.ServicerContext) -> DeviceId: @@ -260,11 +262,13 @@ class DeviceServiceServicerImpl(DeviceServiceServicer): CONFIGUREDEVICE_COUNTER_COMPLETED.inc() return reply except ServiceException as e: + LOGGER.exception('ConfigureDevice exception') + CONFIGUREDEVICE_COUNTER_FAILED.inc() grpc_context.abort(e.code, e.details) except Exception as e: # pragma: no cover - LOGGER.exception('ConfigureDevice exception') # pragma: no cover - CONFIGUREDEVICE_COUNTER_FAILED.inc() # pragma: no cover - grpc_context.abort(grpc.StatusCode.INTERNAL, str(e)) # pragma: no cover + LOGGER.exception('ConfigureDevice exception') + CONFIGUREDEVICE_COUNTER_FAILED.inc() + grpc_context.abort(grpc.StatusCode.INTERNAL, str(e)) @DELETEDEVICE_HISTOGRAM_DURATION.time() def DeleteDevice(self, request : DeviceId, grpc_context : grpc.ServicerContext) -> Empty: @@ -298,8 +302,10 @@ class DeviceServiceServicerImpl(DeviceServiceServicer): DELETEDEVICE_COUNTER_COMPLETED.inc() return reply except ServiceException as e: + LOGGER.exception('DeleteDevice exception') + DELETEDEVICE_COUNTER_FAILED.inc() grpc_context.abort(e.code, e.details) except Exception as e: # pragma: no cover - LOGGER.exception('DeleteDevice exception') # pragma: no cover - DELETEDEVICE_COUNTER_FAILED.inc() # pragma: no cover - grpc_context.abort(grpc.StatusCode.INTERNAL, str(e)) # pragma: no cover + LOGGER.exception('DeleteDevice exception') + DELETEDEVICE_COUNTER_FAILED.inc() + grpc_context.abort(grpc.StatusCode.INTERNAL, str(e)) diff --git a/src/service/client/ServiceClient.py b/src/service/client/ServiceClient.py index 95b18863879d3f3c6d8dfd38a7073eca0a492440..b20112f2ea41abc96362666dba501b1086713b78 100644 --- a/src/service/client/ServiceClient.py +++ b/src/service/client/ServiceClient.py @@ -48,7 +48,7 @@ class ServiceClient: return response @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect') - def DeleteService(self, request : Service) -> ServiceId: + def DeleteService(self, request : Service) -> Empty: LOGGER.debug('DeleteService request: {}'.format(request)) response = self.stub.DeleteService(request) LOGGER.debug('DeleteService result: {}'.format(response)) diff --git a/src/service/proto/service_pb2.py b/src/service/proto/service_pb2.py index 2b2e9753bb57d6303c5862a1e98c7cd22c6d306a..b48703dd585218b2f2a57d3d00c482f449b005a5 100644 --- a/src/service/proto/service_pb2.py +++ b/src/service/proto/service_pb2.py @@ -21,7 +21,7 @@ DESCRIPTOR = _descriptor.FileDescriptor( syntax='proto3', serialized_options=None, create_key=_descriptor._internal_create_key, - serialized_pb=b'\n\rservice.proto\x12\x07service\x1a\rcontext.proto\"+\n\x0bServiceList\x12\x1c\n\x02\x63s\x18\x01 \x03(\x0b\x32\x10.service.Service\"\x87\x02\n\x07Service\x12!\n\x05\x63s_id\x18\x01 \x01(\x0b\x32\x12.service.ServiceId\x12)\n\x0bserviceType\x18\x02 \x01(\x0e\x32\x14.service.ServiceType\x12)\n\x0c\x65ndpointList\x18\x03 \x03(\x0b\x32\x13.context.EndPointId\x12\'\n\nconstraint\x18\x04 \x03(\x0b\x32\x13.context.Constraint\x12+\n\x0cserviceState\x18\x05 \x01(\x0b\x32\x15.service.ServiceState\x12-\n\rserviceConfig\x18\x06 \x01(\x0b\x32\x16.service.ServiceConfig\"&\n\rServiceConfig\x12\x15\n\rserviceConfig\x18\x01 \x01(\t\"P\n\tServiceId\x12%\n\tcontextId\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x1c\n\x05\x63s_id\x18\x02 \x01(\x0b\x32\r.context.Uuid\":\n\rServiceIdList\x12)\n\rserviceIdList\x18\x01 \x03(\x0b\x32\x12.service.ServiceId\"?\n\x0cServiceState\x12/\n\x0cserviceState\x18\x01 \x01(\x0e\x32\x19.service.ServiceStateEnum\"=\n\x0e\x43onnectionList\x12+\n\x0e\x63onnectionList\x18\x01 \x03(\x0b\x32\x13.service.Connection\"\x84\x01\n\nConnection\x12%\n\x06\x63on_id\x18\x01 \x01(\x0b\x32\x15.service.ConnectionId\x12,\n\x10relatedServiceId\x18\x02 \x01(\x0b\x32\x12.service.ServiceId\x12!\n\x04path\x18\x03 \x03(\x0b\x32\x13.context.EndPointId\"-\n\x0c\x43onnectionId\x12\x1d\n\x06\x63on_id\x18\x01 \x01(\x0b\x32\r.context.Uuid*M\n\x0bServiceType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x08\n\x04L3NM\x10\x01\x12\x08\n\x04L2NM\x10\x02\x12\x1d\n\x19TAPI_CONNECTIVITY_SERVICE\x10\x03*@\n\x10ServiceStateEnum\x12\x0b\n\x07PLANNED\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\x13\n\x0fPENDING_REMOVAL\x10\x02\x32\xef\x02\n\x0eServiceService\x12\x38\n\x0eGetServiceList\x12\x0e.context.Empty\x1a\x14.service.ServiceList\"\x00\x12\x37\n\rCreateService\x12\x10.service.Service\x1a\x12.service.ServiceId\"\x00\x12\x37\n\rUpdateService\x12\x10.service.Service\x1a\x12.service.ServiceId\"\x00\x12\x37\n\rDeleteService\x12\x10.service.Service\x1a\x12.service.ServiceId\"\x00\x12\x38\n\x0eGetServiceById\x12\x12.service.ServiceId\x1a\x10.service.Service\"\x00\x12>\n\x11GetConnectionList\x12\x0e.context.Empty\x1a\x17.service.ConnectionList\"\x00\x62\x06proto3' + serialized_pb=b'\n\rservice.proto\x12\x07service\x1a\rcontext.proto\"+\n\x0bServiceList\x12\x1c\n\x02\x63s\x18\x01 \x03(\x0b\x32\x10.service.Service\"\x87\x02\n\x07Service\x12!\n\x05\x63s_id\x18\x01 \x01(\x0b\x32\x12.service.ServiceId\x12)\n\x0bserviceType\x18\x02 \x01(\x0e\x32\x14.service.ServiceType\x12)\n\x0c\x65ndpointList\x18\x03 \x03(\x0b\x32\x13.context.EndPointId\x12\'\n\nconstraint\x18\x04 \x03(\x0b\x32\x13.context.Constraint\x12+\n\x0cserviceState\x18\x05 \x01(\x0b\x32\x15.service.ServiceState\x12-\n\rserviceConfig\x18\x06 \x01(\x0b\x32\x16.service.ServiceConfig\"&\n\rServiceConfig\x12\x15\n\rserviceConfig\x18\x01 \x01(\t\"P\n\tServiceId\x12%\n\tcontextId\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x1c\n\x05\x63s_id\x18\x02 \x01(\x0b\x32\r.context.Uuid\":\n\rServiceIdList\x12)\n\rserviceIdList\x18\x01 \x03(\x0b\x32\x12.service.ServiceId\"?\n\x0cServiceState\x12/\n\x0cserviceState\x18\x01 \x01(\x0e\x32\x19.service.ServiceStateEnum\"=\n\x0e\x43onnectionList\x12+\n\x0e\x63onnectionList\x18\x01 \x03(\x0b\x32\x13.service.Connection\"\x84\x01\n\nConnection\x12%\n\x06\x63on_id\x18\x01 \x01(\x0b\x32\x15.service.ConnectionId\x12,\n\x10relatedServiceId\x18\x02 \x01(\x0b\x32\x12.service.ServiceId\x12!\n\x04path\x18\x03 \x03(\x0b\x32\x13.context.EndPointId\"-\n\x0c\x43onnectionId\x12\x1d\n\x06\x63on_id\x18\x01 \x01(\x0b\x32\r.context.Uuid*M\n\x0bServiceType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x08\n\x04L3NM\x10\x01\x12\x08\n\x04L2NM\x10\x02\x12\x1d\n\x19TAPI_CONNECTIVITY_SERVICE\x10\x03*@\n\x10ServiceStateEnum\x12\x0b\n\x07PLANNED\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\x13\n\x0fPENDING_REMOVAL\x10\x02\x32\xeb\x02\n\x0eServiceService\x12\x38\n\x0eGetServiceList\x12\x0e.context.Empty\x1a\x14.service.ServiceList\"\x00\x12\x37\n\rCreateService\x12\x10.service.Service\x1a\x12.service.ServiceId\"\x00\x12\x37\n\rUpdateService\x12\x10.service.Service\x1a\x12.service.ServiceId\"\x00\x12\x33\n\rDeleteService\x12\x10.service.Service\x1a\x0e.context.Empty\"\x00\x12\x38\n\x0eGetServiceById\x12\x12.service.ServiceId\x1a\x10.service.Service\"\x00\x12>\n\x11GetConnectionList\x12\x0e.context.Empty\x1a\x17.service.ConnectionList\"\x00\x62\x06proto3' , dependencies=[context__pb2.DESCRIPTOR,]) @@ -547,7 +547,7 @@ _SERVICESERVICE = _descriptor.ServiceDescriptor( serialized_options=None, create_key=_descriptor._internal_create_key, serialized_start=990, - serialized_end=1357, + serialized_end=1353, methods=[ _descriptor.MethodDescriptor( name='GetServiceList', @@ -585,7 +585,7 @@ _SERVICESERVICE = _descriptor.ServiceDescriptor( index=3, containing_service=None, input_type=_SERVICE, - output_type=_SERVICEID, + output_type=context__pb2._EMPTY, serialized_options=None, create_key=_descriptor._internal_create_key, ), diff --git a/src/service/proto/service_pb2_grpc.py b/src/service/proto/service_pb2_grpc.py index 05be96c21a167997a6963af6223cf1c7e2b2acbe..50e9d7c2496080486d66b4efac4577a13389b79b 100644 --- a/src/service/proto/service_pb2_grpc.py +++ b/src/service/proto/service_pb2_grpc.py @@ -33,7 +33,7 @@ class ServiceServiceStub(object): self.DeleteService = channel.unary_unary( '/service.ServiceService/DeleteService', request_serializer=service__pb2.Service.SerializeToString, - response_deserializer=service__pb2.ServiceId.FromString, + response_deserializer=context__pb2.Empty.FromString, ) self.GetServiceById = channel.unary_unary( '/service.ServiceService/GetServiceById', @@ -107,7 +107,7 @@ def add_ServiceServiceServicer_to_server(servicer, server): 'DeleteService': grpc.unary_unary_rpc_method_handler( servicer.DeleteService, request_deserializer=service__pb2.Service.FromString, - response_serializer=service__pb2.ServiceId.SerializeToString, + response_serializer=context__pb2.Empty.SerializeToString, ), 'GetServiceById': grpc.unary_unary_rpc_method_handler( servicer.GetServiceById, @@ -193,7 +193,7 @@ class ServiceService(object): metadata=None): return grpc.experimental.unary_unary(request, target, '/service.ServiceService/DeleteService', service__pb2.Service.SerializeToString, - service__pb2.ServiceId.FromString, + context__pb2.Empty.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/src/service/service/ServiceService.py b/src/service/service/ServiceService.py index be7d73c5434014d0d2c2d2b3e197cdef2932c86e..44573b0b92c7d4f2a6740e99eeb22cf1ff4d8a65 100644 --- a/src/service/service/ServiceService.py +++ b/src/service/service/ServiceService.py @@ -20,7 +20,7 @@ class ServiceService: self.endpoint = None self.max_workers = max_workers self.grace_period = grace_period - self.device_servicer = None + self.service_servicer = None self.health_servicer = None self.pool = None self.server = None @@ -34,7 +34,7 @@ class ServiceService: self.server = grpc.server(self.pool) # , interceptors=(tracer_interceptor,)) self.service_servicer = ServiceServiceServicerImpl(self.database) - add_ServiceServiceServicer_to_server(self.device_servicer, self.server) + add_ServiceServiceServicer_to_server(self.service_servicer, self.server) self.health_servicer = HealthServicer( experimental_non_blocking=True, experimental_thread_pool=futures.ThreadPoolExecutor(max_workers=1)) diff --git a/src/service/service/ServiceServiceServicerImpl.py b/src/service/service/ServiceServiceServicerImpl.py index 11aad76831bd4930d575480474bef3664ad0f4b0..102b7721f3c06d84dab5c2ae7dad0760816de9a8 100644 --- a/src/service/service/ServiceServiceServicerImpl.py +++ b/src/service/service/ServiceServiceServicerImpl.py @@ -1,16 +1,12 @@ -from typing import Dict, List, Set, Tuple +from typing import Dict import grpc, logging from prometheus_client import Counter, Histogram -from common.Checkers import chk_options, chk_string from common.database.api.Database import Database -from common.database.api.context.Constants import DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID -from common.database.api.context.service.ServiceState import ServiceState, servicestate_enum_values, \ - to_servicestate_enum -from common.database.api.context.service.ServiceType import ServiceType, servicetype_enum_values, to_servicetype_enum from common.exceptions.ServiceException import ServiceException from service.proto.context_pb2 import Empty from service.proto.service_pb2 import ConnectionList, Service, ServiceId, ServiceList from service.proto.service_pb2_grpc import ServiceServiceServicer +from service.service.Tools import check_service_id_request, check_service_request LOGGER = logging.getLogger(__name__) @@ -83,22 +79,25 @@ class ServiceServiceServicerImpl(ServiceServiceServicer): # ----- Validate request data and pre-conditions ----------------------------------------------------------- # ----- Retrieve data from the database -------------------------------------------------------------------- - db_context_uuids = self.database.contexts + db_context_uuids = self.database.contexts.get() json_services = [] for db_context_uuid in db_context_uuids: - json_services.extend(self.database.context(db_context_uuid).dump_services()) + db_context = self.database.context(db_context_uuid) + json_services.extend(db_context.dump_services()) # ----- Compose reply -------------------------------------------------------------------------------------- reply = ServiceList(cs=json_services) LOGGER.debug('GetServiceList reply: {}'.format(str(reply))) GETSERVICELIST_COUNTER_COMPLETED.inc() return reply - except ServiceException as e: + except ServiceException as e: # pragma: no cover (ServiceException not thrown) + LOGGER.exception('GetServiceList exception') + GETSERVICELIST_COUNTER_FAILED.inc() grpc_context.abort(e.code, e.details) except Exception as e: # pragma: no cover - LOGGER.exception('GetServiceList exception') # pragma: no cover - GETSERVICELIST_COUNTER_FAILED.inc() # pragma: no cover - grpc_context.abort(grpc.StatusCode.INTERNAL, str(e)) # pragma: no cover + LOGGER.exception('GetServiceList exception') + GETSERVICELIST_COUNTER_FAILED.inc() + grpc_context.abort(grpc.StatusCode.INTERNAL, str(e)) @CREATESERVICE_HISTOGRAM_DURATION.time() def CreateService(self, request : Service, grpc_context : grpc.ServicerContext) -> ServiceId: @@ -107,146 +106,20 @@ class ServiceServiceServicerImpl(ServiceServiceServicer): LOGGER.debug('CreateService request: {}'.format(str(request))) # ----- Validate request data and pre-conditions ----------------------------------------------------------- - try: - context_id = chk_string ('service.cs_id.contextId.contextUuid.uuid', - request.cs_id.contextId.contextUuid.uuid, - allow_empty=False) - service_id = chk_string ('service.cs_id.cs_id.uuid', - request.cs_id.cs_id.uuid, - allow_empty=False) - service_type = chk_options('service.serviceType', - request.serviceType, - servicetype_enum_values()) - service_config = chk_string ('service.serviceConfig.serviceConfig', - request.serviceConfig.serviceConfig, - allow_empty=True) - service_state = chk_options('device.serviceState.serviceState', - request.serviceState.serviceState, - servicestate_enum_values()) - except Exception as e: - LOGGER.exception('Invalid arguments:') - raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e)) - - service_type = to_servicetype_enum(service_type) - # should not happen because gRPC limits accepted values in enums - if service_type is None: # pragma: no cover - msg = 'Unsupported ServiceType({}).' # pragma: no cover - msg = msg.format(request.serviceType) # pragma: no cover - raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg) # pragma: no cover - - if service_type == ServiceType.UNKNOWN: - msg = ' '.join([ - 'Service has to be created with a known ServiceType.', - 'UNKNOWN is only for internal use.', - ]) - raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg) - - service_state = to_servicestate_enum(service_state) - # should not happen because gRPC limits accepted values in enums - if service_state is None: # pragma: no cover - msg = 'Unsupported ServiceState({}).' # pragma: no cover - msg = msg.format(request.serviceState.serviceState) # pragma: no cover - raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg) # pragma: no cover - - if service_state != ServiceState.PLANNED: - msg = ' '.join([ - 'Service has to be created with PLANNED state.', - 'State will be updated by the service appropriately.', - ]) - raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg) - - db_context = self.database.context(context_id).create() - if db_context.services.contains(service_id): - msg = 'Context({})/Service({}) already exists in the database.' - msg = msg.format(context_id, service_id) - raise ServiceException(grpc.StatusCode.ALREADY_EXISTS, msg) - - added_context_topology_devices_endpoints : Dict[str, Dict[str, Dict[str, Set[str]]]] = {} - context_topology_device_endpoint_tuples : List[Tuple[str, str, str, str]] = [] - for i,endpoint in enumerate(request.endpointList): - try: - ep_context_id = chk_string('endpoint[#{}].topoId.contextId.contextUuid.uuid'.format(i), - endpoint.topoId.contextId.contextUuid.uuid, - allow_empty=True) - ep_topology_id = chk_string('endpoint[#{}].topoId.topoId.uuid'.format(i), - endpoint.topoId.topoId.uuid, - allow_empty=True) - ep_device_id = chk_string('endpoint[#{}].dev_id.device_id.uuid'.format(i), - endpoint.dev_id.device_id.uuid, - allow_empty=False) - ep_port_id = chk_string('endpoint[#{}].port_id.uuid'.format(i), - endpoint.port_id.uuid, - allow_empty=False) - except Exception as e: - LOGGER.exception('Invalid arguments:') - raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e)) - - if len(ep_context_id) == 0: ep_context_id = DEFAULT_CONTEXT_ID - if len(ep_topology_id) == 0: ep_topology_id = DEFAULT_TOPOLOGY_ID - - added_devices = added_context_topology_devices_endpoints.get(ep_context_id, {}).get(ep_topology_id, {}) - if ep_device_id in added_devices: - msg = 'Duplicated Device({}) in Endpoint(#{}) of Service({}).' - msg = msg.format(ep_device_id, i, service_id) - raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg) - - if not self.database.contexts.contains(ep_context_id): - msg = ' '.join([ - 'Context({}) in Endpoint(#{}) of Service({})', - 'does not exist in the database.', - ]) - msg = msg.format(ep_context_id, i, service_id) - raise ServiceException(grpc.StatusCode.NOT_FOUND, msg) - - db_ep_context = self.database.context(ep_context_id) - if not db_ep_context.topologies.contains(ep_topology_id): - msg = ' '.join([ - 'Context({})/Topology({}) in Endpoint(#{}) of Service({})', - 'does not exist in the database.', - ]) - msg = msg.format(ep_context_id, ep_topology_id, i, service_id) - raise ServiceException(grpc.StatusCode.NOT_FOUND, msg) - - db_ep_topology = db_ep_context.topology(ep_topology_id) - if not db_ep_topology.devices.contains(ep_device_id): - msg = ' '.join([ - 'Context({})/Topology({})/Device({}) in Endpoint(#{}) of Service({})', - 'does not exist in the database.', - ]) - msg = msg.format(ep_context_id, ep_topology_id, ep_device_id, i, service_id) - raise ServiceException(grpc.StatusCode.NOT_FOUND, msg) - - added_device_and_endpoints = added_context_topology_devices_endpoints\ - .setdefault(ep_context_id, {})\ - .setdefault(ep_topology_id, {})\ - .setdefault(ep_device_id, {}) - - # should never happen since same device cannot appear 2 times in the service - if ep_port_id in added_device_and_endpoints: # pragma: no cover - msg = 'Duplicated Context({})/Topology({})/Device({})/Port({}) in Endpoint(#{}) of Service({}).' - msg = msg.format(ep_context_id, ep_topology_id, ep_device_id, ep_port_id, i, service_id) - raise ServiceException(grpc.StatusCode.NOT_FOUND, msg) - - db_ep_device = db_ep_topology.device(ep_device_id) - if not db_ep_device.endpoints.contains(ep_port_id): - msg = ' '.join([ - 'Context({})/Topology({})/Device({})/Port({}) in Endpoint(#{}) of Service({})', - 'does not exist in the database.', - ]) - msg = msg.format(ep_context_id, ep_topology_id, ep_device_id, ep_port_id, i, service_id) - raise ServiceException(grpc.StatusCode.NOT_FOUND, msg) - - added_device_and_endpoints.add(ep_port_id) - context_topology_device_endpoint_tuples.append( - (ep_context_id, ep_topology_id, ep_device_id, ep_port_id)) + context_id, service_id, service_type, service_config, service_state, db_endpoints, constraint_tuples = \ + check_service_request('CreateService', request, self.database, LOGGER) # ----- Implement changes in the database ------------------------------------------------------------------ - db_service = db_context.service(service_id).create(service_type, service_config, service_state) - for context_id,topology_id,device_id,endpoint_id in context_topology_device_endpoint_tuples: - service_endpoint_id = '{}:{}/{}'.format(topology_id, device_id, endpoint_id) - db_endpoint = db_context.topology(topology_id).device(device_id).endpoint(endpoint_id) + db_context = self.database.context(context_id) + db_service = db_context.service(service_id) + db_service.create(service_type, service_config, service_state) + + for db_endpoint in db_endpoints: + service_endpoint_id = '{}:{}/{}'.format( + db_endpoint.topology_uuid, db_endpoint.device_uuid, db_endpoint.endpoint_uuid) db_service.endpoint(service_endpoint_id).create(db_endpoint) - for cons_type,cons_value in constraint_pairs: db_service.constraint(cons_type).create(cons_value) + + for cons_type,cons_value in constraint_tuples: db_service.constraint(cons_type).create(cons_value) # ----- Compose reply -------------------------------------------------------------------------------------- reply = ServiceId(**db_service.dump_id()) @@ -254,11 +127,13 @@ class ServiceServiceServicerImpl(ServiceServiceServicer): CREATESERVICE_COUNTER_COMPLETED.inc() return reply except ServiceException as e: + LOGGER.exception('CreateService exception') + CREATESERVICE_COUNTER_FAILED.inc() grpc_context.abort(e.code, e.details) except Exception as e: # pragma: no cover - LOGGER.exception('CreateService exception') # pragma: no cover - CREATESERVICE_COUNTER_FAILED.inc() # pragma: no cover - grpc_context.abort(grpc.StatusCode.INTERNAL, str(e)) # pragma: no cover + LOGGER.exception('CreateService exception') + CREATESERVICE_COUNTER_FAILED.inc() + grpc_context.abort(grpc.StatusCode.INTERNAL, str(e)) @UPDATESERVICE_HISTOGRAM_DURATION.time() def UpdateService(self, request : Service, grpc_context : grpc.ServicerContext) -> ServiceId: @@ -266,35 +141,88 @@ class ServiceServiceServicerImpl(ServiceServiceServicer): try: LOGGER.debug('UpdateService request: {}'.format(str(request))) + # ----- Validate request data and pre-conditions ----------------------------------------------------------- + context_id, service_id, service_type, service_config, service_state, db_endpoints, constraint_tuples = \ + check_service_request('UpdateService', request, self.database, LOGGER) + + # ----- Implement changes in the database ------------------------------------------------------------------ + db_context = self.database.context(context_id) + db_service = db_context.service(service_id) + + # Update service attributes + db_service.update(update_attributes={ + 'service_type' : service_type, + 'service_config': service_config, + 'service_state' : service_state, + }) + + # Update service constraints; first add missing, then remove existing, but not added to Service + db_service_constraint_types = set(db_service.constraints.get()) + for constraint_type,constraint_value in constraint_tuples: + if constraint_type in db_service_constraint_types: + db_service.constraint(constraint_type).update(update_attributes={ + 'constraint_value': constraint_value + }) + else: + db_service.constraint(constraint_type).create(constraint_value) + db_service_constraint_types.discard(constraint_type) + + for constraint_type in db_service_constraint_types: + db_service.constraint(constraint_type).delete() + + # Update service endpoints; first add missing, then remove existing, but not added to Service + db_service_endpoint_uuids = set(db_service.endpoints.get()) + for db_endpoint in db_endpoints: + service_endpoint_id = '{}:{}/{}'.format( + db_endpoint.topology_uuid, db_endpoint.device_uuid, db_endpoint.endpoint_uuid) + if service_endpoint_id not in db_service_endpoint_uuids: + db_service.endpoint(service_endpoint_id).create(db_endpoint) + db_service_endpoint_uuids.discard(service_endpoint_id) + + for db_service_endpoint_uuid in db_service_endpoint_uuids: + db_service.endpoint(db_service_endpoint_uuid).delete() - reply = None + # ----- Compose reply -------------------------------------------------------------------------------------- + reply = ServiceId(**db_service.dump_id()) LOGGER.debug('UpdateService reply: {}'.format(str(reply))) UPDATESERVICE_COUNTER_COMPLETED.inc() return reply except ServiceException as e: + LOGGER.exception('UpdateService exception') + UPDATESERVICE_COUNTER_FAILED.inc() grpc_context.abort(e.code, e.details) except Exception as e: # pragma: no cover - LOGGER.exception('UpdateService exception') # pragma: no cover - UPDATESERVICE_COUNTER_FAILED.inc() # pragma: no cover - grpc_context.abort(grpc.StatusCode.INTERNAL, str(e)) # pragma: no cover + LOGGER.exception('UpdateService exception') + UPDATESERVICE_COUNTER_FAILED.inc() + grpc_context.abort(grpc.StatusCode.INTERNAL, str(e)) @DELETESERVICE_HISTOGRAM_DURATION.time() - def DeleteService(self, request : Service, grpc_context : grpc.ServicerContext) -> ServiceId: + def DeleteService(self, request : Service, grpc_context : grpc.ServicerContext) -> Empty: DELETESERVICE_COUNTER_STARTED.inc() try: LOGGER.debug('DeleteService request: {}'.format(str(request))) + # ----- Validate request data and pre-conditions ----------------------------------------------------------- + context_id, service_id = check_service_id_request('DeleteService', request, self.database, LOGGER) + + # ----- Implement changes in the database ------------------------------------------------------------------ + db_context = self.database.context(context_id) + db_service = db_context.service(service_id) + db_service.delete() - reply = None + # ----- Compose reply -------------------------------------------------------------------------------------- + reply = Empty() LOGGER.debug('DeleteService reply: {}'.format(str(reply))) DELETESERVICE_COUNTER_COMPLETED.inc() return reply except ServiceException as e: + LOGGER.exception('DeleteService exception') + DELETESERVICE_COUNTER_FAILED.inc() grpc_context.abort(e.code, e.details) except Exception as e: # pragma: no cover - LOGGER.exception('DeleteService exception') # pragma: no cover - DELETESERVICE_COUNTER_FAILED.inc() # pragma: no cover - grpc_context.abort(grpc.StatusCode.INTERNAL, str(e)) # pragma: no cover + LOGGER.exception('DeleteService exception') + DELETESERVICE_COUNTER_FAILED.inc() + grpc_context.abort(grpc.StatusCode.INTERNAL, str(e)) @GETSERVICEBYID_HISTOGRAM_DURATION.time() def GetServiceById(self, request : ServiceId, grpc_context : grpc.ServicerContext) -> Service: @@ -302,17 +230,26 @@ class ServiceServiceServicerImpl(ServiceServiceServicer): try: LOGGER.debug('GetServiceById request: {}'.format(str(request))) + # ----- Validate request data and pre-conditions ----------------------------------------------------------- + context_id, service_id = check_service_id_request('GetServiceById', request, self.database, LOGGER) - reply = None + # ----- Retrieve data from the database -------------------------------------------------------------------- + db_context = self.database.context(context_id) + db_service = db_context.service(service_id) + + # ----- Compose reply -------------------------------------------------------------------------------------- + reply = Service(**db_service.dump()) LOGGER.debug('GetServiceById reply: {}'.format(str(reply))) GETSERVICEBYID_COUNTER_COMPLETED.inc() return reply except ServiceException as e: + LOGGER.exception('GetServiceById exception') + GETSERVICEBYID_COUNTER_FAILED.inc() grpc_context.abort(e.code, e.details) except Exception as e: # pragma: no cover - LOGGER.exception('GetServiceById exception') # pragma: no cover - GETSERVICEBYID_COUNTER_FAILED.inc() # pragma: no cover - grpc_context.abort(grpc.StatusCode.INTERNAL, str(e)) # pragma: no cover + LOGGER.exception('GetServiceById exception') + GETSERVICEBYID_COUNTER_FAILED.inc() + grpc_context.abort(grpc.StatusCode.INTERNAL, str(e)) @GETCONNECTIONLIST_HISTOGRAM_DURATION.time() def GetConnectionList(self, request : Empty, grpc_context : grpc.ServicerContext) -> ConnectionList: @@ -320,14 +257,21 @@ class ServiceServiceServicerImpl(ServiceServiceServicer): try: LOGGER.debug('GetConnectionList request: {}'.format(str(request))) + # ----- Validate request data and pre-conditions ----------------------------------------------------------- - reply = None - LOGGER.debug('GetConnectionList reply: {}'.format(str(reply))) - GETCONNECTIONLIST_COUNTER_COMPLETED.inc() - return reply + # ----- Retrieve data from the database -------------------------------------------------------------------- + raise ServiceException(grpc.StatusCode.UNIMPLEMENTED, 'RPC GetConnectionList() not implemented') + + # ----- Compose reply -------------------------------------------------------------------------------------- + #reply = ConnectionList() + #LOGGER.debug('GetConnectionList reply: {}'.format(str(reply))) + #GETCONNECTIONLIST_COUNTER_COMPLETED.inc() + #return reply except ServiceException as e: + LOGGER.exception('GetConnectionList exception') + GETCONNECTIONLIST_COUNTER_FAILED.inc() grpc_context.abort(e.code, e.details) except Exception as e: # pragma: no cover - LOGGER.exception('GetConnectionList exception') # pragma: no cover - GETCONNECTIONLIST_COUNTER_FAILED.inc() # pragma: no cover - grpc_context.abort(grpc.StatusCode.INTERNAL, str(e)) # pragma: no cover + LOGGER.exception('GetConnectionList exception') + GETCONNECTIONLIST_COUNTER_FAILED.inc() + grpc_context.abort(grpc.StatusCode.INTERNAL, str(e)) diff --git a/src/service/service/Tools.py b/src/service/service/Tools.py new file mode 100644 index 0000000000000000000000000000000000000000..f0a135dfaddbf04905a7681f435150a01f541cc8 --- /dev/null +++ b/src/service/service/Tools.py @@ -0,0 +1,255 @@ +import grpc, logging +from enum import Enum +from typing import Dict, List, Set, Tuple +from common.Checkers import chk_options, chk_string +from common.database.api.Database import Database +from common.database.api.context.Constants import DEFAULT_TOPOLOGY_ID +from common.database.api.context.topology.device.Endpoint import Endpoint +from common.database.api.context.service.ServiceState import ServiceState, servicestate_enum_values, to_servicestate_enum +from common.database.api.context.service.ServiceType import ServiceType, servicetype_enum_values, to_servicetype_enum +from common.exceptions.ServiceException import ServiceException +from service.proto.context_pb2 import Constraint, EndPointId +from service.proto.service_pb2 import Service, ServiceId + +# For each method name, define acceptable service types. Empty set means accept all. +ACCEPTED_SERVICE_TYPES : Dict[str, Set[ServiceType]] = { + 'CreateService': set([ServiceType.L2NM, ServiceType.L3NM, ServiceType.TAPI_CONNECTIVITY_SERVICE]), + 'UpdateService': set([ServiceType.L2NM, ServiceType.L3NM, ServiceType.TAPI_CONNECTIVITY_SERVICE]), +} + +# For each method name, define acceptable service states. Empty set means accept all. +ACCEPTED_SERVICE_STATES : Dict[str, Set[ServiceState]] = { + 'CreateService': set([ServiceState.PLANNED]), + 'UpdateService': set([ServiceState.PLANNED, ServiceState.ACTIVE, ServiceState.PENDING_REMOVAL]), +} + +# For each method name, define values for (allow_empty_context_id, allow_empty_topology_id) +CHECK_ENDPOINT_SETTINGS : Dict[str, Tuple[bool, bool]] = { + 'CreateService': (False, False), + 'UpdateService': (True, True), +} + +def check_service_exists(method_name : str, database : Database, context_id : str, service_id : str): + db_context = database.context(context_id).create() + service_exists = db_context.services.contains(service_id) + if method_name in ['CreateService'] and service_exists: + msg = 'Context({})/Service({}) already exists in the database.' + msg = msg.format(context_id, service_id) + raise ServiceException(grpc.StatusCode.ALREADY_EXISTS, msg) + elif method_name in ['UpdateService', 'DeleteService'] and not service_exists: + msg = 'Context({})/Service({}) does not exist in the database.' + msg = msg.format(context_id, service_id) + raise ServiceException(grpc.StatusCode.NOT_FOUND, msg) + +def _check_enum(enum_name, method_name, value, to_enum_method, accepted_values_dict) -> Enum: + _value = to_enum_method(value) + if _value is None: + msg = 'Unsupported {}({}).' + msg = msg.format(enum_name, value) + raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg) + + accepted_values = accepted_values_dict.get(method_name) + if accepted_values is None: + msg = '{} scceptable values not specified for Method({}).' + msg = msg.format(enum_name, method_name) + raise ServiceException(grpc.StatusCode.INTERNAL, msg) + + if len(accepted_values) == 0: return _value + if _value in accepted_values: return _value + + msg = 'Method({}) does not accept {}({}). Permitted values are {}({}).' + msg = msg.format(method_name, enum_name, _value.name, enum_name, list(map(lambda v: v.name, accepted_values))) + raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg) + +def check_service_type(method_name : str, value : str) -> ServiceType: + return _check_enum('ServiceType', method_name, value, to_servicetype_enum, ACCEPTED_SERVICE_TYPES) + +def check_service_state(method_name : str, value : str) -> ServiceState: + return _check_enum('ServiceState', method_name, value, to_servicestate_enum, ACCEPTED_SERVICE_STATES) + +def check_service_constraint( + logger : logging.Logger, constraint_number : int, parent_name : str, constraint : Constraint, + add_constraints : Dict[str, Dict[str, Set[str]]]) -> Tuple[str, str]: + + try: + constraint_type = chk_string('constraint[#{}].constraint_type'.format(constraint_number), + constraint.constraint_type, + allow_empty=False) + constraint_value = chk_string('constraint[#{}].constraint_value'.format(constraint_number), + constraint.constraint_value, + allow_empty=False) + except Exception as e: + logger.exception('Invalid arguments:') + raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e)) + + if constraint_type in add_constraints: + msg = 'Duplicated ConstraintType({}) in {}.' + msg = msg.format(constraint_type, parent_name) + raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg) + + add_constraints[constraint_type] = constraint_value + return constraint_type, constraint_value + +def check_service_endpoint( + logger : logging.Logger, endpoint_number : int, parent_name : str, endpoint : EndPointId, service_context_id : str, + add_topology_devices_endpoints : Dict[str, Dict[str, Set[str]]], allow_empty_context_id=False, + allow_empty_topology_id=False) -> Tuple[str, str, str]: + + try: + ep_context_id = chk_string('endpoint[#{}].topoId.contextId.contextUuid.uuid'.format(endpoint_number), + endpoint.topoId.contextId.contextUuid.uuid, + allow_empty=allow_empty_context_id) + ep_topology_id = chk_string('endpoint[#{}].topoId.topoId.uuid'.format(endpoint_number), + endpoint.topoId.topoId.uuid, + allow_empty=allow_empty_topology_id) + ep_device_id = chk_string('endpoint[#{}].dev_id.device_id.uuid'.format(endpoint_number), + endpoint.dev_id.device_id.uuid, + allow_empty=False) + ep_port_id = chk_string('endpoint[#{}].port_id.uuid'.format(endpoint_number), + endpoint.port_id.uuid, + allow_empty=False) + except Exception as e: + logger.exception('Invalid arguments:') + raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e)) + + if len(ep_context_id) == 0: + # Assumption: if no context is specified for a service endpoint, use service context + ep_context_id = service_context_id + elif ep_context_id != service_context_id: + # Assumption: service and service endpoints should belong to same context + msg = 'Context({}) in {} mismatches service Context.' + msg = msg.format(ep_context_id, parent_name) + raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg) + + if len(ep_topology_id) == 0: + # Assumption: if no topology is specified for a service endpoint, use default topology + ep_topology_id = DEFAULT_TOPOLOGY_ID + + add_devices = add_topology_devices_endpoints.setdefault(ep_topology_id, {}) + if ep_device_id in add_devices: + msg = 'Duplicated Context({})/Topology({})/Device({}) in {}.' + msg = msg.format(ep_context_id, ep_topology_id, ep_device_id, parent_name) + raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg) + + add_device_and_endpoints = add_devices.setdefault(ep_device_id, {}) + + # Implicit validation since same device cannot appear 2 times in the service + #if ep_port_id in add_device_and_endpoints: + # msg = 'Duplicated Context({})/Topology({})/Device({})/Port({}) in {}.' + # msg = msg.format(ep_context_id, ep_topology_id, ep_device_id, ep_port_id, parent_name) + # raise ServiceException(grpc.StatusCode.NOT_FOUND, msg) + + add_device_and_endpoints.add(ep_port_id) + return ep_topology_id, ep_device_id, ep_port_id + +def check_device_endpoint_exists( + database : Database, parent_name : str, + context_id : str, topology_id : str, device_id : str, port_id : str) -> Endpoint: + + if not database.contexts.contains(context_id): + msg = 'Context({}) in {} does not exist in the database.' + msg = msg.format(context_id, parent_name) + raise ServiceException(grpc.StatusCode.NOT_FOUND, msg) + db_context = database.context(context_id) + + if not db_context.topologies.contains(topology_id): + msg = 'Context({})/Topology({}) in {} does not exist in the database.' + msg = msg.format(context_id, topology_id, parent_name) + raise ServiceException(grpc.StatusCode.NOT_FOUND, msg) + db_topology = db_context.topology(topology_id) + + if not db_topology.devices.contains(device_id): + msg = 'Context({})/Topology({})/Device({}) in {} does not exist in the database.' + msg = msg.format(context_id, topology_id, device_id, parent_name) + raise ServiceException(grpc.StatusCode.NOT_FOUND, msg) + db_device = db_topology.device(device_id) + + if not db_device.endpoints.contains(port_id): + msg = 'Context({})/Topology({})/Device({})/Port({}) in {} does not exist in the database.' + msg = msg.format(context_id, topology_id, device_id, port_id, parent_name) + raise ServiceException(grpc.StatusCode.NOT_FOUND, msg) + + return db_device.endpoint(port_id) + +def check_service_request( + method_name : str, request : Service, database : Database, logger : logging.Logger + ) -> Tuple[str, str, ServiceType, str, ServiceState, List[Endpoint], List[Tuple[str, str]]]: + + # ----- Parse attributes ------------------------------------------------------------------------------------------- + try: + context_id = chk_string ('service.cs_id.contextId.contextUuid.uuid', + request.cs_id.contextId.contextUuid.uuid, + allow_empty=False) + service_id = chk_string ('service.cs_id.cs_id.uuid', + request.cs_id.cs_id.uuid, + allow_empty=False) + service_type = chk_options('service.serviceType', + request.serviceType, + servicetype_enum_values()) + service_config = chk_string ('service.serviceConfig.serviceConfig', + request.serviceConfig.serviceConfig, + allow_empty=True) + service_state = chk_options('service.serviceState.serviceState', + request.serviceState.serviceState, + servicestate_enum_values()) + except Exception as e: + logger.exception('Invalid arguments:') + raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e)) + + service_type = check_service_type(method_name, service_type) + service_state = check_service_state(method_name, service_state) + + # ----- Check if service exists in database ------------------------------------------------------------------------ + check_service_exists(method_name, database, context_id, service_id) + + # ----- Parse constraints ------------------------------------------------------------------------------------------ + add_constraints : Dict[str, str] = {} + constraint_tuples : List[Tuple[str, str]] = [] + for constraint_number,constraint in enumerate(request.constraint): + parent_name = 'Constraint(#{}) of Context({})/Service({})'.format(constraint_number, context_id, service_id) + constraint_type, constraint_value = check_service_constraint( + logger, constraint_number, parent_name, constraint, add_constraints) + constraint_tuples.append((constraint_type, constraint_value)) + + # ----- Parse endpoints and check if they exist in the database ad device endpoints -------------------------------- + settings = CHECK_ENDPOINT_SETTINGS.get(method_name) + if settings is None: + msg = '"check_service_endpoint" settings not spefified for Method({}).' + msg = msg.format(method_name) + raise ServiceException(grpc.StatusCode.INTERNAL, msg) + allow_empty_context_id, allow_empty_topology_id = settings + + add_topology_devices_endpoints : Dict[str, Dict[str, Set[str]]] = {} + db_endpoints : List[Endpoint] = [] + for endpoint_number,endpoint in enumerate(request.endpointList): + parent_name = 'Endpoint(#{}) of Context({})/Service({})'.format(endpoint_number, context_id, service_id) + + ep_topology_id, ep_device_id, ep_port_id = check_service_endpoint( + logger, endpoint_number, parent_name, endpoint, context_id, add_topology_devices_endpoints, + allow_empty_context_id=allow_empty_context_id, allow_empty_topology_id=allow_empty_topology_id) + + db_endpoint = check_device_endpoint_exists( + database, parent_name, context_id, ep_topology_id, ep_device_id, ep_port_id) + db_endpoints.append(db_endpoint) + + return context_id, service_id, service_type, service_config, service_state, db_endpoints, constraint_tuples + +def check_service_id_request( + method_name : str, request : ServiceId, database : Database, logger : logging.Logger) -> Tuple[str, str]: + + # ----- Parse attributes ------------------------------------------------------------------------------------------- + try: + context_id = chk_string ('service_id.contextId.contextUuid.uuid', + request.contextId.contextUuid.uuid, + allow_empty=False) + service_id = chk_string ('service_id.cs_id.uuid', + request.cs_id.uuid, + allow_empty=False) + except Exception as e: + logger.exception('Invalid arguments:') + raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e)) + + # ----- Check if service exists in database ------------------------------------------------------------------------ + check_service_exists(method_name, database, context_id, service_id) + + return context_id, service_id diff --git a/src/service/tests/test_unitary.py b/src/service/tests/test_unitary.py index 69eb3171e21abe445fe46dd9b5a40c4b98b3c2b3..e001627a229b45cf3f6b59b670789f0f801c3ee1 100644 --- a/src/service/tests/test_unitary.py +++ b/src/service/tests/test_unitary.py @@ -1,21 +1,64 @@ import logging, pytest +from google.protobuf.json_format import MessageToDict from common.database.Factory import get_database, DatabaseEngineEnum +from common.database.api.Database import Database +from common.database.tests.script import populate_example +from common.tests.Assertions import validate_empty, validate_service, validate_service_id, validate_service_list from service.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD from service.client.ServiceClient import ServiceClient +from service.proto.context_pb2 import Empty +from service.proto.service_pb2 import Service, ServiceId, ServiceStateEnum, ServiceType from service.service.ServiceService import ServiceService LOGGER = logging.getLogger(__name__) LOGGER.setLevel(logging.DEBUG) +SERVICE_ID = { + 'contextId': {'contextUuid': {'uuid': 'admin'}}, + 'cs_id': {'uuid': 'DEV1'}, +} +SERVICE = { + 'cs_id': SERVICE_ID, + 'serviceType': ServiceType.L3NM, + 'serviceConfig': {'serviceConfig': '<config/>'}, + 'serviceState': {'serviceState': ServiceStateEnum.PLANNED}, + 'endpointList' : [ + { + 'topoId': { + 'contextId': {'contextUuid': {'uuid': 'admin'}}, + 'topoId': {'uuid': 'admin'} + }, + 'dev_id': {'device_id': {'uuid': 'DEV1'}}, + 'port_id': {'uuid' : 'EP5'} + }, + { + 'topoId': { + 'contextId': {'contextUuid': {'uuid': 'admin'}}, + 'topoId': {'uuid': 'admin'} + }, + 'dev_id': {'device_id': {'uuid': 'DEV2'}}, + 'port_id': {'uuid' : 'EP5'} + }, + { + 'topoId': { + 'contextId': {'contextUuid': {'uuid': 'admin'}}, + 'topoId': {'uuid': 'admin'} + }, + 'dev_id': {'device_id': {'uuid': 'DEV3'}}, + 'port_id': {'uuid' : 'EP5'} + }, + ] +} + @pytest.fixture(scope='session') -def service_database(): +def database(): _database = get_database(engine=DatabaseEngineEnum.INMEMORY) return _database @pytest.fixture(scope='session') -def service_service(service_database): +def service_service(database): _service = ServiceService( - service_database, port=GRPC_SERVICE_PORT, max_workers=GRPC_MAX_WORKERS, grace_period=GRPC_GRACE_PERIOD) + database, port=GRPC_SERVICE_PORT, max_workers=GRPC_MAX_WORKERS, grace_period=GRPC_GRACE_PERIOD) _service.start() yield _service _service.stop() @@ -25,3 +68,41 @@ def service_client(service_service): _client = ServiceClient(address='127.0.0.1', port=GRPC_SERVICE_PORT) yield _client _client.close() + +def test_get_services_empty(service_client : ServiceClient, database : Database): + # should work + populate_example(database, add_services=False) + validate_service_list(MessageToDict( + service_client.GetServiceList(Empty()), + including_default_value_fields=True, preserving_proto_field_name=True, + use_integers_for_enums=False)) + +def test_create_service(service_client : ServiceClient): + # should work + validate_service_id(MessageToDict( + service_client.CreateService(Service(**SERVICE)), + including_default_value_fields=True, preserving_proto_field_name=True, + use_integers_for_enums=False)) + +def test_get_service(service_client : ServiceClient): + # should work + validate_service(MessageToDict( + service_client.GetServiceById(ServiceId(**SERVICE_ID)), + including_default_value_fields=True, preserving_proto_field_name=True, + use_integers_for_enums=False)) + +def test_delete_service(service_client : ServiceClient): + # should work + validate_empty(MessageToDict( + service_client.DeleteService(ServiceId(**SERVICE_ID)), + including_default_value_fields=True, preserving_proto_field_name=True, + use_integers_for_enums=False)) + +def test_get_services_full(service_client : ServiceClient, database : Database): + # should work + database.clear_all() + populate_example(database) + validate_service_list(MessageToDict( + service_client.GetServiceList(Empty()), + including_default_value_fields=True, preserving_proto_field_name=True, + use_integers_for_enums=False))