diff --git a/scripts/run_tests_locally-context.sh b/scripts/run_tests_locally-context.sh index 8b0c82b3ef1157596c63ce260f2bd2e85a6ca9a1..0124469ecc0089c1aeb26f695f4976b7bc50a66d 100755 --- a/scripts/run_tests_locally-context.sh +++ b/scripts/run_tests_locally-context.sh @@ -13,28 +13,66 @@ # See the License for the specific language governing permissions and # limitations under the License. -######################################################################################################################## -# Define your deployment settings here -######################################################################################################################## - -# If not already set, set the name of the Kubernetes namespace to deploy to. -export TFS_K8S_NAMESPACE=${TFS_K8S_NAMESPACE:-"tfs"} - -######################################################################################################################## -# Automated steps start here -######################################################################################################################## - PROJECTDIR=`pwd` cd $PROJECTDIR/src RCFILE=$PROJECTDIR/coverage/.coveragerc +COVERAGEFILE=$PROJECTDIR/coverage/.coverage -#export CRDB_URI="cockroachdb://tfs:tfs123@127.0.0.1:26257/tfs_test?sslmode=require" -export CRDB_URI="cockroachdb://tfs:tfs123@10.1.7.195:26257/tfs_test?sslmode=require" -export PYTHONPATH=/home/tfs/tfs-ctrl/src +# Destroy old coverage file and configure the correct folder on the .coveragerc file +rm -f $COVERAGEFILE +cat $PROJECTDIR/coverage/.coveragerc.template | sed s+~/tfs-ctrl+$PROJECTDIR+g > $RCFILE + +echo +echo "Pre-test clean-up:" +echo "------------------" +docker rm -f crdb nats +docker volume rm -f crdb +docker network rm tfs-br -# Run unitary tests and analyze coverage of code at same time +echo +echo "Pull Docker images:" +echo "-------------------" +docker pull cockroachdb/cockroach:latest-v22.2 +docker pull nats:2.9 + +echo +echo "Create test environment:" +echo "------------------------" +docker network create -d bridge --subnet=172.254.254.0/24 --gateway=172.254.254.1 --ip-range=172.254.254.0/24 tfs-br +docker volume create crdb +docker run --name crdb -d --network=tfs-br --ip 172.254.254.10 -p 26257:26257 -p 8080:8080 \ + --env COCKROACH_DATABASE=tfs_test --env COCKROACH_USER=tfs --env COCKROACH_PASSWORD=tfs123\ + --volume "crdb:/cockroach/cockroach-data" \ + cockroachdb/cockroach:latest-v22.2 start-single-node +docker run --name nats -d --network=tfs-br --ip 172.254.254.11 -p 4222:4222 -p 8222:8222 \ + nats:2.9 --http_port 8222 --user tfs --pass tfs123 +echo "Waiting for initialization..." +sleep 10 +docker ps -a + +echo +echo "Run unitary tests and analyze code coverage:" +echo "--------------------------------------------" +export CRDB_URI="cockroachdb://tfs:tfs123@172.254.254.10:26257/tfs_test?sslmode=require" +export MB_BACKEND="nats" +export NATS_URI="nats://tfs:tfs123@172.254.254.11:4222" +export PYTHONPATH=/home/tfs/tfs-ctrl/src # helpful pytest flags: --log-level=INFO -o log_cli=true --verbose --maxfail=1 --durations=0 coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose --maxfail=1 \ - context/tests/test_unitary.py \ - context/tests/test_hasher.py + context/tests/test_context.py \ + context/tests/test_topology.py + #context/tests/test_*.py + +echo +echo "Coverage report:" +echo "----------------" +#coverage report --rcfile=$RCFILE --sort cover --show-missing --skip-covered | grep --color -E -i "^context/.*$|$" +coverage report --rcfile=$RCFILE --sort cover --show-missing --skip-covered --include="context/*" + +echo +echo "Post-test clean-up:" +echo "-------------------" +docker rm -f crdb nats +docker volume rm -f crdb +docker network rm tfs-br diff --git a/src/context/.gitlab-ci.yml b/src/context/.gitlab-ci.yml index 468566701aacbed2c05c36fa27ca8b6a8ecc66d0..2a707004f5ab7701c0d3e1a97bff681fb4043eb5 100644 --- a/src/context/.gitlab-ci.yml +++ b/src/context/.gitlab-ci.yml @@ -51,10 +51,12 @@ unit test context: - if docker network list | grep teraflowbridge; then echo "teraflowbridge is already created"; else docker network create -d bridge teraflowbridge; fi - if docker container ls | grep crdb; then docker rm -f crdb; else echo "CockroachDB container is not in the system"; fi - if docker volume ls | grep crdb; then docker volume rm -f crdb; else echo "CockroachDB volume is not in the system"; fi + - if docker container ls | grep nats; then docker rm -f nats; else echo "NATS container is not in the system"; fi - if docker container ls | grep $IMAGE_NAME; then docker rm -f $IMAGE_NAME; else echo "$IMAGE_NAME container is not in the system"; fi script: - docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG" - docker pull "cockroachdb/cockroach:latest-v22.2" + - docker pull "nats:2.9" - docker volume create crdb - > docker run --name crdb -d --network=teraflowbridge -p 26257:26257 -p 8080:8080 @@ -63,16 +65,24 @@ unit test context: --env COCKROACH_PASSWORD=tfs123 --volume "crdb:/cockroach/cockroach-data" cockroachdb/cockroach:latest-v22.2 start-single-node + - > + docker run --name nats -d --network=teraflowbridge -p 4222:4222 -p 8222:8222 + nats:2.9 --http_port 8222 --user tfs --pass tfs123 + - echo "Waiting for initialization..." - sleep 10 + - docker ps -a - CRDB_ADDRESS=$(docker inspect crdb --format "{{.NetworkSettings.Networks.teraflowbridge.IPAddress}}") + - NATS_ADDRESS=$(docker inspect nats --format "{{.NetworkSettings.Networks.teraflowbridge.IPAddress}}") - > docker run --name $IMAGE_NAME -d -p 1010:1010 --env "CRDB_URI=cockroachdb://tfs:tfs123@${CRDB_ADDRESS}:26257/tfs_test?sslmode=require" + --env "MB_BACKEND=nats" + --env "NATS_URI=nats://tfs:tfs123@${NATS_ADDRESS}:4222" --volume "$PWD/src/$IMAGE_NAME/tests:/opt/results" --network=teraflowbridge $CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG - docker ps -a - - sleep 10 + - sleep 3 - docker logs $IMAGE_NAME - > docker exec -i $IMAGE_NAME bash -c @@ -80,8 +90,7 @@ unit test context: - docker exec -i $IMAGE_NAME bash -c "coverage report --include='${IMAGE_NAME}/*' --show-missing" coverage: '/TOTAL\s+\d+\s+\d+\s+(\d+%)/' after_script: - - docker rm -f $IMAGE_NAME - - docker rm -f crdb + - docker rm -f $IMAGE_NAME crdb nats - docker volume rm -f crdb - docker network rm teraflowbridge rules: diff --git a/src/context/service/Constants.py b/src/context/service/Constants.py deleted file mode 100644 index 1eb274cf07d872bdcd6fefb482bf1a6afa7fc487..0000000000000000000000000000000000000000 --- a/src/context/service/Constants.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -TOPIC_CONNECTION = 'connection' -TOPIC_CONTEXT = 'context' -TOPIC_DEVICE = 'device' -TOPIC_LINK = 'link' -#TOPIC_POLICY = 'policy' -TOPIC_SERVICE = 'service' -TOPIC_SLICE = 'slice' -TOPIC_TOPOLOGY = 'topology' - -TOPICS = { - TOPIC_CONNECTION, TOPIC_CONTEXT, TOPIC_DEVICE, TOPIC_LINK, - #TOPIC_POLICY, - TOPIC_SERVICE, TOPIC_SLICE, TOPIC_TOPOLOGY -} - -CONSUME_TIMEOUT = 0.5 # seconds diff --git a/src/context/service/ContextServiceServicerImpl.py b/src/context/service/ContextServiceServicerImpl.py index 3f1bd9c202550f5aad3ce64408677fe55d344b50..1528d64d9d9e394080cfbff17eba03a9b73430f9 100644 --- a/src/context/service/ContextServiceServicerImpl.py +++ b/src/context/service/ContextServiceServicerImpl.py @@ -19,7 +19,7 @@ from common.proto.context_pb2 import ( Connection, ConnectionEvent, ConnectionId, ConnectionIdList, ConnectionList, Context, ContextEvent, ContextId, ContextIdList, ContextList, Device, DeviceEvent, DeviceId, DeviceIdList, DeviceList, - Empty, + Empty, EventTypeEnum, Link, LinkEvent, LinkId, LinkIdList, LinkList, Service, ServiceEvent, ServiceId, ServiceIdList, ServiceList, Slice, SliceEvent, SliceId, SliceIdList, SliceList, @@ -38,9 +38,9 @@ from .database.PolicyRule import ( from .database.Service import service_delete, service_get, service_list_ids, service_list_objs, service_set from .database.Slice import slice_delete, slice_get, slice_list_ids, slice_list_objs, slice_set, slice_unset from .database.Topology import topology_delete, topology_get, topology_list_ids, topology_list_objs, topology_set -from .Constants import ( +from .Events import ( CONSUME_TIMEOUT, TOPIC_CONNECTION, TOPIC_CONTEXT, TOPIC_DEVICE, TOPIC_LINK, #TOPIC_POLICY, - TOPIC_SERVICE, TOPIC_SLICE, TOPIC_TOPOLOGY) + TOPIC_SERVICE, TOPIC_SLICE, TOPIC_TOPOLOGY, notify_event) LOGGER = logging.getLogger(__name__) @@ -60,28 +60,29 @@ class ContextServiceServicerImpl(ContextServiceServicer, ContextPolicyServiceSer @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) def ListContextIds(self, request : Empty, context : grpc.ServicerContext) -> ContextIdList: - return context_list_ids(self.db_engine) + return ContextIdList(context_ids=context_list_ids(self.db_engine)) @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) def ListContexts(self, request : Empty, context : grpc.ServicerContext) -> ContextList: - return context_list_objs(self.db_engine) + return ContextList(contexts=context_list_objs(self.db_engine)) @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) def GetContext(self, request : ContextId, context : grpc.ServicerContext) -> Context: - return context_get(self.db_engine, request) + return Context(**context_get(self.db_engine, request)) @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) def SetContext(self, request : Context, context : grpc.ServicerContext) -> ContextId: - context_id,updated = context_set(self.db_engine, request) # pylint: disable=unused-variable - #event_type = EventTypeEnum.EVENTTYPE_UPDATE if updated else EventTypeEnum.EVENTTYPE_CREATE - #notify_event(self.messagebroker, TOPIC_CONTEXT, event_type, {'context_id': context_id}) - return context_id + context_id,updated = context_set(self.db_engine, request) + event_type = EventTypeEnum.EVENTTYPE_UPDATE if updated else EventTypeEnum.EVENTTYPE_CREATE + notify_event(self.messagebroker, TOPIC_CONTEXT, event_type, {'context_id': context_id}) + return ContextId(**context_id) @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) def RemoveContext(self, request : ContextId, context : grpc.ServicerContext) -> Empty: - deleted = context_delete(self.db_engine, request) # pylint: disable=unused-variable - #if deleted: - # notify_event(self.messagebroker, TOPIC_CONTEXT, EventTypeEnum.EVENTTYPE_REMOVE, {'context_id': request}) + context_id,deleted = context_delete(self.db_engine, request) + if deleted: + event_type = EventTypeEnum.EVENTTYPE_REMOVE + notify_event(self.messagebroker, TOPIC_CONTEXT, event_type, {'context_id': context_id}) return Empty() @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) @@ -94,28 +95,29 @@ class ContextServiceServicerImpl(ContextServiceServicer, ContextPolicyServiceSer @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) def ListTopologyIds(self, request : ContextId, context : grpc.ServicerContext) -> TopologyIdList: - return topology_list_ids(self.db_engine, request) + return TopologyIdList(topology_ids=topology_list_ids(self.db_engine, request)) @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) def ListTopologies(self, request : ContextId, context : grpc.ServicerContext) -> TopologyList: - return topology_list_objs(self.db_engine, request) + return TopologyList(topologies=topology_list_objs(self.db_engine, request)) @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) def GetTopology(self, request : TopologyId, context : grpc.ServicerContext) -> Topology: - return topology_get(self.db_engine, request) + return Topology(**topology_get(self.db_engine, request)) @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) def SetTopology(self, request : Topology, context : grpc.ServicerContext) -> TopologyId: - topology_id,updated = topology_set(self.db_engine, request) # pylint: disable=unused-variable - #event_type = EventTypeEnum.EVENTTYPE_UPDATE if updated else EventTypeEnum.EVENTTYPE_CREATE - #notify_event(self.messagebroker, TOPIC_TOPOLOGY, event_type, {'topology_id': topology_id}) - return topology_id + topology_id,updated = topology_set(self.db_engine, request) + event_type = EventTypeEnum.EVENTTYPE_UPDATE if updated else EventTypeEnum.EVENTTYPE_CREATE + notify_event(self.messagebroker, TOPIC_TOPOLOGY, event_type, {'topology_id': topology_id}) + return TopologyId(**topology_id) @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) def RemoveTopology(self, request : TopologyId, context : grpc.ServicerContext) -> Empty: - deleted = topology_delete(self.db_engine, request) # pylint: disable=unused-variable - #if deleted: - # notify_event(self.messagebroker, TOPIC_TOPOLOGY, EventTypeEnum.EVENTTYPE_REMOVE, {'topology_id': request}) + topology_id,deleted = topology_delete(self.db_engine, request) + if deleted: + event_type = EventTypeEnum.EVENTTYPE_REMOVE + notify_event(self.messagebroker, TOPIC_TOPOLOGY, event_type, {'topology_id': topology_id}) return Empty() @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) diff --git a/src/context/service/Events.py b/src/context/service/Events.py index 46b1d36c4a40a48cf2928f1e1c80822443e3cf28..e7cf1997c349652fae405bb191d4fe03b4b0a238 100644 --- a/src/context/service/Events.py +++ b/src/context/service/Events.py @@ -18,9 +18,25 @@ from common.message_broker.Message import Message from common.message_broker.MessageBroker import MessageBroker from common.proto.context_pb2 import EventTypeEnum -def notify_event( - messagebroker : MessageBroker, topic_name : str, event_type : EventTypeEnum, fields : Dict[str, str]) -> None: +TOPIC_CONNECTION = 'connection' +TOPIC_CONTEXT = 'context' +TOPIC_DEVICE = 'device' +TOPIC_LINK = 'link' +#TOPIC_POLICY = 'policy' +TOPIC_SERVICE = 'service' +TOPIC_SLICE = 'slice' +TOPIC_TOPOLOGY = 'topology' + +TOPICS = { + TOPIC_CONNECTION, TOPIC_CONTEXT, TOPIC_DEVICE, TOPIC_LINK, #TOPIC_POLICY, + TOPIC_SERVICE, TOPIC_SLICE, TOPIC_TOPOLOGY +} +CONSUME_TIMEOUT = 0.5 # seconds + +def notify_event( + messagebroker : MessageBroker, topic_name : str, event_type : EventTypeEnum, fields : Dict[str, str] +) -> None: event = {'event': {'timestamp': {'timestamp': time.time()}, 'event_type': event_type}} for field_name, field_value in fields.items(): event[field_name] = field_value diff --git a/src/context/service/database/Context.py b/src/context/service/database/Context.py index 6c7003e95beca96d0b2f61449644489866c638db..e4fd13b22be9ff816fad044d4eb06779ccf7983d 100644 --- a/src/context/service/database/Context.py +++ b/src/context/service/database/Context.py @@ -12,13 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -import logging +import datetime, logging from sqlalchemy.dialects.postgresql import insert from sqlalchemy.engine import Engine from sqlalchemy.orm import Session, sessionmaker from sqlalchemy_cockroachdb import run_transaction from typing import Dict, List, Optional, Tuple -from common.proto.context_pb2 import Context, ContextId, ContextIdList, ContextList +from common.proto.context_pb2 import Context, ContextId from common.method_wrappers.ServiceExceptions import NotFoundException from common.tools.object_factory.Context import json_context_id from .models.ContextModel import ContextModel @@ -26,21 +26,19 @@ from .uuids.Context import context_get_uuid LOGGER = logging.getLogger(__name__) -def context_list_ids(db_engine : Engine) -> ContextIdList: +def context_list_ids(db_engine : Engine) -> List[Dict]: def callback(session : Session) -> List[Dict]: obj_list : List[ContextModel] = session.query(ContextModel).all() - #.options(selectinload(ContextModel.topology)).filter_by(context_uuid=context_uuid).one_or_none() return [obj.dump_id() for obj in obj_list] - return ContextIdList(context_ids=run_transaction(sessionmaker(bind=db_engine), callback)) + return run_transaction(sessionmaker(bind=db_engine), callback) -def context_list_objs(db_engine : Engine) -> ContextList: +def context_list_objs(db_engine : Engine) -> List[Dict]: def callback(session : Session) -> List[Dict]: obj_list : List[ContextModel] = session.query(ContextModel).all() - #.options(selectinload(ContextModel.topology)).filter_by(context_uuid=context_uuid).one_or_none() return [obj.dump() for obj in obj_list] - return ContextList(contexts=run_transaction(sessionmaker(bind=db_engine), callback)) + return run_transaction(sessionmaker(bind=db_engine), callback) -def context_get(db_engine : Engine, request : ContextId) -> Context: +def context_get(db_engine : Engine, request : ContextId) -> Dict: context_uuid = context_get_uuid(request, allow_random=False) def callback(session : Session) -> Optional[Dict]: obj : Optional[ContextModel] = session.query(ContextModel).filter_by(context_uuid=context_uuid).one_or_none() @@ -51,9 +49,9 @@ def context_get(db_engine : Engine, request : ContextId) -> Context: raise NotFoundException('Context', raw_context_uuid, extra_details=[ 'context_uuid generated was: {:s}'.format(context_uuid) ]) - return Context(**obj) + return obj -def context_set(db_engine : Engine, request : Context) -> Tuple[ContextId, bool]: +def context_set(db_engine : Engine, request : Context) -> Tuple[Dict, bool]: context_name = request.name if len(context_name) == 0: context_name = request.context_id.context_uuid.uuid context_uuid = context_get_uuid(request.context_id, context_name=context_name, allow_random=True) @@ -72,26 +70,34 @@ def context_set(db_engine : Engine, request : Context) -> Tuple[ContextId, bool] if len(request.slice_ids) > 0: # pragma: no cover LOGGER.warning('Items in field "slice_ids" ignored. This field is used for retrieval purposes only.') + now = datetime.datetime.utcnow() context_data = [{ 'context_uuid': context_uuid, 'context_name': context_name, + 'created_at' : now, + 'updated_at' : now, }] - def callback(session : Session) -> None: + def callback(session : Session) -> bool: stmt = insert(ContextModel).values(context_data) stmt = stmt.on_conflict_do_update( index_elements=[ContextModel.context_uuid], - set_=dict(context_name = stmt.excluded.context_name) + set_=dict( + context_name = stmt.excluded.context_name, + updated_at = stmt.excluded.updated_at, + ) ) - session.execute(stmt) + stmt = stmt.returning(ContextModel.created_at, ContextModel.updated_at) + created_at,updated_at = session.execute(stmt).fetchone() + return updated_at > created_at - run_transaction(sessionmaker(bind=db_engine), callback) - updated = False # TODO: improve and check if created/updated - return ContextId(**json_context_id(context_uuid)),updated + updated = run_transaction(sessionmaker(bind=db_engine), callback) + return json_context_id(context_uuid),updated -def context_delete(db_engine : Engine, request : ContextId) -> bool: +def context_delete(db_engine : Engine, request : ContextId) -> Tuple[Dict, bool]: context_uuid = context_get_uuid(request, allow_random=False) def callback(session : Session) -> bool: num_deleted = session.query(ContextModel).filter_by(context_uuid=context_uuid).delete() return num_deleted > 0 - return run_transaction(sessionmaker(bind=db_engine), callback) + deleted = run_transaction(sessionmaker(bind=db_engine), callback) + return json_context_id(context_uuid),deleted diff --git a/src/context/service/database/Topology.py b/src/context/service/database/Topology.py index 40ecb6c39dd077db8407548d8751c67ced9d6891..75fc229d8c41261c4e15ba89393c47d94f32d4ba 100644 --- a/src/context/service/database/Topology.py +++ b/src/context/service/database/Topology.py @@ -12,13 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -import logging +import datetime, logging from sqlalchemy.dialects.postgresql import insert from sqlalchemy.engine import Engine from sqlalchemy.orm import Session, sessionmaker from sqlalchemy_cockroachdb import run_transaction from typing import Dict, List, Optional, Tuple -from common.proto.context_pb2 import ContextId, Topology, TopologyId, TopologyIdList, TopologyList +from common.proto.context_pb2 import ContextId, Topology, TopologyId from common.method_wrappers.ServiceExceptions import NotFoundException from common.tools.object_factory.Context import json_context_id from common.tools.object_factory.Topology import json_topology_id @@ -28,23 +28,21 @@ from .uuids.Topology import topology_get_uuid LOGGER = logging.getLogger(__name__) -def topology_list_ids(db_engine : Engine, request : ContextId) -> TopologyIdList: +def topology_list_ids(db_engine : Engine, request : ContextId) -> List[Dict]: context_uuid = context_get_uuid(request, allow_random=False) def callback(session : Session) -> List[Dict]: obj_list : List[TopologyModel] = session.query(TopologyModel).filter_by(context_uuid=context_uuid).all() - #.options(selectinload(ContextModel.topology)).filter_by(context_uuid=context_uuid).one_or_none() return [obj.dump_id() for obj in obj_list] - return TopologyIdList(topology_ids=run_transaction(sessionmaker(bind=db_engine), callback)) + return run_transaction(sessionmaker(bind=db_engine), callback) -def topology_list_objs(db_engine : Engine, request : ContextId) -> TopologyList: +def topology_list_objs(db_engine : Engine, request : ContextId) -> List[Dict]: context_uuid = context_get_uuid(request, allow_random=False) def callback(session : Session) -> List[Dict]: obj_list : List[TopologyModel] = session.query(TopologyModel).filter_by(context_uuid=context_uuid).all() - #.options(selectinload(ContextModel.topology)).filter_by(context_uuid=context_uuid).one_or_none() return [obj.dump() for obj in obj_list] - return TopologyList(topologies=run_transaction(sessionmaker(bind=db_engine), callback)) + return run_transaction(sessionmaker(bind=db_engine), callback) -def topology_get(db_engine : Engine, request : TopologyId) -> Topology: +def topology_get(db_engine : Engine, request : TopologyId) -> Dict: _,topology_uuid = topology_get_uuid(request, allow_random=False) def callback(session : Session) -> Optional[Dict]: obj : Optional[TopologyModel] = session.query(TopologyModel)\ @@ -58,9 +56,9 @@ def topology_get(db_engine : Engine, request : TopologyId) -> Topology: 'context_uuid generated was: {:s}'.format(context_uuid), 'topology_uuid generated was: {:s}'.format(topology_uuid), ]) - return Topology(**obj) + return obj -def topology_set(db_engine : Engine, request : Topology) -> Tuple[TopologyId, bool]: +def topology_set(db_engine : Engine, request : Topology) -> Tuple[Dict, bool]: topology_name = request.name if len(topology_name) == 0: topology_name = request.topology_id.topology_uuid.uuid context_uuid,topology_uuid = topology_get_uuid(request.topology_id, topology_name=topology_name, allow_random=True) @@ -75,27 +73,35 @@ def topology_set(db_engine : Engine, request : Topology) -> Tuple[TopologyId, bo if len(request.link_ids) > 0: # pragma: no cover LOGGER.warning('Items in field "link_ids" ignored. This field is used for retrieval purposes only.') + now = datetime.datetime.utcnow() topology_data = [{ 'context_uuid' : context_uuid, 'topology_uuid': topology_uuid, 'topology_name': topology_name, + 'created_at' : now, + 'updated_at' : now, }] def callback(session : Session) -> None: stmt = insert(TopologyModel).values(topology_data) stmt = stmt.on_conflict_do_update( index_elements=[TopologyModel.topology_uuid], - set_=dict(topology_name = stmt.excluded.topology_name) + set_=dict( + topology_name = stmt.excluded.topology_name, + updated_at = stmt.excluded.updated_at, + ) ) - session.execute(stmt) - - run_transaction(sessionmaker(bind=db_engine), callback) - updated = False # TODO: improve and check if created/updated - return TopologyId(**json_topology_id(topology_uuid, json_context_id(context_uuid))),updated + stmt = stmt.returning(TopologyModel.created_at, TopologyModel.updated_at) + created_at,updated_at = session.execute(stmt).fetchone() + return updated_at > created_at + + updated = run_transaction(sessionmaker(bind=db_engine), callback) + return json_topology_id(topology_uuid, context_id=json_context_id(context_uuid)),updated -def topology_delete(db_engine : Engine, request : TopologyId) -> bool: - _,topology_uuid = topology_get_uuid(request, allow_random=False) +def topology_delete(db_engine : Engine, request : TopologyId) -> Tuple[Dict, bool]: + context_uuid,topology_uuid = topology_get_uuid(request, allow_random=False) def callback(session : Session) -> bool: num_deleted = session.query(TopologyModel).filter_by(topology_uuid=topology_uuid).delete() return num_deleted > 0 - return run_transaction(sessionmaker(bind=db_engine), callback) + deleted = run_transaction(sessionmaker(bind=db_engine), callback) + return json_topology_id(topology_uuid, context_id=json_context_id(context_uuid)),deleted diff --git a/src/context/service/database/models/ContextModel.py b/src/context/service/database/models/ContextModel.py index 8dc5f545f436fede830645785c72fb58a239d358..fee0f72a544d96d7fb9381e1ecd406cc3084b560 100644 --- a/src/context/service/database/models/ContextModel.py +++ b/src/context/service/database/models/ContextModel.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from sqlalchemy import Column, String +from sqlalchemy import Column, DateTime, String from sqlalchemy.dialects.postgresql import UUID from sqlalchemy.orm import relationship from typing import Dict @@ -23,6 +23,8 @@ class ContextModel(_Base): context_uuid = Column(UUID(as_uuid=False), primary_key=True) context_name = Column(String, nullable=False) + created_at = Column(DateTime) + updated_at = Column(DateTime) topologies = relationship('TopologyModel', back_populates='context') services = relationship('ServiceModel', back_populates='context') diff --git a/src/context/service/database/models/TopologyModel.py b/src/context/service/database/models/TopologyModel.py index 14fdaaeec8196bd960423983e186d843bd1321d5..d4dbe173e510332f35f707221af9a27b84eb5f22 100644 --- a/src/context/service/database/models/TopologyModel.py +++ b/src/context/service/database/models/TopologyModel.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from sqlalchemy import Column, ForeignKey, String +from sqlalchemy import Column, DateTime, ForeignKey, String from sqlalchemy.dialects.postgresql import UUID from sqlalchemy.orm import relationship from typing import Dict @@ -24,6 +24,8 @@ class TopologyModel(_Base): topology_uuid = Column(UUID(as_uuid=False), primary_key=True) context_uuid = Column(ForeignKey('context.context_uuid'), nullable=False) topology_name = Column(String, nullable=False) + created_at = Column(DateTime) + updated_at = Column(DateTime) context = relationship('ContextModel', back_populates='topologies') topology_devices = relationship('TopologyDeviceModel') # back_populates='topology' diff --git a/src/context/tests/conftest.py b/src/context/tests/conftest.py index 25de05842e7596429cb236db0b7589ad7a94213c..93b8c66beff65cdbbfd97ca4483f55333fa36c71 100644 --- a/src/context/tests/conftest.py +++ b/src/context/tests/conftest.py @@ -20,7 +20,7 @@ from common.Constants import ServiceNameEnum from common.Settings import ( ENVVAR_SUFIX_SERVICE_HOST, ENVVAR_SUFIX_SERVICE_PORT_GRPC, ENVVAR_SUFIX_SERVICE_PORT_HTTP, get_env_var_name, get_service_port_grpc, get_service_port_http) -from common.message_broker.Factory import get_messagebroker_backend, BackendEnum as MessageBrokerBackendEnum +from common.message_broker.Factory import get_messagebroker_backend from common.message_broker.MessageBroker import MessageBroker from common.method_wrappers.Decorator import MetricsPool from context.client.ContextClient import ContextClient @@ -43,7 +43,7 @@ def context_db_mb(request) -> Tuple[sqlalchemy.engine.Engine, MessageBroker]: Engine.create_database(_db_engine) rebuild_database(_db_engine) - _msg_broker = MessageBroker(get_messagebroker_backend(backend=MessageBrokerBackendEnum.INMEMORY)) + _msg_broker = MessageBroker(get_messagebroker_backend()) yield _db_engine, _msg_broker _msg_broker.terminate() @@ -72,6 +72,7 @@ def pytest_terminal_summary( ): yield - print('') - print('Performance Results:') - print(RAW_METRICS.get_pretty_table().get_string()) + if RAW_METRICS is not None: + print('') + print('Performance Results:') + print(RAW_METRICS.get_pretty_table().get_string()) diff --git a/src/context/tests/test_connection.py b/src/context/tests/test_connection.py index f28fde3565956ff33dc1a5285ef3e07bdd43d8bc..4cc5407b4bafb55013bfbc46dc9850a1b33f8864 100644 --- a/src/context/tests/test_connection.py +++ b/src/context/tests/test_connection.py @@ -24,7 +24,7 @@ from .Objects import ( DEVICE_R2, DEVICE_R2_ID, DEVICE_R3, DEVICE_R3_ID, SERVICE_R1_R2, SERVICE_R1_R2_ID, SERVICE_R1_R3, SERVICE_R1_R3_ID, SERVICE_R2_R3, SERVICE_R2_R3_ID, TOPOLOGY, TOPOLOGY_ID) -#@pytest.mark.depends(on=['context/tests/test_service.py::test_service', 'context/tests/test_slice.py::test_slice']) +@pytest.mark.depends(on=['context/tests/test_service.py::test_service', 'context/tests/test_slice.py::test_slice']) def test_connection(context_client : ContextClient) -> None: # ----- Initialize the EventsCollector ----------------------------------------------------------------------------- diff --git a/src/context/tests/test_context.py b/src/context/tests/test_context.py index 443d36c92a7871ba93e02be0828fa1036bd2abdd..4337db239a5319fa0a877a2f0f0f6918616920ab 100644 --- a/src/context/tests/test_context.py +++ b/src/context/tests/test_context.py @@ -12,22 +12,23 @@ # See the License for the specific language governing permissions and # limitations under the License. -import copy, grpc, pytest -from common.proto.context_pb2 import Context, ContextId, Empty +import copy, grpc, pytest, time +from common.proto.context_pb2 import Context, ContextEvent, ContextId, Empty, EventTypeEnum from context.client.ContextClient import ContextClient from context.service.database.uuids.Context import context_get_uuid -#from context.client.EventsCollector import EventsCollector +from context.client.EventsCollector import EventsCollector from .Objects import CONTEXT, CONTEXT_ID, CONTEXT_NAME def test_context(context_client : ContextClient) -> None: # ----- Initialize the EventsCollector ----------------------------------------------------------------------------- - #events_collector = EventsCollector( - # context_client, log_events_received=True, - # activate_context_collector = True, activate_topology_collector = False, activate_device_collector = False, - # activate_link_collector = False, activate_service_collector = False, activate_slice_collector = False, - # activate_connection_collector = False) - #events_collector.start() + events_collector = EventsCollector( + context_client, log_events_received=True, + activate_context_collector = True, activate_topology_collector = False, activate_device_collector = False, + activate_link_collector = False, activate_service_collector = False, activate_slice_collector = False, + activate_connection_collector = False) + events_collector.start() + time.sleep(3) # wait for the events collector to start # ----- Get when the object does not exist ------------------------------------------------------------------------- context_id = ContextId(**CONTEXT_ID) @@ -50,10 +51,10 @@ def test_context(context_client : ContextClient) -> None: assert response.context_uuid.uuid == context_uuid # ----- Check create event ----------------------------------------------------------------------------------------- - #event = events_collector.get_event(block=True, timeout=10.0) - #assert isinstance(event, ContextEvent) - #assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE - #assert event.context_id.context_uuid.uuid == context_uuid + event = events_collector.get_event(block=True, timeout=1.0) + assert isinstance(event, ContextEvent) + assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE + assert event.context_id.context_uuid.uuid == context_uuid # ----- Get when the object exists --------------------------------------------------------------------------------- response = context_client.GetContext(ContextId(**CONTEXT_ID)) @@ -84,10 +85,10 @@ def test_context(context_client : ContextClient) -> None: assert response.context_uuid.uuid == context_uuid # ----- Check update event ----------------------------------------------------------------------------------------- - #event = events_collector.get_event(block=True, timeout=10.0) - #assert isinstance(event, ContextEvent) - #assert event.event.event_type == EventTypeEnum.EVENTTYPE_UPDATE - #assert event.context_id.context_uuid.uuid == context_uuid + event = events_collector.get_event(block=True, timeout=1.0) + assert isinstance(event, ContextEvent) + assert event.event.event_type == EventTypeEnum.EVENTTYPE_UPDATE + assert event.context_id.context_uuid.uuid == context_uuid # ----- Get when the object is modified ---------------------------------------------------------------------------- response = context_client.GetContext(ContextId(**CONTEXT_ID)) @@ -114,10 +115,10 @@ def test_context(context_client : ContextClient) -> None: context_client.RemoveContext(ContextId(**CONTEXT_ID)) # ----- Check remove event ----------------------------------------------------------------------------------------- - #event = events_collector.get_event(block=True, timeout=10.0) - #assert isinstance(event, ContextEvent) - #assert event.event.event_type == EventTypeEnum.EVENTTYPE_REMOVE - #assert event.context_id.context_uuid.uuid == context_uuid + event = events_collector.get_event(block=True, timeout=1.0) + assert isinstance(event, ContextEvent) + assert event.event.event_type == EventTypeEnum.EVENTTYPE_REMOVE + assert event.context_id.context_uuid.uuid == context_uuid # ----- List after deleting the object ----------------------------------------------------------------------------- response = context_client.ListContextIds(Empty()) @@ -127,4 +128,4 @@ def test_context(context_client : ContextClient) -> None: assert len(response.contexts) == 0 # ----- Stop the EventsCollector ----------------------------------------------------------------------------------- - #events_collector.stop() + events_collector.stop() diff --git a/src/context/tests/test_topology.py b/src/context/tests/test_topology.py index 23e73edc83e7f1cb16370c4325f93be277d3298a..2e7e38cb13c69efaeb9b3515cfb6e38ab1c14243 100644 --- a/src/context/tests/test_topology.py +++ b/src/context/tests/test_topology.py @@ -12,31 +12,31 @@ # See the License for the specific language governing permissions and # limitations under the License. -import copy, grpc, pytest -from common.proto.context_pb2 import Context, ContextId, Topology, TopologyId +import copy, grpc, pytest, time +from common.proto.context_pb2 import Context, ContextEvent, ContextId, EventTypeEnum, Topology, TopologyEvent, TopologyId from context.client.ContextClient import ContextClient from context.service.database.uuids.Topology import topology_get_uuid -#from context.client.EventsCollector import EventsCollector +from context.client.EventsCollector import EventsCollector from .Objects import CONTEXT, CONTEXT_ID, CONTEXT_NAME, TOPOLOGY, TOPOLOGY_ID, TOPOLOGY_NAME @pytest.mark.depends(on=['context/tests/test_context.py::test_context']) def test_topology(context_client : ContextClient) -> None: # ----- Initialize the EventsCollector ----------------------------------------------------------------------------- - #events_collector = EventsCollector( - # context_client, log_events_received=True, - # activate_context_collector = False, activate_topology_collector = True, activate_device_collector = False, - # activate_link_collector = False, activate_service_collector = False, activate_slice_collector = False, - # activate_connection_collector = False) - #events_collector.start() + events_collector = EventsCollector( + context_client, log_events_received=True, + activate_context_collector = False, activate_topology_collector = True, activate_device_collector = False, + activate_link_collector = False, activate_service_collector = False, activate_slice_collector = False, + activate_connection_collector = False) + events_collector.start() + time.sleep(3) # wait for the events collector to start # ----- Prepare dependencies for the test and capture related events ----------------------------------------------- context_client.SetContext(Context(**CONTEXT)) - # event = events_collector.get_event(block=True) - # assert isinstance(event, ContextEvent) - # assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE - # assert event.context_id.context_uuid.uuid == context_uuid + event = events_collector.get_event(block=True, timeout=1.0) + assert isinstance(event, ContextEvent) + assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE # ----- Get when the object does not exist ------------------------------------------------------------------------- topology_id = TopologyId(**TOPOLOGY_ID) @@ -65,11 +65,11 @@ def test_topology(context_client : ContextClient) -> None: assert response.topology_uuid.uuid == topology_uuid # ----- Check create event ----------------------------------------------------------------------------------------- - #event = events_collector.get_event(block=True) - #assert isinstance(event, TopologyEvent) - #assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE - #assert event.topology_id.context_id.context_uuid.uuid == context_uuid - #assert event.topology_id.topology_uuid.uuid == topology_uuid + event = events_collector.get_event(block=True, timeout=1.0) + assert isinstance(event, TopologyEvent) + assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE + assert event.topology_id.context_id.context_uuid.uuid == context_uuid + assert event.topology_id.topology_uuid.uuid == topology_uuid # ----- Get when the object exists --------------------------------------------------------------------------------- response = context_client.GetContext(ContextId(**CONTEXT_ID)) @@ -111,11 +111,11 @@ def test_topology(context_client : ContextClient) -> None: assert response.topology_uuid.uuid == topology_uuid # ----- Check update event ----------------------------------------------------------------------------------------- - #event = events_collector.get_event(block=True) - #assert isinstance(event, TopologyEvent) - #assert event.event.event_type == EventTypeEnum.EVENTTYPE_UPDATE - #assert event.topology_id.context_id.context_uuid.uuid == context_uuid - #assert event.topology_id.topology_uuid.uuid == topology_uuid + event = events_collector.get_event(block=True, timeout=1.0) + assert isinstance(event, TopologyEvent) + assert event.event.event_type == EventTypeEnum.EVENTTYPE_UPDATE + assert event.topology_id.context_id.context_uuid.uuid == context_uuid + assert event.topology_id.topology_uuid.uuid == topology_uuid # ----- Get when the object is modified ---------------------------------------------------------------------------- response = context_client.GetTopology(TopologyId(**TOPOLOGY_ID)) @@ -143,11 +143,11 @@ def test_topology(context_client : ContextClient) -> None: context_client.RemoveTopology(TopologyId(**TOPOLOGY_ID)) # ----- Check remove event ----------------------------------------------------------------------------------------- - #event = events_collector.get_event(block=True) - #assert isinstance(event, TopologyEvent) - #assert event.event.event_type == EventTypeEnum.EVENTTYPE_REMOVE - #assert event.topology_id.context_id.context_uuid.uuid == context_uuid - #assert event.topology_id.topology_uuid.uuid == topology_uuid + event = events_collector.get_event(block=True, timeout=1.0) + assert isinstance(event, TopologyEvent) + assert event.event.event_type == EventTypeEnum.EVENTTYPE_REMOVE + assert event.topology_id.context_id.context_uuid.uuid == context_uuid + assert event.topology_id.topology_uuid.uuid == topology_uuid # ----- List after deleting the object ----------------------------------------------------------------------------- response = context_client.GetContext(ContextId(**CONTEXT_ID)) @@ -164,10 +164,10 @@ def test_topology(context_client : ContextClient) -> None: # ----- Clean dependencies used in the test and capture related events --------------------------------------------- context_client.RemoveContext(ContextId(**CONTEXT_ID)) - #event = events_collector.get_event(block=True) - #assert isinstance(event, ContextEvent) - #assert event.event.event_type == EventTypeEnum.EVENTTYPE_REMOVE - #assert event.context_id.context_uuid.uuid == context_uuid + event = events_collector.get_event(block=True, timeout=1.0) + assert isinstance(event, ContextEvent) + assert event.event.event_type == EventTypeEnum.EVENTTYPE_REMOVE + assert event.context_id.context_uuid.uuid == context_uuid # ----- Stop the EventsCollector ----------------------------------------------------------------------------------- - #events_collector.stop() + events_collector.stop()