From 8d7f1fd922da9fc6a0c4ef16d41598a89c5da05f Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Sun, 26 Mar 2023 11:57:55 +0000 Subject: [PATCH 01/14] Context component: - Added SelectDevice, SelectService, SelectSlice RPC methods - Corrected relationship import modes - Added options to selectively load relations according to method --- proto/context.proto | 27 ++++- src/context/client/ContextClient.py | 27 ++++- .../service/ContextServiceServicerImpl.py | 24 +++- src/context/service/database/Connection.py | 11 +- src/context/service/database/Context.py | 14 ++- src/context/service/database/Device.py | 37 ++++++- src/context/service/database/EndPoint.py | 7 +- src/context/service/database/Link.py | 10 +- src/context/service/database/PolicyRule.py | 11 +- src/context/service/database/Service.py | 35 +++++- src/context/service/database/Slice.py | 43 +++++++- src/context/service/database/Topology.py | 11 +- .../database/models/ConnectionModel.py | 8 +- .../service/database/models/DeviceModel.py | 32 ++++-- .../service/database/models/EndPointModel.py | 4 +- .../database/models/PolicyRuleModel.py | 2 +- .../service/database/models/ServiceModel.py | 62 ++++++----- .../service/database/models/SliceModel.py | 103 +++++++++++------- .../service/database/models/TopologyModel.py | 10 +- 19 files changed, 350 insertions(+), 128 deletions(-) diff --git a/proto/context.proto b/proto/context.proto index 2dfbb7805..3b25e6361 100644 --- a/proto/context.proto +++ b/proto/context.proto @@ -40,7 +40,7 @@ service ContextService { rpc SetDevice (Device ) returns ( DeviceId ) {} rpc RemoveDevice (DeviceId ) returns ( Empty ) {} rpc GetDeviceEvents (Empty ) returns (stream DeviceEvent ) {} - + rpc SelectDevice (DeviceFilter ) returns ( DeviceList ) {} rpc ListEndPointNames (EndPointIdList) returns ( EndPointNameList) {} rpc ListLinkIds (Empty ) returns ( LinkIdList ) {} @@ -57,6 +57,7 @@ service ContextService { rpc UnsetService (Service ) returns ( ServiceId ) {} rpc RemoveService (ServiceId ) returns ( Empty ) {} rpc GetServiceEvents (Empty ) returns (stream ServiceEvent ) {} + rpc SelectService (ServiceFilter ) returns ( ServiceList ) {} rpc ListSliceIds (ContextId ) returns ( SliceIdList ) {} rpc ListSlices (ContextId ) returns ( SliceList ) {} @@ -65,6 +66,7 @@ service ContextService { rpc UnsetSlice (Slice ) returns ( SliceId ) {} rpc RemoveSlice (SliceId ) returns ( Empty ) {} rpc GetSliceEvents (Empty ) returns (stream SliceEvent ) {} + rpc SelectSlice (SliceFilter ) returns ( SliceList ) {} rpc ListConnectionIds (ServiceId ) returns ( ConnectionIdList) {} rpc ListConnections (ServiceId ) returns ( ConnectionList ) {} @@ -208,6 +210,13 @@ message DeviceList { repeated Device devices = 1; } +message DeviceFilter { + DeviceIdList device_ids = 1; + bool include_endpoints = 2; + bool include_config_rules = 3; + bool include_components = 4; +} + message DeviceEvent { Event event = 1; DeviceId device_id = 2; @@ -288,6 +297,13 @@ message ServiceList { repeated Service services = 1; } +message ServiceFilter { + ServiceIdList service_ids = 1; + bool include_endpoint_ids = 2; + bool include_constraints = 3; + bool include_config_rules = 4; +} + message ServiceEvent { Event event = 1; ServiceId service_id = 2; @@ -342,6 +358,15 @@ message SliceList { repeated Slice slices = 1; } +message SliceFilter { + SliceIdList slice_ids = 1; + bool include_endpoint_ids = 2; + bool include_constraints = 3; + bool include_service_ids = 4; + bool include_subslice_ids = 5; + bool include_config_rules = 6; +} + message SliceEvent { Event event = 1; SliceId slice_id = 2; diff --git a/src/context/client/ContextClient.py b/src/context/client/ContextClient.py index 7c3832d6b..13d9dc003 100644 --- a/src/context/client/ContextClient.py +++ b/src/context/client/ContextClient.py @@ -21,11 +21,11 @@ from common.tools.grpc.Tools import grpc_message_to_json_string from common.proto.context_pb2 import ( Connection, ConnectionEvent, ConnectionId, ConnectionIdList, ConnectionList, Context, ContextEvent, ContextId, ContextIdList, ContextList, - Device, DeviceEvent, DeviceId, DeviceIdList, DeviceList, + Device, DeviceEvent, DeviceFilter, DeviceId, DeviceIdList, DeviceList, Empty, EndPointIdList, EndPointNameList, Link, LinkEvent, LinkId, LinkIdList, LinkList, - Service, ServiceEvent, ServiceId, ServiceIdList, ServiceList, - Slice, SliceEvent, SliceId, SliceIdList, SliceList, + Service, ServiceEvent, ServiceFilter, ServiceId, ServiceIdList, ServiceList, + Slice, SliceEvent, SliceFilter, SliceId, SliceIdList, SliceList, Topology, TopologyDetails, TopologyEvent, TopologyId, TopologyIdList, TopologyList) from common.proto.context_pb2_grpc import ContextServiceStub from common.proto.context_policy_pb2_grpc import ContextPolicyServiceStub @@ -185,6 +185,13 @@ class ContextClient: LOGGER.debug('RemoveDevice result: {:s}'.format(grpc_message_to_json_string(response))) return response + @RETRY_DECORATOR + def SelectDevice(self, request: DeviceFilter) -> DeviceList: + LOGGER.debug('SelectDevice request: {:s}'.format(grpc_message_to_json_string(request))) + response = self.stub.SelectDevice(request) + LOGGER.debug('SelectDevice result: {:s}'.format(grpc_message_to_json_string(response))) + return response + @RETRY_DECORATOR def GetDeviceEvents(self, request: Empty) -> Iterator[DeviceEvent]: LOGGER.debug('GetDeviceEvents request: {:s}'.format(grpc_message_to_json_string(request))) @@ -283,6 +290,13 @@ class ContextClient: LOGGER.debug('RemoveService result: {:s}'.format(grpc_message_to_json_string(response))) return response + @RETRY_DECORATOR + def SelectService(self, request: ServiceFilter) -> ServiceList: + LOGGER.debug('SelectService request: {:s}'.format(grpc_message_to_json_string(request))) + response = self.stub.SelectService(request) + LOGGER.debug('SelectService result: {:s}'.format(grpc_message_to_json_string(response))) + return response + @RETRY_DECORATOR def GetServiceEvents(self, request: Empty) -> Iterator[ServiceEvent]: LOGGER.debug('GetServiceEvents request: {:s}'.format(grpc_message_to_json_string(request))) @@ -332,6 +346,13 @@ class ContextClient: LOGGER.debug('RemoveSlice result: {:s}'.format(grpc_message_to_json_string(response))) return response + @RETRY_DECORATOR + def SelectSlice(self, request: SliceFilter) -> SliceList: + LOGGER.debug('SelectSlice request: {:s}'.format(grpc_message_to_json_string(request))) + response = self.stub.SelectSlice(request) + LOGGER.debug('SelectSlice result: {:s}'.format(grpc_message_to_json_string(response))) + return response + @RETRY_DECORATOR def GetSliceEvents(self, request: Empty) -> Iterator[SliceEvent]: LOGGER.debug('GetSliceEvents request: {:s}'.format(grpc_message_to_json_string(request))) diff --git a/src/context/service/ContextServiceServicerImpl.py b/src/context/service/ContextServiceServicerImpl.py index 6fe00f917..789ee7a78 100644 --- a/src/context/service/ContextServiceServicerImpl.py +++ b/src/context/service/ContextServiceServicerImpl.py @@ -18,11 +18,11 @@ from common.message_broker.MessageBroker import MessageBroker from common.proto.context_pb2 import ( Connection, ConnectionEvent, ConnectionId, ConnectionIdList, ConnectionList, Context, ContextEvent, ContextId, ContextIdList, ContextList, - Device, DeviceEvent, DeviceId, DeviceIdList, DeviceList, + Device, DeviceEvent, DeviceFilter, DeviceId, DeviceIdList, DeviceList, Empty, EndPointIdList, EndPointNameList, EventTypeEnum, Link, LinkEvent, LinkId, LinkIdList, LinkList, - Service, ServiceEvent, ServiceId, ServiceIdList, ServiceList, - Slice, SliceEvent, SliceId, SliceIdList, SliceList, + Service, ServiceEvent, ServiceFilter, ServiceId, ServiceIdList, ServiceList, + Slice, SliceEvent, SliceFilter, SliceId, SliceIdList, SliceList, Topology, TopologyDetails, TopologyEvent, TopologyId, TopologyIdList, TopologyList) from common.proto.policy_pb2 import PolicyRuleIdList, PolicyRuleId, PolicyRuleList, PolicyRule from common.proto.context_pb2_grpc import ContextServiceServicer @@ -31,13 +31,13 @@ from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_m from .database.Connection import ( connection_delete, connection_get, connection_list_ids, connection_list_objs, connection_set) from .database.Context import context_delete, context_get, context_list_ids, context_list_objs, context_set -from .database.Device import device_delete, device_get, device_list_ids, device_list_objs, device_set +from .database.Device import device_delete, device_get, device_list_ids, device_list_objs, device_select, device_set from .database.EndPoint import endpoint_list_names from .database.Link import link_delete, link_get, link_list_ids, link_list_objs, link_set from .database.PolicyRule import ( policyrule_delete, policyrule_get, policyrule_list_ids, policyrule_list_objs, policyrule_set) -from .database.Service import service_delete, service_get, service_list_ids, service_list_objs, service_set -from .database.Slice import slice_delete, slice_get, slice_list_ids, slice_list_objs, slice_set, slice_unset +from .database.Service import service_delete, service_get, service_list_ids, service_list_objs, service_select, service_set +from .database.Slice import slice_delete, slice_get, slice_list_ids, slice_list_objs, slice_select, slice_set, slice_unset from .database.Topology import ( topology_delete, topology_get, topology_get_details, topology_list_ids, topology_list_objs, topology_set) from .Events import ( @@ -161,6 +161,10 @@ class ContextServiceServicerImpl(ContextServiceServicer, ContextPolicyServiceSer notify_event(self.messagebroker, TOPIC_DEVICE, event_type, {'device_id': device_id}) return Empty() + @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def SelectDevices(self, request : DeviceFilter, context : grpc.ServicerContext) -> DeviceList: + return DeviceList(devices=device_select(self.db_engine, request)) + @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) def GetDeviceEvents(self, request : Empty, context : grpc.ServicerContext) -> Iterator[DeviceEvent]: for message in self.messagebroker.consume({TOPIC_DEVICE}, consume_timeout=CONSUME_TIMEOUT): @@ -235,6 +239,10 @@ class ContextServiceServicerImpl(ContextServiceServicer, ContextPolicyServiceSer notify_event(self.messagebroker, TOPIC_SERVICE, event_type, {'service_id': service_id}) return Empty() + @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def SelectService(self, request : ServiceFilter, context : grpc.ServicerContext) -> ServiceList: + return ServiceList(services=service_select(self.db_engine, request)) + @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) def GetServiceEvents(self, request : Empty, context : grpc.ServicerContext) -> Iterator[ServiceEvent]: for message in self.messagebroker.consume({TOPIC_SERVICE}, consume_timeout=CONSUME_TIMEOUT): @@ -278,6 +286,10 @@ class ContextServiceServicerImpl(ContextServiceServicer, ContextPolicyServiceSer notify_event(self.messagebroker, TOPIC_SLICE, event_type, {'slice_id': slice_id}) return Empty() + @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def SelectSlice(self, request : SliceFilter, context : grpc.ServicerContext) -> SliceList: + return SliceList(slices=slice_select(self.db_engine, request)) + @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) def GetSliceEvents(self, request : Empty, context : grpc.ServicerContext) -> Iterator[SliceEvent]: for message in self.messagebroker.consume({TOPIC_SLICE}, consume_timeout=CONSUME_TIMEOUT): diff --git a/src/context/service/database/Connection.py b/src/context/service/database/Connection.py index a3edb8ea2..80d3b3a6d 100644 --- a/src/context/service/database/Connection.py +++ b/src/context/service/database/Connection.py @@ -16,7 +16,7 @@ import datetime, logging, re from sqlalchemy.dialects.postgresql import insert from sqlalchemy.engine import Engine from sqlalchemy.exc import IntegrityError -from sqlalchemy.orm import Session, sessionmaker +from sqlalchemy.orm import Session, selectinload, sessionmaker from sqlalchemy_cockroachdb import run_transaction from typing import Dict, List, Optional, Tuple from common.proto.context_pb2 import Connection, ConnectionId, ServiceId @@ -40,7 +40,11 @@ def connection_list_ids(db_engine : Engine, request : ServiceId) -> List[Dict]: def connection_list_objs(db_engine : Engine, request : ServiceId) -> List[Dict]: _,service_uuid = service_get_uuid(request, allow_random=False) def callback(session : Session) -> List[Dict]: - obj_list : List[ConnectionModel] = session.query(ConnectionModel).filter_by(service_uuid=service_uuid).all() + obj_list : List[ConnectionModel] = session.query(ConnectionModel)\ + .options(selectinload(ConnectionModel.connection_service))\ + .options(selectinload(ConnectionModel.connection_endpoints))\ + .options(selectinload(ConnectionModel.connection_subservices))\ + .filter_by(service_uuid=service_uuid).all() return [obj.dump() for obj in obj_list] return run_transaction(sessionmaker(bind=db_engine), callback) @@ -48,6 +52,9 @@ def connection_get(db_engine : Engine, request : ConnectionId) -> Dict: connection_uuid = connection_get_uuid(request, allow_random=False) def callback(session : Session) -> Optional[Dict]: obj : Optional[ConnectionModel] = session.query(ConnectionModel)\ + .options(selectinload(ConnectionModel.connection_service))\ + .options(selectinload(ConnectionModel.connection_endpoints))\ + .options(selectinload(ConnectionModel.connection_subservices))\ .filter_by(connection_uuid=connection_uuid).one_or_none() return None if obj is None else obj.dump() obj = run_transaction(sessionmaker(bind=db_engine), callback) diff --git a/src/context/service/database/Context.py b/src/context/service/database/Context.py index 9e05e54b3..465409503 100644 --- a/src/context/service/database/Context.py +++ b/src/context/service/database/Context.py @@ -15,7 +15,7 @@ import datetime, logging from sqlalchemy.dialects.postgresql import insert from sqlalchemy.engine import Engine -from sqlalchemy.orm import Session, sessionmaker +from sqlalchemy.orm import Session, selectinload, sessionmaker from sqlalchemy_cockroachdb import run_transaction from typing import Dict, List, Optional, Tuple from common.proto.context_pb2 import Context, ContextId @@ -34,14 +34,22 @@ def context_list_ids(db_engine : Engine) -> List[Dict]: def context_list_objs(db_engine : Engine) -> List[Dict]: def callback(session : Session) -> List[Dict]: - obj_list : List[ContextModel] = session.query(ContextModel).all() + obj_list : List[ContextModel] = session.query(ContextModel)\ + .options(selectinload(ContextModel.topologies))\ + .options(selectinload(ContextModel.services))\ + .options(selectinload(ContextModel.slices))\ + .all() return [obj.dump() for obj in obj_list] return run_transaction(sessionmaker(bind=db_engine), callback) def context_get(db_engine : Engine, request : ContextId) -> Dict: context_uuid = context_get_uuid(request, allow_random=False) def callback(session : Session) -> Optional[Dict]: - obj : Optional[ContextModel] = session.query(ContextModel).filter_by(context_uuid=context_uuid).one_or_none() + obj : Optional[ContextModel] = session.query(ContextModel)\ + .options(selectinload(ContextModel.topologies))\ + .options(selectinload(ContextModel.services))\ + .options(selectinload(ContextModel.slices))\ + .filter_by(context_uuid=context_uuid).one_or_none() return None if obj is None else obj.dump() obj = run_transaction(sessionmaker(bind=db_engine), callback) if obj is None: diff --git a/src/context/service/database/Device.py b/src/context/service/database/Device.py index c5a19c9c4..c51a88368 100644 --- a/src/context/service/database/Device.py +++ b/src/context/service/database/Device.py @@ -15,12 +15,12 @@ import datetime, logging from sqlalchemy.dialects.postgresql import insert from sqlalchemy.engine import Engine -from sqlalchemy.orm import Session, sessionmaker +from sqlalchemy.orm import Session, selectinload, sessionmaker from sqlalchemy_cockroachdb import run_transaction from typing import Dict, List, Optional, Set, Tuple from common.method_wrappers.ServiceExceptions import InvalidArgumentException, NotFoundException -from common.proto.context_pb2 import Device, DeviceId, TopologyId -from common.tools.grpc.Tools import grpc_message_to_json_string +from common.proto.context_pb2 import Device, DeviceFilter, DeviceId, TopologyId +#from common.tools.grpc.Tools import grpc_message_to_json_string from common.tools.object_factory.Device import json_device_id from context.service.database.uuids.Topology import topology_get_uuid from .models.DeviceModel import DeviceModel @@ -43,14 +43,22 @@ def device_list_ids(db_engine : Engine) -> List[Dict]: def device_list_objs(db_engine : Engine) -> List[Dict]: def callback(session : Session) -> List[Dict]: - obj_list : List[DeviceModel] = session.query(DeviceModel).all() + obj_list : List[DeviceModel] = session.query(DeviceModel)\ + .options(selectinload(DeviceModel.endpoints))\ + .options(selectinload(DeviceModel.config_rules))\ + .all() + #.options(selectinload(DeviceModel.components))\ return [obj.dump() for obj in obj_list] return run_transaction(sessionmaker(bind=db_engine), callback) def device_get(db_engine : Engine, request : DeviceId) -> Dict: device_uuid = device_get_uuid(request, allow_random=False) def callback(session : Session) -> Optional[Dict]: - obj : Optional[DeviceModel] = session.query(DeviceModel).filter_by(device_uuid=device_uuid).one_or_none() + obj : Optional[DeviceModel] = session.query(DeviceModel)\ + .options(selectinload(DeviceModel.endpoints))\ + .options(selectinload(DeviceModel.config_rules))\ + .filter_by(device_uuid=device_uuid).one_or_none() + #.options(selectinload(DeviceModel.components))\ return None if obj is None else obj.dump() obj = run_transaction(sessionmaker(bind=db_engine), callback) if obj is None: @@ -182,3 +190,22 @@ def device_delete(db_engine : Engine, request : DeviceId) -> Tuple[Dict, bool]: return num_deleted > 0 deleted = run_transaction(sessionmaker(bind=db_engine), callback) return json_device_id(device_uuid),deleted + +def device_select(db_engine : Engine, request : DeviceFilter) -> List[Dict]: + device_uuids = [ + device_get_uuid(device_id, allow_random=False) + for device_id in request.device_ids.device_ids + ] + dump_params = dict( + include_endpoints =request.include_endpoints, + include_config_rules=request.include_config_rules, + include_components =request.include_components, + ) + def callback(session : Session) -> List[Dict]: + query = session.query(DeviceModel) + if request.include_endpoints : query = query.options(selectinload(DeviceModel.endpoints)) + if request.include_config_rules: query = query.options(selectinload(DeviceModel.config_rules)) + #if request.include_components : query = query.options(selectinload(DeviceModel.components)) + obj_list : List[DeviceModel] = query.filter(DeviceModel.device_uuid.in_(device_uuids)).all() + return [obj.dump(**dump_params) for obj in obj_list] + return run_transaction(sessionmaker(bind=db_engine), callback) diff --git a/src/context/service/database/EndPoint.py b/src/context/service/database/EndPoint.py index e2f86893a..b0df3bb81 100644 --- a/src/context/service/database/EndPoint.py +++ b/src/context/service/database/EndPoint.py @@ -14,7 +14,7 @@ import logging from sqlalchemy.engine import Engine -from sqlalchemy.orm import Session, sessionmaker +from sqlalchemy.orm import Session, selectinload, sessionmaker from sqlalchemy_cockroachdb import run_transaction from typing import Dict, List from common.proto.context_pb2 import EndPointIdList @@ -29,7 +29,8 @@ def endpoint_list_names(db_engine : Engine, request : EndPointIdList) -> List[Di for endpoint_id in request.endpoint_ids } def callback(session : Session) -> List[Dict]: - obj_list : List[EndPointModel] = \ - session.query(EndPointModel).filter(EndPointModel.endpoint_uuid.in_(endpoint_uuids)).all() + obj_list : List[EndPointModel] = session.query(EndPointModel)\ + .options(selectinload(EndPointModel.device))\ + .filter(EndPointModel.endpoint_uuid.in_(endpoint_uuids)).all() return [obj.dump_name() for obj in obj_list] return run_transaction(sessionmaker(bind=db_engine), callback) diff --git a/src/context/service/database/Link.py b/src/context/service/database/Link.py index 299827dbd..f5bfc9dea 100644 --- a/src/context/service/database/Link.py +++ b/src/context/service/database/Link.py @@ -15,7 +15,7 @@ import datetime, logging from sqlalchemy.dialects.postgresql import insert from sqlalchemy.engine import Engine -from sqlalchemy.orm import Session, sessionmaker +from sqlalchemy.orm import Session, selectinload, sessionmaker from sqlalchemy_cockroachdb import run_transaction from typing import Dict, List, Optional, Set, Tuple from common.proto.context_pb2 import Link, LinkId @@ -36,14 +36,18 @@ def link_list_ids(db_engine : Engine) -> List[Dict]: def link_list_objs(db_engine : Engine) -> List[Dict]: def callback(session : Session) -> List[Dict]: - obj_list : List[LinkModel] = session.query(LinkModel).all() + obj_list : List[LinkModel] = session.query(LinkModel)\ + .options(selectinload(LinkModel.link_endpoints))\ + .all() return [obj.dump() for obj in obj_list] return run_transaction(sessionmaker(bind=db_engine), callback) def link_get(db_engine : Engine, request : LinkId) -> Dict: link_uuid = link_get_uuid(request, allow_random=False) def callback(session : Session) -> Optional[Dict]: - obj : Optional[LinkModel] = session.query(LinkModel).filter_by(link_uuid=link_uuid).one_or_none() + obj : Optional[LinkModel] = session.query(LinkModel)\ + .options(selectinload(LinkModel.link_endpoints))\ + .filter_by(link_uuid=link_uuid).one_or_none() return None if obj is None else obj.dump() obj = run_transaction(sessionmaker(bind=db_engine), callback) if obj is None: diff --git a/src/context/service/database/PolicyRule.py b/src/context/service/database/PolicyRule.py index e95cec4ae..13f0a2698 100644 --- a/src/context/service/database/PolicyRule.py +++ b/src/context/service/database/PolicyRule.py @@ -15,7 +15,7 @@ import datetime, json from sqlalchemy.dialects.postgresql import insert from sqlalchemy.engine import Engine -from sqlalchemy.orm import Session, sessionmaker +from sqlalchemy.orm import Session, selectinload, sessionmaker from sqlalchemy_cockroachdb import run_transaction from typing import Dict, List, Optional, Set, Tuple from common.proto.policy_pb2 import PolicyRule, PolicyRuleId, PolicyRuleIdList, PolicyRuleList @@ -31,14 +31,15 @@ from .uuids.Service import service_get_uuid def policyrule_list_ids(db_engine : Engine) -> List[Dict]: def callback(session : Session) -> List[Dict]: obj_list : List[PolicyRuleModel] = session.query(PolicyRuleModel).all() - #.options(selectinload(PolicyRuleModel.topology)).filter_by(context_uuid=context_uuid).one_or_none() return [obj.dump_id() for obj in obj_list] return run_transaction(sessionmaker(bind=db_engine), callback) def policyrule_list_objs(db_engine : Engine) -> List[Dict]: def callback(session : Session) -> List[Dict]: - obj_list : List[PolicyRuleModel] = session.query(PolicyRuleModel).all() - #.options(selectinload(PolicyRuleModel.topology)).filter_by(context_uuid=context_uuid).one_or_none() + obj_list : List[PolicyRuleModel] = session.query(PolicyRuleModel)\ + .options(selectinload(PolicyRuleModel.policyrule_service))\ + .options(selectinload(PolicyRuleModel.policyrule_devices))\ + .all() return [obj.dump() for obj in obj_list] return run_transaction(sessionmaker(bind=db_engine), callback) @@ -46,6 +47,8 @@ def policyrule_get(db_engine : Engine, request : PolicyRuleId) -> PolicyRule: policyrule_uuid = policyrule_get_uuid(request, allow_random=False) def callback(session : Session) -> Optional[Dict]: obj : Optional[PolicyRuleModel] = session.query(PolicyRuleModel)\ + .options(selectinload(PolicyRuleModel.policyrule_service))\ + .options(selectinload(PolicyRuleModel.policyrule_devices))\ .filter_by(policyrule_uuid=policyrule_uuid).one_or_none() return None if obj is None else obj.dump() obj = run_transaction(sessionmaker(bind=db_engine), callback) diff --git a/src/context/service/database/Service.py b/src/context/service/database/Service.py index fe12eaf8a..32484a309 100644 --- a/src/context/service/database/Service.py +++ b/src/context/service/database/Service.py @@ -15,10 +15,10 @@ import datetime, logging from sqlalchemy.dialects.postgresql import insert from sqlalchemy.engine import Engine -from sqlalchemy.orm import Session, sessionmaker +from sqlalchemy.orm import Session, selectinload, sessionmaker from sqlalchemy_cockroachdb import run_transaction from typing import Dict, List, Optional, Tuple -from common.proto.context_pb2 import ContextId, Service, ServiceId +from common.proto.context_pb2 import ContextId, Service, ServiceFilter, ServiceId from common.method_wrappers.ServiceExceptions import InvalidArgumentException, NotFoundException from common.tools.object_factory.Context import json_context_id from common.tools.object_factory.Service import json_service_id @@ -43,14 +43,22 @@ def service_list_ids(db_engine : Engine, request : ContextId) -> List[Dict]: def service_list_objs(db_engine : Engine, request : ContextId) -> List[Dict]: context_uuid = context_get_uuid(request, allow_random=False) def callback(session : Session) -> List[Dict]: - obj_list : List[ServiceModel] = session.query(ServiceModel).filter_by(context_uuid=context_uuid).all() + obj_list : List[ServiceModel] = session.query(ServiceModel)\ + .options(selectinload(ServiceModel.service_endpoints))\ + .options(selectinload(ServiceModel.constraints))\ + .options(selectinload(ServiceModel.config_rules))\ + .filter_by(context_uuid=context_uuid).all() return [obj.dump() for obj in obj_list] return run_transaction(sessionmaker(bind=db_engine), callback) def service_get(db_engine : Engine, request : ServiceId) -> Dict: _,service_uuid = service_get_uuid(request, allow_random=False) def callback(session : Session) -> Optional[Dict]: - obj : Optional[ServiceModel] = session.query(ServiceModel).filter_by(service_uuid=service_uuid).one_or_none() + obj : Optional[ServiceModel] = session.query(ServiceModel)\ + .options(selectinload(ServiceModel.service_endpoints))\ + .options(selectinload(ServiceModel.constraints))\ + .options(selectinload(ServiceModel.config_rules))\ + .filter_by(service_uuid=service_uuid).one_or_none() return None if obj is None else obj.dump() obj = run_transaction(sessionmaker(bind=db_engine), callback) if obj is None: @@ -145,3 +153,22 @@ def service_delete(db_engine : Engine, request : ServiceId) -> Tuple[Dict, bool] return num_deleted > 0 deleted = run_transaction(sessionmaker(bind=db_engine), callback) return json_service_id(service_uuid, json_context_id(context_uuid)),deleted + +def service_select(db_engine : Engine, request : ServiceFilter) -> List[Dict]: + service_uuids = [ + service_get_uuid(service_id, allow_random=False)[1] + for service_id in request.service_ids.service_ids + ] + dump_params = dict( + include_endpoint_ids=request.include_endpoint_ids, + include_constraints =request.include_constraints, + include_config_rules=request.include_config_rules, + ) + def callback(session : Session) -> List[Dict]: + query = session.query(ServiceModel) + if request.include_endpoint_ids: query = query.options(selectinload(ServiceModel.service_endpoints)) + if request.include_constraints : query = query.options(selectinload(ServiceModel.constraints)) + if request.include_config_rules: query = query.options(selectinload(ServiceModel.config_rules)) + obj_list : List[ServiceModel] = query.filter(ServiceModel.service_uuid.in_(service_uuids)).all() + return [obj.dump(**dump_params) for obj in obj_list] + return run_transaction(sessionmaker(bind=db_engine), callback) diff --git a/src/context/service/database/Slice.py b/src/context/service/database/Slice.py index 724046bfa..abd140024 100644 --- a/src/context/service/database/Slice.py +++ b/src/context/service/database/Slice.py @@ -16,10 +16,10 @@ import datetime, logging from sqlalchemy import and_ from sqlalchemy.dialects.postgresql import insert from sqlalchemy.engine import Engine -from sqlalchemy.orm import Session, sessionmaker +from sqlalchemy.orm import Session, selectinload, sessionmaker from sqlalchemy_cockroachdb import run_transaction from typing import Dict, List, Optional, Set, Tuple -from common.proto.context_pb2 import ContextId, Slice, SliceId +from common.proto.context_pb2 import ContextId, Slice, SliceFilter, SliceId from common.method_wrappers.ServiceExceptions import InvalidArgumentException, NotFoundException from common.tools.object_factory.Context import json_context_id from common.tools.object_factory.Slice import json_slice_id @@ -44,14 +44,26 @@ def slice_list_ids(db_engine : Engine, request : ContextId) -> List[Dict]: def slice_list_objs(db_engine : Engine, request : ContextId) -> List[Dict]: context_uuid = context_get_uuid(request, allow_random=False) def callback(session : Session) -> List[Dict]: - obj_list : List[SliceModel] = session.query(SliceModel).filter_by(context_uuid=context_uuid).all() + obj_list : List[SliceModel] = session.query(SliceModel)\ + .options(selectinload(SliceModel.slice_endpoints))\ + .options(selectinload(SliceModel.slice_services))\ + .options(selectinload(SliceModel.slice_subslices))\ + .options(selectinload(SliceModel.constraints))\ + .options(selectinload(SliceModel.config_rules))\ + .filter_by(context_uuid=context_uuid).all() return [obj.dump() for obj in obj_list] return run_transaction(sessionmaker(bind=db_engine), callback) def slice_get(db_engine : Engine, request : SliceId) -> Dict: _,slice_uuid = slice_get_uuid(request, allow_random=False) def callback(session : Session) -> Optional[Dict]: - obj : Optional[SliceModel] = session.query(SliceModel).filter_by(slice_uuid=slice_uuid).one_or_none() + obj : Optional[SliceModel] = session.query(SliceModel)\ + .options(selectinload(SliceModel.slice_endpoints))\ + .options(selectinload(SliceModel.slice_services))\ + .options(selectinload(SliceModel.slice_subslices))\ + .options(selectinload(SliceModel.constraints))\ + .options(selectinload(SliceModel.config_rules))\ + .filter_by(slice_uuid=slice_uuid).one_or_none() return None if obj is None else obj.dump() obj = run_transaction(sessionmaker(bind=db_engine), callback) if obj is None: @@ -240,3 +252,26 @@ def slice_delete(db_engine : Engine, request : SliceId) -> Tuple[Dict, bool]: return num_deleted > 0 deleted = run_transaction(sessionmaker(bind=db_engine), callback) return json_slice_id(slice_uuid, json_context_id(context_uuid)),deleted + +def slice_select(db_engine : Engine, request : SliceFilter) -> List[Dict]: + slice_uuids = [ + slice_get_uuid(slice_id, allow_random=False)[1] + for slice_id in request.slice_ids.slice_ids + ] + dump_params = dict( + include_endpoint_ids=request.include_endpoint_ids, + include_constraints =request.include_constraints, + include_service_ids =request.include_service_ids, + include_subslice_ids=request.include_subslice_ids, + include_config_rules=request.include_config_rules, + ) + def callback(session : Session) -> List[Dict]: + query = session.query(SliceModel) + if request.include_endpoint_ids: query = query.options(selectinload(SliceModel.slice_endpoints)) + if request.include_service_ids : query = query.options(selectinload(SliceModel.slice_services)) + if request.include_subslice_ids: query = query.options(selectinload(SliceModel.slice_subslices)) + if request.include_constraints : query = query.options(selectinload(SliceModel.constraints)) + if request.include_config_rules: query = query.options(selectinload(SliceModel.config_rules)) + obj_list : List[SliceModel] = query.filter(SliceModel.slice_uuid.in_(slice_uuids)).all() + return [obj.dump(**dump_params) for obj in obj_list] + return run_transaction(sessionmaker(bind=db_engine), callback) diff --git a/src/context/service/database/Topology.py b/src/context/service/database/Topology.py index e2c6e2e99..210f497d1 100644 --- a/src/context/service/database/Topology.py +++ b/src/context/service/database/Topology.py @@ -15,7 +15,7 @@ import datetime, logging from sqlalchemy.dialects.postgresql import insert from sqlalchemy.engine import Engine -from sqlalchemy.orm import Session, sessionmaker +from sqlalchemy.orm import Session, selectinload, sessionmaker from sqlalchemy_cockroachdb import run_transaction from typing import Dict, List, Optional, Tuple from common.proto.context_pb2 import ContextId, Topology, TopologyId @@ -38,7 +38,10 @@ def topology_list_ids(db_engine : Engine, request : ContextId) -> List[Dict]: def topology_list_objs(db_engine : Engine, request : ContextId) -> List[Dict]: context_uuid = context_get_uuid(request, allow_random=False) def callback(session : Session) -> List[Dict]: - obj_list : List[TopologyModel] = session.query(TopologyModel).filter_by(context_uuid=context_uuid).all() + obj_list : List[TopologyModel] = session.query(TopologyModel)\ + .options(selectinload(TopologyModel.topology_devices))\ + .options(selectinload(TopologyModel.topology_links))\ + .filter_by(context_uuid=context_uuid).all() return [obj.dump() for obj in obj_list] return run_transaction(sessionmaker(bind=db_engine), callback) @@ -46,6 +49,8 @@ def topology_get(db_engine : Engine, request : TopologyId) -> Dict: _,topology_uuid = topology_get_uuid(request, allow_random=False) def callback(session : Session) -> Optional[Dict]: obj : Optional[TopologyModel] = session.query(TopologyModel)\ + .options(selectinload(TopologyModel.topology_devices))\ + .options(selectinload(TopologyModel.topology_links))\ .filter_by(topology_uuid=topology_uuid).one_or_none() return None if obj is None else obj.dump() obj = run_transaction(sessionmaker(bind=db_engine), callback) @@ -62,6 +67,8 @@ def topology_get_details(db_engine : Engine, request : TopologyId) -> Dict: _,topology_uuid = topology_get_uuid(request, allow_random=False) def callback(session : Session) -> Optional[Dict]: obj : Optional[TopologyModel] = session.query(TopologyModel)\ + .options(selectinload(TopologyModel.topology_devices))\ + .options(selectinload(TopologyModel.topology_links))\ .filter_by(topology_uuid=topology_uuid).one_or_none() return None if obj is None else obj.dump_details() obj = run_transaction(sessionmaker(bind=db_engine), callback) diff --git a/src/context/service/database/models/ConnectionModel.py b/src/context/service/database/models/ConnectionModel.py index 156e33c6b..f71d41778 100644 --- a/src/context/service/database/models/ConnectionModel.py +++ b/src/context/service/database/models/ConnectionModel.py @@ -59,8 +59,8 @@ class ConnectionEndPointModel(_Base): endpoint_uuid = Column(ForeignKey('endpoint.endpoint_uuid', ondelete='RESTRICT'), primary_key=True, index=True) position = Column(Integer, nullable=False) - connection = relationship('ConnectionModel', back_populates='connection_endpoints', lazy='joined') - endpoint = relationship('EndPointModel', lazy='joined') # back_populates='connection_endpoints' + connection = relationship('ConnectionModel', back_populates='connection_endpoints') #, lazy='joined' + endpoint = relationship('EndPointModel', lazy='selectin') # back_populates='connection_endpoints' __table_args__ = ( CheckConstraint(position >= 0, name='check_position_value'), @@ -72,5 +72,5 @@ class ConnectionSubServiceModel(_Base): connection_uuid = Column(ForeignKey('connection.connection_uuid', ondelete='CASCADE' ), primary_key=True) subservice_uuid = Column(ForeignKey('service.service_uuid', ondelete='RESTRICT'), primary_key=True, index=True) - connection = relationship('ConnectionModel', back_populates='connection_subservices', lazy='joined') - subservice = relationship('ServiceModel', lazy='joined') # back_populates='connection_subservices' + connection = relationship('ConnectionModel', back_populates='connection_subservices') #, lazy='joined' + subservice = relationship('ServiceModel', lazy='selectin') # back_populates='connection_subservices' diff --git a/src/context/service/database/models/DeviceModel.py b/src/context/service/database/models/DeviceModel.py index 2124386d1..24130841d 100644 --- a/src/context/service/database/models/DeviceModel.py +++ b/src/context/service/database/models/DeviceModel.py @@ -16,7 +16,7 @@ import operator from sqlalchemy import Column, DateTime, Enum, String from sqlalchemy.dialects.postgresql import ARRAY, UUID from sqlalchemy.orm import relationship -from typing import Dict +from typing import Dict, List from .enums.DeviceDriver import ORM_DeviceDriverEnum from .enums.DeviceOperationalStatus import ORM_DeviceOperationalStatusEnum from ._Base import _Base @@ -39,19 +39,29 @@ class DeviceModel(_Base): def dump_id(self) -> Dict: return {'device_uuid': {'uuid': self.device_uuid}} - def dump(self) -> Dict: - return { + def dump_endpoints(self) -> List[Dict]: + return [endpoint.dump() for endpoint in self.endpoints] + + def dump_config_rules(self) -> Dict: + return {'config_rules': [ + config_rule.dump() + for config_rule in sorted(self.config_rules, key=operator.attrgetter('position')) + ]} + + #def dump_components(self) -> List[Dict]: + # return [] + + def dump(self, + include_endpoints : bool = True, include_config_rules : bool = True, include_components : bool = True, + ) -> Dict: + result = { 'device_id' : self.dump_id(), 'name' : self.device_name, 'device_type' : self.device_type, 'device_operational_status': self.device_operational_status.value, 'device_drivers' : [driver.value for driver in self.device_drivers], - 'device_config' : {'config_rules': [ - config_rule.dump() - for config_rule in sorted(self.config_rules, key=operator.attrgetter('position')) - ]}, - 'device_endpoints' : [ - endpoint.dump() - for endpoint in self.endpoints - ], } + if include_endpoints: result['device_endpoints'] = self.dump_endpoints() + if include_config_rules: result['device_config'] = self.dump_config_rules() + #if include_components: result['components'] = self.dump_components() + return result diff --git a/src/context/service/database/models/EndPointModel.py b/src/context/service/database/models/EndPointModel.py index 12ba7e10e..a079f9900 100644 --- a/src/context/service/database/models/EndPointModel.py +++ b/src/context/service/database/models/EndPointModel.py @@ -31,8 +31,8 @@ class EndPointModel(_Base): created_at = Column(DateTime, nullable=False) updated_at = Column(DateTime, nullable=False) - device = relationship('DeviceModel', back_populates='endpoints') - topology = relationship('TopologyModel') + device = relationship('DeviceModel', back_populates='endpoints') # lazy='selectin' + topology = relationship('TopologyModel', lazy='selectin') #link_endpoints = relationship('LinkEndPointModel', back_populates='endpoint' ) #service_endpoints = relationship('ServiceEndPointModel', back_populates='endpoint' ) diff --git a/src/context/service/database/models/PolicyRuleModel.py b/src/context/service/database/models/PolicyRuleModel.py index 663a9a39a..32364e289 100644 --- a/src/context/service/database/models/PolicyRuleModel.py +++ b/src/context/service/database/models/PolicyRuleModel.py @@ -74,4 +74,4 @@ class PolicyRuleDeviceModel(_Base): device_uuid = Column(ForeignKey('device.device_uuid', ondelete='RESTRICT'), primary_key=True, index=True) #policyrule = relationship('PolicyRuleModel', lazy='joined') # back_populates='policyrule_devices' - device = relationship('DeviceModel', lazy='joined') # back_populates='policyrule_devices' + device = relationship('DeviceModel', lazy='selectin') # back_populates='policyrule_devices' diff --git a/src/context/service/database/models/ServiceModel.py b/src/context/service/database/models/ServiceModel.py index f1781c4f8..ef6e1b06a 100644 --- a/src/context/service/database/models/ServiceModel.py +++ b/src/context/service/database/models/ServiceModel.py @@ -16,7 +16,7 @@ import operator from sqlalchemy import CheckConstraint, Column, DateTime, Enum, ForeignKey, Integer, String from sqlalchemy.dialects.postgresql import UUID from sqlalchemy.orm import relationship -from typing import Dict +from typing import Dict, List from .enums.ServiceStatus import ORM_ServiceStatusEnum from .enums.ServiceType import ORM_ServiceTypeEnum from ._Base import _Base @@ -32,10 +32,10 @@ class ServiceModel(_Base): created_at = Column(DateTime, nullable=False) updated_at = Column(DateTime, nullable=False) - context = relationship('ContextModel', back_populates='services') - service_endpoints = relationship('ServiceEndPointModel') # lazy='joined', back_populates='service' - constraints = relationship('ConstraintModel', passive_deletes=True) # lazy='joined', back_populates='service' - config_rules = relationship('ConfigRuleModel', passive_deletes=True) # lazy='joined', back_populates='service' + context = relationship('ContextModel', back_populates='services', lazy='selectin') + service_endpoints = relationship('ServiceEndPointModel') # lazy='selectin', back_populates='service' + constraints = relationship('ConstraintModel', passive_deletes=True) # lazy='selectin', back_populates='service' + config_rules = relationship('ConfigRuleModel', passive_deletes=True) # lazy='selectin', back_populates='service' def dump_id(self) -> Dict: return { @@ -43,25 +43,37 @@ class ServiceModel(_Base): 'service_uuid': {'uuid': self.service_uuid}, } - def dump(self) -> Dict: - return { - 'service_id' : self.dump_id(), - 'name' : self.service_name, - 'service_type' : self.service_type.value, - 'service_status' : {'service_status': self.service_status.value}, - 'service_endpoint_ids': [ - service_endpoint.endpoint.dump_id() - for service_endpoint in sorted(self.service_endpoints, key=operator.attrgetter('position')) - ], - 'service_constraints' : [ - constraint.dump() - for constraint in sorted(self.constraints, key=operator.attrgetter('position')) - ], - 'service_config' : {'config_rules': [ - config_rule.dump() - for config_rule in sorted(self.config_rules, key=operator.attrgetter('position')) - ]}, + def dump_endpoint_ids(self) -> List[Dict]: + return [ + service_endpoint.endpoint.dump_id() + for service_endpoint in sorted(self.service_endpoints, key=operator.attrgetter('position')) + ] + + def dump_constraints(self) -> List[Dict]: + return [ + constraint.dump() + for constraint in sorted(self.constraints, key=operator.attrgetter('position')) + ] + + def dump_config_rules(self) -> Dict: + return {'config_rules': [ + config_rule.dump() + for config_rule in sorted(self.config_rules, key=operator.attrgetter('position')) + ]} + + def dump( + self, include_endpoint_ids : bool = True, include_constraints : bool = True, include_config_rules : bool = True + ) -> Dict: + result = { + 'service_id' : self.dump_id(), + 'name' : self.service_name, + 'service_type' : self.service_type.value, + 'service_status': {'service_status': self.service_status.value}, } + if include_endpoint_ids: result['service_endpoint_ids'] = self.dump_endpoint_ids() + if include_constraints: result['service_constraints'] = self.dump_constraints() + if include_config_rules: result['service_config'] = self.dump_config_rules() + return result class ServiceEndPointModel(_Base): __tablename__ = 'service_endpoint' @@ -70,8 +82,8 @@ class ServiceEndPointModel(_Base): endpoint_uuid = Column(ForeignKey('endpoint.endpoint_uuid', ondelete='RESTRICT'), primary_key=True, index=True) position = Column(Integer, nullable=False) - service = relationship('ServiceModel', back_populates='service_endpoints', lazy='joined') - endpoint = relationship('EndPointModel', lazy='joined') # back_populates='service_endpoints' + service = relationship('ServiceModel', back_populates='service_endpoints') # lazy='selectin' + endpoint = relationship('EndPointModel', lazy='selectin') # back_populates='service_endpoints' __table_args__ = ( CheckConstraint(position >= 0, name='check_position_value'), diff --git a/src/context/service/database/models/SliceModel.py b/src/context/service/database/models/SliceModel.py index 7f1550eb2..423af244e 100644 --- a/src/context/service/database/models/SliceModel.py +++ b/src/context/service/database/models/SliceModel.py @@ -16,7 +16,7 @@ import operator from sqlalchemy import CheckConstraint, Column, DateTime, Enum, ForeignKey, Integer, String from sqlalchemy.dialects.postgresql import UUID from sqlalchemy.orm import relationship -from typing import Dict +from typing import Dict, List from .enums.SliceStatus import ORM_SliceStatusEnum from ._Base import _Base @@ -32,13 +32,13 @@ class SliceModel(_Base): created_at = Column(DateTime, nullable=False) updated_at = Column(DateTime, nullable=False) - context = relationship('ContextModel', back_populates='slices') - slice_endpoints = relationship('SliceEndPointModel') # lazy='joined', back_populates='slice' - slice_services = relationship('SliceServiceModel') # lazy='joined', back_populates='slice' + context = relationship('ContextModel', back_populates='slices', lazy='selectin') + slice_endpoints = relationship('SliceEndPointModel') # lazy='selectin', back_populates='slice' + slice_services = relationship('SliceServiceModel') # lazy='selectin', back_populates='slice' slice_subslices = relationship( 'SliceSubSliceModel', primaryjoin='slice.c.slice_uuid == slice_subslice.c.slice_uuid') - constraints = relationship('ConstraintModel', passive_deletes=True) # lazy='joined', back_populates='slice' - config_rules = relationship('ConfigRuleModel', passive_deletes=True) # lazy='joined', back_populates='slice' + constraints = relationship('ConstraintModel', passive_deletes=True) # lazy='selectin', back_populates='slice' + config_rules = relationship('ConfigRuleModel', passive_deletes=True) # lazy='selectin', back_populates='slice' def dump_id(self) -> Dict: return { @@ -46,36 +46,59 @@ class SliceModel(_Base): 'slice_uuid': {'uuid': self.slice_uuid}, } - def dump(self) -> Dict: + + def dump_endpoint_ids(self) -> List[Dict]: + return [ + slice_endpoint.endpoint.dump_id() + for slice_endpoint in sorted(self.slice_endpoints, key=operator.attrgetter('position')) + ] + + def dump_constraints(self) -> List[Dict]: + return [ + constraint.dump() + for constraint in sorted(self.constraints, key=operator.attrgetter('position')) + ] + + def dump_config_rules(self) -> Dict: + return {'config_rules': [ + config_rule.dump() + for config_rule in sorted(self.config_rules, key=operator.attrgetter('position')) + ]} + + def dump_service_ids(self) -> List[Dict]: + return [ + slice_service.service.dump_id() + for slice_service in self.slice_services + ] + + def dump_subslice_ids(self) -> List[Dict]: + return [ + slice_subslice.subslice.dump_id() + for slice_subslice in self.slice_subslices + ] + + def dump_owner_id(self) -> Dict: return { - 'slice_id' : self.dump_id(), - 'name' : self.slice_name, - 'slice_status' : {'slice_status': self.slice_status.value}, - 'slice_endpoint_ids': [ - slice_endpoint.endpoint.dump_id() - for slice_endpoint in sorted(self.slice_endpoints, key=operator.attrgetter('position')) - ], - 'slice_constraints' : [ - constraint.dump() - for constraint in sorted(self.constraints, key=operator.attrgetter('position')) - ], - 'slice_config' : {'config_rules': [ - config_rule.dump() - for config_rule in sorted(self.config_rules, key=operator.attrgetter('position')) - ]}, - 'slice_service_ids': [ - slice_service.service.dump_id() - for slice_service in self.slice_services - ], - 'slice_subslice_ids': [ - slice_subslice.subslice.dump_id() - for slice_subslice in self.slice_subslices - ], - 'slice_owner': { - 'owner_uuid': {'uuid': self.slice_owner_uuid}, - 'owner_string': self.slice_owner_string - } + 'owner_uuid': {'uuid': self.slice_owner_uuid}, + 'owner_string': self.slice_owner_string + } + + def dump( + self, include_endpoint_ids : bool = True, include_constraints : bool = True, include_service_ids : bool = True, + include_subslice_ids : bool = True, include_config_rules : bool = True + ) -> Dict: + result = { + 'slice_id' : self.dump_id(), + 'name' : self.slice_name, + 'slice_status': {'slice_status': self.slice_status.value}, + 'slice_owner' : self.dump_owner_id() } + if include_endpoint_ids: result['slice_endpoint_ids'] = self.dump_endpoint_ids() + if include_constraints : result['slice_constraints' ] = self.dump_constraints() + if include_service_ids : result['slice_service_ids' ] = self.dump_service_ids() + if include_subslice_ids: result['slice_subslice_ids'] = self.dump_subslice_ids() + if include_config_rules: result['slice_config' ] = self.dump_config_rules() + return result class SliceEndPointModel(_Base): __tablename__ = 'slice_endpoint' @@ -84,8 +107,8 @@ class SliceEndPointModel(_Base): endpoint_uuid = Column(ForeignKey('endpoint.endpoint_uuid', ondelete='RESTRICT'), primary_key=True, index=True) position = Column(Integer, nullable=False) - slice = relationship('SliceModel', back_populates='slice_endpoints', lazy='joined') - endpoint = relationship('EndPointModel', lazy='joined') # back_populates='slice_endpoints' + slice = relationship('SliceModel', back_populates='slice_endpoints') #, lazy='selectin' + endpoint = relationship('EndPointModel', lazy='selectin') # back_populates='slice_endpoints' __table_args__ = ( CheckConstraint(position >= 0, name='check_position_value'), @@ -97,8 +120,8 @@ class SliceServiceModel(_Base): slice_uuid = Column(ForeignKey('slice.slice_uuid', ondelete='CASCADE' ), primary_key=True) service_uuid = Column(ForeignKey('service.service_uuid', ondelete='RESTRICT'), primary_key=True, index=True) - slice = relationship('SliceModel', back_populates='slice_services', lazy='joined') - service = relationship('ServiceModel', lazy='joined') # back_populates='slice_services' + slice = relationship('SliceModel', back_populates='slice_services') # , lazy='selectin' + service = relationship('ServiceModel', lazy='selectin') # back_populates='slice_services' class SliceSubSliceModel(_Base): __tablename__ = 'slice_subslice' @@ -107,5 +130,5 @@ class SliceSubSliceModel(_Base): subslice_uuid = Column(ForeignKey('slice.slice_uuid', ondelete='CASCADE'), primary_key=True, index=True) slice = relationship( - 'SliceModel', foreign_keys='SliceSubSliceModel.slice_uuid', back_populates='slice_subslices', lazy='joined') - subslice = relationship('SliceModel', foreign_keys='SliceSubSliceModel.subslice_uuid', lazy='joined') + 'SliceModel', foreign_keys='SliceSubSliceModel.slice_uuid', back_populates='slice_subslices') #, lazy='selectin' + subslice = relationship('SliceModel', foreign_keys='SliceSubSliceModel.subslice_uuid', lazy='selectin') diff --git a/src/context/service/database/models/TopologyModel.py b/src/context/service/database/models/TopologyModel.py index 7dc2333f0..ebeddcb76 100644 --- a/src/context/service/database/models/TopologyModel.py +++ b/src/context/service/database/models/TopologyModel.py @@ -27,7 +27,7 @@ class TopologyModel(_Base): created_at = Column(DateTime, nullable=False) updated_at = Column(DateTime, nullable=False) - context = relationship('ContextModel', back_populates='topologies') + context = relationship('ContextModel', back_populates='topologies', lazy='selectin') topology_devices = relationship('TopologyDeviceModel') # back_populates='topology' topology_links = relationship('TopologyLinkModel' ) # back_populates='topology' @@ -59,8 +59,8 @@ class TopologyDeviceModel(_Base): topology_uuid = Column(ForeignKey('topology.topology_uuid', ondelete='RESTRICT'), primary_key=True, index=True) device_uuid = Column(ForeignKey('device.device_uuid', ondelete='CASCADE' ), primary_key=True, index=True) - #topology = relationship('TopologyModel', lazy='joined') # back_populates='topology_devices' - device = relationship('DeviceModel', lazy='joined') # back_populates='topology_devices' + #topology = relationship('TopologyModel', lazy='selectin') # back_populates='topology_devices' + device = relationship('DeviceModel', lazy='selectin') # back_populates='topology_devices' class TopologyLinkModel(_Base): __tablename__ = 'topology_link' @@ -68,5 +68,5 @@ class TopologyLinkModel(_Base): topology_uuid = Column(ForeignKey('topology.topology_uuid', ondelete='RESTRICT'), primary_key=True, index=True) link_uuid = Column(ForeignKey('link.link_uuid', ondelete='CASCADE' ), primary_key=True, index=True) - #topology = relationship('TopologyModel', lazy='joined') # back_populates='topology_links' - link = relationship('LinkModel', lazy='joined') # back_populates='topology_links' + #topology = relationship('TopologyModel', lazy='selectin') # back_populates='topology_links' + link = relationship('LinkModel', lazy='selectin') # back_populates='topology_links' -- GitLab From 179d28fbc5077e64e1f581f18db9d51b1fc402c1 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Sun, 26 Mar 2023 11:58:19 +0000 Subject: [PATCH 02/14] PathComp component - Frontend: - Updated to use RPC method GetTopologyDetails --- .../service/PathCompServiceServicerImpl.py | 24 ++++++++++++------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/src/pathcomp/frontend/service/PathCompServiceServicerImpl.py b/src/pathcomp/frontend/service/PathCompServiceServicerImpl.py index 6fc33dbd4..52f1cd3d5 100644 --- a/src/pathcomp/frontend/service/PathCompServiceServicerImpl.py +++ b/src/pathcomp/frontend/service/PathCompServiceServicerImpl.py @@ -13,9 +13,9 @@ # limitations under the License. import grpc, logging, threading -from common.Constants import DEFAULT_CONTEXT_NAME, INTERDOMAIN_TOPOLOGY_NAME +from common.Constants import DEFAULT_CONTEXT_NAME, DEFAULT_TOPOLOGY_NAME, INTERDOMAIN_TOPOLOGY_NAME from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method -from common.proto.context_pb2 import ContextId, Empty +from common.proto.context_pb2 import ContextId, Empty, TopologyId from common.proto.pathcomp_pb2 import PathCompReply, PathCompRequest from common.proto.pathcomp_pb2_grpc import PathCompServiceServicer from common.tools.context_queries.Device import get_devices_in_topology @@ -23,6 +23,7 @@ from common.tools.context_queries.Link import get_links_in_topology from common.tools.context_queries.InterDomain import is_inter_domain from common.tools.grpc.Tools import grpc_message_to_json_string from common.tools.object_factory.Context import json_context_id +from common.tools.object_factory.Topology import json_topology_id from context.client.ContextClient import ContextClient from pathcomp.frontend.service.algorithms.Factory import get_algorithm @@ -30,7 +31,7 @@ LOGGER = logging.getLogger(__name__) METRICS_POOL = MetricsPool('PathComp', 'RPC') -ADMIN_CONTEXT_ID = ContextId(**json_context_id(DEFAULT_CONTEXT_NAME)) +#ADMIN_CONTEXT_ID = ContextId(**json_context_id(DEFAULT_CONTEXT_NAME)) class PathCompServiceServicerImpl(PathCompServiceServicer): def __init__(self) -> None: @@ -44,18 +45,23 @@ class PathCompServiceServicerImpl(PathCompServiceServicer): context_client = ContextClient() + context_id = json_context_id(DEFAULT_CONTEXT_NAME) if (len(request.services) == 1) and is_inter_domain(context_client, request.services[0].service_endpoint_ids): - devices = get_devices_in_topology(context_client, ADMIN_CONTEXT_ID, INTERDOMAIN_TOPOLOGY_NAME) - links = get_links_in_topology(context_client, ADMIN_CONTEXT_ID, INTERDOMAIN_TOPOLOGY_NAME) + #devices = get_devices_in_topology(context_client, ADMIN_CONTEXT_ID, INTERDOMAIN_TOPOLOGY_NAME) + #links = get_links_in_topology(context_client, ADMIN_CONTEXT_ID, INTERDOMAIN_TOPOLOGY_NAME) + topology_id = json_topology_id(INTERDOMAIN_TOPOLOGY_NAME, context_id) else: # TODO: improve filtering of devices and links # TODO: add contexts, topologies, and membership of devices/links in topologies - devices = context_client.ListDevices(Empty()) - links = context_client.ListLinks(Empty()) + #devices = context_client.ListDevices(Empty()) + #links = context_client.ListLinks(Empty()) + topology_id = json_topology_id(DEFAULT_TOPOLOGY_NAME, context_id) + + topology_details = context_client.GetTopologyDetails(TopologyId(**topology_id)) algorithm = get_algorithm(request) - algorithm.add_devices(devices) - algorithm.add_links(links) + algorithm.add_devices(topology_details.devices) + algorithm.add_links(topology_details.links) algorithm.add_service_requests(request) #LOGGER.debug('device_list = {:s}' .format(str(algorithm.device_list ))) -- GitLab From 9d9efa4ff02be32d8be2757d7d30a03f7adbe379 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Sun, 26 Mar 2023 11:59:33 +0000 Subject: [PATCH 03/14] Deploy Scripts: - Added linkerd injectors for load balancing (commented, not working) - Added service monitor to my_deploy.sh --- deploy/expose_perf_dashboards.sh | 20 ++++++++++++++++++++ deploy/tfs.sh | 1 + manifests/pathcompservice.yaml | 4 ++++ my_deploy.sh | 7 ++++--- 4 files changed, 29 insertions(+), 3 deletions(-) create mode 100755 deploy/expose_perf_dashboards.sh diff --git a/deploy/expose_perf_dashboards.sh b/deploy/expose_perf_dashboards.sh new file mode 100755 index 000000000..6cabbae95 --- /dev/null +++ b/deploy/expose_perf_dashboards.sh @@ -0,0 +1,20 @@ +#!/bin/bash + +EXT_GRAFANA_HTTP_PORT="31001" +EXT_PROMETHEUS_HTTP_PORT="31002" + +GRAFANA_HTTP_PORT=$(kubectl --namespace monitoring get service grafana -o 'jsonpath={.spec.ports[?(@.name=="http")].port}') +PATCH='{"data": {"'${EXT_GRAFANA_HTTP_PORT}'": "monitoring/grafana:'${GRAFANA_HTTP_PORT}'"}}' +kubectl patch configmap nginx-ingress-tcp-microk8s-conf --namespace ingress --patch "${PATCH}" +PORT_MAP='{"containerPort": '${EXT_GRAFANA_HTTP_PORT}', "hostPort": '${EXT_GRAFANA_HTTP_PORT}'}' +CONTAINER='{"name": "nginx-ingress-microk8s", "ports": ['${PORT_MAP}']}' +PATCH='{"spec": {"template": {"spec": {"containers": ['${CONTAINER}']}}}}' +kubectl patch daemonset nginx-ingress-microk8s-controller --namespace ingress --patch "${PATCH}" + +PROMETHEUS_HTTP_PORT=$(kubectl --namespace monitoring get service prometheus-k8s -o 'jsonpath={.spec.ports[?(@.name=="web")].port}') +PATCH='{"data": {"'${EXT_PROMETHEUS_HTTP_PORT}'": "monitoring/prometheus-k8s:'${PROMETHEUS_HTTP_PORT}'"}}' +kubectl patch configmap nginx-ingress-tcp-microk8s-conf --namespace ingress --patch "${PATCH}" +PORT_MAP='{"containerPort": '${EXT_PROMETHEUS_HTTP_PORT}', "hostPort": '${EXT_PROMETHEUS_HTTP_PORT}'}' +CONTAINER='{"name": "nginx-ingress-microk8s", "ports": ['${PORT_MAP}']}' +PATCH='{"spec": {"template": {"spec": {"containers": ['${CONTAINER}']}}}}' +kubectl patch daemonset nginx-ingress-microk8s-controller --namespace ingress --patch "${PATCH}" diff --git a/deploy/tfs.sh b/deploy/tfs.sh index 4c6dc95d2..6a0e7a2e6 100755 --- a/deploy/tfs.sh +++ b/deploy/tfs.sh @@ -242,6 +242,7 @@ for COMPONENT in $TFS_COMPONENTS; do echo " Adapting '$COMPONENT' manifest file..." MANIFEST="$TMP_MANIFESTS_FOLDER/${COMPONENT}service.yaml" cp ./manifests/"${COMPONENT}"service.yaml "$MANIFEST" + #cat ./manifests/"${COMPONENT}"service.yaml | linkerd inject - --proxy-cpu-request "10m" --proxy-cpu-limit "1" --proxy-memory-request "64Mi" --proxy-memory-limit "256Mi" > "$MANIFEST" if [ "$COMPONENT" == "pathcomp" ]; then IMAGE_URL=$(echo "$TFS_REGISTRY_IMAGES/$COMPONENT-frontend:$TFS_IMAGE_TAG" | sed 's,//,/,g' | sed 's,http:/,,g') diff --git a/manifests/pathcompservice.yaml b/manifests/pathcompservice.yaml index fd3599f42..28edf9aa5 100644 --- a/manifests/pathcompservice.yaml +++ b/manifests/pathcompservice.yaml @@ -23,6 +23,8 @@ spec: replicas: 1 template: metadata: + #annotations: + # config.linkerd.io/opaque-ports: "8081" labels: app: pathcompservice spec: @@ -53,6 +55,8 @@ spec: - name: backend image: labs.etsi.org:5050/tfs/controller/pathcomp-backend:latest imagePullPolicy: Always + #ports: + #- containerPort: 8081 #readinessProbe: # httpGet: # path: /health diff --git a/my_deploy.sh b/my_deploy.sh index 22a7ae815..4f3cb25f6 100755 --- a/my_deploy.sh +++ b/my_deploy.sh @@ -20,7 +20,8 @@ export TFS_REGISTRY_IMAGES="http://localhost:32000/tfs/" # Set the list of components, separated by spaces, you want to build images for, and deploy. -export TFS_COMPONENTS="context device automation monitoring pathcomp service slice compute webui load_generator" +#automation monitoring compute +export TFS_COMPONENTS="context device pathcomp service slice webui load_generator" # Set the tag you want to use for your images. export TFS_IMAGE_TAG="dev" @@ -29,7 +30,7 @@ export TFS_IMAGE_TAG="dev" export TFS_K8S_NAMESPACE="tfs" # Set additional manifest files to be applied after the deployment -export TFS_EXTRA_MANIFESTS="manifests/nginx_ingress_http.yaml" +export TFS_EXTRA_MANIFESTS="manifests/nginx_ingress_http.yaml manifests/servicemonitors.yaml" # Set the new Grafana admin password export TFS_GRAFANA_PASSWORD="admin123+" @@ -63,7 +64,7 @@ export CRDB_DATABASE="tfs" export CRDB_DEPLOY_MODE="single" # Disable flag for dropping database, if it exists. -export CRDB_DROP_DATABASE_IF_EXISTS="" +export CRDB_DROP_DATABASE_IF_EXISTS="YES" # Disable flag for re-deploying CockroachDB from scratch. export CRDB_REDEPLOY="" -- GitLab From 10eebd0aa724b03f000920047214bde070758a31 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Mon, 27 Mar 2023 18:08:05 +0000 Subject: [PATCH 04/14] Manifests: - Added autoscalers for context, service, slice, and pathcomp components --- manifests/contextservice.yaml | 24 +++++++++++++++++++++++- manifests/pathcompservice.yaml | 30 +++++++++++++++++++++++++----- manifests/serviceservice.yaml | 24 +++++++++++++++++++++++- manifests/sliceservice.yaml | 24 +++++++++++++++++++++++- 4 files changed, 94 insertions(+), 8 deletions(-) diff --git a/manifests/contextservice.yaml b/manifests/contextservice.yaml index b1e6eb89d..811f00d7a 100644 --- a/manifests/contextservice.yaml +++ b/manifests/contextservice.yaml @@ -20,7 +20,7 @@ spec: selector: matchLabels: app: contextservice - replicas: 1 + #replicas: 1 template: metadata: labels: @@ -77,3 +77,25 @@ spec: protocol: TCP port: 9192 targetPort: 9192 +--- +apiVersion: autoscaling/v2 +kind: HorizontalPodAutoscaler +metadata: + name: contextservice-hpa +spec: + scaleTargetRef: + apiVersion: apps/v1 + kind: Deployment + name: contextservice + minReplicas: 2 + maxReplicas: 5 + metrics: + - type: Resource + resource: + name: cpu + target: + type: Utilization + averageUtilization: 80 + #behavior: + # scaleDown: + # stabilizationWindowSeconds: 30 diff --git a/manifests/pathcompservice.yaml b/manifests/pathcompservice.yaml index 28edf9aa5..5703b82ed 100644 --- a/manifests/pathcompservice.yaml +++ b/manifests/pathcompservice.yaml @@ -20,11 +20,9 @@ spec: selector: matchLabels: app: pathcompservice - replicas: 1 + #replicas: 1 template: metadata: - #annotations: - # config.linkerd.io/opaque-ports: "8081" labels: app: pathcompservice spec: @@ -55,8 +53,8 @@ spec: - name: backend image: labs.etsi.org:5050/tfs/controller/pathcomp-backend:latest imagePullPolicy: Always - #ports: - #- containerPort: 8081 + ports: + - containerPort: 8081 #readinessProbe: # httpGet: # path: /health @@ -100,3 +98,25 @@ spec: protocol: TCP port: 9192 targetPort: 9192 +--- +apiVersion: autoscaling/v2 +kind: HorizontalPodAutoscaler +metadata: + name: pathcompservice-hpa +spec: + scaleTargetRef: + apiVersion: apps/v1 + kind: Deployment + name: pathcompservice + minReplicas: 2 + maxReplicas: 5 + metrics: + - type: Resource + resource: + name: cpu + target: + type: Utilization + averageUtilization: 80 + #behavior: + # scaleDown: + # stabilizationWindowSeconds: 30 diff --git a/manifests/serviceservice.yaml b/manifests/serviceservice.yaml index 3fa4a6e0d..655538f49 100644 --- a/manifests/serviceservice.yaml +++ b/manifests/serviceservice.yaml @@ -20,7 +20,7 @@ spec: selector: matchLabels: app: serviceservice - replicas: 1 + #replicas: 1 template: metadata: labels: @@ -70,3 +70,25 @@ spec: protocol: TCP port: 9192 targetPort: 9192 +--- +apiVersion: autoscaling/v2 +kind: HorizontalPodAutoscaler +metadata: + name: serviceservice-hpa +spec: + scaleTargetRef: + apiVersion: apps/v1 + kind: Deployment + name: serviceservice + minReplicas: 2 + maxReplicas: 5 + metrics: + - type: Resource + resource: + name: cpu + target: + type: Utilization + averageUtilization: 80 + #behavior: + # scaleDown: + # stabilizationWindowSeconds: 30 diff --git a/manifests/sliceservice.yaml b/manifests/sliceservice.yaml index 49e2b5943..34453735e 100644 --- a/manifests/sliceservice.yaml +++ b/manifests/sliceservice.yaml @@ -20,7 +20,7 @@ spec: selector: matchLabels: app: sliceservice - replicas: 1 + #replicas: 1 template: metadata: labels: @@ -75,3 +75,25 @@ spec: protocol: TCP port: 9192 targetPort: 9192 +--- +apiVersion: autoscaling/v2 +kind: HorizontalPodAutoscaler +metadata: + name: sliceservice-hpa +spec: + scaleTargetRef: + apiVersion: apps/v1 + kind: Deployment + name: sliceservice + minReplicas: 2 + maxReplicas: 5 + metrics: + - type: Resource + resource: + name: cpu + target: + type: Utilization + averageUtilization: 80 + #behavior: + # scaleDown: + # stabilizationWindowSeconds: 30 -- GitLab From b7d57e36d8c1f35920d66c093fd629d471e0d16f Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Mon, 27 Mar 2023 18:08:26 +0000 Subject: [PATCH 05/14] Deploy scripts: - Added linkerd injection of sidecars in pods --- deploy/tfs.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/deploy/tfs.sh b/deploy/tfs.sh index 6a0e7a2e6..4003cf14c 100755 --- a/deploy/tfs.sh +++ b/deploy/tfs.sh @@ -241,8 +241,8 @@ for COMPONENT in $TFS_COMPONENTS; do echo " Adapting '$COMPONENT' manifest file..." MANIFEST="$TMP_MANIFESTS_FOLDER/${COMPONENT}service.yaml" - cp ./manifests/"${COMPONENT}"service.yaml "$MANIFEST" - #cat ./manifests/"${COMPONENT}"service.yaml | linkerd inject - --proxy-cpu-request "10m" --proxy-cpu-limit "1" --proxy-memory-request "64Mi" --proxy-memory-limit "256Mi" > "$MANIFEST" + #cp ./manifests/"${COMPONENT}"service.yaml "$MANIFEST" + cat ./manifests/"${COMPONENT}"service.yaml | linkerd inject - --proxy-cpu-request "10m" --proxy-cpu-limit "1" --proxy-memory-request "64Mi" --proxy-memory-limit "256Mi" > "$MANIFEST" if [ "$COMPONENT" == "pathcomp" ]; then IMAGE_URL=$(echo "$TFS_REGISTRY_IMAGES/$COMPONENT-frontend:$TFS_IMAGE_TAG" | sed 's,//,/,g' | sed 's,http:/,,g') -- GitLab From f56b92956635e9b1d01b34ce7ee2e83522e812d4 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Mon, 27 Mar 2023 18:09:01 +0000 Subject: [PATCH 06/14] PathComp component - Frontend: - Corrected identification of backend IP address --- src/pathcomp/frontend/Config.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/pathcomp/frontend/Config.py b/src/pathcomp/frontend/Config.py index f17a9f537..714eb7278 100644 --- a/src/pathcomp/frontend/Config.py +++ b/src/pathcomp/frontend/Config.py @@ -26,8 +26,9 @@ PATHCOMP_BACKEND_BASEURL = str(os.environ.get('PATHCOMP_BACKEND_BASEURL', DEFAUL # - first check env vars PATHCOMP_BACKEND_HOST & PATHCOMP_BACKEND_PORT # - if not set, check env vars PATHCOMPSERVICE_SERVICE_HOST & PATHCOMPSERVICE_SERVICE_PORT_HTTP # - if not set, use DEFAULT_PATHCOMP_BACKEND_HOST & DEFAULT_PATHCOMP_BACKEND_PORT + backend_host = DEFAULT_PATHCOMP_BACKEND_HOST -backend_host = os.environ.get('PATHCOMPSERVICE_SERVICE_HOST', backend_host) +#backend_host = os.environ.get('PATHCOMPSERVICE_SERVICE_HOST', backend_host) PATHCOMP_BACKEND_HOST = str(os.environ.get('PATHCOMP_BACKEND_HOST', backend_host)) backend_port = DEFAULT_PATHCOMP_BACKEND_PORT -- GitLab From 6ee25c26a829d0c7f88f3373924843ce4e4109b8 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Mon, 27 Mar 2023 18:10:47 +0000 Subject: [PATCH 07/14] Context component: - Corrected lazy mode of link relationships - Corrected TopologyModel dump_detailed method to avoid retrieving config rules and components - Added missing selectinload options in Topology --- src/context/service/database/Topology.py | 9 ++++++--- src/context/service/database/models/LinkModel.py | 4 ++-- src/context/service/database/models/TopologyModel.py | 12 ++++++++++-- 3 files changed, 18 insertions(+), 7 deletions(-) diff --git a/src/context/service/database/Topology.py b/src/context/service/database/Topology.py index 210f497d1..4440299b6 100644 --- a/src/context/service/database/Topology.py +++ b/src/context/service/database/Topology.py @@ -22,7 +22,9 @@ from common.proto.context_pb2 import ContextId, Topology, TopologyId from common.method_wrappers.ServiceExceptions import NotFoundException from common.tools.object_factory.Context import json_context_id from common.tools.object_factory.Topology import json_topology_id -from .models.TopologyModel import TopologyModel +from .models.DeviceModel import DeviceModel +from .models.LinkModel import LinkModel +from .models.TopologyModel import TopologyDeviceModel, TopologyLinkModel, TopologyModel from .uuids.Context import context_get_uuid from .uuids.Topology import topology_get_uuid @@ -67,9 +69,10 @@ def topology_get_details(db_engine : Engine, request : TopologyId) -> Dict: _,topology_uuid = topology_get_uuid(request, allow_random=False) def callback(session : Session) -> Optional[Dict]: obj : Optional[TopologyModel] = session.query(TopologyModel)\ - .options(selectinload(TopologyModel.topology_devices))\ - .options(selectinload(TopologyModel.topology_links))\ + .options(selectinload(TopologyModel.topology_devices, TopologyDeviceModel.device, DeviceModel.endpoints))\ + .options(selectinload(TopologyModel.topology_links, TopologyLinkModel.link, LinkModel.link_endpoints))\ .filter_by(topology_uuid=topology_uuid).one_or_none() + #.options(selectinload(DeviceModel.components))\ return None if obj is None else obj.dump_details() obj = run_transaction(sessionmaker(bind=db_engine), callback) if obj is None: diff --git a/src/context/service/database/models/LinkModel.py b/src/context/service/database/models/LinkModel.py index e9fd9bc87..9c16da3c9 100644 --- a/src/context/service/database/models/LinkModel.py +++ b/src/context/service/database/models/LinkModel.py @@ -50,8 +50,8 @@ class LinkEndPointModel(_Base): endpoint_uuid = Column(ForeignKey('endpoint.endpoint_uuid', ondelete='RESTRICT'), primary_key=True, index=True) position = Column(Integer, nullable=False) - link = relationship('LinkModel', back_populates='link_endpoints', lazy='joined') - endpoint = relationship('EndPointModel', lazy='joined') # back_populates='link_endpoints' + link = relationship('LinkModel', back_populates='link_endpoints') #, lazy='selectin' + endpoint = relationship('EndPointModel', lazy='selectin') # back_populates='link_endpoints' __table_args__ = ( CheckConstraint(position >= 0, name='check_position_value'), diff --git a/src/context/service/database/models/TopologyModel.py b/src/context/service/database/models/TopologyModel.py index ebeddcb76..0ed4a038b 100644 --- a/src/context/service/database/models/TopologyModel.py +++ b/src/context/service/database/models/TopologyModel.py @@ -46,11 +46,19 @@ class TopologyModel(_Base): } def dump_details(self) -> Dict: + devices = [ + td.device.dump(include_config_rules=False, include_components=False) + for td in self.topology_devices + ] + links = [ + tl.link.dump() + for tl in self.topology_links + ] return { 'topology_id': self.dump_id(), 'name' : self.topology_name, - 'devices' : [td.device.dump() for td in self.topology_devices], - 'links' : [tl.link.dump() for tl in self.topology_links ], + 'devices' : devices, + 'links' : links, } class TopologyDeviceModel(_Base): -- GitLab From c706fba824ff2eb55f3cded159bbda98868bc425 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Mon, 27 Mar 2023 18:11:31 +0000 Subject: [PATCH 08/14] Service component - L2Emulated Service Handler: - Added missing delete config rules --- .../l2nm_emulated/ConfigRules.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/src/service/service/service_handlers/l2nm_emulated/ConfigRules.py b/src/service/service/service_handlers/l2nm_emulated/ConfigRules.py index 363983b86..ac44574ad 100644 --- a/src/service/service/service_handlers/l2nm_emulated/ConfigRules.py +++ b/src/service/service/service_handlers/l2nm_emulated/ConfigRules.py @@ -105,17 +105,17 @@ def teardown_config_rules( if_cirid_name = '{:s}.{:s}'.format(endpoint_name, str(circuit_id)) network_instance_name = 'ELAN-AC:{:s}'.format(str(circuit_id)) - #connection_point_id = 'VC-1' + connection_point_id = 'VC-1' json_config_rules = [ - #json_config_rule_delete( - # '/network_instance[{:s}]/connection_point[{:s}]'.format(network_instance_name, connection_point_id), - # {'name': network_instance_name, 'connection_point': connection_point_id}), + json_config_rule_delete( + '/network_instance[{:s}]/connection_point[{:s}]'.format(network_instance_name, connection_point_id), + {'name': network_instance_name, 'connection_point': connection_point_id}), - #json_config_rule_delete( - # '/network_instance[{:s}]/interface[{:s}]'.format(network_instance_name, if_cirid_name), - # {'name': network_instance_name, 'id': if_cirid_name, 'interface': if_cirid_name, - # 'subinterface': sub_interface_index}), + json_config_rule_delete( + '/network_instance[{:s}]/interface[{:s}]'.format(network_instance_name, if_cirid_name), + {'name': network_instance_name, 'id': if_cirid_name, 'interface': if_cirid_name, + 'subinterface': sub_interface_index}), json_config_rule_delete( '/network_instance[{:s}]'.format(network_instance_name), -- GitLab From 69a5d5db4c178d18431acc1b03093d07a3b42ddb Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Mon, 27 Mar 2023 18:13:06 +0000 Subject: [PATCH 09/14] WebUI component: - Optimized topology retrieval for network plotting --- src/webui/service/main/routes.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/src/webui/service/main/routes.py b/src/webui/service/main/routes.py index 32cefddf3..75f036bef 100644 --- a/src/webui/service/main/routes.py +++ b/src/webui/service/main/routes.py @@ -131,25 +131,18 @@ def topology(): topology_uuid = session['topology_uuid'] json_topo_id = json_topology_id(topology_uuid, context_id=json_context_id(context_uuid)) - grpc_topology = context_client.GetTopology(TopologyId(**json_topo_id)) + response = context_client.GetTopologyDetails(TopologyId(**json_topo_id)) - topo_device_uuids = {device_id.device_uuid.uuid for device_id in grpc_topology.device_ids} - topo_link_uuids = {link_id .link_uuid .uuid for link_id in grpc_topology.link_ids } - - response = context_client.ListDevices(Empty()) devices = [] for device in response.devices: - if device.device_id.device_uuid.uuid not in topo_device_uuids: continue devices.append({ 'id': device.device_id.device_uuid.uuid, 'name': device.name, 'type': device.device_type, }) - response = context_client.ListLinks(Empty()) links = [] for link in response.links: - if link.link_id.link_uuid.uuid not in topo_link_uuids: continue if len(link.link_endpoint_ids) != 2: str_link = grpc_message_to_json_string(link) LOGGER.warning('Unexpected link with len(endpoints) != 2: {:s}'.format(str_link)) -- GitLab From 41b7ac7b613a5299b75f49a04d6af9eb09d84fce Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Tue, 28 Mar 2023 15:24:58 +0000 Subject: [PATCH 10/14] Context component: - Improved device_set method reducing unneeded inserts --- src/context/service/database/Device.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/context/service/database/Device.py b/src/context/service/database/Device.py index c51a88368..3e106bc15 100644 --- a/src/context/service/database/Device.py +++ b/src/context/service/database/Device.py @@ -171,7 +171,9 @@ def device_set(db_engine : Engine, request : Device) -> Tuple[Dict, bool]: endpoint_updates = session.execute(stmt).fetchall() updated_endpoints = any([(updated_at > created_at) for created_at,updated_at in endpoint_updates]) - if len(related_topologies) > 0: + if not updated or len(related_topologies) > 1: + # Only update topology-device relations when device is created (not updated) or when endpoints are + # modified (len(related_topologies) > 1). session.execute(insert(TopologyDeviceModel).values(related_topologies).on_conflict_do_nothing( index_elements=[TopologyDeviceModel.topology_uuid, TopologyDeviceModel.device_uuid] )) -- GitLab From 0a03b17f330185b0832c8d737ee4ccb8f0d8d2ac Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Tue, 28 Mar 2023 15:25:36 +0000 Subject: [PATCH 11/14] Device component: - Removed endpoints when running SetDevice method in ConfigureDevice for performance enhancement. --- src/device/service/DeviceServiceServicerImpl.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/device/service/DeviceServiceServicerImpl.py b/src/device/service/DeviceServiceServicerImpl.py index 2b08b6c7e..205d769ac 100644 --- a/src/device/service/DeviceServiceServicerImpl.py +++ b/src/device/service/DeviceServiceServicerImpl.py @@ -160,6 +160,12 @@ class DeviceServiceServicerImpl(DeviceServiceServicer): for error in errors: LOGGER.error(error) raise OperationFailedException('ConfigureDevice', extra_details=errors) + # Context Performance+Scalability enhancement: + # This method, besides P4 logic, does not add/update/delete endpoints. + # Remove endpoints to reduce number of inserts done by Context. + # TODO: Add logic to inspect endpoints and keep only those ones modified with respect to Context. + del device.device_endpoints[:] + # Note: Rules are updated by configure_rules() and deconfigure_rules() methods. device_id = context_client.SetDevice(device) return device_id -- GitLab From 7848b9db7352c8806a4b6523523239b46231953e Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Wed, 29 Mar 2023 20:16:57 +0200 Subject: [PATCH 12/14] Deploy Script: - Updated NATS image --- deploy/nats.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deploy/nats.sh b/deploy/nats.sh index aa082b54b..b730cec4a 100755 --- a/deploy/nats.sh +++ b/deploy/nats.sh @@ -53,7 +53,7 @@ function nats_deploy_single() { echo ">>> NATS is present; skipping step." else echo ">>> Deploy NATS" - helm3 install ${NATS_NAMESPACE} nats/nats --namespace ${NATS_NAMESPACE} --set nats.image.tag=2.9-alpine + helm3 install ${NATS_NAMESPACE} nats/nats --namespace ${NATS_NAMESPACE} --set nats.image=nats:2.9-alpine echo ">>> Waiting NATS statefulset to be created..." while ! kubectl get --namespace ${NATS_NAMESPACE} statefulset/${NATS_NAMESPACE} &> /dev/null; do -- GitLab From 6850ad4816e2462ea3d133d7514d2a12d1b6686f Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Wed, 29 Mar 2023 20:17:37 +0200 Subject: [PATCH 13/14] Manifests: - Updated requests and limits per pod - Updated HPA replica limits --- manifests/contextservice.yaml | 10 +++++----- manifests/deviceservice.yaml | 6 +++--- manifests/load_generatorservice.yaml | 6 +++--- manifests/pathcompservice.yaml | 4 ++-- manifests/performance-hpa.txt | 14 ++++++++++++++ manifests/serviceservice.yaml | 12 ++++++------ manifests/sliceservice.yaml | 12 ++++++------ 7 files changed, 39 insertions(+), 25 deletions(-) create mode 100644 manifests/performance-hpa.txt diff --git a/manifests/contextservice.yaml b/manifests/contextservice.yaml index 811f00d7a..8655b275b 100644 --- a/manifests/contextservice.yaml +++ b/manifests/contextservice.yaml @@ -52,11 +52,11 @@ spec: command: ["/bin/grpc_health_probe", "-addr=:1010"] resources: requests: - cpu: 50m + cpu: 75m memory: 64Mi limits: - cpu: 500m - memory: 512Mi + cpu: 100m + memory: 128Mi --- apiVersion: v1 kind: Service @@ -87,8 +87,8 @@ spec: apiVersion: apps/v1 kind: Deployment name: contextservice - minReplicas: 2 - maxReplicas: 5 + minReplicas: 1 + maxReplicas: 20 metrics: - type: Resource resource: diff --git a/manifests/deviceservice.yaml b/manifests/deviceservice.yaml index ca2c81f0f..ad54f4b6c 100644 --- a/manifests/deviceservice.yaml +++ b/manifests/deviceservice.yaml @@ -45,11 +45,11 @@ spec: command: ["/bin/grpc_health_probe", "-addr=:2020"] resources: requests: - cpu: 50m + cpu: 128m memory: 64Mi limits: - cpu: 500m - memory: 512Mi + cpu: 256m + memory: 128Mi --- apiVersion: v1 kind: Service diff --git a/manifests/load_generatorservice.yaml b/manifests/load_generatorservice.yaml index b94e11e72..3f65c2c85 100644 --- a/manifests/load_generatorservice.yaml +++ b/manifests/load_generatorservice.yaml @@ -44,11 +44,11 @@ spec: command: ["/bin/grpc_health_probe", "-addr=:50052"] resources: requests: - cpu: 50m + cpu: 256m memory: 64Mi limits: - cpu: 500m - memory: 512Mi + cpu: 512m + memory: 128Mi --- apiVersion: v1 kind: Service diff --git a/manifests/pathcompservice.yaml b/manifests/pathcompservice.yaml index 5703b82ed..3ba12750b 100644 --- a/manifests/pathcompservice.yaml +++ b/manifests/pathcompservice.yaml @@ -108,8 +108,8 @@ spec: apiVersion: apps/v1 kind: Deployment name: pathcompservice - minReplicas: 2 - maxReplicas: 5 + minReplicas: 1 + maxReplicas: 20 metrics: - type: Resource resource: diff --git a/manifests/performance-hpa.txt b/manifests/performance-hpa.txt new file mode 100644 index 000000000..6aadc114f --- /dev/null +++ b/manifests/performance-hpa.txt @@ -0,0 +1,14 @@ +NAME CPU MEM metric +context 0.07 65 min +context 0.085 66 avg +context 0.1 71 max +device 0.112 64 min/avg/max +serviceservice 0.034 32 min +serviceservice 0.047 33 avg +serviceservice 0.06 37 max +pathcompservice 0.087 184 min/avg/max +sliceservice 0.028 103 min +sliceservice 0.046 106 avg +sliceservice 0.062 110 max +load-generator 0.192 35 min/avg/max +webuiservice 0.003 75 min/avg/max diff --git a/manifests/serviceservice.yaml b/manifests/serviceservice.yaml index 655538f49..ce90aa188 100644 --- a/manifests/serviceservice.yaml +++ b/manifests/serviceservice.yaml @@ -45,11 +45,11 @@ spec: command: ["/bin/grpc_health_probe", "-addr=:3030"] resources: requests: - cpu: 50m - memory: 64Mi + cpu: 32m + memory: 32Mi limits: - cpu: 500m - memory: 512Mi + cpu: 128m + memory: 64Mi --- apiVersion: v1 kind: Service @@ -80,8 +80,8 @@ spec: apiVersion: apps/v1 kind: Deployment name: serviceservice - minReplicas: 2 - maxReplicas: 5 + minReplicas: 1 + maxReplicas: 20 metrics: - type: Resource resource: diff --git a/manifests/sliceservice.yaml b/manifests/sliceservice.yaml index 34453735e..8f312e8e0 100644 --- a/manifests/sliceservice.yaml +++ b/manifests/sliceservice.yaml @@ -50,11 +50,11 @@ spec: command: ["/bin/grpc_health_probe", "-addr=:4040"] resources: requests: - cpu: 50m - memory: 64Mi + cpu: 32m + memory: 128Mi limits: - cpu: 500m - memory: 512Mi + cpu: 128m + memory: 256Mi --- apiVersion: v1 kind: Service @@ -85,8 +85,8 @@ spec: apiVersion: apps/v1 kind: Deployment name: sliceservice - minReplicas: 2 - maxReplicas: 5 + minReplicas: 1 + maxReplicas: 20 metrics: - type: Resource resource: -- GitLab From 531690b141caeadc055d5cfa74dc629ba6c6a8f9 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Thu, 30 Mar 2023 08:23:39 +0000 Subject: [PATCH 14/14] Deploy Scripts: - Removed unneeded scripts/files - Cleaned up changes --- deploy/expose_perf_dashboards.sh | 20 -------------------- deploy/tfs.sh | 3 +-- manifests/performance-hpa.txt | 14 -------------- my_deploy.sh | 5 ++--- 4 files changed, 3 insertions(+), 39 deletions(-) delete mode 100755 deploy/expose_perf_dashboards.sh delete mode 100644 manifests/performance-hpa.txt diff --git a/deploy/expose_perf_dashboards.sh b/deploy/expose_perf_dashboards.sh deleted file mode 100755 index 6cabbae95..000000000 --- a/deploy/expose_perf_dashboards.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/bash - -EXT_GRAFANA_HTTP_PORT="31001" -EXT_PROMETHEUS_HTTP_PORT="31002" - -GRAFANA_HTTP_PORT=$(kubectl --namespace monitoring get service grafana -o 'jsonpath={.spec.ports[?(@.name=="http")].port}') -PATCH='{"data": {"'${EXT_GRAFANA_HTTP_PORT}'": "monitoring/grafana:'${GRAFANA_HTTP_PORT}'"}}' -kubectl patch configmap nginx-ingress-tcp-microk8s-conf --namespace ingress --patch "${PATCH}" -PORT_MAP='{"containerPort": '${EXT_GRAFANA_HTTP_PORT}', "hostPort": '${EXT_GRAFANA_HTTP_PORT}'}' -CONTAINER='{"name": "nginx-ingress-microk8s", "ports": ['${PORT_MAP}']}' -PATCH='{"spec": {"template": {"spec": {"containers": ['${CONTAINER}']}}}}' -kubectl patch daemonset nginx-ingress-microk8s-controller --namespace ingress --patch "${PATCH}" - -PROMETHEUS_HTTP_PORT=$(kubectl --namespace monitoring get service prometheus-k8s -o 'jsonpath={.spec.ports[?(@.name=="web")].port}') -PATCH='{"data": {"'${EXT_PROMETHEUS_HTTP_PORT}'": "monitoring/prometheus-k8s:'${PROMETHEUS_HTTP_PORT}'"}}' -kubectl patch configmap nginx-ingress-tcp-microk8s-conf --namespace ingress --patch "${PATCH}" -PORT_MAP='{"containerPort": '${EXT_PROMETHEUS_HTTP_PORT}', "hostPort": '${EXT_PROMETHEUS_HTTP_PORT}'}' -CONTAINER='{"name": "nginx-ingress-microk8s", "ports": ['${PORT_MAP}']}' -PATCH='{"spec": {"template": {"spec": {"containers": ['${CONTAINER}']}}}}' -kubectl patch daemonset nginx-ingress-microk8s-controller --namespace ingress --patch "${PATCH}" diff --git a/deploy/tfs.sh b/deploy/tfs.sh index 4003cf14c..4c6dc95d2 100755 --- a/deploy/tfs.sh +++ b/deploy/tfs.sh @@ -241,8 +241,7 @@ for COMPONENT in $TFS_COMPONENTS; do echo " Adapting '$COMPONENT' manifest file..." MANIFEST="$TMP_MANIFESTS_FOLDER/${COMPONENT}service.yaml" - #cp ./manifests/"${COMPONENT}"service.yaml "$MANIFEST" - cat ./manifests/"${COMPONENT}"service.yaml | linkerd inject - --proxy-cpu-request "10m" --proxy-cpu-limit "1" --proxy-memory-request "64Mi" --proxy-memory-limit "256Mi" > "$MANIFEST" + cp ./manifests/"${COMPONENT}"service.yaml "$MANIFEST" if [ "$COMPONENT" == "pathcomp" ]; then IMAGE_URL=$(echo "$TFS_REGISTRY_IMAGES/$COMPONENT-frontend:$TFS_IMAGE_TAG" | sed 's,//,/,g' | sed 's,http:/,,g') diff --git a/manifests/performance-hpa.txt b/manifests/performance-hpa.txt deleted file mode 100644 index 6aadc114f..000000000 --- a/manifests/performance-hpa.txt +++ /dev/null @@ -1,14 +0,0 @@ -NAME CPU MEM metric -context 0.07 65 min -context 0.085 66 avg -context 0.1 71 max -device 0.112 64 min/avg/max -serviceservice 0.034 32 min -serviceservice 0.047 33 avg -serviceservice 0.06 37 max -pathcompservice 0.087 184 min/avg/max -sliceservice 0.028 103 min -sliceservice 0.046 106 avg -sliceservice 0.062 110 max -load-generator 0.192 35 min/avg/max -webuiservice 0.003 75 min/avg/max diff --git a/my_deploy.sh b/my_deploy.sh index 4f3cb25f6..506e52176 100755 --- a/my_deploy.sh +++ b/my_deploy.sh @@ -20,8 +20,7 @@ export TFS_REGISTRY_IMAGES="http://localhost:32000/tfs/" # Set the list of components, separated by spaces, you want to build images for, and deploy. -#automation monitoring compute -export TFS_COMPONENTS="context device pathcomp service slice webui load_generator" +export TFS_COMPONENTS="context device automation monitoring pathcomp service slice compute webui load_generator" # Set the tag you want to use for your images. export TFS_IMAGE_TAG="dev" @@ -64,7 +63,7 @@ export CRDB_DATABASE="tfs" export CRDB_DEPLOY_MODE="single" # Disable flag for dropping database, if it exists. -export CRDB_DROP_DATABASE_IF_EXISTS="YES" +export CRDB_DROP_DATABASE_IF_EXISTS="" # Disable flag for re-deploying CockroachDB from scratch. export CRDB_REDEPLOY="" -- GitLab