From e156b3d3a9fac80e4b844d0c14d800c6e6dd45d7 Mon Sep 17 00:00:00 2001 From: Alberto Gonzalez Barneo Date: Wed, 4 Sep 2024 09:47:08 +0000 Subject: [PATCH 1/2] Added folder app with some compoaspects gisters apps app with new component app register for qkd --- src/app/AppServiceServicerImpl.py | 59 ++++++ src/app/__init__.py | 13 ++ src/app/__main__.py | 80 ++++++++ src/app/client/AppClient | 50 +++++ src/app/client/__init__.py | 13 ++ src/app/service/AppService.py | 23 +++ src/app/service/database/App.py | 185 ++++++++++++++++++ src/app/service/database/Engine.py | 55 ++++++ src/app/service/database/__init__.py | 0 src/app/service/database/models/AppModel.py | 49 +++++ src/app/service/database/models/_Base.py | 44 +++++ src/app/service/database/models/__init__.py | 0 .../database/models/enums/QKDAppStatus.py | 13 ++ .../database/models/enums/QKDAppTypes.py | 11 ++ .../database/models/enums/_GrpcToEnum.py | 38 ++++ .../service/database/models/enums/__init__.py | 0 src/app/service/database/uuids/App.py | 16 ++ src/app/service/database/uuids/_Builder.py | 44 +++++ src/app/service/rest_server/RestServer.py | 9 + src/app/service/rest_server/__init__.py | 0 .../service/rest_server/qkd_app/Resources.py | 72 +++++++ .../service/rest_server/qkd_app/__init__.py | 16 ++ 22 files changed, 790 insertions(+) create mode 100644 src/app/AppServiceServicerImpl.py create mode 100644 src/app/__init__.py create mode 100644 src/app/__main__.py create mode 100644 src/app/client/AppClient create mode 100644 src/app/client/__init__.py create mode 100644 src/app/service/AppService.py create mode 100644 src/app/service/database/App.py create mode 100644 src/app/service/database/Engine.py create mode 100644 src/app/service/database/__init__.py create mode 100644 src/app/service/database/models/AppModel.py create mode 100644 src/app/service/database/models/_Base.py create mode 100644 src/app/service/database/models/__init__.py create mode 100644 src/app/service/database/models/enums/QKDAppStatus.py create mode 100644 src/app/service/database/models/enums/QKDAppTypes.py create mode 100644 src/app/service/database/models/enums/_GrpcToEnum.py create mode 100644 src/app/service/database/models/enums/__init__.py create mode 100644 src/app/service/database/uuids/App.py create mode 100644 src/app/service/database/uuids/_Builder.py create mode 100644 src/app/service/rest_server/RestServer.py create mode 100644 src/app/service/rest_server/__init__.py create mode 100644 src/app/service/rest_server/qkd_app/Resources.py create mode 100644 src/app/service/rest_server/qkd_app/__init__.py diff --git a/src/app/AppServiceServicerImpl.py b/src/app/AppServiceServicerImpl.py new file mode 100644 index 000000000..7d2df1aff --- /dev/null +++ b/src/app/AppServiceServicerImpl.py @@ -0,0 +1,59 @@ +import grpc, logging, sqlalchemy +from typing import Iterator, Optional +from common.message_broker.MessageBroker import MessageBroker +import grpc, json, logging #, deepdiff +from common.proto.context_pb2 import ( + Empty, Service, ServiceId, ServiceStatusEnum, ServiceTypeEnum, ContextId) +from common.proto.app_pb2 import (App, AppId, AppList, QKDAppTypesEnum) +from common.proto.app_pb2_grpc import AppServiceServicer +from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method +from common.tools.context_queries.InterDomain import is_inter_domain #, is_multi_domain +from common.tools.grpc.ConfigRules import copy_config_rules +from common.tools.grpc.Constraints import copy_constraints +from common.tools.grpc.EndPointIds import copy_endpoint_ids +from common.tools.grpc.ServiceIds import update_service_ids +#from common.tools.grpc.Tools import grpc_message_to_json_string +from context.client.ContextClient import ContextClient +from app.client.AppClient import AppClient +from .database.App import app_set, app_list_objs, app_get, app_get_by_server +from common.method_wrappers.ServiceExceptions import NotFoundException + +LOGGER = logging.getLogger(__name__) + +METRICS_POOL = MetricsPool('App', 'RPC') + +# Optare: This file must be edited based on app's logic + +class AppServiceServicerImpl(AppServiceServicer): + def __init__(self, db_engine : sqlalchemy.engine.Engine, messagebroker : MessageBroker): + LOGGER.debug('Creating Servicer...') + self.db_engine = db_engine + self.messagebroker = messagebroker + LOGGER.debug('Servicer Created') + + @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def RegisterApp(self, request : App, context : grpc.ServicerContext) -> Empty: + # Optare: This is the main function required for the project. + # Optare: If it's an internal it will save it directly. If it's an external one it will save it as pending by not providing the remote until the other party requests it too + # Optare: Ideally, the only thing needed to change is the code inside the try block. Currently it just searches by a pending app with the same server_id but you can put more restrictions or different search and raise the NotFoundException + + if request.app_type == QKDAppTypesEnum.QKDAPPTYPES_INTERNAL: + app_set(self.db_engine, self.messagebroker, request) + + else: + try: + app = app_get_by_server(self.db_engine, request.server_app_id) + except NotFoundException: + app = request + app_set(self.db_engine, self.messagebroker, app) + else: + app.remote_device_id.device_uuid.uuid = request.local_device_id.device_uuid.uuid + app_set(self.db_engine, self.messagebroker, app) + + + return Empty() + + + @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def ListApps(self, request: ContextId, context : grpc.ServicerContext) -> AppList: + return app_list_objs(self.db_engine) diff --git a/src/app/__init__.py b/src/app/__init__.py new file mode 100644 index 000000000..5648545ca --- /dev/null +++ b/src/app/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/src/app/__main__.py b/src/app/__main__.py new file mode 100644 index 000000000..fee7a3ce9 --- /dev/null +++ b/src/app/__main__.py @@ -0,0 +1,80 @@ +import logging, signal, sys, threading +from prometheus_client import start_http_server +from common.Constants import ServiceNameEnum +from common.Settings import ( + ENVVAR_SUFIX_SERVICE_HOST, ENVVAR_SUFIX_SERVICE_PORT_GRPC, get_env_var_name, get_log_level, get_metrics_port, + wait_for_environment_variables) +from .AppService import AppService +from .rest_server.RestServer import RestServer +from .rest_server.qkd_app import register_qkd_app +from common.message_broker.Factory import get_messagebroker_backend +from common.message_broker.MessageBroker import MessageBroker +from .database.Engine import Engine +from .database.models._Base import rebuild_database + +terminate = threading.Event() +LOGGER : logging.Logger = None + +def signal_handler(signal, frame): # pylint: disable=redefined-outer-name + LOGGER.warning('Terminate signal received') + terminate.set() + +def main(): + global LOGGER # pylint: disable=global-statement + + log_level = get_log_level() + logging.basicConfig(level=log_level, format="[%(asctime)s] %(levelname)s:%(name)s:%(message)s") + LOGGER = logging.getLogger(__name__) + + wait_for_environment_variables([ + get_env_var_name(ServiceNameEnum.CONTEXT, ENVVAR_SUFIX_SERVICE_HOST ), + get_env_var_name(ServiceNameEnum.CONTEXT, ENVVAR_SUFIX_SERVICE_PORT_GRPC), + ]) + + signal.signal(signal.SIGINT, signal_handler) + signal.signal(signal.SIGTERM, signal_handler) + + LOGGER.info('Starting...') + + # Start metrics server + metrics_port = get_metrics_port() + start_http_server(metrics_port) + + # Get Database Engine instance and initialize database, if needed + LOGGER.info('Getting SQLAlchemy DB Engine...') + db_engine = Engine.get_engine() + if db_engine is None: + LOGGER.error('Unable to get SQLAlchemy DB Engine...') + return -1 + + try: + Engine.create_database(db_engine) + except: # pylint: disable=bare-except # pragma: no cover + LOGGER.exception('Failed to check/create the database: {:s}'.format(str(db_engine.url))) + + rebuild_database(db_engine) + + # Get message broker instance + messagebroker = None #MessageBroker(get_messagebroker_backend()) + + # Starting context service + grpc_service = AppService(db_engine, messagebroker) + grpc_service.start() + + rest_server = RestServer() + register_qkd_app(rest_server) + rest_server.start() + + # Wait for Ctrl+C or termination signal + while not terminate.wait(timeout=1.0): pass + + LOGGER.info('Terminating...') + grpc_service.stop() + rest_server.shutdown() + rest_server.join() + + LOGGER.info('Bye') + return 0 + +if __name__ == '__main__': + sys.exit(main()) diff --git a/src/app/client/AppClient b/src/app/client/AppClient new file mode 100644 index 000000000..9148f0432 --- /dev/null +++ b/src/app/client/AppClient @@ -0,0 +1,50 @@ +import grpc, logging +from common.Constants import ServiceNameEnum +from common.Settings import get_service_host, get_service_port_grpc +from common.proto.context_pb2 import Empty, ContextId +from common.proto.app_pb2 import App, AppId, AppList +from common.proto.app_pb2_grpc import AppServiceStub +from common.tools.client.RetryDecorator import retry, delay_exponential +from common.tools.grpc.Tools import grpc_message_to_json_string + +LOGGER = logging.getLogger(__name__) +MAX_RETRIES = 15 +DELAY_FUNCTION = delay_exponential(initial=0.01, increment=2.0, maximum=5.0) +RETRY_DECORATOR = retry(max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect') + +class AppClient: + def __init__(self, host=None, port=None): + if not host: host = get_service_host(ServiceNameEnum.APP) + if not port: port = get_service_port_grpc(ServiceNameEnum.APP) + self.endpoint = '{:s}:{:s}'.format(str(host), str(port)) + LOGGER.debug('Creating channel to {:s}...'.format(self.endpoint)) + self.channel = None + self.stub = None + self.connect() + LOGGER.debug('Channel created') + + def connect(self): + self.channel = grpc.insecure_channel(self.endpoint) + self.stub = AppServiceStub(self.channel) + + def close(self): + if self.channel is not None: self.channel.close() + self.channel = None + self.stub = None + + + + @RETRY_DECORATOR + def RegisterApp(self, request : App) -> Empty: + LOGGER.debug('RegisterApp request: {:s}'.format(grpc_message_to_json_string(request))) + response = self.stub.RegisterApp(request) + LOGGER.debug('RegisterApp result: {:s}'.format(grpc_message_to_json_string(response))) + return response + + + @RETRY_DECORATOR + def ListApps(self, request: ContextId) -> AppList: + LOGGER.debug('ListApps request: {:s}'.format(grpc_message_to_json_string(request))) + response = self.stub.ListApps(request) + LOGGER.debug('ListApps result: {:s}'.format(grpc_message_to_json_string(response))) + return response diff --git a/src/app/client/__init__.py b/src/app/client/__init__.py new file mode 100644 index 000000000..5648545ca --- /dev/null +++ b/src/app/client/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/src/app/service/AppService.py b/src/app/service/AppService.py new file mode 100644 index 000000000..a1c1f8da3 --- /dev/null +++ b/src/app/service/AppService.py @@ -0,0 +1,23 @@ +import logging, sqlalchemy +from common.Constants import ServiceNameEnum +from common.Settings import get_service_port_grpc +from common.message_broker.MessageBroker import MessageBroker +from common.proto.app_pb2_grpc import add_AppServiceServicer_to_server +from common.tools.service.GenericGrpcService import GenericGrpcService +from app.service.AppServiceServicerImpl import AppServiceServicerImpl + +# Custom gRPC settings +GRPC_MAX_WORKERS = 200 # multiple clients might keep connections alive for Get*Events() RPC methods +LOGGER = logging.getLogger(__name__) + + +class AppService(GenericGrpcService): + def __init__( + self, db_engine : sqlalchemy.engine.Engine, messagebroker : MessageBroker, cls_name: str = __name__ + ) -> None: + port = get_service_port_grpc(ServiceNameEnum.APP) + super().__init__(port, max_workers=GRPC_MAX_WORKERS, cls_name=cls_name) + self.app_servicer = AppServiceServicerImpl(db_engine, messagebroker) + + def install_servicers(self): + add_AppServiceServicer_to_server(self.app_servicer, self.server) diff --git a/src/app/service/database/App.py b/src/app/service/database/App.py new file mode 100644 index 000000000..c24daf158 --- /dev/null +++ b/src/app/service/database/App.py @@ -0,0 +1,185 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime, logging, uuid +from sqlalchemy.dialects.postgresql import insert +from sqlalchemy.engine import Engine +from sqlalchemy.orm import Session, selectinload, sessionmaker +from sqlalchemy_cockroachdb import run_transaction +from typing import Dict, List, Optional, Set, Tuple +from common.method_wrappers.ServiceExceptions import InvalidArgumentException, NotFoundException +from common.message_broker.MessageBroker import MessageBroker +from common.proto.context_pb2 import Empty +from common.proto.app_pb2 import ( + AppList, App, AppId) +from common.tools.grpc.Tools import grpc_message_to_json_string +from .models.AppModel import AppModel +from .models.enums.QKDAppStatus import grpc_to_enum__qkd_app_status +from .models.enums.QKDAppTypes import grpc_to_enum__qkd_app_types +from .uuids.App import app_get_uuid +from common.tools.object_factory.Context import json_context_id +from common.tools.object_factory.App import json_app_id +from context.service.database.uuids.Context import context_get_uuid + + + +#from .Events import notify_event_context, notify_event_device, notify_event_topology + +LOGGER = logging.getLogger(__name__) + + +def app_list_objs(db_engine : Engine) -> AppList: + def callback(session : Session) -> List[Dict]: + obj_list : List[AppModel] = session.query(AppModel)\ + .all() + return [obj.dump() for obj in obj_list] + apps = run_transaction(sessionmaker(bind=db_engine), callback) + return AppList(apps=apps) + +def app_get(db_engine : Engine, request : AppId) -> App: + app_uuid = app_get_uuid(request, allow_random=False) + def callback(session : Session) -> Optional[Dict]: + obj : Optional[AppModel] = session.query(AppModel)\ + .filter_by(app_uuid=app_uuid).one_or_none() + return None if obj is None else obj.dump() + obj = run_transaction(sessionmaker(bind=db_engine), callback) + if obj is None: + raw_app_uuid = request.app_uuid.uuid + raise NotFoundException('App', raw_app_uuid, extra_details=[ + 'app_uuid generated was: {:s}'.format(app_uuid) + ]) + return App(**obj) + +def app_set(db_engine : Engine, messagebroker : MessageBroker, request : App) -> AppId: + context_uuid = context_get_uuid(request.app_id.context_id, allow_random=False) + raw_app_uuid = request.app_id.app_uuid.uuid + app_uuid = app_get_uuid(request.app_id, allow_random=True) + + app_type = request.app_type + app_status = grpc_to_enum__qkd_app_status(request.app_status) + app_type = grpc_to_enum__qkd_app_types(request.app_type) + + now = datetime.datetime.utcnow() + + + app_data = [{ + 'context_uuid' : context_uuid, + 'app_uuid' : app_uuid, + 'app_status' : app_status, + 'app_type' : app_type, + 'server_app_id' : request.server_app_id, + 'client_app_id' : request.client_app_id, + 'backing_qkdl_uuid' : [qkdl_id.qkdl_uuid.uuid for qkdl_id in request.backing_qkdl_id], + 'local_device_uuid' : request.local_device_id.device_uuid.uuid, + 'remote_device_uuid' : request.remote_device_id.device_uuid.uuid or None, + 'created_at' : now, + 'updated_at' : now, + }] + + + def callback(session : Session) -> Tuple[bool, List[Dict]]: + stmt = insert(AppModel).values(app_data) + stmt = stmt.on_conflict_do_update( + index_elements=[AppModel.app_uuid], + set_=dict( + app_status = stmt.excluded.app_status, + app_type = stmt.excluded.app_type, + server_app_id = stmt.excluded.server_app_id, + client_app_id = stmt.excluded.client_app_id, + backing_qkdl_uuid = stmt.excluded.backing_qkdl_uuid, + local_device_uuid = stmt.excluded.local_device_uuid, + remote_device_uuid = stmt.excluded.remote_device_uuid, + updated_at = stmt.excluded.updated_at, + ) + ) + stmt = stmt.returning(AppModel.created_at, AppModel.updated_at) + created_at,updated_at = session.execute(stmt).fetchone() + updated = updated_at > created_at + + return updated + + updated = run_transaction(sessionmaker(bind=db_engine), callback) + context_id = json_context_id(context_uuid) + app_id = json_app_id(app_uuid, context_id=context_id) + #event_type = EventTypeEnum.EVENTTYPE_UPDATE if updated else EventTypeEnum.EVENTTYPE_CREATE + #notify_event_app(messagebroker, event_type, app_id) + #notify_event_context(messagebroker, EventTypeEnum.EVENTTYPE_UPDATE, context_id) + return AppId(**app_id) + + + +def app_get_by_server(db_engine : Engine, request : str) -> App: + def callback(session : Session) -> Optional[Dict]: + obj : Optional[AppModel] = session.query(AppModel)\ + .filter_by(server_app_id=request).one_or_none() + return None if obj is None else obj.dump() + obj = run_transaction(sessionmaker(bind=db_engine), callback) + if obj is None: + raise NotFoundException('No app match found for', request) + return App(**obj) + + + +""" +def device_delete(db_engine : Engine, messagebroker : MessageBroker, request : DeviceId) -> Empty: + device_uuid = device_get_uuid(request, allow_random=False) + def callback(session : Session) -> Tuple[bool, List[Dict]]: + query = session.query(TopologyDeviceModel) + query = query.filter_by(device_uuid=device_uuid) + topology_device_list : List[TopologyDeviceModel] = query.all() + topology_ids = [obj.topology.dump_id() for obj in topology_device_list] + num_deleted = session.query(DeviceModel).filter_by(device_uuid=device_uuid).delete() + return num_deleted > 0, topology_ids + deleted, updated_topology_ids = run_transaction(sessionmaker(bind=db_engine), callback) + device_id = json_device_id(device_uuid) + if deleted: + notify_event_device(messagebroker, EventTypeEnum.EVENTTYPE_REMOVE, device_id) + + context_ids : Dict[str, Dict] = dict() + topology_ids : Dict[str, Dict] = dict() + for topology_id in updated_topology_ids: + topology_uuid = topology_id['topology_uuid']['uuid'] + topology_ids[topology_uuid] = topology_id + context_id = topology_id['context_id'] + context_uuid = context_id['context_uuid']['uuid'] + context_ids[context_uuid] = context_id + + for topology_id in topology_ids.values(): + notify_event_topology(messagebroker, EventTypeEnum.EVENTTYPE_UPDATE, topology_id) + + for context_id in context_ids.values(): + notify_event_context(messagebroker, EventTypeEnum.EVENTTYPE_UPDATE, context_id) + + return Empty() + +def device_select(db_engine : Engine, request : DeviceFilter) -> DeviceList: + device_uuids = [ + device_get_uuid(device_id, allow_random=False) + for device_id in request.device_ids.device_ids + ] + dump_params = dict( + include_endpoints =request.include_endpoints, + include_config_rules=request.include_config_rules, + include_components =request.include_components, + ) + def callback(session : Session) -> List[Dict]: + query = session.query(DeviceModel) + if request.include_endpoints : query = query.options(selectinload(DeviceModel.endpoints)) + if request.include_config_rules: query = query.options(selectinload(DeviceModel.config_rules)) + #if request.include_components : query = query.options(selectinload(DeviceModel.components)) + obj_list : List[DeviceModel] = query.filter(DeviceModel.device_uuid.in_(device_uuids)).all() + return [obj.dump(**dump_params) for obj in obj_list] + devices = run_transaction(sessionmaker(bind=db_engine), callback) + return DeviceList(devices=devices) +""" diff --git a/src/app/service/database/Engine.py b/src/app/service/database/Engine.py new file mode 100644 index 000000000..8120cd153 --- /dev/null +++ b/src/app/service/database/Engine.py @@ -0,0 +1,55 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging, sqlalchemy, sqlalchemy_utils +from common.Settings import get_setting + +LOGGER = logging.getLogger(__name__) + +APP_NAME = 'tfs' +ECHO = False # true: dump SQL commands and transactions executed +CRDB_URI_TEMPLATE = 'cockroachdb://{:s}:{:s}@cockroachdb-public.{:s}.svc.cluster.local:{:s}/{:s}?sslmode={:s}' + +class Engine: + @staticmethod + def get_engine() -> sqlalchemy.engine.Engine: + crdb_uri = get_setting('CRDB_URI', default=None) + if crdb_uri is None: + CRDB_NAMESPACE = get_setting('CRDB_NAMESPACE') + CRDB_SQL_PORT = get_setting('CRDB_SQL_PORT') + CRDB_DATABASE = get_setting('CRDB_DATABASE_APP') + CRDB_USERNAME = get_setting('CRDB_USERNAME') + CRDB_PASSWORD = get_setting('CRDB_PASSWORD') + CRDB_SSLMODE = get_setting('CRDB_SSLMODE') + crdb_uri = CRDB_URI_TEMPLATE.format( + CRDB_USERNAME, CRDB_PASSWORD, CRDB_NAMESPACE, CRDB_SQL_PORT, CRDB_DATABASE, CRDB_SSLMODE) + + try: + engine = sqlalchemy.create_engine( + crdb_uri, connect_args={'application_name': APP_NAME}, echo=ECHO, future=True) + except: # pylint: disable=bare-except # pragma: no cover + LOGGER.exception('Failed to connect to database: {:s}'.format(str(crdb_uri))) + return None + + return engine + + @staticmethod + def create_database(engine : sqlalchemy.engine.Engine) -> None: + if not sqlalchemy_utils.database_exists(engine.url): + sqlalchemy_utils.create_database(engine.url) + + @staticmethod + def drop_database(engine : sqlalchemy.engine.Engine) -> None: + if sqlalchemy_utils.database_exists(engine.url): + sqlalchemy_utils.drop_database(engine.url) diff --git a/src/app/service/database/__init__.py b/src/app/service/database/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/app/service/database/models/AppModel.py b/src/app/service/database/models/AppModel.py new file mode 100644 index 000000000..7d6b3cece --- /dev/null +++ b/src/app/service/database/models/AppModel.py @@ -0,0 +1,49 @@ +import operator +from sqlalchemy import CheckConstraint, Column, DateTime, Float, Enum, ForeignKey, Integer, String +from sqlalchemy.dialects.postgresql import UUID, ARRAY +from sqlalchemy.orm import relationship +from typing import Dict +from ._Base import _Base +from .enums.QKDAppStatus import ORM_QKDAppStatusEnum +from .enums.QKDAppTypes import ORM_QKDAppTypesEnum + +class AppModel(_Base): + __tablename__ = 'app' + + app_uuid = Column(UUID(as_uuid=False), primary_key=True) + context_uuid = Column(UUID(as_uuid=False), nullable=False) # Supposed to be Foreign Key + app_status = Column(Enum(ORM_QKDAppStatusEnum), nullable=False) + app_type = Column(Enum(ORM_QKDAppTypesEnum), nullable=False) + server_app_id = Column(String, nullable=False) + client_app_id = Column(ARRAY(String), nullable=False) + backing_qkdl_uuid = Column(ARRAY(UUID(as_uuid=False)), nullable=False) + local_device_uuid = Column(UUID(as_uuid=False), nullable=False) + remote_device_uuid = Column(UUID(as_uuid=False), nullable=True) + + # Optare: Created_at and Updated_at are only used to know if an app was updated later on the code. Don't change it + + created_at = Column(DateTime, nullable=False) + updated_at = Column(DateTime, nullable=False) + + #__table_args__ = ( + # CheckConstraint(... >= 0, name='name_value_...'), + #) + + def dump_id(self) -> Dict: + return { + 'context_id': {'context_uuid': {'uuid': self.context_uuid}}, + 'app_uuid': {'uuid': self.app_uuid} + } + + def dump(self) -> Dict: + result = { + 'app_id' : self.dump_id(), + 'app_status' : self.app_status.value, + 'app_type' : self.app_type.value, + 'server_app_id' : self.server_app_id, + 'client_app_id' : self.client_app_id, + 'backing_qkdl_id' : [{'qkdl_uuid': {'uuid': qkdl_id}} for qkdl_id in self.backing_qkdl_uuid], + 'local_device_id' : {'device_uuid': {'uuid': self.local_device_uuid}}, + 'remote_device_id' : {'device_uuid': {'uuid': self.remote_device_uuid}}, + } + return result diff --git a/src/app/service/database/models/_Base.py b/src/app/service/database/models/_Base.py new file mode 100644 index 000000000..83cad6861 --- /dev/null +++ b/src/app/service/database/models/_Base.py @@ -0,0 +1,44 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sqlalchemy +from typing import Any, List +from sqlalchemy.orm import Session, sessionmaker, declarative_base +from sqlalchemy.sql import text +from sqlalchemy_cockroachdb import run_transaction + +_Base = declarative_base() + +''' +def create_performance_enhancers(db_engine : sqlalchemy.engine.Engine) -> None: + def index_storing( + index_name : str, table_name : str, index_fields : List[str], storing_fields : List[str] + ) -> Any: + str_index_fields = ','.join(['"{:s}"'.format(index_field) for index_field in index_fields]) + str_storing_fields = ','.join(['"{:s}"'.format(storing_field) for storing_field in storing_fields]) + INDEX_STORING = 'CREATE INDEX IF NOT EXISTS {:s} ON "{:s}" ({:s}) STORING ({:s});' + return text(INDEX_STORING.format(index_name, table_name, str_index_fields, str_storing_fields)) + + statements = [ + # In case of relations + ] + def callback(session : Session) -> bool: + for stmt in statements: session.execute(stmt) + run_transaction(sessionmaker(bind=db_engine), callback) +''' + +def rebuild_database(db_engine : sqlalchemy.engine.Engine, drop_if_exists : bool = False): + if drop_if_exists: _Base.metadata.drop_all(db_engine) + _Base.metadata.create_all(db_engine) + #create_performance_enhancers(db_engine) diff --git a/src/app/service/database/models/__init__.py b/src/app/service/database/models/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/app/service/database/models/enums/QKDAppStatus.py b/src/app/service/database/models/enums/QKDAppStatus.py new file mode 100644 index 000000000..d945981a3 --- /dev/null +++ b/src/app/service/database/models/enums/QKDAppStatus.py @@ -0,0 +1,13 @@ +import enum, functools +from common.proto.app_pb2 import QKDAppStatusEnum +from ._GrpcToEnum import grpc_to_enum + +class ORM_QKDAppStatusEnum(enum.Enum): + ON = QKDAppStatusEnum.QKDAPPSTATUS_ON + DISCONNECTED = QKDAppStatusEnum.QKDAPPSTATUS_DISCONNECTED + OUT_OF_TIME = QKDAppStatusEnum.QKDAPPSTATUS_OUT_OF_TIME + ZOMBIE = QKDAppStatusEnum.QKDAPPSTATUS_ZOMBIE + + +grpc_to_enum__qkd_app_status = functools.partial( + grpc_to_enum, QKDAppStatusEnum, ORM_QKDAppStatusEnum) diff --git a/src/app/service/database/models/enums/QKDAppTypes.py b/src/app/service/database/models/enums/QKDAppTypes.py new file mode 100644 index 000000000..df932ab31 --- /dev/null +++ b/src/app/service/database/models/enums/QKDAppTypes.py @@ -0,0 +1,11 @@ +import enum, functools +from common.proto.app_pb2 import QKDAppTypesEnum +from ._GrpcToEnum import grpc_to_enum + +class ORM_QKDAppTypesEnum(enum.Enum): + INTERNAL = QKDAppTypesEnum.QKDAPPTYPES_INTERNAL + CLIENT = QKDAppTypesEnum.QKDAPPTYPES_CLIENT + + +grpc_to_enum__qkd_app_types = functools.partial( + grpc_to_enum, QKDAppTypesEnum, ORM_QKDAppTypesEnum) diff --git a/src/app/service/database/models/enums/_GrpcToEnum.py b/src/app/service/database/models/enums/_GrpcToEnum.py new file mode 100644 index 000000000..0cde8ac5e --- /dev/null +++ b/src/app/service/database/models/enums/_GrpcToEnum.py @@ -0,0 +1,38 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re +from enum import Enum +from typing import Optional + +# Enumeration classes are redundant with gRPC classes, but gRPC does not provide a programmatical method to retrieve +# the values it expects from strings containing the desired value symbol or its integer value, so a kind of mapping is +# required. Besides, ORM Models expect Enum classes in EnumeratedFields; we create specific and conveniently defined +# Enum classes to serve both purposes. + +def grpc_to_enum(grpc_enum_class, orm_enum_class : Enum, grpc_enum_value, grpc_enum_prefix : Optional[str] = None): + enum_name = grpc_enum_class.Name(grpc_enum_value) + + if grpc_enum_prefix is None: + grpc_enum_prefix = orm_enum_class.__name__.upper() + #grpc_enum_prefix = re.sub(r'^ORM_(.+)$', r'\1', grpc_enum_prefix) + #grpc_enum_prefix = re.sub(r'^(.+)ENUM$', r'\1', grpc_enum_prefix) + #grpc_enum_prefix = grpc_enum_prefix + '_' + grpc_enum_prefix = re.sub(r'^ORM_(.+)ENUM$', r'\1_', grpc_enum_prefix) + + if len(grpc_enum_prefix) > 0: + enum_name = enum_name.replace(grpc_enum_prefix, '') + + orm_enum_value = orm_enum_class._member_map_.get(enum_name) + return orm_enum_value diff --git a/src/app/service/database/models/enums/__init__.py b/src/app/service/database/models/enums/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/app/service/database/uuids/App.py b/src/app/service/database/uuids/App.py new file mode 100644 index 000000000..d3d395140 --- /dev/null +++ b/src/app/service/database/uuids/App.py @@ -0,0 +1,16 @@ +from common.proto.app_pb2 import AppId +from common.method_wrappers.ServiceExceptions import InvalidArgumentsException +from ._Builder import get_uuid_from_string, get_uuid_random + +def app_get_uuid( + app_id : AppId, allow_random : bool = False +) -> str: + app_uuid = app_id.app_uuid.uuid + + if len(app_uuid) > 0: + return get_uuid_from_string(app_uuid) + if allow_random: return get_uuid_random() + + raise InvalidArgumentsException([ + ('app_id.app_uuid.uuid', app_uuid), + ], extra_details=['At least one is required to produce a App UUID']) diff --git a/src/app/service/database/uuids/_Builder.py b/src/app/service/database/uuids/_Builder.py new file mode 100644 index 000000000..75fe51bc0 --- /dev/null +++ b/src/app/service/database/uuids/_Builder.py @@ -0,0 +1,44 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Optional, Union +from uuid import UUID, uuid4, uuid5 + +# Generate a UUIDv5-like from the SHA-1 of "TFS" and no namespace to be used as the NAMESPACE for all +# the context UUIDs generated. For efficiency purposes, the UUID is hardcoded; however, it is produced +# using the following code: +# from hashlib import sha1 +# from uuid import UUID +# hash = sha1(bytes('TFS', 'utf-8')).digest() +# NAMESPACE_TFS = UUID(bytes=hash[:16], version=5) +NAMESPACE_TFS = UUID('200e3a1f-2223-534f-a100-758e29c37f40') + +def get_uuid_from_string(str_uuid_or_name : Union[str, UUID], prefix_for_name : Optional[str] = None) -> str: + # if UUID given, assume it is already a valid UUID + if isinstance(str_uuid_or_name, UUID): return str_uuid_or_name + if not isinstance(str_uuid_or_name, str): + MSG = 'Parameter({:s}) cannot be used to produce a UUID' + raise Exception(MSG.format(str(repr(str_uuid_or_name)))) + try: + # try to parse as UUID + return str(UUID(str_uuid_or_name)) + except: # pylint: disable=bare-except + # produce a UUID within TFS namespace from parameter + if prefix_for_name is not None: + str_uuid_or_name = '{:s}/{:s}'.format(prefix_for_name, str_uuid_or_name) + return str(uuid5(NAMESPACE_TFS, str_uuid_or_name)) + +def get_uuid_random() -> str: + # Generate random UUID. No need to use namespace since "namespace + random = random". + return str(uuid4()) diff --git a/src/app/service/rest_server/RestServer.py b/src/app/service/rest_server/RestServer.py new file mode 100644 index 000000000..f043d7487 --- /dev/null +++ b/src/app/service/rest_server/RestServer.py @@ -0,0 +1,9 @@ +from common.Constants import ServiceNameEnum +from common.Settings import get_service_baseurl_http, get_service_port_http +from common.tools.service.GenericRestServer import GenericRestServer + +class RestServer(GenericRestServer): + def __init__(self, cls_name: str = __name__) -> None: + bind_port = get_service_port_http(ServiceNameEnum.APP) + base_url = get_service_baseurl_http(ServiceNameEnum.APP) + super().__init__(bind_port, base_url, cls_name=cls_name) diff --git a/src/app/service/rest_server/__init__.py b/src/app/service/rest_server/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/app/service/rest_server/qkd_app/Resources.py b/src/app/service/rest_server/qkd_app/Resources.py new file mode 100644 index 000000000..bbb8cd4a6 --- /dev/null +++ b/src/app/service/rest_server/qkd_app/Resources.py @@ -0,0 +1,72 @@ +import uuid, json +from flask import request +from flask_restful import Resource +from common.proto.context_pb2 import Empty +from common.proto.app_pb2 import App, QKDAppTypesEnum +from common.Constants import DEFAULT_CONTEXT_NAME +from context.client.ContextClient import ContextClient +from app.client.AppClient import AppClient + + +class _Resource(Resource): + def __init__(self) -> None: + super().__init__() + self.context_client = ContextClient() + self.app_client = AppClient() + +class Index(_Resource): + def get(self): + return {'hello': 'world'} + +class CreateQKDApp(_Resource): + # Optare: Post request for the QKD Node to call the TeraflowSDN. Example of requests below + def post(self): + app = request.get_json()['app'] + + devices = self.context_client.ListDevices(Empty()) + devices = devices.devices + + local_device = None + + + # This for-loop won't be necessary if we can garantee Device ID is the same as QKDN Id + for device in devices: + for config_rule in device.device_config.config_rules: + if config_rule.custom.resource_key == '__node__': + value = json.loads(config_rule.custom.resource_value) + qkdn_id = value['qkdn_id'] + if app['local_qkdn_id'] == qkdn_id: + local_device = device + break + + # Optare: Todo: Verify that a service is present for this app + ''' + requests.post('http://10.211.36.220/app/create_qkd_app', json={'app': {'server_app_id':'1', 'client_app_id':[], 'app_status':'ON', 'local_qkdn_id':'00000001-0000-0000-0000-000000000000', 'backing_qkdl_id':['00000003-0002-0000-0000-000000000000']}}) + + + requests.post('http://10.211.36.220/app/create_qkd_app', json={'app': {'server_app_id':'1', 'client_app_id':[], 'app_status':'ON', 'local_qkdn_id':'00000003-0000-0000-0000-000000000000', 'backing_qkdl_id':['00000003-0002-0000-0000-000000000000']}}) + ''' + + + if local_device is None: + return {"status": "fail"} + + external_app_src_dst = { + 'app_id': {'context_id': {'context_uuid': {'uuid': DEFAULT_CONTEXT_NAME}}, 'app_uuid': {'uuid': ''}}, + 'app_status': 'QKDAPPSTATUS_' + app['app_status'], + 'app_type': QKDAppTypesEnum.QKDAPPTYPES_CLIENT, + 'server_app_id': app['server_app_id'], + 'client_app_id': app['client_app_id'], + 'backing_qkdl_id': [{'qkdl_uuid': {'uuid': qkdl_id}} for qkdl_id in app['backing_qkdl_id']], + 'local_device_id': local_device.device_id, + 'remote_device_id': {'device_uuid': {'uuid': ''}}, + } + + + # Optare: This will call our internal RegisterApp which supports the creation of both internal and external app. + # Optare the verification for knowing if two parties are requesting the same app is done inside RegisterApp's function + self.app_client.RegisterApp(App(**external_app_src_dst)) + + # Optare: Todo: Communicate by SBI with both Nodes of the new App + + return {"status": "success"} diff --git a/src/app/service/rest_server/qkd_app/__init__.py b/src/app/service/rest_server/qkd_app/__init__.py new file mode 100644 index 000000000..a0091dd72 --- /dev/null +++ b/src/app/service/rest_server/qkd_app/__init__.py @@ -0,0 +1,16 @@ +from app.service.rest_server.RestServer import RestServer +from .Resources import ( + CreateQKDApp, Index) + +URL_PREFIX = '/app' + +# Use 'path' type since some identifiers might contain char '/' and Flask is unable to recognize them in 'string' type. +RESOURCES = [ + # (endpoint_name, resource_class, resource_url) + ('api.index', Index, '/'), + ('api.register_qkd_app', CreateQKDApp, '/create_qkd_app'), +] + +def register_qkd_app(app_server : RestServer): + for endpoint_name, resource_class, resource_url in RESOURCES: + app_server.add_resource(resource_class, URL_PREFIX + resource_url, endpoint=endpoint_name) -- GitLab From f0de185f24c9ae27a484510f90c459c320ad3364 Mon Sep 17 00:00:00 2001 From: "agbarneo@optaresolutions.com" Date: Wed, 4 Sep 2024 12:57:15 +0000 Subject: [PATCH 2/2] Added the rest of components to do app registration works --- deploy/tfs.sh | 21 +++++ manifests/appservice.yaml | 83 +++++++++++++++++++ manifests/nginx_ingress_http.yaml | 7 ++ my_deploy.sh | 5 +- proto/app.proto | 53 ++++++++++++ src/app/client/{AppClient => AppClient.py} | 0 src/common/Constants.py | 4 + src/service/Dockerfile | 2 + src/service/service/__main__.py | 5 +- .../service/task_scheduler/TaskExecutor.py | 16 +++- src/service/service/tools/ObjectKeys.py | 5 ++ 11 files changed, 197 insertions(+), 4 deletions(-) create mode 100644 manifests/appservice.yaml create mode 100644 proto/app.proto rename src/app/client/{AppClient => AppClient.py} (100%) diff --git a/deploy/tfs.sh b/deploy/tfs.sh index 3fdbe77fb..285acc465 100755 --- a/deploy/tfs.sh +++ b/deploy/tfs.sh @@ -46,6 +46,26 @@ export TFS_GRAFANA_PASSWORD=${TFS_GRAFANA_PASSWORD:-"admin123+"} export TFS_SKIP_BUILD=${TFS_SKIP_BUILD:-""} +# ----- App CockroachDB -------------------------------------------------------- + +# If not already set, set the namespace where CockroackDB will be deployed. +export APP_CRDB_NAMESPACE=${APP_CRDB_NAMESPACE:-"app-crdb"} + +# If not already set, set the external port CockroackDB Postgre SQL interface will be exposed to. +export APP_CRDB_EXT_PORT_SQL=${APP_CRDB_EXT_PORT_SQL:-"26257"} + +# If not already set, set the external port CockroackDB HTTP Mgmt GUI interface will be exposed to. +export APP_CRDB_EXT_PORT_HTTP=${APP_CRDB_EXT_PORT_HTTP:-"8081"} + +# If not already set, set the database username to be used by Context. +export APP_CRDB_USERNAME=${APP_CRDB_USERNAME:-"tfs"} + +# If not already set, set the database user's password to be used by Context. +export APP_CRDB_PASSWORD=${APP_CRDB_PASSWORD:-"tfs123"} + +# If not already set, set the database name to be used by Context. +export APP_CRDB_DATABASE=${APP_CRDB_DATABASE:-"tfs"} + # ----- CockroachDB ------------------------------------------------------------ # If not already set, set the namespace where CockroackDB will be deployed. @@ -141,6 +161,7 @@ kubectl create secret generic crdb-data --namespace ${TFS_K8S_NAMESPACE} --type= --from-literal=CRDB_NAMESPACE=${CRDB_NAMESPACE} \ --from-literal=CRDB_SQL_PORT=${CRDB_SQL_PORT} \ --from-literal=CRDB_DATABASE=${CRDB_DATABASE} \ + --from-literal=CRDB_DATABASE_APP=${CRDB_DATABASE_APP} \ --from-literal=CRDB_USERNAME=${CRDB_USERNAME} \ --from-literal=CRDB_PASSWORD=${CRDB_PASSWORD} \ --from-literal=CRDB_SSLMODE=require diff --git a/manifests/appservice.yaml b/manifests/appservice.yaml new file mode 100644 index 000000000..da0a07353 --- /dev/null +++ b/manifests/appservice.yaml @@ -0,0 +1,83 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: apps/v1 +kind: Deployment +metadata: + name: appservice +spec: + selector: + matchLabels: + app: appservice + #replicas: 1 + template: + metadata: + labels: + app: appservice + spec: + terminationGracePeriodSeconds: 5 + containers: + - name: server + image: labs.etsi.org:5050/tfs/controller/app:latest + imagePullPolicy: Always + ports: + - containerPort: 10060 + - containerPort: 9192 + - containerPort: 8005 + env: + - name: MB_BACKEND + value: "nats" + - name: LOG_LEVEL + value: "INFO" + envFrom: + - secretRef: + name: crdb-data + - secretRef: + name: nats-data + readinessProbe: + exec: + command: ["/bin/grpc_health_probe", "-addr=:10060"] + livenessProbe: + exec: + command: ["/bin/grpc_health_probe", "-addr=:10060"] + resources: + requests: + cpu: 150m + memory: 128Mi + limits: + cpu: 500m + memory: 512Mi +--- +apiVersion: v1 +kind: Service +metadata: + name: appservice + labels: + app: appservice +spec: + type: ClusterIP + selector: + app: appservice + ports: + - name: grpc + protocol: TCP + port: 10060 + targetPort: 10060 + - name: metrics + protocol: TCP + port: 9192 + targetPort: 9192 + - name: app + port: 8005 + targetPort: 8005 diff --git a/manifests/nginx_ingress_http.yaml b/manifests/nginx_ingress_http.yaml index 0892f0c9b..249501afa 100644 --- a/manifests/nginx_ingress_http.yaml +++ b/manifests/nginx_ingress_http.yaml @@ -57,3 +57,10 @@ spec: name: nbiservice port: number: 8080 + - path: /()(app/.*) + - pathType: Prefix + backend: + service: + name: appservice + port: + number: 8005 diff --git a/my_deploy.sh b/my_deploy.sh index 8417f6eae..b52434d33 100755 --- a/my_deploy.sh +++ b/my_deploy.sh @@ -20,13 +20,13 @@ export TFS_REGISTRY_IMAGES="http://localhost:32000/tfs/" # Set the list of components, separated by spaces, you want to build images for, and deploy. -export TFS_COMPONENTS="context device pathcomp service slice nbi webui load_generator" +export TFS_COMPONENTS="context device app pathcomp service slice nbi webui load_generator" # Uncomment to activate Monitoring #export TFS_COMPONENTS="${TFS_COMPONENTS} monitoring" # Uncomment to activate BGP-LS Speaker -#export TFS_COMPONENTS="${TFS_COMPONENTS} bgpls_speaker" +export TFS_COMPONENTS="${TFS_COMPONENTS} bgpls_speaker" # Uncomment to activate Optical Controller # To manage optical connections, "service" requires "opticalcontroller" to be deployed @@ -100,6 +100,7 @@ export CRDB_PASSWORD="tfs123" # Set the database name to be used by Context. export CRDB_DATABASE="tfs" +export CRDB_DATABASE_APP="tfs_app" # Set CockroachDB installation mode to 'single'. This option is convenient for development and testing. # See ./deploy/all.sh or ./deploy/crdb.sh for additional details diff --git a/proto/app.proto b/proto/app.proto new file mode 100644 index 000000000..d2ed33500 --- /dev/null +++ b/proto/app.proto @@ -0,0 +1,53 @@ +syntax = "proto3"; +package app; + +import "context.proto"; + +// Optare: Change this if you want to change App's structure or enums. +// Optare: If a message (structure) is changed it must be changed in src/app/service/database + +enum QKDAppStatusEnum { + QKDAPPSTATUS_ON = 0; + QKDAPPSTATUS_DISCONNECTED = 1; + QKDAPPSTATUS_OUT_OF_TIME = 2; + QKDAPPSTATUS_ZOMBIE = 3; +} + +enum QKDAppTypesEnum { + QKDAPPTYPES_INTERNAL = 0; + QKDAPPTYPES_CLIENT = 1; +} + +message QKDLId { + context.Uuid qkdl_uuid = 1; +} + + +message App { + AppId app_id = 1; + QKDAppStatusEnum app_status = 2; + QKDAppTypesEnum app_type = 3; + string server_app_id = 4; + repeated string client_app_id = 5; + repeated QKDLId backing_qkdl_id = 6; + context.DeviceId local_device_id = 7; + context.DeviceId remote_device_id = 8; +} + + +message AppId { + context.ContextId context_id = 1; + context.Uuid app_uuid = 2; +} + + +service AppService { + rpc RegisterApp(App) returns (context.Empty) {} + rpc ListApps (context.ContextId ) returns ( AppList ) {} + } + + + + message AppList { + repeated App apps = 1; +} diff --git a/src/app/client/AppClient b/src/app/client/AppClient.py similarity index 100% rename from src/app/client/AppClient rename to src/app/client/AppClient.py diff --git a/src/common/Constants.py b/src/common/Constants.py index de9ac45a4..511735ea4 100644 --- a/src/common/Constants.py +++ b/src/common/Constants.py @@ -61,6 +61,7 @@ class ServiceNameEnum(Enum): E2EORCHESTRATOR = 'e2eorchestrator' OPTICALCONTROLLER = 'opticalcontroller' BGPLS = 'bgpls-speaker' + APP = 'app' # Used for test and debugging only DLT_GATEWAY = 'dltgateway' @@ -89,6 +90,7 @@ DEFAULT_SERVICE_GRPC_PORTS = { ServiceNameEnum.FORECASTER .value : 10040, ServiceNameEnum.E2EORCHESTRATOR .value : 10050, ServiceNameEnum.OPTICALCONTROLLER .value : 10060, + ServiceNameEnum.APP .value : 10070, ServiceNameEnum.BGPLS .value : 20030, # Used for test and debugging only @@ -101,10 +103,12 @@ DEFAULT_SERVICE_HTTP_PORTS = { ServiceNameEnum.CONTEXT .value : 8080, ServiceNameEnum.NBI .value : 8080, ServiceNameEnum.WEBUI .value : 8004, + ServiceNameEnum.APP .value : 8005, } # Default HTTP/REST-API service base URLs DEFAULT_SERVICE_HTTP_BASEURLS = { ServiceNameEnum.NBI .value : None, ServiceNameEnum.WEBUI .value : None, + ServiceNameEnum.APP .value : None, } diff --git a/src/service/Dockerfile b/src/service/Dockerfile index a847ae762..cfb9900ae 100644 --- a/src/service/Dockerfile +++ b/src/service/Dockerfile @@ -70,6 +70,8 @@ COPY src/pathcomp/frontend/__init__.py pathcomp/frontend/__init__.py COPY src/pathcomp/frontend/client/. pathcomp/frontend/client/ COPY src/e2e_orchestrator/__init__.py e2e_orchestrator/__init__.py COPY src/e2e_orchestrator/client/. e2e_orchestrator/client/ +COPY src/app/__init__.py app/__init__.py +COPY src/app/client/. app/client/ COPY src/service/. service/ # Start the service diff --git a/src/service/service/__main__.py b/src/service/service/__main__.py index ae8a9e960..9e7e7b46b 100644 --- a/src/service/service/__main__.py +++ b/src/service/service/__main__.py @@ -44,6 +44,9 @@ def main(): get_env_var_name(ServiceNameEnum.DEVICE, ENVVAR_SUFIX_SERVICE_PORT_GRPC), get_env_var_name(ServiceNameEnum.PATHCOMP, ENVVAR_SUFIX_SERVICE_HOST ), get_env_var_name(ServiceNameEnum.PATHCOMP, ENVVAR_SUFIX_SERVICE_PORT_GRPC), + get_env_var_name(ServiceNameEnum.APP, ENVVAR_SUFIX_SERVICE_HOST ), + get_env_var_name(ServiceNameEnum.APP, ENVVAR_SUFIX_SERVICE_PORT_GRPC), + ]) signal.signal(signal.SIGINT, signal_handler) @@ -72,4 +75,4 @@ def main(): return 0 if __name__ == '__main__': - sys.exit(main()) + sys.exit(main()) \ No newline at end of file diff --git a/src/service/service/task_scheduler/TaskExecutor.py b/src/service/service/task_scheduler/TaskExecutor.py index cd20faad2..487fe5b85 100644 --- a/src/service/service/task_scheduler/TaskExecutor.py +++ b/src/service/service/task_scheduler/TaskExecutor.py @@ -20,6 +20,7 @@ from common.proto.context_pb2 import ( Connection, ConnectionId, Device, DeviceDriverEnum, DeviceId, Service, ServiceId, OpticalConfig, OpticalConfigId ) +from common.proto.app_pb2 import App from common.tools.context_queries.Connection import get_connection_by_id from common.tools.context_queries.Device import get_device from common.tools.context_queries.Service import get_service_by_id @@ -27,11 +28,12 @@ from common.tools.grpc.Tools import grpc_message_to_json_string from common.tools.object_factory.Device import json_device_id from context.client.ContextClient import ContextClient from device.client.DeviceClient import DeviceClient +from app.client.AppClient import AppClient from service.service.service_handler_api.Exceptions import ( UnsatisfiedFilterException, UnsupportedFilterFieldException, UnsupportedFilterFieldValueException ) from service.service.service_handler_api.ServiceHandlerFactory import ServiceHandlerFactory, get_service_handler_class -from service.service.tools.ObjectKeys import get_connection_key, get_device_key, get_service_key +from service.service.tools.ObjectKeys import get_connection_key, get_device_key, get_service_key, get_app_key if TYPE_CHECKING: from service.service.service_handler_api._ServiceHandler import _ServiceHandler @@ -44,11 +46,14 @@ class CacheableObjectType(Enum): CONNECTION = 'connection' DEVICE = 'device' SERVICE = 'service' + APP = 'app' class TaskExecutor: def __init__(self, service_handler_factory : ServiceHandlerFactory) -> None: self._service_handler_factory = service_handler_factory self._context_client = ContextClient() + # DEPENDENCY QKD + self._app_client = AppClient() self._device_client = DeviceClient() self._grpc_objects_cache : Dict[str, CacheableObject] = dict() @@ -220,3 +225,12 @@ class TaskExecutor: str(dict_connection_devices) ) ) + + + # ----- App-related methods --------------------------------------------------------------------------------------- + + def register_app(self, app: App) -> None: + app_key = get_app_key(app.app_id) + self._app_client.RegisterApp(app) + LOGGER.info("reg registered") + self._store_grpc_object(CacheableObjectType.APP, app_key, app) \ No newline at end of file diff --git a/src/service/service/tools/ObjectKeys.py b/src/service/service/tools/ObjectKeys.py index f45126e07..50129c704 100644 --- a/src/service/service/tools/ObjectKeys.py +++ b/src/service/service/tools/ObjectKeys.py @@ -13,6 +13,7 @@ # limitations under the License. from common.proto.context_pb2 import ConnectionId, DeviceId, ServiceId +from common.proto.app_pb2 import AppId def get_connection_key(connection_id : ConnectionId) -> str: return connection_id.connection_uuid.uuid @@ -24,3 +25,7 @@ def get_service_key(service_id : ServiceId) -> str: context_uuid = service_id.context_id.context_uuid.uuid service_uuid = service_id.service_uuid.uuid return '{:s}/{:s}'.format(context_uuid, service_uuid) + +def get_app_key(app_id : AppId) -> str: + return app_id.app_uuid.uuid + -- GitLab