From ee3d0767356b3b7c198fd28d4737bc97a8eaa72d Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Mon, 11 Mar 2024 08:43:30 +0000 Subject: [PATCH 001/205] Changes are made to activate monitoring module, e2e Orchestrator, enable drop cocDB --- my_deploy.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/my_deploy.sh b/my_deploy.sh index 0fcb51f90..5bd58d0dc 100755 --- a/my_deploy.sh +++ b/my_deploy.sh @@ -23,7 +23,7 @@ export TFS_REGISTRY_IMAGES="http://localhost:32000/tfs/" export TFS_COMPONENTS="context device pathcomp service slice nbi webui load_generator" # Uncomment to activate Monitoring -#export TFS_COMPONENTS="${TFS_COMPONENTS} monitoring" +export TFS_COMPONENTS="${TFS_COMPONENTS} monitoring" # Uncomment to activate ZTP #export TFS_COMPONENTS="${TFS_COMPONENTS} ztp" @@ -44,7 +44,7 @@ export TFS_COMPONENTS="context device pathcomp service slice nbi webui load_gene #export TFS_COMPONENTS="${TFS_COMPONENTS} forecaster" # Uncomment to activate E2E Orchestrator -#export TFS_COMPONENTS="${TFS_COMPONENTS} e2e_orchestrator" +export TFS_COMPONENTS="${TFS_COMPONENTS} e2e_orchestrator" # Set the tag you want to use for your images. export TFS_IMAGE_TAG="dev" @@ -93,7 +93,7 @@ export CRDB_DATABASE="tfs" export CRDB_DEPLOY_MODE="single" # Disable flag for dropping database, if it exists. -export CRDB_DROP_DATABASE_IF_EXISTS="" +export CRDB_DROP_DATABASE_IF_EXISTS="YES" # Disable flag for re-deploying CockroachDB from scratch. export CRDB_REDEPLOY="" @@ -141,7 +141,7 @@ export QDB_TABLE_MONITORING_KPIS="tfs_monitoring_kpis" export QDB_TABLE_SLICE_GROUPS="tfs_slice_groups" # Disable flag for dropping tables if they exist. -export QDB_DROP_TABLES_IF_EXIST="" +export QDB_DROP_TABLES_IF_EXIST="YES" # Disable flag for re-deploying QuestDB from scratch. export QDB_REDEPLOY="" -- GitLab From c67611f30b5fce1d88e718e89288810a7c5a6e73 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 19 Mar 2024 07:07:36 +0000 Subject: [PATCH 002/205] New kpi_manger.proto file is created. --- proto/kpi_manager.proto | 47 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 proto/kpi_manager.proto diff --git a/proto/kpi_manager.proto b/proto/kpi_manager.proto new file mode 100644 index 000000000..f5769ed37 --- /dev/null +++ b/proto/kpi_manager.proto @@ -0,0 +1,47 @@ +// Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; +package kpi_manager; + +import "context.proto"; +import "kpi_sample_types.proto"; + +service KpiManagerService{ + rpc SetKpi (KpiDescriptor ) returns (KpiId ) {} // Stable not final + rpc DeleteKpi (KpiId ) returns (context.Empty ) {} // Stable and final + rpc GetKpiDescriptor (KpiId ) returns (KpiDescriptor ) {} // Stable and final + rpc GetKpiDescriptorList (context.Empty ) returns (KpiDescriptorList ) {} // Stable and final +} + +message KpiDescriptor { + KpiId kpi_id = 1; + string kpi_description = 2; + repeated KpiId kpi_id_list = 3; + kpi_sample_types.KpiSampleType kpi_sample_type = 4; + context.DeviceId device_id = 5; + context.EndPointId endpoint_id = 6; + context.ServiceId service_id = 7; + context.SliceId slice_id = 8; + context.ConnectionId connection_id = 9; + context.LinkId link_id = 10; +} + +message KpiId { + context.Uuid kpi_id = 1; +} + +message KpiDescriptorList { + repeated KpiDescriptor kpi_descriptor_list = 1; +} \ No newline at end of file -- GitLab From 5a85e8f320c5b0e9492436c2a9c09f6f8ba25074 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 19 Mar 2024 07:08:52 +0000 Subject: [PATCH 003/205] imports are updated to refer to kpi_manager.proto --- proto/device.proto | 7 ++++--- proto/optical_attack_detector.proto | 5 +++-- proto/policy_condition.proto | 3 ++- 3 files changed, 9 insertions(+), 6 deletions(-) diff --git a/proto/device.proto b/proto/device.proto index 30e60079d..98cca8ce9 100644 --- a/proto/device.proto +++ b/proto/device.proto @@ -16,7 +16,8 @@ syntax = "proto3"; package device; import "context.proto"; -import "monitoring.proto"; +//import "monitoring.proto"; +import "kpi_manager.proto"; service DeviceService { rpc AddDevice (context.Device ) returns (context.DeviceId ) {} @@ -27,8 +28,8 @@ service DeviceService { } message MonitoringSettings { - monitoring.KpiId kpi_id = 1; - monitoring.KpiDescriptor kpi_descriptor = 2; + kpi_manager.KpiId kpi_id = 1; + kpi_manager.KpiDescriptor kpi_descriptor = 2; float sampling_duration_s = 3; float sampling_interval_s = 4; } diff --git a/proto/optical_attack_detector.proto b/proto/optical_attack_detector.proto index ebe3b5e06..0d3ed58de 100644 --- a/proto/optical_attack_detector.proto +++ b/proto/optical_attack_detector.proto @@ -17,7 +17,8 @@ syntax = "proto3"; package optical_attack_detector; import "context.proto"; -import "monitoring.proto"; +//import "monitoring.proto"; +import "kpi_manager.proto"; service OpticalAttackDetectorService { @@ -28,5 +29,5 @@ service OpticalAttackDetectorService { message DetectionRequest { context.ServiceId service_id = 1; - monitoring.KpiId kpi_id = 2; + kpi_manager.KpiId kpi_id = 2; } diff --git a/proto/policy_condition.proto b/proto/policy_condition.proto index 2037af93c..c0af929ef 100644 --- a/proto/policy_condition.proto +++ b/proto/policy_condition.proto @@ -16,10 +16,11 @@ syntax = "proto3"; package policy; import "monitoring.proto"; +import "kpi_manager.proto"; // Condition message PolicyRuleCondition { - monitoring.KpiId kpiId = 1; + kpi_manager.KpiId kpiId = 1; NumericalOperator numericalOperator = 2; monitoring.KpiValue kpiValue = 3; } -- GitLab From a78bcae188ec444a5e78f8a30df6488814d26176 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 19 Mar 2024 07:10:14 +0000 Subject: [PATCH 004/205] few methods and messages are moved to kpi_manager.ptoto --- proto/monitoring.proto | 58 ++++++++++++++++-------------------------- 1 file changed, 22 insertions(+), 36 deletions(-) mode change 100644 => 100755 proto/monitoring.proto diff --git a/proto/monitoring.proto b/proto/monitoring.proto old mode 100644 new mode 100755 index 45ba48b02..2706988aa --- a/proto/monitoring.proto +++ b/proto/monitoring.proto @@ -16,13 +16,14 @@ syntax = "proto3"; package monitoring; import "context.proto"; -import "kpi_sample_types.proto"; +import "kpi_manager.proto"; +//import "kpi_sample_types.proto"; service MonitoringService { - rpc SetKpi (KpiDescriptor ) returns (KpiId ) {} // Stable not final - rpc DeleteKpi (KpiId ) returns (context.Empty ) {} // Stable and final - rpc GetKpiDescriptor (KpiId ) returns (KpiDescriptor ) {} // Stable and final - rpc GetKpiDescriptorList (context.Empty ) returns (KpiDescriptorList ) {} // Stable and final +// rpc SetKpi (KpiDescriptor ) returns (KpiId ) {} // Stable not final +// rpc DeleteKpi (KpiId ) returns (context.Empty ) {} // Stable and final +// rpc GetKpiDescriptor (KpiId ) returns (KpiDescriptor ) {} // Stable and final +// rpc GetKpiDescriptorList (context.Empty ) returns (KpiDescriptorList ) {} // Stable and final rpc IncludeKpi (Kpi ) returns (context.Empty ) {} // Stable and final rpc MonitorKpi (MonitorKpiRequest ) returns (context.Empty ) {} // Stable and final rpc QueryKpiData (KpiQuery ) returns (RawKpiTable ) {} // Not implemented @@ -35,36 +36,25 @@ service MonitoringService { rpc GetAlarmDescriptor (AlarmID ) returns (AlarmDescriptor ) {} // Stable and final rpc GetAlarmResponseStream(AlarmSubscription ) returns (stream AlarmResponse) {} // Not Stable not final rpc DeleteAlarm (AlarmID ) returns (context.Empty ) {} // Stable and final - rpc GetStreamKpi (KpiId ) returns (stream Kpi ) {} // Stable not final - rpc GetInstantKpi (KpiId ) returns (Kpi ) {} // Stable not final +// rpc GetStreamKpi (KpiId ) returns (stream Kpi ) {} // Stable not final +// rpc GetInstantKpi (KpiId ) returns (Kpi ) {} // Stable not final } -message KpiDescriptor { - KpiId kpi_id = 1; - string kpi_description = 2; - repeated KpiId kpi_id_list = 3; - kpi_sample_types.KpiSampleType kpi_sample_type = 4; - context.DeviceId device_id = 5; - context.EndPointId endpoint_id = 6; - context.ServiceId service_id = 7; - context.SliceId slice_id = 8; - context.ConnectionId connection_id = 9; - context.LinkId link_id = 10; -} + message MonitorKpiRequest { - KpiId kpi_id = 1; + kpi_manager.KpiId kpi_id = 1; float monitoring_window_s = 2; float sampling_rate_s = 3; // Pending add field to reflect Available Device Protocols } message KpiQuery { - repeated KpiId kpi_ids = 1; - float monitoring_window_s = 2; - uint32 last_n_samples = 3; // used when you want something like "get the last N many samples - context.Timestamp start_timestamp = 4; // used when you want something like "get the samples since X date/time" - context.Timestamp end_timestamp = 5; // used when you want something like "get the samples until X date/time" + repeated kpi_manager.KpiId kpi_ids = 1; + float monitoring_window_s = 2; + uint32 last_n_samples = 3; // used when you want something like "get the last N many samples + context.Timestamp start_timestamp = 4; // used when you want something like "get the samples since X date/time" + context.Timestamp end_timestamp = 5; // used when you want something like "get the samples until X date/time" } @@ -74,20 +64,18 @@ message RawKpi { // cell } message RawKpiList { // column - KpiId kpi_id = 1; - repeated RawKpi raw_kpis = 2; + kpi_manager.KpiId kpi_id = 1; + repeated RawKpi raw_kpis = 2; } message RawKpiTable { // table repeated RawKpiList raw_kpi_lists = 1; } -message KpiId { - context.Uuid kpi_id = 1; -} + message Kpi { - KpiId kpi_id = 1; + kpi_manager.KpiId kpi_id = 1; context.Timestamp timestamp = 2; KpiValue kpi_value = 3; } @@ -117,13 +105,11 @@ message KpiList { repeated Kpi kpi = 1; } -message KpiDescriptorList { - repeated KpiDescriptor kpi_descriptor_list = 1; -} + message SubsDescriptor{ SubscriptionID subs_id = 1; - KpiId kpi_id = 2; + kpi_manager.KpiId kpi_id = 2; float sampling_duration_s = 3; float sampling_interval_s = 4; context.Timestamp start_timestamp = 5; // used when you want something like "get the samples since X date/time" @@ -148,7 +134,7 @@ message AlarmDescriptor { AlarmID alarm_id = 1; string alarm_description = 2; string name = 3; - KpiId kpi_id = 4; + kpi_manager.KpiId kpi_id = 4; KpiValueRange kpi_value_range = 5; context.Timestamp timestamp = 6; } -- GitLab From c02883ed78e500104a4f57cdbd2f9aa9f7315cc4 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 19 Mar 2024 07:11:13 +0000 Subject: [PATCH 005/205] service name enum and default grpc port is added --- src/common/Constants.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/common/Constants.py b/src/common/Constants.py index 30aa09b4c..838b028a7 100644 --- a/src/common/Constants.py +++ b/src/common/Constants.py @@ -43,6 +43,7 @@ class ServiceNameEnum(Enum): ZTP = 'ztp' POLICY = 'policy' MONITORING = 'monitoring' + KPIMANGER = 'kpiManager' DLT = 'dlt' NBI = 'nbi' CYBERSECURITY = 'cybersecurity' @@ -73,6 +74,7 @@ DEFAULT_SERVICE_GRPC_PORTS = { ServiceNameEnum.ZTP .value : 5050, ServiceNameEnum.POLICY .value : 6060, ServiceNameEnum.MONITORING .value : 7070, + ServiceNameEnum.KPIMANGER .value : 7071, ServiceNameEnum.DLT .value : 8080, ServiceNameEnum.NBI .value : 9090, ServiceNameEnum.L3_CAD .value : 10001, -- GitLab From 65fd027c946f5e266311877f0541e030867fd1ee Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 19 Mar 2024 07:11:59 +0000 Subject: [PATCH 006/205] initial client file for kpi manager --- src/kpi_manager/client/KpiManagerClient.py | 76 ++++++++++++++++++++++ 1 file changed, 76 insertions(+) create mode 100644 src/kpi_manager/client/KpiManagerClient.py diff --git a/src/kpi_manager/client/KpiManagerClient.py b/src/kpi_manager/client/KpiManagerClient.py new file mode 100644 index 000000000..3aa6fc65d --- /dev/null +++ b/src/kpi_manager/client/KpiManagerClient.py @@ -0,0 +1,76 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import grpc, logging +from common.Constants import ServiceNameEnum +from common.Settings import get_service_host, get_service_port_grpc + +from common.tools.client.RetryDecorator import retry, delay_exponential +from common.tools.grpc.Tools import grpc_message_to_json_string +from common.proto.context_pb2 import Empty +from common.proto.kpi_manager_pb2 import KpiId, KpiDescriptor, KpiDescriptorList, +from common.proto.kpi_manager_pb2_grpc import KpiManagerServiceStub + +LOGGER = logging.getLogger(__name__) +MAX_RETRIES = 15 +DELAY_FUNCTION = delay_exponential(initial=0.01, increment=2.0, maximum=5.0) +RETRY_DECORATOR = retry(max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect') + +class KpiManagerclient: + def __init__(self, host=None, port=None): + if not host: host = get_service_host(ServiceNameEnum.KPIMANGER) # update enum + if not port: port = get_service_port_grpc(ServiceNameEnum.KPIMANGER) # update enum + self.endpoint = '{:s}:{:s}'.format(str(host), str(port)) + LOGGER.debug('Creating channel to {:s}...'.format(str(self.endpoint))) + self.channel = None + self.stub = None + self.connect() + LOGGER.debug('Channel created') + + def connect(self): + self.channel = grpc.insecure_channel(self.endpoint) + self.stub = KpiManagerServiceStub(self.channel) + + def close(self): + if self.channel is not None: self.channel.close() + self.channel = None + self.stub = None + + @RETRY_DECORATOR + def SetKpi(self, request : KpiDescriptor) -> KpiId: + LOGGER.debug('SetKpi: {:s}'.format(grpc_message_to_json_string(request))) + response = self.stub.SetKpi(request) + LOGGER.debug('SetKpi result: {:s}'.format(grpc_message_to_json_string(response))) + return response + + @RETRY_DECORATOR + def DeleteKpi(self,request : KpiId) -> Empty: + LOGGER.debug('DeleteKpi: {:s}'.format(grpc_message_to_json_string(request))) + response = self.stub.DeleteKpi(request) + LOGGER.info('DeleteKpi result: {:s}'.format(grpc_message_to_json_string(response))) + return response + + @RETRY_DECORATOR + def GetKpiDescriptor(self, request : KpiId) -> KpiDescriptor: + LOGGER.debug('GetKpiDescriptor: {:s}'.format(grpc_message_to_json_string(request))) + response = self.stub.GetKpiDescriptor(request) + LOGGER.debug('GetKpiDescriptor result: {:s}'.format(grpc_message_to_json_string(response))) + return response + + @RETRY_DECORATOR + def GetKpiDescriptorList(self, request : Empty) -> KpiDescriptorList: + LOGGER.debug('GetKpiDescriptorList: {:s}'.format(grpc_message_to_json_string(request))) + response = self.stub.GetKpiDescriptorList(request) + LOGGER.debug('GetKpiDescriptorList result: {:s}'.format(grpc_message_to_json_string(response))) + return response \ No newline at end of file -- GitLab From 0b5fd79d8270475cf0e38cc3970ad77a14e8a08e Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 19 Mar 2024 07:13:09 +0000 Subject: [PATCH 007/205] initial service file for kpi manager --- src/kpi_manager/server/KpiManagerServer.py | 122 +++++++++++++++++++++ 1 file changed, 122 insertions(+) create mode 100644 src/kpi_manager/server/KpiManagerServer.py diff --git a/src/kpi_manager/server/KpiManagerServer.py b/src/kpi_manager/server/KpiManagerServer.py new file mode 100644 index 000000000..0a8932aab --- /dev/null +++ b/src/kpi_manager/server/KpiManagerServer.py @@ -0,0 +1,122 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging, os, grpc +from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method +from common.proto.context_pb2 import Empty + +from common.Constants import ServiceNameEnum +from common.Settings import get_service_port_grpc +from common.proto.kpi_manager_pb2_grpc import add_KpiManagerServiceServicer_to_server +from common.proto.kpi_manager_pb2_grpc import KpiManagerServiceServicer +from monitoring.service.NameMapping import NameMapping + +from common.proto.kpi_manager_pb2 import kpiDescriptor, KpiId, KpiDescriptorList +from monitoring.service import ManagementDBTools + +from common.tools.service.GenericGrpcService import GenericGrpcService + +LOGGER = logging.getLogger(__name__) + +METRICSDB_HOSTNAME = os.environ.get("METRICSDB_HOSTNAME") +METRICSDB_ILP_PORT = os.environ.get("METRICSDB_ILP_PORT") +METRICSDB_REST_PORT = os.environ.get("METRICSDB_REST_PORT") +METRICSDB_TABLE_MONITORING_KPIS = os.environ.get("METRICSDB_TABLE_MONITORING_KPIS") + +METRICS_POOL = MetricsPool('Monitoring', 'RPC') + +class KpiManagerServer(KpiManagerServiceServicer): + def __init__(self, cls_name: str = __name__): + LOGGER.info('Init KpiManagerService') + port = get_service_port_grpc(ServiceNameEnum.KPIMANGER) # port updated + GenericGrpcService(port, cls_name = cls_name) # class inheretence was removed + + # Init sqlite monitoring db + self.management_db = ManagementDBTools.ManagementDB('monitoring.db') # why monitoring.db here??? + LOGGER.info('MetricsDB initialized --- KPI Manager Service') + + def install_servicers(self): + # There is no need to create the "MonitoringServiceServicerImpl" instance because actual class + # implementation exists in the same class. + add_KpiManagerServiceServicer_to_server(KpiManagerServer(), self.server) + + @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def SetKpi( + self, request: KpiDescriptor, grpc_context: grpc.ServicerContext + ) -> KpiId: + response = KpiId() + kpi_description = request.kpi_description + kpi_sample_type = request.kpi_sample_type + kpi_device_id = request.device_id.device_uuid.uuid + kpi_endpoint_id = request.endpoint_id.endpoint_uuid.uuid + kpi_service_id = request.service_id.service_uuid.uuid + kpi_slice_id = request.slice_id.slice_uuid.uuid + kpi_connection_id = request.connection_id.connection_uuid.uuid + kpi_link_id = request.link_id.link_uuid.uuid + if request.kpi_id.kpi_id.uuid != "": + response.kpi_id.uuid = request.kpi_id.kpi_id.uuid + # Here the code to modify an existing kpi + else: + data = self.management_db.insert_KPI( + kpi_description, kpi_sample_type, kpi_device_id, kpi_endpoint_id, + kpi_service_id, kpi_slice_id, kpi_connection_id, kpi_link_id) + response.kpi_id.uuid = str(data) + return response + + @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def DeleteKpi(self, request: KpiId, grpc_context: grpc.ServicerContext) -> Empty: + kpi_id = int(request.kpi_id.uuid) + kpi = self.management_db.get_KPI(kpi_id) + if kpi: + self.management_db.delete_KPI(kpi_id) + else: + LOGGER.info('DeleteKpi error: KpiID({:s}): not found in database'.format(str(kpi_id))) + return Empty() + + @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def GetKpiDescriptor(self, request: KpiId, grpc_context: grpc.ServicerContext) -> KpiDescriptor: + kpi_id = request.kpi_id.uuid + kpi_db = self.management_db.get_KPI(int(kpi_id)) + kpiDescriptor = KpiDescriptor() + if kpi_db is None: + LOGGER.info('GetKpiDescriptor error: KpiID({:s}): not found in database'.format(str(kpi_id))) + else: + kpiDescriptor.kpi_description = kpi_db[1] + kpiDescriptor.kpi_sample_type = kpi_db[2] + kpiDescriptor.device_id.device_uuid.uuid = str(kpi_db[3]) + kpiDescriptor.endpoint_id.endpoint_uuid.uuid = str(kpi_db[4]) + kpiDescriptor.service_id.service_uuid.uuid = str(kpi_db[5]) + kpiDescriptor.slice_id.slice_uuid.uuid = str(kpi_db[6]) + kpiDescriptor.connection_id.connection_uuid.uuid = str(kpi_db[7]) + kpiDescriptor.link_id.link_uuid.uuid = str(kpi_db[8]) + return kpiDescriptor + + @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def GetKpiDescriptorList(self, request: Empty, grpc_context: grpc.ServicerContext) -> KpiDescriptorList: + kpi_descriptor_list = KpiDescriptorList() + data = self.management_db.get_KPIS() + LOGGER.debug(f"data: {data}") + for item in data: + kpi_descriptor = KpiDescriptor() + kpi_descriptor.kpi_id.kpi_id.uuid = str(item[0]) + kpi_descriptor.kpi_description = item[1] + kpi_descriptor.kpi_sample_type = item[2] + kpi_descriptor.device_id.device_uuid.uuid = str(item[3]) + kpi_descriptor.endpoint_id.endpoint_uuid.uuid = str(item[4]) + kpi_descriptor.service_id.service_uuid.uuid = str(item[5]) + kpi_descriptor.slice_id.slice_uuid.uuid = str(item[6]) + kpi_descriptor.connection_id.connection_uuid.uuid = str(item[7]) + kpi_descriptor.link_id.link_uuid.uuid = str(item[8]) + kpi_descriptor_list.kpi_descriptor_list.append(kpi_descriptor) + return kpi_descriptor_list \ No newline at end of file -- GitLab From 5b6399dc2bb9665bbcfc4207239923f82549d938 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 19 Mar 2024 07:36:20 +0000 Subject: [PATCH 008/205] TYPO - "KPIMANGER" changed to "KPIMANAGER" --- src/common/Constants.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/common/Constants.py b/src/common/Constants.py index 838b028a7..4a0f3a226 100644 --- a/src/common/Constants.py +++ b/src/common/Constants.py @@ -43,7 +43,7 @@ class ServiceNameEnum(Enum): ZTP = 'ztp' POLICY = 'policy' MONITORING = 'monitoring' - KPIMANGER = 'kpiManager' + KPIMANAGER = 'kpiManager' DLT = 'dlt' NBI = 'nbi' CYBERSECURITY = 'cybersecurity' -- GitLab From 67146f1e6061f3190659ef1d0f87e07b270c746c Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 19 Mar 2024 07:37:23 +0000 Subject: [PATCH 009/205] updated KPIMANAGER ServiceEnumName --- src/kpi_manager/client/KpiManagerClient.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/kpi_manager/client/KpiManagerClient.py b/src/kpi_manager/client/KpiManagerClient.py index 3aa6fc65d..d31a8b60f 100644 --- a/src/kpi_manager/client/KpiManagerClient.py +++ b/src/kpi_manager/client/KpiManagerClient.py @@ -29,8 +29,8 @@ RETRY_DECORATOR = retry(max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, class KpiManagerclient: def __init__(self, host=None, port=None): - if not host: host = get_service_host(ServiceNameEnum.KPIMANGER) # update enum - if not port: port = get_service_port_grpc(ServiceNameEnum.KPIMANGER) # update enum + if not host: host = get_service_host(ServiceNameEnum.KPIMANAGER) # update enum + if not port: port = get_service_port_grpc(ServiceNameEnum.KPIMANAGER) # update enum self.endpoint = '{:s}:{:s}'.format(str(host), str(port)) LOGGER.debug('Creating channel to {:s}...'.format(str(self.endpoint))) self.channel = None -- GitLab From 2223326625edeeab4a89e0535ce490e73725db1d Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 19 Mar 2024 07:38:50 +0000 Subject: [PATCH 010/205] updation of KPIMANAGER ServiceEnumName and removal of unnecssary variables. --- src/kpi_manager/server/KpiManagerServer.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/src/kpi_manager/server/KpiManagerServer.py b/src/kpi_manager/server/KpiManagerServer.py index 0a8932aab..d42ce14eb 100644 --- a/src/kpi_manager/server/KpiManagerServer.py +++ b/src/kpi_manager/server/KpiManagerServer.py @@ -29,17 +29,12 @@ from common.tools.service.GenericGrpcService import GenericGrpcService LOGGER = logging.getLogger(__name__) -METRICSDB_HOSTNAME = os.environ.get("METRICSDB_HOSTNAME") -METRICSDB_ILP_PORT = os.environ.get("METRICSDB_ILP_PORT") -METRICSDB_REST_PORT = os.environ.get("METRICSDB_REST_PORT") -METRICSDB_TABLE_MONITORING_KPIS = os.environ.get("METRICSDB_TABLE_MONITORING_KPIS") - METRICS_POOL = MetricsPool('Monitoring', 'RPC') class KpiManagerServer(KpiManagerServiceServicer): def __init__(self, cls_name: str = __name__): LOGGER.info('Init KpiManagerService') - port = get_service_port_grpc(ServiceNameEnum.KPIMANGER) # port updated + port = get_service_port_grpc(ServiceNameEnum.KPIMANAGER) # port updated GenericGrpcService(port, cls_name = cls_name) # class inheretence was removed # Init sqlite monitoring db -- GitLab From e5bfc7b10d25c24d03ecc348d948dba4d0639529 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 22 Mar 2024 08:21:45 +0000 Subject: [PATCH 011/205] blank line is removed --- src/kpi_manager/__init__.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 src/kpi_manager/__init__.py diff --git a/src/kpi_manager/__init__.py b/src/kpi_manager/__init__.py new file mode 100644 index 000000000..1549d9811 --- /dev/null +++ b/src/kpi_manager/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + -- GitLab From 7ef9ed5c57db3a562f10453958baf18cff6c5ef0 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 22 Mar 2024 08:23:44 +0000 Subject: [PATCH 012/205] Class name is changed from "KpiManagerclient" to "KpiManagerClient" --- src/kpi_manager/client/KpiManagerClient.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/kpi_manager/client/KpiManagerClient.py b/src/kpi_manager/client/KpiManagerClient.py index d31a8b60f..6baca7cef 100644 --- a/src/kpi_manager/client/KpiManagerClient.py +++ b/src/kpi_manager/client/KpiManagerClient.py @@ -27,7 +27,7 @@ MAX_RETRIES = 15 DELAY_FUNCTION = delay_exponential(initial=0.01, increment=2.0, maximum=5.0) RETRY_DECORATOR = retry(max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect') -class KpiManagerclient: +class KpiManagerClient: def __init__(self, host=None, port=None): if not host: host = get_service_host(ServiceNameEnum.KPIMANAGER) # update enum if not port: port = get_service_port_grpc(ServiceNameEnum.KPIMANAGER) # update enum -- GitLab From e6b8b7a57ad254ae12bf705238f6b8d6e72c01af Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 22 Mar 2024 08:24:11 +0000 Subject: [PATCH 013/205] blank line is removed --- src/kpi_manager/service/KpiManagerService.py | 117 +++++++++++++++++++ 1 file changed, 117 insertions(+) create mode 100644 src/kpi_manager/service/KpiManagerService.py diff --git a/src/kpi_manager/service/KpiManagerService.py b/src/kpi_manager/service/KpiManagerService.py new file mode 100644 index 000000000..d42ce14eb --- /dev/null +++ b/src/kpi_manager/service/KpiManagerService.py @@ -0,0 +1,117 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging, os, grpc +from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method +from common.proto.context_pb2 import Empty + +from common.Constants import ServiceNameEnum +from common.Settings import get_service_port_grpc +from common.proto.kpi_manager_pb2_grpc import add_KpiManagerServiceServicer_to_server +from common.proto.kpi_manager_pb2_grpc import KpiManagerServiceServicer +from monitoring.service.NameMapping import NameMapping + +from common.proto.kpi_manager_pb2 import kpiDescriptor, KpiId, KpiDescriptorList +from monitoring.service import ManagementDBTools + +from common.tools.service.GenericGrpcService import GenericGrpcService + +LOGGER = logging.getLogger(__name__) + +METRICS_POOL = MetricsPool('Monitoring', 'RPC') + +class KpiManagerServer(KpiManagerServiceServicer): + def __init__(self, cls_name: str = __name__): + LOGGER.info('Init KpiManagerService') + port = get_service_port_grpc(ServiceNameEnum.KPIMANAGER) # port updated + GenericGrpcService(port, cls_name = cls_name) # class inheretence was removed + + # Init sqlite monitoring db + self.management_db = ManagementDBTools.ManagementDB('monitoring.db') # why monitoring.db here??? + LOGGER.info('MetricsDB initialized --- KPI Manager Service') + + def install_servicers(self): + # There is no need to create the "MonitoringServiceServicerImpl" instance because actual class + # implementation exists in the same class. + add_KpiManagerServiceServicer_to_server(KpiManagerServer(), self.server) + + @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def SetKpi( + self, request: KpiDescriptor, grpc_context: grpc.ServicerContext + ) -> KpiId: + response = KpiId() + kpi_description = request.kpi_description + kpi_sample_type = request.kpi_sample_type + kpi_device_id = request.device_id.device_uuid.uuid + kpi_endpoint_id = request.endpoint_id.endpoint_uuid.uuid + kpi_service_id = request.service_id.service_uuid.uuid + kpi_slice_id = request.slice_id.slice_uuid.uuid + kpi_connection_id = request.connection_id.connection_uuid.uuid + kpi_link_id = request.link_id.link_uuid.uuid + if request.kpi_id.kpi_id.uuid != "": + response.kpi_id.uuid = request.kpi_id.kpi_id.uuid + # Here the code to modify an existing kpi + else: + data = self.management_db.insert_KPI( + kpi_description, kpi_sample_type, kpi_device_id, kpi_endpoint_id, + kpi_service_id, kpi_slice_id, kpi_connection_id, kpi_link_id) + response.kpi_id.uuid = str(data) + return response + + @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def DeleteKpi(self, request: KpiId, grpc_context: grpc.ServicerContext) -> Empty: + kpi_id = int(request.kpi_id.uuid) + kpi = self.management_db.get_KPI(kpi_id) + if kpi: + self.management_db.delete_KPI(kpi_id) + else: + LOGGER.info('DeleteKpi error: KpiID({:s}): not found in database'.format(str(kpi_id))) + return Empty() + + @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def GetKpiDescriptor(self, request: KpiId, grpc_context: grpc.ServicerContext) -> KpiDescriptor: + kpi_id = request.kpi_id.uuid + kpi_db = self.management_db.get_KPI(int(kpi_id)) + kpiDescriptor = KpiDescriptor() + if kpi_db is None: + LOGGER.info('GetKpiDescriptor error: KpiID({:s}): not found in database'.format(str(kpi_id))) + else: + kpiDescriptor.kpi_description = kpi_db[1] + kpiDescriptor.kpi_sample_type = kpi_db[2] + kpiDescriptor.device_id.device_uuid.uuid = str(kpi_db[3]) + kpiDescriptor.endpoint_id.endpoint_uuid.uuid = str(kpi_db[4]) + kpiDescriptor.service_id.service_uuid.uuid = str(kpi_db[5]) + kpiDescriptor.slice_id.slice_uuid.uuid = str(kpi_db[6]) + kpiDescriptor.connection_id.connection_uuid.uuid = str(kpi_db[7]) + kpiDescriptor.link_id.link_uuid.uuid = str(kpi_db[8]) + return kpiDescriptor + + @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def GetKpiDescriptorList(self, request: Empty, grpc_context: grpc.ServicerContext) -> KpiDescriptorList: + kpi_descriptor_list = KpiDescriptorList() + data = self.management_db.get_KPIS() + LOGGER.debug(f"data: {data}") + for item in data: + kpi_descriptor = KpiDescriptor() + kpi_descriptor.kpi_id.kpi_id.uuid = str(item[0]) + kpi_descriptor.kpi_description = item[1] + kpi_descriptor.kpi_sample_type = item[2] + kpi_descriptor.device_id.device_uuid.uuid = str(item[3]) + kpi_descriptor.endpoint_id.endpoint_uuid.uuid = str(item[4]) + kpi_descriptor.service_id.service_uuid.uuid = str(item[5]) + kpi_descriptor.slice_id.slice_uuid.uuid = str(item[6]) + kpi_descriptor.connection_id.connection_uuid.uuid = str(item[7]) + kpi_descriptor.link_id.link_uuid.uuid = str(item[8]) + kpi_descriptor_list.kpi_descriptor_list.append(kpi_descriptor) + return kpi_descriptor_list \ No newline at end of file -- GitLab From 90ffb7281d4db0a987d1902640949ad4153f0462 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 22 Mar 2024 08:25:57 +0000 Subject: [PATCH 014/205] KpiManager test messages file initial version. --- src/kpi_manager/tests/test_messages.py | 69 ++++++++++++++++++++++++++ 1 file changed, 69 insertions(+) create mode 100644 src/kpi_manager/tests/test_messages.py diff --git a/src/kpi_manager/tests/test_messages.py b/src/kpi_manager/tests/test_messages.py new file mode 100644 index 000000000..589d6cb84 --- /dev/null +++ b/src/kpi_manager/tests/test_messages.py @@ -0,0 +1,69 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from common.proto import kpi_manager_pb2 +from common.proto.kpi_sample_types_pb2 import KpiSampleType + +def kpi_id(): + _kpi_id = kpi_manager_pb2.KpiId() + _kpi_id.kpi_id.uuid = str(1) # pylint: disable=maybe-no-member + return _kpi_id + +def create_kpi_request(kpi_id_str): + _create_kpi_request = kpi_manager_pb2.KpiDescriptor() + _create_kpi_request.kpi_description = 'KPI Description Test' + _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED + _create_kpi_request.device_id.device_uuid.uuid = 'DEV' + str(kpi_id_str) + _create_kpi_request.service_id.service_uuid.uuid = 'SERV' + str(kpi_id_str) + _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC' + str(kpi_id_str) + _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END' + str(kpi_id_str) + _create_kpi_request.connection_id.connection_uuid.uuid = 'CON' + str(kpi_id_str) + return _create_kpi_request + +def create_kpi_request_b(): + _create_kpi_request = kpi_manager_pb2.KpiDescriptor() + _create_kpi_request.kpi_description = 'KPI Description Test' + _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED + _create_kpi_request.device_id.device_uuid.uuid = 'DEV2' # pylint: disable=maybe-no-member + _create_kpi_request.service_id.service_uuid.uuid = 'SERV2' # pylint: disable=maybe-no-member + _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC2' # pylint: disable=maybe-no-member + _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END2' # pylint: disable=maybe-no-member + _create_kpi_request.connection_id.connection_uuid.uuid = 'CON2' # pylint: disable=maybe-no-member + return _create_kpi_request + +def create_kpi_request_c(): + _create_kpi_request = kpi_manager_pb2.KpiDescriptor() + _create_kpi_request.kpi_description = 'KPI Description Test' + _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED + _create_kpi_request.device_id.device_uuid.uuid = 'DEV3' # pylint: disable=maybe-no-member + _create_kpi_request.service_id.service_uuid.uuid = 'SERV3' # pylint: disable=maybe-no-member + _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC3' # pylint: disable=maybe-no-member + _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END3' # pylint: disable=maybe-no-member + _create_kpi_request.connection_id.connection_uuid.uuid = 'CON3' # pylint: disable=maybe-no-member + return _create_kpi_request + +def create_kpi_request_d(): + _create_kpi_request = kpi_manager_pb2.KpiDescriptor() + _create_kpi_request.kpi_description = 'KPI Description Test' + _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED + _create_kpi_request.device_id.device_uuid.uuid = 'DEV4' # pylint: disable=maybe-no-member + _create_kpi_request.service_id.service_uuid.uuid = 'SERV4' # pylint: disable=maybe-no-member + _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC4' # pylint: disable=maybe-no-member + _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END4' # pylint: disable=maybe-no-member + _create_kpi_request.connection_id.connection_uuid.uuid = 'CON4' # pylint: disable=maybe-no-member + return _create_kpi_request + +def kpi_descriptor_list(): + _kpi_descriptor_list = kpi_manager_pb2.KpiDescriptorList() + return _kpi_descriptor_list \ No newline at end of file -- GitLab From 7fdc29ac02f3a8dc0f35b5fb1d8cca0af3edf4da Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 22 Mar 2024 08:28:35 +0000 Subject: [PATCH 015/205] The methods moved to KpiManagerMessages file are removed. --- src/monitoring/tests/Messages.py | 102 +++++++++++++++---------------- 1 file changed, 51 insertions(+), 51 deletions(-) diff --git a/src/monitoring/tests/Messages.py b/src/monitoring/tests/Messages.py index a56207d9a..23f4db017 100644 --- a/src/monitoring/tests/Messages.py +++ b/src/monitoring/tests/Messages.py @@ -17,54 +17,54 @@ from common.proto import monitoring_pb2 from common.proto.kpi_sample_types_pb2 import KpiSampleType from common.tools.timestamp.Converters import timestamp_utcnow_to_float -def kpi_id(): - _kpi_id = monitoring_pb2.KpiId() - _kpi_id.kpi_id.uuid = str(1) # pylint: disable=maybe-no-member - return _kpi_id - -def create_kpi_request(kpi_id_str): - _create_kpi_request = monitoring_pb2.KpiDescriptor() - _create_kpi_request.kpi_description = 'KPI Description Test' - _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED - _create_kpi_request.device_id.device_uuid.uuid = 'DEV' + str(kpi_id_str) - _create_kpi_request.service_id.service_uuid.uuid = 'SERV' + str(kpi_id_str) - _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC' + str(kpi_id_str) - _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END' + str(kpi_id_str) - _create_kpi_request.connection_id.connection_uuid.uuid = 'CON' + str(kpi_id_str) - return _create_kpi_request - -def create_kpi_request_b(): - _create_kpi_request = monitoring_pb2.KpiDescriptor() - _create_kpi_request.kpi_description = 'KPI Description Test' - _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED - _create_kpi_request.device_id.device_uuid.uuid = 'DEV2' # pylint: disable=maybe-no-member - _create_kpi_request.service_id.service_uuid.uuid = 'SERV2' # pylint: disable=maybe-no-member - _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC2' # pylint: disable=maybe-no-member - _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END2' # pylint: disable=maybe-no-member - _create_kpi_request.connection_id.connection_uuid.uuid = 'CON2' # pylint: disable=maybe-no-member - return _create_kpi_request - -def create_kpi_request_c(): - _create_kpi_request = monitoring_pb2.KpiDescriptor() - _create_kpi_request.kpi_description = 'KPI Description Test' - _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED - _create_kpi_request.device_id.device_uuid.uuid = 'DEV3' # pylint: disable=maybe-no-member - _create_kpi_request.service_id.service_uuid.uuid = 'SERV3' # pylint: disable=maybe-no-member - _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC3' # pylint: disable=maybe-no-member - _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END3' # pylint: disable=maybe-no-member - _create_kpi_request.connection_id.connection_uuid.uuid = 'CON3' # pylint: disable=maybe-no-member - return _create_kpi_request - -def create_kpi_request_d(): - _create_kpi_request = monitoring_pb2.KpiDescriptor() - _create_kpi_request.kpi_description = 'KPI Description Test' - _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED - _create_kpi_request.device_id.device_uuid.uuid = 'DEV4' # pylint: disable=maybe-no-member - _create_kpi_request.service_id.service_uuid.uuid = 'SERV4' # pylint: disable=maybe-no-member - _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC4' # pylint: disable=maybe-no-member - _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END4' # pylint: disable=maybe-no-member - _create_kpi_request.connection_id.connection_uuid.uuid = 'CON4' # pylint: disable=maybe-no-member - return _create_kpi_request +# def kpi_id(): +# _kpi_id = monitoring_pb2.KpiId() +# _kpi_id.kpi_id.uuid = str(1) # pylint: disable=maybe-no-member +# return _kpi_id + +# def create_kpi_request(kpi_id_str): +# _create_kpi_request = monitoring_pb2.KpiDescriptor() +# _create_kpi_request.kpi_description = 'KPI Description Test' +# _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED +# _create_kpi_request.device_id.device_uuid.uuid = 'DEV' + str(kpi_id_str) +# _create_kpi_request.service_id.service_uuid.uuid = 'SERV' + str(kpi_id_str) +# _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC' + str(kpi_id_str) +# _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END' + str(kpi_id_str) +# _create_kpi_request.connection_id.connection_uuid.uuid = 'CON' + str(kpi_id_str) +# return _create_kpi_request + +# def create_kpi_request_b(): +# _create_kpi_request = monitoring_pb2.KpiDescriptor() +# _create_kpi_request.kpi_description = 'KPI Description Test' +# _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED +# _create_kpi_request.device_id.device_uuid.uuid = 'DEV2' # pylint: disable=maybe-no-member +# _create_kpi_request.service_id.service_uuid.uuid = 'SERV2' # pylint: disable=maybe-no-member +# _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC2' # pylint: disable=maybe-no-member +# _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END2' # pylint: disable=maybe-no-member +# _create_kpi_request.connection_id.connection_uuid.uuid = 'CON2' # pylint: disable=maybe-no-member +# return _create_kpi_request + +# def create_kpi_request_c(): +# _create_kpi_request = monitoring_pb2.KpiDescriptor() +# _create_kpi_request.kpi_description = 'KPI Description Test' +# _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED +# _create_kpi_request.device_id.device_uuid.uuid = 'DEV3' # pylint: disable=maybe-no-member +# _create_kpi_request.service_id.service_uuid.uuid = 'SERV3' # pylint: disable=maybe-no-member +# _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC3' # pylint: disable=maybe-no-member +# _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END3' # pylint: disable=maybe-no-member +# _create_kpi_request.connection_id.connection_uuid.uuid = 'CON3' # pylint: disable=maybe-no-member +# return _create_kpi_request + +# def create_kpi_request_d(): +# _create_kpi_request = monitoring_pb2.KpiDescriptor() +# _create_kpi_request.kpi_description = 'KPI Description Test' +# _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED +# _create_kpi_request.device_id.device_uuid.uuid = 'DEV4' # pylint: disable=maybe-no-member +# _create_kpi_request.service_id.service_uuid.uuid = 'SERV4' # pylint: disable=maybe-no-member +# _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC4' # pylint: disable=maybe-no-member +# _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END4' # pylint: disable=maybe-no-member +# _create_kpi_request.connection_id.connection_uuid.uuid = 'CON4' # pylint: disable=maybe-no-member +# return _create_kpi_request def monitor_kpi_request(kpi_uuid, monitoring_window_s, sampling_rate_s): _monitor_kpi_request = monitoring_pb2.MonitorKpiRequest() @@ -80,10 +80,10 @@ def include_kpi_request(kpi_id): _include_kpi_request.kpi_value.floatVal = 500*random() # pylint: disable=maybe-no-member return _include_kpi_request -def kpi_descriptor_list(): - _kpi_descriptor_list = monitoring_pb2.KpiDescriptorList() +# def kpi_descriptor_list(): +# _kpi_descriptor_list = monitoring_pb2.KpiDescriptorList() - return _kpi_descriptor_list +# return _kpi_descriptor_list def kpi_query(kpi_id_list): _kpi_query = monitoring_pb2.KpiQuery() -- GitLab From 7386b351cb42c83fd428d35129dbb44a54e5bbaf Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 22 Mar 2024 08:29:55 +0000 Subject: [PATCH 016/205] Blank line from the top is removed. --- src/kpi_manager/tests/test_unitary.py | 267 ++++++++++++++++++++++++++ 1 file changed, 267 insertions(+) create mode 100644 src/kpi_manager/tests/test_unitary.py diff --git a/src/kpi_manager/tests/test_unitary.py b/src/kpi_manager/tests/test_unitary.py new file mode 100644 index 000000000..39d2b2874 --- /dev/null +++ b/src/kpi_manager/tests/test_unitary.py @@ -0,0 +1,267 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os, pytest +import logging, json + +from apscheduler.schedulers.background import BackgroundScheduler + +from common.proto.context_pb2 import ConfigActionEnum, Context, ContextId, DeviceOperationalStatusEnum, EventTypeEnum, DeviceEvent, Device, Empty, Topology, TopologyId +from common.Constants import ServiceNameEnum +from common.Constants import DEFAULT_CONTEXT_NAME, DEFAULT_TOPOLOGY_NAME, ServiceNameEnum +from common.Settings import ( + ENVVAR_SUFIX_SERVICE_HOST, ENVVAR_SUFIX_SERVICE_PORT_GRPC, get_env_var_name, get_service_port_grpc) +from common.tests.MockServicerImpl_Context import MockServicerImpl_Context +from common.proto.context_pb2_grpc import add_ContextServiceServicer_to_server +from common.proto.kpi_sample_types_pb2 import KpiSampleType +from common.tools.object_factory.Context import json_context, json_context_id +from common.tools.object_factory.Topology import json_topology, json_topology_id +# from common.proto.monitoring_pb2 import KpiId, KpiDescriptor, SubsDescriptor, SubsList, AlarmID, \ +# AlarmDescriptor, AlarmList, KpiDescriptorList, SubsResponse, AlarmResponse, RawKpiTable #, Kpi, KpiList +from common.proto.kpi_manager_pb2 import KpiId, KpiDescriptor, KpiDescriptorList + + +from device.service.driver_api.DriverFactory import DriverFactory +from device.service.driver_api.DriverInstanceCache import DriverInstanceCache +from device.service.DeviceService import DeviceService +from device.client.DeviceClient import DeviceClient + +from kpi_manager.tests.test_messages import create_kpi_request, create_kpi_request_b, create_kpi_request_c, create_kpi_request_d +# from monitoring.service.MonitoringService import MonitoringService +from kpi_manager.service.KpiManagerService import KpiManagerService +# from monitoring.client.MonitoringClient import MonitoringClient +from kpi_manager.client.KpiManagerClient import KpiManagerClient + +from monitoring.service.ManagementDBTools import ManagementDB +from monitoring.service.MetricsDBTools import MetricsDB +from monitoring.service.NameMapping import NameMapping + +########################### +# Tests Setup +########################### + +LOCAL_HOST = '127.0.0.1' +MOCKSERVICE_PORT = 10000 + +KPIMANAGER_SERVICE_PORT = MOCKSERVICE_PORT + get_service_port_grpc(ServiceNameEnum.KPIMANAGER) # avoid privileged ports +os.environ[get_env_var_name(ServiceNameEnum.KPIMANAGER, ENVVAR_SUFIX_SERVICE_HOST )] = str(LOCAL_HOST) +os.environ[get_env_var_name(ServiceNameEnum.KPIMANAGER, ENVVAR_SUFIX_SERVICE_PORT_GRPC)] = str(KPIMANAGER_SERVICE_PORT) + +METRICSDB_HOSTNAME = os.environ.get('METRICSDB_HOSTNAME') + +LOGGER = logging.getLogger(__name__) + +class MockContextService(GenericGrpcService): + # Mock Service implementing Context to simplify unitary tests of Monitoring + + def __init__(self, bind_port: Union[str, int]) -> None: + super().__init__(bind_port, LOCAL_HOST, enable_health_servicer=False, cls_name='MockService') + + # pylint: disable=attribute-defined-outside-init + def install_servicers(self): + self.context_servicer = MockServicerImpl_Context() + add_ContextServiceServicer_to_server(self.context_servicer, self.server) + +@pytest.fixture(scope='session') +def context_service(): + LOGGER.info('Initializing MockContextService...') + _service = MockContextService(MOCKSERVICE_PORT) + _service.start() + + LOGGER.info('Yielding MockContextService...') + yield _service + + LOGGER.info('Terminating MockContextService...') + _service.context_servicer.msg_broker.terminate() + _service.stop() + + LOGGER.info('Terminated MockContextService...') + +@pytest.fixture(scope='session') +def context_client(context_service : MockContextService): # pylint: disable=redefined-outer-name,unused-argument + LOGGER.info('Initializing ContextClient...') + _client = ContextClient() + + LOGGER.info('Yielding ContextClient...') + yield _client + + LOGGER.info('Closing ContextClient...') + _client.close() + + LOGGER.info('Closed ContextClient...') + +@pytest.fixture(scope='session') +def device_service(context_service : MockContextService): # pylint: disable=redefined-outer-name,unused-argument + LOGGER.info('Initializing DeviceService...') + driver_factory = DriverFactory(DRIVERS) + driver_instance_cache = DriverInstanceCache(driver_factory) + _service = DeviceService(driver_instance_cache) + _service.start() + + # yield the server, when test finishes, execution will resume to stop it + LOGGER.info('Yielding DeviceService...') + yield _service + + LOGGER.info('Terminating DeviceService...') + _service.stop() + + LOGGER.info('Terminated DeviceService...') + +@pytest.fixture(scope='session') +def device_client(device_service : DeviceService): # pylint: disable=redefined-outer-name,unused-argument + LOGGER.info('Initializing DeviceClient...') + _client = DeviceClient() + + LOGGER.info('Yielding DeviceClient...') + yield _client + + LOGGER.info('Closing DeviceClient...') + _client.close() + + LOGGER.info('Closed DeviceClient...') + +@pytest.fixture(scope='session') +def device_client(device_service : DeviceService): # pylint: disable=redefined-outer-name,unused-argument + LOGGER.info('Initializing DeviceClient...') + _client = DeviceClient() + + LOGGER.info('Yielding DeviceClient...') + yield _client + + LOGGER.info('Closing DeviceClient...') + _client.close() + + LOGGER.info('Closed DeviceClient...') + +# This fixture will be requested by test cases and last during testing session +@pytest.fixture(scope='session') +def kpi_manager_service( + context_service : MockContextService, # pylint: disable=redefined-outer-name,unused-argument + device_service : DeviceService # pylint: disable=redefined-outer-name,unused-argument + ): + LOGGER.info('Initializing KpiManagerService...') + name_mapping = NameMapping() + # _service = MonitoringService(name_mapping) + _service = KpiManagerService(name_mapping) + _service.start() + + # yield the server, when test finishes, execution will resume to stop it + LOGGER.info('Yielding KpiManagerService...') + yield _service + + LOGGER.info('Terminating KpiManagerService...') + _service.stop() + + LOGGER.info('Terminated KpiManagerService...') + +# This fixture will be requested by test cases and last during testing session. +# The client requires the server, so client fixture has the server as dependency. +# def monitoring_client(monitoring_service : MonitoringService): (Add for better understanding) +@pytest.fixture(scope='session') +def kpi_manager_client(kpi_manager_service : KpiManagerService): # pylint: disable=redefined-outer-name,unused-argument + LOGGER.info('Initializing KpiManagerClient...') + _client = KpiManagerClient() + + # yield the server, when test finishes, execution will resume to stop it + LOGGER.info('Yielding KpiManagerClient...') + yield _client + + LOGGER.info('Closing KpiManagerClient...') + _client.close() + + LOGGER.info('Closed KpiManagerClient...') + +@pytest.fixture(scope='session') +def management_db(): + _management_db = ManagementDB('monitoring.db') + return _management_db + +@pytest.fixture(scope='session') +def metrics_db(kpi_manager_service : KpiManagerService): # pylint: disable=redefined-outer-name + return monitoring_service.monitoring_servicer.metrics_db + +# This function os not clear to me (Changes should me made before execution) +@pytest.fixture(scope='session') +def metrics_db(monitoring_service : MonitoringService): # pylint: disable=redefined-outer-name + return monitoring_service.monitoring_servicer.metrics_db + #_metrics_db = MetricsDBTools.MetricsDB( + # METRICSDB_HOSTNAME, METRICSDB_ILP_PORT, METRICSDB_REST_PORT, METRICSDB_TABLE_MONITORING_KPIS) + #return _metrics_db + +@pytest.fixture(scope='session') +def subs_scheduler(): + _scheduler = BackgroundScheduler(executors={'processpool': ProcessPoolExecutor(max_workers=20)}) + _scheduler.start() + return _scheduler + +def ingestion_data(kpi_id_int): + # pylint: disable=redefined-outer-name,unused-argument + metrics_db = MetricsDB('localhost', '9009', '9000', 'monitoring') + + kpiSampleType = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED + kpiSampleType_name = KpiSampleType.Name(kpiSampleType).upper().replace('KPISAMPLETYPE_', '') + for _ in range(50): + kpiSampleType = kpiSampleType_name + kpiId = kpi_id_int + deviceId = 'DEV'+ str(kpi_id_int) + endpointId = 'END' + str(kpi_id_int) + serviceId = 'SERV' + str(kpi_id_int) + sliceId = 'SLC' + str(kpi_id_int) + connectionId = 'CON' + str(kpi_id_int) + time_stamp = timestamp_utcnow_to_float() + kpi_value = 500*random() + + metrics_db.write_KPI(time_stamp, kpiId, kpiSampleType, deviceId, endpointId, serviceId, sliceId, connectionId, + kpi_value) + sleep(0.1) + +################################################## +# Prepare Environment, should be the first test +################################################## + +def test_prepare_environment( + context_client : ContextClient, # pylint: disable=redefined-outer-name,unused-argument +): + context_id = json_context_id(DEFAULT_CONTEXT_NAME) + context_client.SetContext(Context(**json_context(DEFAULT_CONTEXT_NAME))) + context_client.SetTopology(Topology(**json_topology(DEFAULT_TOPOLOGY_NAME, context_id=context_id))) + +########################### +# Tests Implementation +########################### + +# Test case that makes use of client fixture to test server's CreateKpi method +def test_set_kpi(kpi_manager_client): # pylint: disable=redefined-outer-name + # make call to server + LOGGER.warning('test_create_kpi requesting') + for i in range(3): + response = kpi_manager_client.SetKpi(create_kpi_request(str(i+1))) + LOGGER.debug(str(response)) + assert isinstance(response, KpiId) + +# Test case that makes use of client fixture to test server's DeleteKpi method +def test_delete_kpi(kpi_manager_client): # pylint: disable=redefined-outer-name + # make call to server + LOGGER.warning('delete_kpi requesting') + response = kpi_manager_client.SetKpi(create_kpi_request('4')) + response = kpi_manager_client.DeleteKpi(response) + LOGGER.debug(str(response)) + assert isinstance(response, Empty) + +# Test case that makes use of client fixture to test server's GetKpiDescriptor method +def test_get_kpi_descriptor_list(kpi_manager_client): # pylint: disable=redefined-outer-name + LOGGER.warning('test_getkpidescritor_kpi begin') + response = kpi_manager_client.GetKpiDescriptorList(Empty()) + LOGGER.debug(str(response)) + assert isinstance(response, KpiDescriptorList) -- GitLab From 124d09dec46e2477c23304605e3628200efc84a7 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 22 Mar 2024 08:30:45 +0000 Subject: [PATCH 017/205] File name changed from "KpiManagerServer" to "KpiManagerService" --- src/kpi_manager/server/KpiManagerServer.py | 117 --------------------- 1 file changed, 117 deletions(-) delete mode 100644 src/kpi_manager/server/KpiManagerServer.py diff --git a/src/kpi_manager/server/KpiManagerServer.py b/src/kpi_manager/server/KpiManagerServer.py deleted file mode 100644 index d42ce14eb..000000000 --- a/src/kpi_manager/server/KpiManagerServer.py +++ /dev/null @@ -1,117 +0,0 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging, os, grpc -from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method -from common.proto.context_pb2 import Empty - -from common.Constants import ServiceNameEnum -from common.Settings import get_service_port_grpc -from common.proto.kpi_manager_pb2_grpc import add_KpiManagerServiceServicer_to_server -from common.proto.kpi_manager_pb2_grpc import KpiManagerServiceServicer -from monitoring.service.NameMapping import NameMapping - -from common.proto.kpi_manager_pb2 import kpiDescriptor, KpiId, KpiDescriptorList -from monitoring.service import ManagementDBTools - -from common.tools.service.GenericGrpcService import GenericGrpcService - -LOGGER = logging.getLogger(__name__) - -METRICS_POOL = MetricsPool('Monitoring', 'RPC') - -class KpiManagerServer(KpiManagerServiceServicer): - def __init__(self, cls_name: str = __name__): - LOGGER.info('Init KpiManagerService') - port = get_service_port_grpc(ServiceNameEnum.KPIMANAGER) # port updated - GenericGrpcService(port, cls_name = cls_name) # class inheretence was removed - - # Init sqlite monitoring db - self.management_db = ManagementDBTools.ManagementDB('monitoring.db') # why monitoring.db here??? - LOGGER.info('MetricsDB initialized --- KPI Manager Service') - - def install_servicers(self): - # There is no need to create the "MonitoringServiceServicerImpl" instance because actual class - # implementation exists in the same class. - add_KpiManagerServiceServicer_to_server(KpiManagerServer(), self.server) - - @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def SetKpi( - self, request: KpiDescriptor, grpc_context: grpc.ServicerContext - ) -> KpiId: - response = KpiId() - kpi_description = request.kpi_description - kpi_sample_type = request.kpi_sample_type - kpi_device_id = request.device_id.device_uuid.uuid - kpi_endpoint_id = request.endpoint_id.endpoint_uuid.uuid - kpi_service_id = request.service_id.service_uuid.uuid - kpi_slice_id = request.slice_id.slice_uuid.uuid - kpi_connection_id = request.connection_id.connection_uuid.uuid - kpi_link_id = request.link_id.link_uuid.uuid - if request.kpi_id.kpi_id.uuid != "": - response.kpi_id.uuid = request.kpi_id.kpi_id.uuid - # Here the code to modify an existing kpi - else: - data = self.management_db.insert_KPI( - kpi_description, kpi_sample_type, kpi_device_id, kpi_endpoint_id, - kpi_service_id, kpi_slice_id, kpi_connection_id, kpi_link_id) - response.kpi_id.uuid = str(data) - return response - - @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def DeleteKpi(self, request: KpiId, grpc_context: grpc.ServicerContext) -> Empty: - kpi_id = int(request.kpi_id.uuid) - kpi = self.management_db.get_KPI(kpi_id) - if kpi: - self.management_db.delete_KPI(kpi_id) - else: - LOGGER.info('DeleteKpi error: KpiID({:s}): not found in database'.format(str(kpi_id))) - return Empty() - - @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def GetKpiDescriptor(self, request: KpiId, grpc_context: grpc.ServicerContext) -> KpiDescriptor: - kpi_id = request.kpi_id.uuid - kpi_db = self.management_db.get_KPI(int(kpi_id)) - kpiDescriptor = KpiDescriptor() - if kpi_db is None: - LOGGER.info('GetKpiDescriptor error: KpiID({:s}): not found in database'.format(str(kpi_id))) - else: - kpiDescriptor.kpi_description = kpi_db[1] - kpiDescriptor.kpi_sample_type = kpi_db[2] - kpiDescriptor.device_id.device_uuid.uuid = str(kpi_db[3]) - kpiDescriptor.endpoint_id.endpoint_uuid.uuid = str(kpi_db[4]) - kpiDescriptor.service_id.service_uuid.uuid = str(kpi_db[5]) - kpiDescriptor.slice_id.slice_uuid.uuid = str(kpi_db[6]) - kpiDescriptor.connection_id.connection_uuid.uuid = str(kpi_db[7]) - kpiDescriptor.link_id.link_uuid.uuid = str(kpi_db[8]) - return kpiDescriptor - - @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def GetKpiDescriptorList(self, request: Empty, grpc_context: grpc.ServicerContext) -> KpiDescriptorList: - kpi_descriptor_list = KpiDescriptorList() - data = self.management_db.get_KPIS() - LOGGER.debug(f"data: {data}") - for item in data: - kpi_descriptor = KpiDescriptor() - kpi_descriptor.kpi_id.kpi_id.uuid = str(item[0]) - kpi_descriptor.kpi_description = item[1] - kpi_descriptor.kpi_sample_type = item[2] - kpi_descriptor.device_id.device_uuid.uuid = str(item[3]) - kpi_descriptor.endpoint_id.endpoint_uuid.uuid = str(item[4]) - kpi_descriptor.service_id.service_uuid.uuid = str(item[5]) - kpi_descriptor.slice_id.slice_uuid.uuid = str(item[6]) - kpi_descriptor.connection_id.connection_uuid.uuid = str(item[7]) - kpi_descriptor.link_id.link_uuid.uuid = str(item[8]) - kpi_descriptor_list.kpi_descriptor_list.append(kpi_descriptor) - return kpi_descriptor_list \ No newline at end of file -- GitLab From 71b8b5362b17711d2a4bc2eeaf0c5c17b7a2b827 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 22 Mar 2024 10:33:29 +0000 Subject: [PATCH 018/205] imports of KpiId-Descriptor-List is changed from monitoring_pb2 to kpi_manger_pb2 --- src/monitoring/client/MonitoringClient.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/monitoring/client/MonitoringClient.py b/src/monitoring/client/MonitoringClient.py index 751ff6e38..493e96ca8 100644 --- a/src/monitoring/client/MonitoringClient.py +++ b/src/monitoring/client/MonitoringClient.py @@ -20,8 +20,9 @@ from common.Settings import get_service_host, get_service_port_grpc from common.tools.client.RetryDecorator import retry, delay_exponential from common.tools.grpc.Tools import grpc_message_to_json_string from common.proto.context_pb2 import Empty -from common.proto.monitoring_pb2 import Kpi, KpiDescriptor, KpiId, MonitorKpiRequest, \ - KpiDescriptorList, KpiQuery, KpiList, SubsDescriptor, SubscriptionID, SubsList, \ +from common.proto.kpi_manager_pb2 import KpiId, KpiDescriptor, KpiDescriptorList +from common.proto.monitoring_pb2 import Kpi, MonitorKpiRequest, \ + KpiQuery, KpiList, SubsDescriptor, SubscriptionID, SubsList, \ SubsResponse, AlarmDescriptor, AlarmID, AlarmList, AlarmResponse, AlarmSubscription, RawKpiTable from common.proto.monitoring_pb2_grpc import MonitoringServiceStub -- GitLab From 3e7573750600b730873138e2a765a0cdabd992a7 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 22 Mar 2024 10:35:10 +0000 Subject: [PATCH 019/205] Spelling correction from "KPIMANGER" to "KPIMANAGER" --- src/common/Constants.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/common/Constants.py b/src/common/Constants.py index 4a0f3a226..ee737d2bd 100644 --- a/src/common/Constants.py +++ b/src/common/Constants.py @@ -74,7 +74,7 @@ DEFAULT_SERVICE_GRPC_PORTS = { ServiceNameEnum.ZTP .value : 5050, ServiceNameEnum.POLICY .value : 6060, ServiceNameEnum.MONITORING .value : 7070, - ServiceNameEnum.KPIMANGER .value : 7071, + ServiceNameEnum.KPIMANAGER .value : 7071, ServiceNameEnum.DLT .value : 8080, ServiceNameEnum.NBI .value : 9090, ServiceNameEnum.L3_CAD .value : 10001, -- GitLab From 9f70a96cf4530bc074f98f4e030e6dca5e94fa57 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 22 Mar 2024 10:35:41 +0000 Subject: [PATCH 020/205] files permission changed --- src/kpi_manager/client/KpiManagerClient.py | 0 src/kpi_manager/service/KpiManagerService.py | 0 src/kpi_manager/tests/test_messages.py | 0 3 files changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 src/kpi_manager/client/KpiManagerClient.py mode change 100644 => 100755 src/kpi_manager/service/KpiManagerService.py mode change 100644 => 100755 src/kpi_manager/tests/test_messages.py diff --git a/src/kpi_manager/client/KpiManagerClient.py b/src/kpi_manager/client/KpiManagerClient.py old mode 100644 new mode 100755 diff --git a/src/kpi_manager/service/KpiManagerService.py b/src/kpi_manager/service/KpiManagerService.py old mode 100644 new mode 100755 diff --git a/src/kpi_manager/tests/test_messages.py b/src/kpi_manager/tests/test_messages.py old mode 100644 new mode 100755 -- GitLab From 95ebbc734aeb400fc7b453878733eb5d7489a963 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 26 Mar 2024 10:01:45 +0000 Subject: [PATCH 021/205] tests file for Kpi Manager is added --- scripts/run_tests_locally_kpi_manager.sh | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) create mode 100755 scripts/run_tests_locally_kpi_manager.sh diff --git a/scripts/run_tests_locally_kpi_manager.sh b/scripts/run_tests_locally_kpi_manager.sh new file mode 100755 index 000000000..eeeec4bb2 --- /dev/null +++ b/scripts/run_tests_locally_kpi_manager.sh @@ -0,0 +1,24 @@ +#!/bin/bash +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +PROJECTDIR=`pwd` + +cd $PROJECTDIR/src +RCFILE=$PROJECTDIR/coverage/.coveragerc +coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ + kpi_manager/tests/test_unitary.py + +# python3 kpi_manager/tests/test_unitary.py \ No newline at end of file -- GitLab From 50bbeb104476a21c4b981ae37ab75566d48438f4 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 26 Mar 2024 10:05:36 +0000 Subject: [PATCH 022/205] Docker build command for Kpi Manager is added and Docker file paths are updated to correct directories. --- src/build.sh | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/src/build.sh b/src/build.sh index b1a7d299e..9ae91ef10 100755 --- a/src/build.sh +++ b/src/build.sh @@ -18,16 +18,20 @@ cd $(dirname $0) echo "BUILD context" -context/genproto.sh +# context/genproto.sh # genproto.sh file doesn't exist docker build -t "context:develop" -f context/Dockerfile --quiet . -docker build -t "context:test" -f context/tests/Dockerfile --quiet . +# docker build -t "context:test" -f context/tests/Dockerfile --quiet . # Dockerfile doesn't exist -cd monitoring -./genproto.sh -cd .. +# genproto.sh file doesn't exist +# cd monitoring +# ./genproto.sh +# cd .. echo "BUILD monitoring" docker build -t "monitoring:dockerfile" -f monitoring/Dockerfile . +echo "BUILD kpi manager" +docker build -t "kpi_manager:dockerfile" -f kpi_manager/Dockerfile . + echo "Prune unused images" docker image prune --force -- GitLab From e28b5ce2fcde8a28564a864c5d221d4e3e57ed63 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 26 Mar 2024 10:07:58 +0000 Subject: [PATCH 023/205] Docker name and ports are added according to Kpi Manager Service --- src/start.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/start.sh b/src/start.sh index 32a016cc0..c3b5d8375 100755 --- a/src/start.sh +++ b/src/start.sh @@ -15,4 +15,4 @@ docker network create -d bridge teraflowbridge -docker run -d -p 7070:7070 --name monitoring --network=teraflowbridge monitoring:dockerfile +docker run -d -p 7071:7071 --name kpi_manager --network=teraflowbridge kpi_manager:dockerfile -- GitLab From f19ae7505ffeb37b9b4c128ac7440ebe504e8aa7 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 26 Mar 2024 10:09:28 +0000 Subject: [PATCH 024/205] monitoring docker command is added for the reference --- src/start.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/start.sh b/src/start.sh index c3b5d8375..8c3fafe6e 100755 --- a/src/start.sh +++ b/src/start.sh @@ -15,4 +15,5 @@ docker network create -d bridge teraflowbridge -docker run -d -p 7071:7071 --name kpi_manager --network=teraflowbridge kpi_manager:dockerfile +# docker run -d -p 7070:7070 --name monitoring --network=teraflowbridge monitoring:dockerfile +docker run -d -p 7071:7071 --name kpi_manager --network=teraflowbridge kpi_manager:dockerfile \ No newline at end of file -- GitLab From cb1bddd6a5516fcaa1fb035ec276766f8246bf21 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 26 Mar 2024 10:10:00 +0000 Subject: [PATCH 025/205] Kpi Manager Service docker file is created --- src/kpi_manager/Dockerfile | 71 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 71 insertions(+) create mode 100644 src/kpi_manager/Dockerfile diff --git a/src/kpi_manager/Dockerfile b/src/kpi_manager/Dockerfile new file mode 100644 index 000000000..d3d962b9f --- /dev/null +++ b/src/kpi_manager/Dockerfile @@ -0,0 +1,71 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM python:3.9-slim + +# Install dependencies +RUN apt-get --yes --quiet --quiet update && \ + apt-get --yes --quiet --quiet install wget g++ git && \ + rm -rf /var/lib/apt/lists/* + +# Set Python to show logs as they occur +ENV PYTHONUNBUFFERED=0 + +# Download the gRPC health probe +RUN GRPC_HEALTH_PROBE_VERSION=v0.2.0 && \ + wget -qO/bin/grpc_health_probe https://github.com/grpc-ecosystem/grpc-health-probe/releases/download/${GRPC_HEALTH_PROBE_VERSION}/grpc_health_probe-linux-amd64 && \ + chmod +x /bin/grpc_health_probe + +# Get generic Python packages +RUN python3 -m pip install --upgrade pip +RUN python3 -m pip install --upgrade setuptools wheel +RUN python3 -m pip install --upgrade pip-tools + +# Get common Python packages +# Note: this step enables sharing the previous Docker build steps among all the Python components +WORKDIR /var/teraflow +COPY common_requirements.in common_requirements.in +RUN pip-compile --quiet --output-file=common_requirements.txt common_requirements.in +RUN python3 -m pip install -r common_requirements.txt + +# Add common files into working directory +WORKDIR /var/teraflow/common +COPY src/common/. ./ +RUN rm -rf proto + +# Create proto sub-folder, copy .proto files, and generate Python code +RUN mkdir -p /var/teraflow/common/proto +WORKDIR /var/teraflow/common/proto +RUN touch __init__.py +COPY proto/*.proto ./ +RUN python3 -m grpc_tools.protoc -I=. --python_out=. --grpc_python_out=. *.proto +RUN rm *.proto +RUN find . -type f -exec sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' {} \; + +# Create component sub-folders, get specific Python packages +RUN mkdir -p /var/teraflow/kpi_manager +WORKDIR /var/teraflow/kpi_manager +COPY src/kpi_manager/requirements.in requirements.in +RUN pip-compile --quiet --output-file=requirements.txt requirements.in +RUN python3 -m pip install -r requirements.txt + +# Add component files into working directory +WORKDIR /var/teraflow +COPY src/context/. context/ +COPY src/device/. device/ +COPY src/monitoring/. monitoring/ +COPY src/kpi_manager/. kpi_manager/ + +# Start the service +ENTRYPOINT ["python", "-m", "kpi_manager.service"] -- GitLab From 82cb396dad0a10eda54d73d70155f256b6f1f7e1 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 26 Mar 2024 10:16:43 +0000 Subject: [PATCH 026/205] temporary requirements file is added. --- src/kpi_manager/requirements.in | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) create mode 100644 src/kpi_manager/requirements.in diff --git a/src/kpi_manager/requirements.in b/src/kpi_manager/requirements.in new file mode 100644 index 000000000..a6183b57e --- /dev/null +++ b/src/kpi_manager/requirements.in @@ -0,0 +1,24 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +anytree==2.8.0 +APScheduler==3.10.1 +influx-line-protocol==0.1.4 +psycopg2-binary==2.9.3 +python-dateutil==2.8.2 +python-json-logger==2.0.2 +questdb==1.0.1 +requests==2.27.1 +xmltodict==0.12.0 +# grpc_health_probe==0.2.0 #getting error on this library \ No newline at end of file -- GitLab From 2d868d953730ab675d629aadb74d69d297c61d89 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 26 Mar 2024 10:17:13 +0000 Subject: [PATCH 027/205] __init__.py is added is sevice directory --- src/kpi_manager/service/__init__.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 src/kpi_manager/service/__init__.py diff --git a/src/kpi_manager/service/__init__.py b/src/kpi_manager/service/__init__.py new file mode 100644 index 000000000..1549d9811 --- /dev/null +++ b/src/kpi_manager/service/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + -- GitLab From d1d7b89af69076bad27762048422a573eab6efe0 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 26 Mar 2024 10:17:48 +0000 Subject: [PATCH 028/205] __main__.py file added in Kpi Manager service directory. --- src/kpi_manager/service/__main__.py | 107 ++++++++++++++++++++++++++++ 1 file changed, 107 insertions(+) create mode 100644 src/kpi_manager/service/__main__.py diff --git a/src/kpi_manager/service/__main__.py b/src/kpi_manager/service/__main__.py new file mode 100644 index 000000000..9f0e53246 --- /dev/null +++ b/src/kpi_manager/service/__main__.py @@ -0,0 +1,107 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging, signal, sys, threading, time +from prometheus_client import start_http_server +from common.Constants import ServiceNameEnum +from common.Settings import ( + ENVVAR_SUFIX_SERVICE_HOST, ENVVAR_SUFIX_SERVICE_PORT_GRPC, get_env_var_name, get_log_level, get_metrics_port, + wait_for_environment_variables) +from common.proto import monitoring_pb2 +from monitoring.service.EventTools import EventsDeviceCollector # import updated +from monitoring.service.NameMapping import NameMapping # import updated +# from .MonitoringService import MonitoringService +from .KpiManagerService import KpiManagerService + +terminate = threading.Event() +LOGGER = None + +def signal_handler(signal, frame): # pylint: disable=redefined-outer-name + LOGGER.warning('Terminate signal received') + terminate.set() + +def start_kpi_manager(name_mapping : NameMapping): + LOGGER.info('Start Monitoring...',) + + events_collector = EventsDeviceCollector(name_mapping) + events_collector.start() + + # TODO: redesign this method to be more clear and clean + + # Iterate while terminate is not set + while not terminate.is_set(): + list_new_kpi_ids = events_collector.listen_events() + + # Monitor Kpis + if bool(list_new_kpi_ids): + for kpi_id in list_new_kpi_ids: + # Create Monitor Kpi Requests + monitor_kpi_request = monitoring_pb2.MonitorKpiRequest() + monitor_kpi_request.kpi_id.CopyFrom(kpi_id) + monitor_kpi_request.monitoring_window_s = 86400 + monitor_kpi_request.sampling_rate_s = 10 + events_collector._monitoring_client.MonitorKpi(monitor_kpi_request) + + time.sleep(0.5) # let other tasks run; do not overload CPU + else: + # Terminate is set, looping terminates + LOGGER.warning("Stopping execution...") + + events_collector.start() + +def main(): + global LOGGER # pylint: disable=global-statement + + log_level = get_log_level() + logging.basicConfig(level=log_level) + LOGGER = logging.getLogger(__name__) + + wait_for_environment_variables([ + get_env_var_name(ServiceNameEnum.CONTEXT, ENVVAR_SUFIX_SERVICE_HOST ), + get_env_var_name(ServiceNameEnum.CONTEXT, ENVVAR_SUFIX_SERVICE_PORT_GRPC), + get_env_var_name(ServiceNameEnum.DEVICE, ENVVAR_SUFIX_SERVICE_HOST ), + get_env_var_name(ServiceNameEnum.DEVICE, ENVVAR_SUFIX_SERVICE_PORT_GRPC), + ]) + + signal.signal(signal.SIGINT, signal_handler) + signal.signal(signal.SIGTERM, signal_handler) + + LOGGER.info('Starting...') + + # Start metrics server + metrics_port = get_metrics_port() + start_http_server(metrics_port) + + name_mapping = NameMapping() + # Starting monitoring service + # grpc_service = MonitoringService(name_mapping) + # grpc_service.start() + # start_monitoring(name_mapping) + + grpc_service = KpiManagerService(name_mapping) + grpc_service.start() + + start_kpi_manager(name_mapping) + + # Wait for Ctrl+C or termination signal + while not terminate.wait(timeout=1.0): pass + + LOGGER.info('Terminating...') + grpc_service.stop() + + LOGGER.info('Bye') + return 0 + +if __name__ == '__main__': + sys.exit(main()) -- GitLab From 343483c835443c26603ca23b152294225fe0fbf3 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 26 Mar 2024 10:19:02 +0000 Subject: [PATCH 029/205] KpiManagerService file re-structred into generic KpiManagerSevice file. --- src/kpi_manager/service/KpiManagerService.py | 106 ++----------------- 1 file changed, 10 insertions(+), 96 deletions(-) diff --git a/src/kpi_manager/service/KpiManagerService.py b/src/kpi_manager/service/KpiManagerService.py index d42ce14eb..dbbcec2cf 100755 --- a/src/kpi_manager/service/KpiManagerService.py +++ b/src/kpi_manager/service/KpiManagerService.py @@ -12,106 +12,20 @@ # See the License for the specific language governing permissions and # limitations under the License. -import logging, os, grpc -from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method -from common.proto.context_pb2 import Empty - from common.Constants import ServiceNameEnum from common.Settings import get_service_port_grpc +# from common.proto.monitoring_pb2_grpc import add_MonitoringServiceServicer_to_server from common.proto.kpi_manager_pb2_grpc import add_KpiManagerServiceServicer_to_server -from common.proto.kpi_manager_pb2_grpc import KpiManagerServiceServicer -from monitoring.service.NameMapping import NameMapping - -from common.proto.kpi_manager_pb2 import kpiDescriptor, KpiId, KpiDescriptorList -from monitoring.service import ManagementDBTools - from common.tools.service.GenericGrpcService import GenericGrpcService +from kpi_manager.service.KpiManagerServiceServicerImpl import KpiManagerServiceServicerImpl +# from monitoring.service.MonitoringServiceServicerImpl import MonitoringServiceServicerImpl +from monitoring.service.NameMapping import NameMapping -LOGGER = logging.getLogger(__name__) - -METRICS_POOL = MetricsPool('Monitoring', 'RPC') - -class KpiManagerServer(KpiManagerServiceServicer): - def __init__(self, cls_name: str = __name__): - LOGGER.info('Init KpiManagerService') - port = get_service_port_grpc(ServiceNameEnum.KPIMANAGER) # port updated - GenericGrpcService(port, cls_name = cls_name) # class inheretence was removed - - # Init sqlite monitoring db - self.management_db = ManagementDBTools.ManagementDB('monitoring.db') # why monitoring.db here??? - LOGGER.info('MetricsDB initialized --- KPI Manager Service') +class KpiManagerService(GenericGrpcService): + def __init__(self, name_mapping : NameMapping, cls_name: str = __name__) -> None: + port = get_service_port_grpc(ServiceNameEnum.KPIMANAGER) + super().__init__(port, cls_name=cls_name) + self.kpiManagerService_servicer = KpiManagerServiceServicerImpl(name_mapping) def install_servicers(self): - # There is no need to create the "MonitoringServiceServicerImpl" instance because actual class - # implementation exists in the same class. - add_KpiManagerServiceServicer_to_server(KpiManagerServer(), self.server) - - @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def SetKpi( - self, request: KpiDescriptor, grpc_context: grpc.ServicerContext - ) -> KpiId: - response = KpiId() - kpi_description = request.kpi_description - kpi_sample_type = request.kpi_sample_type - kpi_device_id = request.device_id.device_uuid.uuid - kpi_endpoint_id = request.endpoint_id.endpoint_uuid.uuid - kpi_service_id = request.service_id.service_uuid.uuid - kpi_slice_id = request.slice_id.slice_uuid.uuid - kpi_connection_id = request.connection_id.connection_uuid.uuid - kpi_link_id = request.link_id.link_uuid.uuid - if request.kpi_id.kpi_id.uuid != "": - response.kpi_id.uuid = request.kpi_id.kpi_id.uuid - # Here the code to modify an existing kpi - else: - data = self.management_db.insert_KPI( - kpi_description, kpi_sample_type, kpi_device_id, kpi_endpoint_id, - kpi_service_id, kpi_slice_id, kpi_connection_id, kpi_link_id) - response.kpi_id.uuid = str(data) - return response - - @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def DeleteKpi(self, request: KpiId, grpc_context: grpc.ServicerContext) -> Empty: - kpi_id = int(request.kpi_id.uuid) - kpi = self.management_db.get_KPI(kpi_id) - if kpi: - self.management_db.delete_KPI(kpi_id) - else: - LOGGER.info('DeleteKpi error: KpiID({:s}): not found in database'.format(str(kpi_id))) - return Empty() - - @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def GetKpiDescriptor(self, request: KpiId, grpc_context: grpc.ServicerContext) -> KpiDescriptor: - kpi_id = request.kpi_id.uuid - kpi_db = self.management_db.get_KPI(int(kpi_id)) - kpiDescriptor = KpiDescriptor() - if kpi_db is None: - LOGGER.info('GetKpiDescriptor error: KpiID({:s}): not found in database'.format(str(kpi_id))) - else: - kpiDescriptor.kpi_description = kpi_db[1] - kpiDescriptor.kpi_sample_type = kpi_db[2] - kpiDescriptor.device_id.device_uuid.uuid = str(kpi_db[3]) - kpiDescriptor.endpoint_id.endpoint_uuid.uuid = str(kpi_db[4]) - kpiDescriptor.service_id.service_uuid.uuid = str(kpi_db[5]) - kpiDescriptor.slice_id.slice_uuid.uuid = str(kpi_db[6]) - kpiDescriptor.connection_id.connection_uuid.uuid = str(kpi_db[7]) - kpiDescriptor.link_id.link_uuid.uuid = str(kpi_db[8]) - return kpiDescriptor - - @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def GetKpiDescriptorList(self, request: Empty, grpc_context: grpc.ServicerContext) -> KpiDescriptorList: - kpi_descriptor_list = KpiDescriptorList() - data = self.management_db.get_KPIS() - LOGGER.debug(f"data: {data}") - for item in data: - kpi_descriptor = KpiDescriptor() - kpi_descriptor.kpi_id.kpi_id.uuid = str(item[0]) - kpi_descriptor.kpi_description = item[1] - kpi_descriptor.kpi_sample_type = item[2] - kpi_descriptor.device_id.device_uuid.uuid = str(item[3]) - kpi_descriptor.endpoint_id.endpoint_uuid.uuid = str(item[4]) - kpi_descriptor.service_id.service_uuid.uuid = str(item[5]) - kpi_descriptor.slice_id.slice_uuid.uuid = str(item[6]) - kpi_descriptor.connection_id.connection_uuid.uuid = str(item[7]) - kpi_descriptor.link_id.link_uuid.uuid = str(item[8]) - kpi_descriptor_list.kpi_descriptor_list.append(kpi_descriptor) - return kpi_descriptor_list \ No newline at end of file + add_KpiManagerServiceServicer_to_server(self.kpiManagerService_servicer, self.server) -- GitLab From 6e9b010e3a52f467f88a1c4c3181c44171295664 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 26 Mar 2024 10:19:36 +0000 Subject: [PATCH 030/205] KpiManagerServiceImpl file is created. --- .../service/KpiManagerServiceServicerImpl.py | 104 ++++++++++++++++++ 1 file changed, 104 insertions(+) create mode 100644 src/kpi_manager/service/KpiManagerServiceServicerImpl.py diff --git a/src/kpi_manager/service/KpiManagerServiceServicerImpl.py b/src/kpi_manager/service/KpiManagerServiceServicerImpl.py new file mode 100644 index 000000000..3a40195da --- /dev/null +++ b/src/kpi_manager/service/KpiManagerServiceServicerImpl.py @@ -0,0 +1,104 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging, grpc +from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method +from common.proto.context_pb2 import Empty +from common.proto.kpi_manager_pb2_grpc import KpiManagerServiceServicer +from common.proto.kpi_manager_pb2 import KpiId, KpiDescriptor, KpiDescriptorList +from monitoring.service.NameMapping import NameMapping +from monitoring.service import ManagementDBTools + + +LOGGER = logging.getLogger(__name__) + +METRICS_POOL = MetricsPool('Monitoring', 'RPC') + +class KpiManagerServiceServicerImpl(KpiManagerServiceServicer): + def __init__(self, name_mapping : NameMapping): + LOGGER.info('Init KpiManagerService') + + # Init sqlite monitoring db + self.management_db = ManagementDBTools.ManagementDB('monitoring.db') # why monitoring.db here??? + LOGGER.info('MetricsDB initialized --- KPI Manager Service') + + @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def SetKpi( + self, request: KpiDescriptor, grpc_context: grpc.ServicerContext + ) -> KpiId: + response = KpiId() + kpi_description = request.kpi_description + kpi_sample_type = request.kpi_sample_type + kpi_device_id = request.device_id.device_uuid.uuid + kpi_endpoint_id = request.endpoint_id.endpoint_uuid.uuid + kpi_service_id = request.service_id.service_uuid.uuid + kpi_slice_id = request.slice_id.slice_uuid.uuid + kpi_connection_id = request.connection_id.connection_uuid.uuid + kpi_link_id = request.link_id.link_uuid.uuid + if request.kpi_id.kpi_id.uuid != "": + response.kpi_id.uuid = request.kpi_id.kpi_id.uuid + # Here the code to modify an existing kpi + else: + data = self.management_db.insert_KPI( + kpi_description, kpi_sample_type, kpi_device_id, kpi_endpoint_id, + kpi_service_id, kpi_slice_id, kpi_connection_id, kpi_link_id) + response.kpi_id.uuid = str(data) + return response + + @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def DeleteKpi(self, request: KpiId, grpc_context: grpc.ServicerContext) -> Empty: + kpi_id = int(request.kpi_id.uuid) + kpi = self.management_db.get_KPI(kpi_id) + if kpi: + self.management_db.delete_KPI(kpi_id) + else: + LOGGER.info('DeleteKpi error: KpiID({:s}): not found in database'.format(str(kpi_id))) + return Empty() + + @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def GetKpiDescriptor(self, request: KpiId, grpc_context: grpc.ServicerContext) -> KpiDescriptor: + kpi_id = request.kpi_id.uuid + kpi_db = self.management_db.get_KPI(int(kpi_id)) + kpiDescriptor = KpiDescriptor() + if kpi_db is None: + LOGGER.info('GetKpiDescriptor error: KpiID({:s}): not found in database'.format(str(kpi_id))) + else: + kpiDescriptor.kpi_description = kpi_db[1] + kpiDescriptor.kpi_sample_type = kpi_db[2] + kpiDescriptor.device_id.device_uuid.uuid = str(kpi_db[3]) + kpiDescriptor.endpoint_id.endpoint_uuid.uuid = str(kpi_db[4]) + kpiDescriptor.service_id.service_uuid.uuid = str(kpi_db[5]) + kpiDescriptor.slice_id.slice_uuid.uuid = str(kpi_db[6]) + kpiDescriptor.connection_id.connection_uuid.uuid = str(kpi_db[7]) + kpiDescriptor.link_id.link_uuid.uuid = str(kpi_db[8]) + return kpiDescriptor + + @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def GetKpiDescriptorList(self, request: Empty, grpc_context: grpc.ServicerContext) -> KpiDescriptorList: + kpi_descriptor_list = KpiDescriptorList() + data = self.management_db.get_KPIS() + LOGGER.debug(f"data: {data}") + for item in data: + kpi_descriptor = KpiDescriptor() + kpi_descriptor.kpi_id.kpi_id.uuid = str(item[0]) + kpi_descriptor.kpi_description = item[1] + kpi_descriptor.kpi_sample_type = item[2] + kpi_descriptor.device_id.device_uuid.uuid = str(item[3]) + kpi_descriptor.endpoint_id.endpoint_uuid.uuid = str(item[4]) + kpi_descriptor.service_id.service_uuid.uuid = str(item[5]) + kpi_descriptor.slice_id.slice_uuid.uuid = str(item[6]) + kpi_descriptor.connection_id.connection_uuid.uuid = str(item[7]) + kpi_descriptor.link_id.link_uuid.uuid = str(item[8]) + kpi_descriptor_list.kpi_descriptor_list.append(kpi_descriptor) + return kpi_descriptor_list \ No newline at end of file -- GitLab From ab122c221a6b64283f0afd4b21093e48e1fe54c8 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 26 Mar 2024 10:32:21 +0000 Subject: [PATCH 031/205] These imports were added temporily --- src/kpi_manager/tests/test_unitary.py | 3 +++ 1 file changed, 3 insertions(+) mode change 100644 => 100755 src/kpi_manager/tests/test_unitary.py diff --git a/src/kpi_manager/tests/test_unitary.py b/src/kpi_manager/tests/test_unitary.py old mode 100644 new mode 100755 index 39d2b2874..b75ea5751 --- a/src/kpi_manager/tests/test_unitary.py +++ b/src/kpi_manager/tests/test_unitary.py @@ -12,6 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. + +# import sys +# sys.path.append('.') import os, pytest import logging, json -- GitLab From 16666d8b744200e973fa30600342a9053aaada65 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 26 Mar 2024 10:34:08 +0000 Subject: [PATCH 032/205] Imports of Kpi, KpiList and KpiDescriptor is changed to kpi_manager_pb2 file. --- src/monitoring/service/MonitoringServiceServicerImpl.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/monitoring/service/MonitoringServiceServicerImpl.py b/src/monitoring/service/MonitoringServiceServicerImpl.py index 608b0bad9..e98cfa236 100644 --- a/src/monitoring/service/MonitoringServiceServicerImpl.py +++ b/src/monitoring/service/MonitoringServiceServicerImpl.py @@ -20,8 +20,8 @@ from common.proto.context_pb2 import Empty from common.proto.device_pb2 import MonitoringSettings from common.proto.kpi_sample_types_pb2 import KpiSampleType from common.proto.monitoring_pb2_grpc import MonitoringServiceServicer -from common.proto.monitoring_pb2 import AlarmResponse, AlarmDescriptor, AlarmList, SubsList, KpiId, \ - KpiDescriptor, KpiList, KpiQuery, SubsDescriptor, SubscriptionID, AlarmID, KpiDescriptorList, \ +from common.proto.monitoring_pb2 import AlarmResponse, AlarmDescriptor, AlarmList, SubsList, \ + KpiQuery, SubsDescriptor, SubscriptionID, AlarmID, KpiList,\ MonitorKpiRequest, Kpi, AlarmSubscription, SubsResponse, RawKpiTable, RawKpi, RawKpiList from common.tools.timestamp.Converters import timestamp_string_to_float, timestamp_utcnow_to_float from device.client.DeviceClient import DeviceClient @@ -30,6 +30,8 @@ from monitoring.service.AlarmManager import AlarmManager from monitoring.service.NameMapping import NameMapping from monitoring.service.SubscriptionManager import SubscriptionManager +from common.proto.kpi_manager_pb2 import KpiId, KpiDescriptor, KpiDescriptorList + LOGGER = logging.getLogger(__name__) METRICSDB_HOSTNAME = os.environ.get("METRICSDB_HOSTNAME") -- GitLab From a3ca5899e477d26085cb239f14e5fe1d01fb8803 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Wed, 27 Mar 2024 06:37:57 +0000 Subject: [PATCH 033/205] after temporarily removal of --rcfile file --- scripts/run_tests_locally_kpi_manager.sh | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/scripts/run_tests_locally_kpi_manager.sh b/scripts/run_tests_locally_kpi_manager.sh index eeeec4bb2..8ed855a8e 100755 --- a/scripts/run_tests_locally_kpi_manager.sh +++ b/scripts/run_tests_locally_kpi_manager.sh @@ -17,8 +17,12 @@ PROJECTDIR=`pwd` cd $PROJECTDIR/src -RCFILE=$PROJECTDIR/coverage/.coveragerc -coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ - kpi_manager/tests/test_unitary.py +# RCFILE=$PROJECTDIR/coverage/.coveragerc +# coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ +# kpi_manager/tests/test_unitary.py + +# python3 kpi_manager/tests/test_unitary.py -# python3 kpi_manager/tests/test_unitary.py \ No newline at end of file +RCFILE=$PROJECTDIR/coverage/.coveragerc +python3 -m pytest --log-level=INFO --verbose \ + kpi_manager/tests/test_unitary.py \ No newline at end of file -- GitLab From 92ea40f1cd78480b7c457444b6a9861c39e935b7 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Wed, 27 Mar 2024 06:38:48 +0000 Subject: [PATCH 034/205] removal of unexpected "," --- src/kpi_manager/client/KpiManagerClient.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/kpi_manager/client/KpiManagerClient.py b/src/kpi_manager/client/KpiManagerClient.py index 6baca7cef..a129cd327 100755 --- a/src/kpi_manager/client/KpiManagerClient.py +++ b/src/kpi_manager/client/KpiManagerClient.py @@ -19,7 +19,7 @@ from common.Settings import get_service_host, get_service_port_grpc from common.tools.client.RetryDecorator import retry, delay_exponential from common.tools.grpc.Tools import grpc_message_to_json_string from common.proto.context_pb2 import Empty -from common.proto.kpi_manager_pb2 import KpiId, KpiDescriptor, KpiDescriptorList, +from common.proto.kpi_manager_pb2 import KpiId, KpiDescriptor, KpiDescriptorList from common.proto.kpi_manager_pb2_grpc import KpiManagerServiceStub LOGGER = logging.getLogger(__name__) -- GitLab From 7b88c37fde4204b2bf73ead2b6f0deb0ab4a7e8a Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Wed, 27 Mar 2024 06:39:29 +0000 Subject: [PATCH 035/205] __init__.py is added in kpi_manager/client directory --- src/kpi_manager/client/__init__.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 src/kpi_manager/client/__init__.py diff --git a/src/kpi_manager/client/__init__.py b/src/kpi_manager/client/__init__.py new file mode 100644 index 000000000..1549d9811 --- /dev/null +++ b/src/kpi_manager/client/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + -- GitLab From e50c6e937c4832a868dd5851586d175f8d78c638 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Wed, 27 Mar 2024 06:40:27 +0000 Subject: [PATCH 036/205] some missing imports were added and some methods are commented temporarily --- src/kpi_manager/tests/test_unitary.py | 92 ++++++++++++++------------- 1 file changed, 49 insertions(+), 43 deletions(-) diff --git a/src/kpi_manager/tests/test_unitary.py b/src/kpi_manager/tests/test_unitary.py index b75ea5751..113b26890 100755 --- a/src/kpi_manager/tests/test_unitary.py +++ b/src/kpi_manager/tests/test_unitary.py @@ -17,6 +17,7 @@ # sys.path.append('.') import os, pytest import logging, json +from typing import Union from apscheduler.schedulers.background import BackgroundScheduler @@ -33,6 +34,8 @@ from common.tools.object_factory.Topology import json_topology, json_topology_id # from common.proto.monitoring_pb2 import KpiId, KpiDescriptor, SubsDescriptor, SubsList, AlarmID, \ # AlarmDescriptor, AlarmList, KpiDescriptorList, SubsResponse, AlarmResponse, RawKpiTable #, Kpi, KpiList from common.proto.kpi_manager_pb2 import KpiId, KpiDescriptor, KpiDescriptorList +from common.tools.service.GenericGrpcService import GenericGrpcService +from context.client.ContextClient import ContextClient from device.service.driver_api.DriverFactory import DriverFactory @@ -50,6 +53,9 @@ from monitoring.service.ManagementDBTools import ManagementDB from monitoring.service.MetricsDBTools import MetricsDB from monitoring.service.NameMapping import NameMapping +os.environ['DEVICE_EMULATED_ONLY'] = 'TRUE' +from device.service.drivers import DRIVERS + ########################### # Tests Setup ########################### @@ -185,49 +191,49 @@ def kpi_manager_client(kpi_manager_service : KpiManagerService): # pylint: disab LOGGER.info('Closed KpiManagerClient...') -@pytest.fixture(scope='session') -def management_db(): - _management_db = ManagementDB('monitoring.db') - return _management_db - -@pytest.fixture(scope='session') -def metrics_db(kpi_manager_service : KpiManagerService): # pylint: disable=redefined-outer-name - return monitoring_service.monitoring_servicer.metrics_db - -# This function os not clear to me (Changes should me made before execution) -@pytest.fixture(scope='session') -def metrics_db(monitoring_service : MonitoringService): # pylint: disable=redefined-outer-name - return monitoring_service.monitoring_servicer.metrics_db - #_metrics_db = MetricsDBTools.MetricsDB( - # METRICSDB_HOSTNAME, METRICSDB_ILP_PORT, METRICSDB_REST_PORT, METRICSDB_TABLE_MONITORING_KPIS) - #return _metrics_db - -@pytest.fixture(scope='session') -def subs_scheduler(): - _scheduler = BackgroundScheduler(executors={'processpool': ProcessPoolExecutor(max_workers=20)}) - _scheduler.start() - return _scheduler - -def ingestion_data(kpi_id_int): - # pylint: disable=redefined-outer-name,unused-argument - metrics_db = MetricsDB('localhost', '9009', '9000', 'monitoring') - - kpiSampleType = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED - kpiSampleType_name = KpiSampleType.Name(kpiSampleType).upper().replace('KPISAMPLETYPE_', '') - for _ in range(50): - kpiSampleType = kpiSampleType_name - kpiId = kpi_id_int - deviceId = 'DEV'+ str(kpi_id_int) - endpointId = 'END' + str(kpi_id_int) - serviceId = 'SERV' + str(kpi_id_int) - sliceId = 'SLC' + str(kpi_id_int) - connectionId = 'CON' + str(kpi_id_int) - time_stamp = timestamp_utcnow_to_float() - kpi_value = 500*random() - - metrics_db.write_KPI(time_stamp, kpiId, kpiSampleType, deviceId, endpointId, serviceId, sliceId, connectionId, - kpi_value) - sleep(0.1) +# @pytest.fixture(scope='session') +# def management_db(): +# _management_db = ManagementDB('monitoring.db') +# return _management_db + +# @pytest.fixture(scope='session') +# def metrics_db(kpi_manager_service : KpiManagerService): # pylint: disable=redefined-outer-name +# return monitoring_service.monitoring_servicer.metrics_db + +# # This function os not clear to me (Changes should me made before execution) +# @pytest.fixture(scope='session') +# def metrics_db(monitoring_service : MonitoringService): # pylint: disable=redefined-outer-name +# return monitoring_service.monitoring_servicer.metrics_db +# #_metrics_db = MetricsDBTools.MetricsDB( +# # METRICSDB_HOSTNAME, METRICSDB_ILP_PORT, METRICSDB_REST_PORT, METRICSDB_TABLE_MONITORING_KPIS) +# #return _metrics_db + +# @pytest.fixture(scope='session') +# def subs_scheduler(): +# _scheduler = BackgroundScheduler(executors={'processpool': ProcessPoolExecutor(max_workers=20)}) +# _scheduler.start() +# return _scheduler + +# def ingestion_data(kpi_id_int): +# # pylint: disable=redefined-outer-name,unused-argument +# metrics_db = MetricsDB('localhost', '9009', '9000', 'monitoring') + +# kpiSampleType = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED +# kpiSampleType_name = KpiSampleType.Name(kpiSampleType).upper().replace('KPISAMPLETYPE_', '') +# for _ in range(50): +# kpiSampleType = kpiSampleType_name +# kpiId = kpi_id_int +# deviceId = 'DEV'+ str(kpi_id_int) +# endpointId = 'END' + str(kpi_id_int) +# serviceId = 'SERV' + str(kpi_id_int) +# sliceId = 'SLC' + str(kpi_id_int) +# connectionId = 'CON' + str(kpi_id_int) +# time_stamp = timestamp_utcnow_to_float() +# kpi_value = 500*random() + +# metrics_db.write_KPI(time_stamp, kpiId, kpiSampleType, deviceId, endpointId, serviceId, sliceId, connectionId, +# kpi_value) +# sleep(0.1) ################################################## # Prepare Environment, should be the first test -- GitLab From e8f6fdba780302ed14035f43a6634053b5d1bf02 Mon Sep 17 00:00:00 2001 From: Ricard Vilalta Date: Mon, 8 Apr 2024 08:37:07 +0000 Subject: [PATCH 037/205] Upload New File --- kpi_manager.proto | 46 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 46 insertions(+) create mode 100644 kpi_manager.proto diff --git a/kpi_manager.proto b/kpi_manager.proto new file mode 100644 index 000000000..1ef447fb3 --- /dev/null +++ b/kpi_manager.proto @@ -0,0 +1,46 @@ +syntax = "proto3"; +package monitoring; + +import "context.proto"; +import "kpi_sample_types.proto"; + +service KpiManagerService { + rpc GetKpiDescriptor (KpiId ) returns (KpiDescriptor ) {} + rpc SetKpiDescriptor (KpiDescriptor ) returns (KpiId ) {} + rpc DeleteKpiDescriptor(KpiId ) returns (context.Empty ) {} + rpc SelectKpiDescriptor(KpiDescriptorFilter) returns (KpiDescriptorList) {} +} + +message KpiId { + context.Uuid kpi_id = 1; +} + +message KpiDescriptor { + KpiId kpi_id = 1; + string kpi_description = 2; + kpi_sample_types.KpiSampleType kpi_sample_type = 3; + context.DeviceId device_id = 4; + context.EndPointId endpoint_id = 5; + context.ServiceId service_id = 6; + context.SliceId slice_id = 7; + context.ConnectionId connection_id = 8; + context.LinkId link_id = 9; +} + +message KpiDescriptorFilter { + // KPI Descriptors that fulfill the filter are those that match ALL the following fields. + // An empty list means: any value is accepted. + // All fields empty means: list all KPI Descriptors + repeated KpiId kpi_id = 1; + repeated kpi_sample_types.KpiSampleType kpi_sample_type = 2; + repeated context.DeviceId device_id = 3; + repeated context.EndPointId endpoint_id = 4; + repeated context.ServiceId service_id = 5; + repeated context.SliceId slice_id = 6; + repeated context.ConnectionId connection_id = 7; + repeated context.LinkId link_id = 8; +} + +message KpiDescriptorList { + repeated KpiDescriptor kpi_descriptor_list = 1; +} -- GitLab From bbd1511c1051f60dd042303baf7f0d096ed6cc3d Mon Sep 17 00:00:00 2001 From: Ricard Vilalta Date: Mon, 8 Apr 2024 08:37:26 +0000 Subject: [PATCH 038/205] Delete kpi_manager.proto --- kpi_manager.proto | 46 ---------------------------------------------- 1 file changed, 46 deletions(-) delete mode 100644 kpi_manager.proto diff --git a/kpi_manager.proto b/kpi_manager.proto deleted file mode 100644 index 1ef447fb3..000000000 --- a/kpi_manager.proto +++ /dev/null @@ -1,46 +0,0 @@ -syntax = "proto3"; -package monitoring; - -import "context.proto"; -import "kpi_sample_types.proto"; - -service KpiManagerService { - rpc GetKpiDescriptor (KpiId ) returns (KpiDescriptor ) {} - rpc SetKpiDescriptor (KpiDescriptor ) returns (KpiId ) {} - rpc DeleteKpiDescriptor(KpiId ) returns (context.Empty ) {} - rpc SelectKpiDescriptor(KpiDescriptorFilter) returns (KpiDescriptorList) {} -} - -message KpiId { - context.Uuid kpi_id = 1; -} - -message KpiDescriptor { - KpiId kpi_id = 1; - string kpi_description = 2; - kpi_sample_types.KpiSampleType kpi_sample_type = 3; - context.DeviceId device_id = 4; - context.EndPointId endpoint_id = 5; - context.ServiceId service_id = 6; - context.SliceId slice_id = 7; - context.ConnectionId connection_id = 8; - context.LinkId link_id = 9; -} - -message KpiDescriptorFilter { - // KPI Descriptors that fulfill the filter are those that match ALL the following fields. - // An empty list means: any value is accepted. - // All fields empty means: list all KPI Descriptors - repeated KpiId kpi_id = 1; - repeated kpi_sample_types.KpiSampleType kpi_sample_type = 2; - repeated context.DeviceId device_id = 3; - repeated context.EndPointId endpoint_id = 4; - repeated context.ServiceId service_id = 5; - repeated context.SliceId slice_id = 6; - repeated context.ConnectionId connection_id = 7; - repeated context.LinkId link_id = 8; -} - -message KpiDescriptorList { - repeated KpiDescriptor kpi_descriptor_list = 1; -} -- GitLab From b000c36edfcefa92d58d128832054559b3de1e89 Mon Sep 17 00:00:00 2001 From: Ricard Vilalta Date: Mon, 8 Apr 2024 08:41:29 +0000 Subject: [PATCH 039/205] Upload New File --- proto/telemetry_frontend.proto | 42 ++++++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 proto/telemetry_frontend.proto diff --git a/proto/telemetry_frontend.proto b/proto/telemetry_frontend.proto new file mode 100644 index 000000000..93213628e --- /dev/null +++ b/proto/telemetry_frontend.proto @@ -0,0 +1,42 @@ +syntax = "proto3"; +package device; + +import "context.proto"; +import "kpi_manager.proto"; +import "kpi_sample_types.proto"; + +service TelemetryFrontendService { + rpc StartCollector (Collector ) returns (CollectorId ) {} + rpc StopCollector (CollectorId ) returns (context.Empty) {} + rpc SelectCollectors(CollectorFilter) returns (CollectorList) {} +} + +message CollectorId { + context.Uuid collector_id = 1; +} + +message Collector { + CollectorId collector_id = 1; // The Collector ID + kpi_manager.KpiId kpi_id = 2; // The KPI Id to be associated to the collected samples + float duration_s = 3; // Terminate data collection after duration[seconds]; duration==0 means indefinitely + float interval_s = 4; // Interval between collected samples +} + +message CollectorFilter { + // Collector that fulfill the filter are those that match ALL the following fields. + // An empty list means: any value is accepted. + // All fields empty means: list all Collectors + repeated CollectorId collector_id = 1; + repeated kpi_manager.KpiId kpi_id = 2; + repeated kpi_sample_types.KpiSampleType kpi_sample_type = 3; + repeated context.DeviceId device_id = 4; + repeated context.EndPointId endpoint_id = 5; + repeated context.ServiceId service_id = 6; + repeated context.SliceId slice_id = 7; + repeated context.ConnectionId connection_id = 8; + repeated context.LinkId link_id = 9; +} + +message CollectorList { + repeated Collector collector_list = 1; +} -- GitLab From 0c68bc65f7ffe72f10f95ed13ac1fc87404d5952 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Thu, 11 Apr 2024 13:50:35 +0000 Subject: [PATCH 040/205] comment added to mentioned the place of error during KPI_MANAGER tests --- src/kpi_manager/tests/test_unitary.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/kpi_manager/tests/test_unitary.py b/src/kpi_manager/tests/test_unitary.py index 113b26890..b45346d06 100755 --- a/src/kpi_manager/tests/test_unitary.py +++ b/src/kpi_manager/tests/test_unitary.py @@ -239,6 +239,7 @@ def kpi_manager_client(kpi_manager_service : KpiManagerService): # pylint: disab # Prepare Environment, should be the first test ################################################## +# ERROR on this test --- def test_prepare_environment( context_client : ContextClient, # pylint: disable=redefined-outer-name,unused-argument ): -- GitLab From 3de99d75ff905e1b38304775752d5144ec9d6143 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Thu, 11 Apr 2024 13:51:51 +0000 Subject: [PATCH 041/205] Kafka.sh is added (An automated script to deply Apache kafka) --- deploy/kafka.sh | 64 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 64 insertions(+) create mode 100755 deploy/kafka.sh diff --git a/deploy/kafka.sh b/deploy/kafka.sh new file mode 100755 index 000000000..976bec117 --- /dev/null +++ b/deploy/kafka.sh @@ -0,0 +1,64 @@ +#!/bin/bash +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +######################################################################################################################## +# Read deployment settings +######################################################################################################################## + +# If not already set, set the namespace where Apache Kafka will be deployed. +export KFK_NAMESPACE=${KFK_NAMESPACE:-"kafka"} + + +######################################################################################################################## +# Automated steps start here +######################################################################################################################## + +# Constants +TMP_FOLDER="./tmp" +KFK_MANIFESTS_PATH="manifests/kafka" +KFK_ZOOKEEPER_MANIFEST="01-zookeeper.yaml" +KFK_MANIFEST="02-kafka.yaml" + +# Create a tmp folder for files modified during the deployment +TMP_MANIFESTS_FOLDER="${TMP_FOLDER}/${KFK_NAMESPACE}/manifests" +mkdir -p ${TMP_MANIFESTS_FOLDER} + +# copy zookeeper and kafka manifest files to temporary manifest location +cp "${KFK_MANIFESTS_PATH}/${KFK_ZOOKEEPER_MANIFEST}" "${TMP_MANIFESTS_FOLDER}/${KFK_ZOOKEEPER_MANIFEST}" +cp "${KFK_MANIFESTS_PATH}/${KFK_MANIFEST}" "${TMP_MANIFESTS_FOLDER}/${KFK_MANIFEST}" + +kubectl delete namespace ${KFK_NAMESPACE} --ignore-not-found +kubectl create namespace ${KFK_NAMESPACE} +# sleep 2 +# echo "----" + +# Kafka zookeeper service should be deployed before the kafka service +kubectl --namespace ${KFK_NAMESPACE} apply -f "${TMP_MANIFESTS_FOLDER}/${KFK_ZOOKEEPER_MANIFEST}" +# kubectl get services --namespace ${KFK_NAMESPACE} +# echo "----" + +KFK_ZOOKEEPER_SERVICE="zookeeper-service" # this command may be replaced with command to get service name automatically +KFK_ZOOKEEPER_IP=$(kubectl --namespace ${KFK_NAMESPACE} get service ${KFK_ZOOKEEPER_SERVICE} -o 'jsonpath={.spec.clusterIP}') +# echo $KFK_ZOOKEEPER_IP +# echo "----" + +# Kafka service should be deployed after the zookeeper service +sed -i "s//${KFK_ZOOKEEPER_IP}/" "${TMP_MANIFESTS_FOLDER}/$KFK_MANIFEST" +# echo "----" +kubectl --namespace ${KFK_NAMESPACE} apply -f "${TMP_MANIFESTS_FOLDER}/$KFK_MANIFEST" +sleep 5 +kubectl --namespace ${KFK_NAMESPACE} get pods + +echo "--- Kafka service deployed sucessfully ---" \ No newline at end of file -- GitLab From a0ae85f2b319f06221d7eb1f523e9ec8e052bd1f Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Thu, 11 Apr 2024 13:52:31 +0000 Subject: [PATCH 042/205] Apache Kafka manifest files for deployment --- manifests/kafka/01-zookeeper.yaml | 40 +++++++++++++++++++++++++++ manifests/kafka/02-kafka.yaml | 46 +++++++++++++++++++++++++++++++ 2 files changed, 86 insertions(+) create mode 100644 manifests/kafka/01-zookeeper.yaml create mode 100644 manifests/kafka/02-kafka.yaml diff --git a/manifests/kafka/01-zookeeper.yaml b/manifests/kafka/01-zookeeper.yaml new file mode 100644 index 000000000..0f5ade5d9 --- /dev/null +++ b/manifests/kafka/01-zookeeper.yaml @@ -0,0 +1,40 @@ +apiVersion: v1 +kind: Service +metadata: + labels: + app: zookeeper-service + name: zookeeper-service + namespace: kafka +spec: + type: NodePort + ports: + - name: zookeeper-port + port: 2181 + nodePort: 30181 + targetPort: 2181 + selector: + app: zookeeper +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + labels: + app: zookeeper + name: zookeeper + namespace: kafka +spec: + replicas: 1 + selector: + matchLabels: + app: zookeeper + template: + metadata: + labels: + app: zookeeper + spec: + containers: + - image: wurstmeister/zookeeper + imagePullPolicy: IfNotPresent + name: zookeeper + ports: + - containerPort: 2181 \ No newline at end of file diff --git a/manifests/kafka/02-kafka.yaml b/manifests/kafka/02-kafka.yaml new file mode 100644 index 000000000..8a2b51724 --- /dev/null +++ b/manifests/kafka/02-kafka.yaml @@ -0,0 +1,46 @@ +apiVersion: v1 +kind: Service +metadata: + labels: + app: kafka-broker + name: kafka-service + namespace: kafka +spec: + ports: + - port: 9092 + selector: + app: kafka-broker +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + labels: + app: kafka-broker + name: kafka-broker + namespace: kafka +spec: + replicas: 1 + selector: + matchLabels: + app: kafka-broker + template: + metadata: + labels: + app: kafka-broker + spec: + hostname: kafka-broker + containers: + - env: + - name: KAFKA_BROKER_ID + value: "1" + - name: KAFKA_ZOOKEEPER_CONNECT + value: :2181 + - name: KAFKA_LISTENERS + value: PLAINTEXT://:9092 + - name: KAFKA_ADVERTISED_LISTENERS + value: PLAINTEXT://localhost:9092 + image: wurstmeister/kafka + imagePullPolicy: IfNotPresent + name: kafka-broker + ports: + - containerPort: 9092 \ No newline at end of file -- GitLab From 2e0889be589f100d22ae6246ebaac0ef967010ac Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Thu, 11 Apr 2024 13:54:00 +0000 Subject: [PATCH 043/205] packages version changed to pre-tested versions. --- src/monitoring/requirements.in | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/src/monitoring/requirements.in b/src/monitoring/requirements.in index 4e57dd019..bea1bc165 100644 --- a/src/monitoring/requirements.in +++ b/src/monitoring/requirements.in @@ -32,3 +32,15 @@ requests==2.27.1 xmltodict==0.12.0 questdb==1.0.1 psycopg2-binary==2.9.3 +coverage==6.3 +grpcio==1.47.* +grpcio-health-checking==1.47.* +grpcio-tools==1.47.* +grpclib==0.4.4 +prettytable==3.5.0 +prometheus-client==0.13.0 +protobuf==3.20.* +pytest==6.2.5 +pytest-benchmark==3.4.1 +python-dateutil==2.8.2 +pytest-depends==1.0.1 -- GitLab From 2fc6a4982fea2681100e2737a8b43d22d19b6ed6 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Thu, 11 Apr 2024 13:54:46 +0000 Subject: [PATCH 044/205] Apache Kafka enviornment variable is added. --- my_deploy.sh | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/my_deploy.sh b/my_deploy.sh index 5bd58d0dc..74c293619 100755 --- a/my_deploy.sh +++ b/my_deploy.sh @@ -154,3 +154,10 @@ export PROM_EXT_PORT_HTTP="9090" # Set the external port Grafana HTTP Dashboards will be exposed to. export GRAF_EXT_PORT_HTTP="3000" + + +# ----- Apache Kafka ----------------------------------------------------------- + +# Set the namespace where Apache Kafka will be deployed. +export KFK_NAMESPACE="kafka" + -- GitLab From 3a00a55c67a4105fcadc6e9944bf8f598b6ed1b9 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 12 Apr 2024 09:05:59 +0000 Subject: [PATCH 045/205] Improvement in Kafka deployment script --- deploy/kafka.sh | 27 ++++++++++++++++----------- 1 file changed, 16 insertions(+), 11 deletions(-) diff --git a/deploy/kafka.sh b/deploy/kafka.sh index 976bec117..f2fb666b5 100755 --- a/deploy/kafka.sh +++ b/deploy/kafka.sh @@ -39,26 +39,31 @@ mkdir -p ${TMP_MANIFESTS_FOLDER} cp "${KFK_MANIFESTS_PATH}/${KFK_ZOOKEEPER_MANIFEST}" "${TMP_MANIFESTS_FOLDER}/${KFK_ZOOKEEPER_MANIFEST}" cp "${KFK_MANIFESTS_PATH}/${KFK_MANIFEST}" "${TMP_MANIFESTS_FOLDER}/${KFK_MANIFEST}" +echo "Apache Kafka Namespace" +echo ">>> Delete Apache Kafka Namespace" kubectl delete namespace ${KFK_NAMESPACE} --ignore-not-found + +echo ">>> Create Apache Kafka Namespace" kubectl create namespace ${KFK_NAMESPACE} -# sleep 2 -# echo "----" +echo ">>> Deplying Apache Kafka Zookeeper" # Kafka zookeeper service should be deployed before the kafka service kubectl --namespace ${KFK_NAMESPACE} apply -f "${TMP_MANIFESTS_FOLDER}/${KFK_ZOOKEEPER_MANIFEST}" -# kubectl get services --namespace ${KFK_NAMESPACE} -# echo "----" -KFK_ZOOKEEPER_SERVICE="zookeeper-service" # this command may be replaced with command to get service name automatically +KFK_ZOOKEEPER_SERVICE="zookeeper-service" # this command may be replaced with command to extract service name automatically KFK_ZOOKEEPER_IP=$(kubectl --namespace ${KFK_NAMESPACE} get service ${KFK_ZOOKEEPER_SERVICE} -o 'jsonpath={.spec.clusterIP}') -# echo $KFK_ZOOKEEPER_IP -# echo "----" # Kafka service should be deployed after the zookeeper service sed -i "s//${KFK_ZOOKEEPER_IP}/" "${TMP_MANIFESTS_FOLDER}/$KFK_MANIFEST" -# echo "----" + +echo ">>> Deploying Apache Kafka Broker" kubectl --namespace ${KFK_NAMESPACE} apply -f "${TMP_MANIFESTS_FOLDER}/$KFK_MANIFEST" -sleep 5 -kubectl --namespace ${KFK_NAMESPACE} get pods -echo "--- Kafka service deployed sucessfully ---" \ No newline at end of file +echo ">>> Verifing Apache Kafka deployment" +sleep 5 +KFK_PODS_STATUS=$(kubectl --namespace ${KFK_NAMESPACE} get pods) +if echo "$KFK_PODS_STATUS" | grep -qEv 'STATUS|Running'; then + echo "Deployment Error: $KFK_PODS_STATUS" +else + echo "$KFK_PODS_STATUS" +fi \ No newline at end of file -- GitLab From 7a342bb6e3d2a70822befcf6c32f869c96cc3430 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 16 Apr 2024 09:22:03 +0000 Subject: [PATCH 046/205] Added new message (KpiDescriptorFilter) and updated method name --- proto/kpi_manager.proto | 42 +++++++++++++++++++++++++++-------------- 1 file changed, 28 insertions(+), 14 deletions(-) diff --git a/proto/kpi_manager.proto b/proto/kpi_manager.proto index f5769ed37..ad48eb84f 100644 --- a/proto/kpi_manager.proto +++ b/proto/kpi_manager.proto @@ -19,27 +19,41 @@ import "context.proto"; import "kpi_sample_types.proto"; service KpiManagerService{ - rpc SetKpi (KpiDescriptor ) returns (KpiId ) {} // Stable not final - rpc DeleteKpi (KpiId ) returns (context.Empty ) {} // Stable and final - rpc GetKpiDescriptor (KpiId ) returns (KpiDescriptor ) {} // Stable and final - rpc GetKpiDescriptorList (context.Empty ) returns (KpiDescriptorList ) {} // Stable and final + rpc SetKpiDescriptor (KpiDescriptor ) returns (KpiId ) {} // Stable not final + rpc DeleteKpiDescriptor (KpiId ) returns (context.Empty ) {} // Stable and final + rpc GetKpiDescriptor (KpiId ) returns (KpiDescriptor ) {} // Stable and final + rpc SelectKpiDescriptor (KpiDescriptorFilter) returns (KpiDescriptorList ) {} // Stable and final +} + + +message KpiId { + context.Uuid kpi_id = 1; } message KpiDescriptor { KpiId kpi_id = 1; string kpi_description = 2; - repeated KpiId kpi_id_list = 3; - kpi_sample_types.KpiSampleType kpi_sample_type = 4; - context.DeviceId device_id = 5; - context.EndPointId endpoint_id = 6; - context.ServiceId service_id = 7; - context.SliceId slice_id = 8; - context.ConnectionId connection_id = 9; - context.LinkId link_id = 10; + kpi_sample_types.KpiSampleType kpi_sample_type = 3; + context.DeviceId device_id = 4; + context.EndPointId endpoint_id = 5; + context.ServiceId service_id = 6; + context.SliceId slice_id = 7; + context.ConnectionId connection_id = 8; + context.LinkId link_id = 9; } -message KpiId { - context.Uuid kpi_id = 1; +message KpiDescriptorFilter { + // KPI Descriptors that fulfill the filter are those that match ALL the following fields. + // An empty list means: any value is accepted. + // All fields empty means: list all KPI Descriptors + repeated KpiId kpi_id = 1; + repeated kpi_sample_types.KpiSampleType kpi_sample_type = 2; + repeated context.DeviceId device_id = 3; + repeated context.EndPointId endpoint_id = 4; + repeated context.ServiceId service_id = 5; + repeated context.SliceId slice_id = 6; + repeated context.ConnectionId connection_id = 7; + repeated context.LinkId link_id = 8; } message KpiDescriptorList { -- GitLab From 2a3ddf384af7bd9751b35207588333d0bc0c06e4 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 16 Apr 2024 09:23:18 +0000 Subject: [PATCH 047/205] Update method names --- src/kpi_manager/client/KpiManagerClient.py | 30 +++++++++++----------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/src/kpi_manager/client/KpiManagerClient.py b/src/kpi_manager/client/KpiManagerClient.py index a129cd327..5a4cd2e20 100755 --- a/src/kpi_manager/client/KpiManagerClient.py +++ b/src/kpi_manager/client/KpiManagerClient.py @@ -19,7 +19,7 @@ from common.Settings import get_service_host, get_service_port_grpc from common.tools.client.RetryDecorator import retry, delay_exponential from common.tools.grpc.Tools import grpc_message_to_json_string from common.proto.context_pb2 import Empty -from common.proto.kpi_manager_pb2 import KpiId, KpiDescriptor, KpiDescriptorList +from common.proto.kpi_manager_pb2 import KpiId, KpiDescriptor, KpiDescriptorFilter, KpiDescriptorList from common.proto.kpi_manager_pb2_grpc import KpiManagerServiceStub LOGGER = logging.getLogger(__name__) @@ -29,8 +29,8 @@ RETRY_DECORATOR = retry(max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, class KpiManagerClient: def __init__(self, host=None, port=None): - if not host: host = get_service_host(ServiceNameEnum.KPIMANAGER) # update enum - if not port: port = get_service_port_grpc(ServiceNameEnum.KPIMANAGER) # update enum + if not host: host = get_service_host(ServiceNameEnum.KPIMANAGER) + if not port: port = get_service_port_grpc(ServiceNameEnum.KPIMANAGER) self.endpoint = '{:s}:{:s}'.format(str(host), str(port)) LOGGER.debug('Creating channel to {:s}...'.format(str(self.endpoint))) self.channel = None @@ -48,17 +48,17 @@ class KpiManagerClient: self.stub = None @RETRY_DECORATOR - def SetKpi(self, request : KpiDescriptor) -> KpiId: - LOGGER.debug('SetKpi: {:s}'.format(grpc_message_to_json_string(request))) - response = self.stub.SetKpi(request) - LOGGER.debug('SetKpi result: {:s}'.format(grpc_message_to_json_string(response))) + def SetKpiDescriptor(self, request : KpiDescriptor) -> KpiId: + LOGGER.debug('SetKpiDescriptor: {:s}'.format(grpc_message_to_json_string(request))) + response = self.stub.SetKpiDescriptor(request) + LOGGER.debug('SetKpiDescriptor result: {:s}'.format(grpc_message_to_json_string(response))) return response @RETRY_DECORATOR - def DeleteKpi(self,request : KpiId) -> Empty: - LOGGER.debug('DeleteKpi: {:s}'.format(grpc_message_to_json_string(request))) - response = self.stub.DeleteKpi(request) - LOGGER.info('DeleteKpi result: {:s}'.format(grpc_message_to_json_string(response))) + def DeleteKpiDescriptor(self,request : KpiId) -> Empty: + LOGGER.debug('DeleteKpiDescriptor: {:s}'.format(grpc_message_to_json_string(request))) + response = self.stub.DeleteKpiDescriptor(request) + LOGGER.info('DeleteKpiDescriptor result: {:s}'.format(grpc_message_to_json_string(response))) return response @RETRY_DECORATOR @@ -69,8 +69,8 @@ class KpiManagerClient: return response @RETRY_DECORATOR - def GetKpiDescriptorList(self, request : Empty) -> KpiDescriptorList: - LOGGER.debug('GetKpiDescriptorList: {:s}'.format(grpc_message_to_json_string(request))) - response = self.stub.GetKpiDescriptorList(request) - LOGGER.debug('GetKpiDescriptorList result: {:s}'.format(grpc_message_to_json_string(response))) + def SelectKpiDescriptor(self, request : KpiDescriptorFilter) -> KpiDescriptorList: + LOGGER.debug('SelectKpiDescriptor: {:s}'.format(grpc_message_to_json_string(request))) + response = self.stub.SelectKpiDescriptor(request) + LOGGER.debug('SelectKpiDescriptor result: {:s}'.format(grpc_message_to_json_string(response))) return response \ No newline at end of file -- GitLab From bb54d7a49c083f2a7927e3259ce7a50d014498be Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 16 Apr 2024 09:23:43 +0000 Subject: [PATCH 048/205] update method name --- src/kpi_manager/service/KpiManagerServiceServicerImpl.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/kpi_manager/service/KpiManagerServiceServicerImpl.py b/src/kpi_manager/service/KpiManagerServiceServicerImpl.py index 3a40195da..fc1ea3b96 100644 --- a/src/kpi_manager/service/KpiManagerServiceServicerImpl.py +++ b/src/kpi_manager/service/KpiManagerServiceServicerImpl.py @@ -16,7 +16,7 @@ import logging, grpc from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method from common.proto.context_pb2 import Empty from common.proto.kpi_manager_pb2_grpc import KpiManagerServiceServicer -from common.proto.kpi_manager_pb2 import KpiId, KpiDescriptor, KpiDescriptorList +from common.proto.kpi_manager_pb2 import KpiId, KpiDescriptor, KpiDescriptorFilter, KpiDescriptorList from monitoring.service.NameMapping import NameMapping from monitoring.service import ManagementDBTools @@ -34,7 +34,7 @@ class KpiManagerServiceServicerImpl(KpiManagerServiceServicer): LOGGER.info('MetricsDB initialized --- KPI Manager Service') @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def SetKpi( + def SetKpiDescriptor( self, request: KpiDescriptor, grpc_context: grpc.ServicerContext ) -> KpiId: response = KpiId() @@ -57,7 +57,7 @@ class KpiManagerServiceServicerImpl(KpiManagerServiceServicer): return response @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def DeleteKpi(self, request: KpiId, grpc_context: grpc.ServicerContext) -> Empty: + def DeleteKpiDescriptor(self, request: KpiId, grpc_context: grpc.ServicerContext) -> Empty: kpi_id = int(request.kpi_id.uuid) kpi = self.management_db.get_KPI(kpi_id) if kpi: @@ -85,7 +85,7 @@ class KpiManagerServiceServicerImpl(KpiManagerServiceServicer): return kpiDescriptor @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def GetKpiDescriptorList(self, request: Empty, grpc_context: grpc.ServicerContext) -> KpiDescriptorList: + def SelectKpiDescriptor(self, request: KpiDescriptorFilter, grpc_context: grpc.ServicerContext) -> KpiDescriptorList: kpi_descriptor_list = KpiDescriptorList() data = self.management_db.get_KPIS() LOGGER.debug(f"data: {data}") -- GitLab From 4720cbc7a3b1c0beda0861839075701b488daa49 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 16 Apr 2024 09:25:08 +0000 Subject: [PATCH 049/205] New method "create_kpi_filter_request" is added --- src/kpi_manager/tests/test_messages.py | 73 +++++++++++++++++--------- 1 file changed, 47 insertions(+), 26 deletions(-) diff --git a/src/kpi_manager/tests/test_messages.py b/src/kpi_manager/tests/test_messages.py index 589d6cb84..237a983bf 100755 --- a/src/kpi_manager/tests/test_messages.py +++ b/src/kpi_manager/tests/test_messages.py @@ -24,46 +24,67 @@ def create_kpi_request(kpi_id_str): _create_kpi_request = kpi_manager_pb2.KpiDescriptor() _create_kpi_request.kpi_description = 'KPI Description Test' _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED - _create_kpi_request.device_id.device_uuid.uuid = 'DEV' + str(kpi_id_str) + _create_kpi_request.device_id.device_uuid.uuid = 'DEV' + str(kpi_id_str) _create_kpi_request.service_id.service_uuid.uuid = 'SERV' + str(kpi_id_str) - _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC' + str(kpi_id_str) - _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END' + str(kpi_id_str) - _create_kpi_request.connection_id.connection_uuid.uuid = 'CON' + str(kpi_id_str) + _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC' + str(kpi_id_str) + _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END' + str(kpi_id_str) + _create_kpi_request.connection_id.connection_uuid.uuid = 'CON' + str(kpi_id_str) return _create_kpi_request def create_kpi_request_b(): - _create_kpi_request = kpi_manager_pb2.KpiDescriptor() - _create_kpi_request.kpi_description = 'KPI Description Test' - _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED - _create_kpi_request.device_id.device_uuid.uuid = 'DEV2' # pylint: disable=maybe-no-member - _create_kpi_request.service_id.service_uuid.uuid = 'SERV2' # pylint: disable=maybe-no-member - _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC2' # pylint: disable=maybe-no-member - _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END2' # pylint: disable=maybe-no-member + _create_kpi_request = kpi_manager_pb2.KpiDescriptor() + _create_kpi_request.kpi_description = 'KPI Description Test' + _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED + _create_kpi_request.device_id.device_uuid.uuid = 'DEV2' # pylint: disable=maybe-no-member + _create_kpi_request.service_id.service_uuid.uuid = 'SERV2' # pylint: disable=maybe-no-member + _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC2' # pylint: disable=maybe-no-member + _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END2' # pylint: disable=maybe-no-member _create_kpi_request.connection_id.connection_uuid.uuid = 'CON2' # pylint: disable=maybe-no-member return _create_kpi_request def create_kpi_request_c(): - _create_kpi_request = kpi_manager_pb2.KpiDescriptor() - _create_kpi_request.kpi_description = 'KPI Description Test' - _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED - _create_kpi_request.device_id.device_uuid.uuid = 'DEV3' # pylint: disable=maybe-no-member - _create_kpi_request.service_id.service_uuid.uuid = 'SERV3' # pylint: disable=maybe-no-member - _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC3' # pylint: disable=maybe-no-member - _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END3' # pylint: disable=maybe-no-member + _create_kpi_request = kpi_manager_pb2.KpiDescriptor() + _create_kpi_request.kpi_description = 'KPI Description Test' + _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED + _create_kpi_request.device_id.device_uuid.uuid = 'DEV3' # pylint: disable=maybe-no-member + _create_kpi_request.service_id.service_uuid.uuid = 'SERV3' # pylint: disable=maybe-no-member + _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC3' # pylint: disable=maybe-no-member + _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END3' # pylint: disable=maybe-no-member _create_kpi_request.connection_id.connection_uuid.uuid = 'CON3' # pylint: disable=maybe-no-member return _create_kpi_request def create_kpi_request_d(): - _create_kpi_request = kpi_manager_pb2.KpiDescriptor() - _create_kpi_request.kpi_description = 'KPI Description Test' - _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED - _create_kpi_request.device_id.device_uuid.uuid = 'DEV4' # pylint: disable=maybe-no-member - _create_kpi_request.service_id.service_uuid.uuid = 'SERV4' # pylint: disable=maybe-no-member - _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC4' # pylint: disable=maybe-no-member - _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END4' # pylint: disable=maybe-no-member + _create_kpi_request = kpi_manager_pb2.KpiDescriptor() + _create_kpi_request.kpi_description = 'KPI Description Test' + _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED + _create_kpi_request.device_id.device_uuid.uuid = 'DEV4' # pylint: disable=maybe-no-member + _create_kpi_request.service_id.service_uuid.uuid = 'SERV4' # pylint: disable=maybe-no-member + _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC4' # pylint: disable=maybe-no-member + _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END4' # pylint: disable=maybe-no-member _create_kpi_request.connection_id.connection_uuid.uuid = 'CON4' # pylint: disable=maybe-no-member return _create_kpi_request def kpi_descriptor_list(): _kpi_descriptor_list = kpi_manager_pb2.KpiDescriptorList() - return _kpi_descriptor_list \ No newline at end of file + return _kpi_descriptor_list + +def create_kpi_filter_request(): + _create_kpi_filter_request = kpi_manager_pb2.KpiDescriptorFilter() + _create_kpi_filter_request.kpi_sample_type.append(KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED) + new_device_id = _create_kpi_filter_request.device_id.add() + new_device_id.device_uuid.uuid = 'DEV1' + new_service_id = _create_kpi_filter_request.service_id.add() + new_service_id.service_uuid.uuid = 'SERV1' + new_slice_id = _create_kpi_filter_request.slice_id.add() + new_slice_id.slice_uuid.uuid = 'SLC1' + new_endpoint_id = _create_kpi_filter_request.endpoint_id.add() + new_endpoint_id.endpoint_uuid.uuid = 'END1' + new_connection_id = _create_kpi_filter_request.connection_id.add() + new_connection_id.connection_uuid.uuid = 'CON1' + + # _create_kpi_filter_request.device_id.device_uuid.uuid.append('DEV1') # pylint: disable=maybe-no-member + # _create_kpi_filter_request[0].service_id.service_uuid.uuid = 'SERV1' # pylint: disable=maybe-no-member + # _create_kpi_filter_request[0].slice_id.slice_uuid.uuid = 'SLC1' # pylint: disable=maybe-no-member + # _create_kpi_filter_request[0].endpoint_id.endpoint_uuid.uuid = 'END1' # pylint: disable=maybe-no-member + # _create_kpi_filter_request[0].connection_id.connection_uuid.uuid = 'CON1' # pylint: disable=maybe-no-member + return _create_kpi_filter_request -- GitLab From 44158d341973232e47826cde8a7b50406dfab10f Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 16 Apr 2024 09:25:29 +0000 Subject: [PATCH 050/205] comments were removed --- src/kpi_manager/tests/test_messages.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/src/kpi_manager/tests/test_messages.py b/src/kpi_manager/tests/test_messages.py index 237a983bf..72ff74c16 100755 --- a/src/kpi_manager/tests/test_messages.py +++ b/src/kpi_manager/tests/test_messages.py @@ -82,9 +82,4 @@ def create_kpi_filter_request(): new_connection_id = _create_kpi_filter_request.connection_id.add() new_connection_id.connection_uuid.uuid = 'CON1' - # _create_kpi_filter_request.device_id.device_uuid.uuid.append('DEV1') # pylint: disable=maybe-no-member - # _create_kpi_filter_request[0].service_id.service_uuid.uuid = 'SERV1' # pylint: disable=maybe-no-member - # _create_kpi_filter_request[0].slice_id.slice_uuid.uuid = 'SLC1' # pylint: disable=maybe-no-member - # _create_kpi_filter_request[0].endpoint_id.endpoint_uuid.uuid = 'END1' # pylint: disable=maybe-no-member - # _create_kpi_filter_request[0].connection_id.connection_uuid.uuid = 'CON1' # pylint: disable=maybe-no-member - return _create_kpi_filter_request + return _create_kpi_filter_request \ No newline at end of file -- GitLab From 843c37fbf969fcc2261b72a1171ad1c0bfd675e0 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 16 Apr 2024 09:26:39 +0000 Subject: [PATCH 051/205] function calls were updated according to name method names --- src/kpi_manager/tests/test_unitary.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/src/kpi_manager/tests/test_unitary.py b/src/kpi_manager/tests/test_unitary.py index b45346d06..adc886341 100755 --- a/src/kpi_manager/tests/test_unitary.py +++ b/src/kpi_manager/tests/test_unitary.py @@ -33,7 +33,7 @@ from common.tools.object_factory.Context import json_context, json_context_id from common.tools.object_factory.Topology import json_topology, json_topology_id # from common.proto.monitoring_pb2 import KpiId, KpiDescriptor, SubsDescriptor, SubsList, AlarmID, \ # AlarmDescriptor, AlarmList, KpiDescriptorList, SubsResponse, AlarmResponse, RawKpiTable #, Kpi, KpiList -from common.proto.kpi_manager_pb2 import KpiId, KpiDescriptor, KpiDescriptorList +from common.proto.kpi_manager_pb2 import KpiId, KpiDescriptor, KpiDescriptorFilter, KpiDescriptorList from common.tools.service.GenericGrpcService import GenericGrpcService from context.client.ContextClient import ContextClient @@ -43,7 +43,7 @@ from device.service.driver_api.DriverInstanceCache import DriverInstanceCache from device.service.DeviceService import DeviceService from device.client.DeviceClient import DeviceClient -from kpi_manager.tests.test_messages import create_kpi_request, create_kpi_request_b, create_kpi_request_c, create_kpi_request_d +from kpi_manager.tests.test_messages import create_kpi_request, create_kpi_request_b, create_kpi_request_c, create_kpi_request_d, create_kpi_filter_request # from monitoring.service.MonitoringService import MonitoringService from kpi_manager.service.KpiManagerService import KpiManagerService # from monitoring.client.MonitoringClient import MonitoringClient @@ -256,7 +256,7 @@ def test_set_kpi(kpi_manager_client): # pylint: disable=redefined-outer-name # make call to server LOGGER.warning('test_create_kpi requesting') for i in range(3): - response = kpi_manager_client.SetKpi(create_kpi_request(str(i+1))) + response = kpi_manager_client.SetKpiDescriptor(create_kpi_request(str(i+1))) LOGGER.debug(str(response)) assert isinstance(response, KpiId) @@ -264,14 +264,14 @@ def test_set_kpi(kpi_manager_client): # pylint: disable=redefined-outer-name def test_delete_kpi(kpi_manager_client): # pylint: disable=redefined-outer-name # make call to server LOGGER.warning('delete_kpi requesting') - response = kpi_manager_client.SetKpi(create_kpi_request('4')) - response = kpi_manager_client.DeleteKpi(response) + response = kpi_manager_client.SetKpiDescriptor(create_kpi_request('4')) + response = kpi_manager_client.DeleteKpiDescriptor(response) LOGGER.debug(str(response)) assert isinstance(response, Empty) # Test case that makes use of client fixture to test server's GetKpiDescriptor method -def test_get_kpi_descriptor_list(kpi_manager_client): # pylint: disable=redefined-outer-name - LOGGER.warning('test_getkpidescritor_kpi begin') - response = kpi_manager_client.GetKpiDescriptorList(Empty()) +def test_select_kpi_descriptor(kpi_manager_client): # pylint: disable=redefined-outer-name + LOGGER.warning('test_selectkpidescritor begin') + response = kpi_manager_client.SelectKpiDescriptor(create_kpi_filter_request()) LOGGER.debug(str(response)) assert isinstance(response, KpiDescriptorList) -- GitLab From acf03e1207b1a5c7f733f31de31846bc77053b19 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Wed, 24 Apr 2024 16:19:36 +0000 Subject: [PATCH 052/205] Improvements in file formatting --- proto/telemetry_frontend.proto | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/proto/telemetry_frontend.proto b/proto/telemetry_frontend.proto index 93213628e..1f89a5d54 100644 --- a/proto/telemetry_frontend.proto +++ b/proto/telemetry_frontend.proto @@ -16,10 +16,10 @@ message CollectorId { } message Collector { - CollectorId collector_id = 1; // The Collector ID - kpi_manager.KpiId kpi_id = 2; // The KPI Id to be associated to the collected samples - float duration_s = 3; // Terminate data collection after duration[seconds]; duration==0 means indefinitely - float interval_s = 4; // Interval between collected samples + CollectorId collector_id = 1; // The Collector ID + kpi_manager.KpiId kpi_id = 2; // The KPI Id to be associated to the collected samples + float duration_s = 3; // Terminate data collection after duration[seconds]; duration==0 means indefinitely + float interval_s = 4; // Interval between collected samples } message CollectorFilter { -- GitLab From 86c1d4eacadd4301bec08cdcf28044ddedeb95b4 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Wed, 24 Apr 2024 16:20:25 +0000 Subject: [PATCH 053/205] "Telemetry Frontend" Enum name and port added --- src/common/Constants.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/common/Constants.py b/src/common/Constants.py index ee737d2bd..72003846b 100644 --- a/src/common/Constants.py +++ b/src/common/Constants.py @@ -44,6 +44,7 @@ class ServiceNameEnum(Enum): POLICY = 'policy' MONITORING = 'monitoring' KPIMANAGER = 'kpiManager' + TELEMETRYFRONTEND = 'telemetryfrontend' DLT = 'dlt' NBI = 'nbi' CYBERSECURITY = 'cybersecurity' @@ -75,6 +76,7 @@ DEFAULT_SERVICE_GRPC_PORTS = { ServiceNameEnum.POLICY .value : 6060, ServiceNameEnum.MONITORING .value : 7070, ServiceNameEnum.KPIMANAGER .value : 7071, + ServiceNameEnum.TELEMETRYFRONTEND .value : 7072, ServiceNameEnum.DLT .value : 8080, ServiceNameEnum.NBI .value : 9090, ServiceNameEnum.L3_CAD .value : 10001, -- GitLab From f581f2e3ec830b4319ccd8378eaf6dcb88ab15cb Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Wed, 24 Apr 2024 16:21:51 +0000 Subject: [PATCH 054/205] improvements in file formatting and MAX_RETRIES changed to 10 from 15. --- src/kpi_manager/client/KpiManagerClient.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/kpi_manager/client/KpiManagerClient.py b/src/kpi_manager/client/KpiManagerClient.py index 5a4cd2e20..30b1720fb 100755 --- a/src/kpi_manager/client/KpiManagerClient.py +++ b/src/kpi_manager/client/KpiManagerClient.py @@ -16,14 +16,14 @@ import grpc, logging from common.Constants import ServiceNameEnum from common.Settings import get_service_host, get_service_port_grpc -from common.tools.client.RetryDecorator import retry, delay_exponential -from common.tools.grpc.Tools import grpc_message_to_json_string from common.proto.context_pb2 import Empty -from common.proto.kpi_manager_pb2 import KpiId, KpiDescriptor, KpiDescriptorFilter, KpiDescriptorList +from common.tools.grpc.Tools import grpc_message_to_json_string +from common.tools.client.RetryDecorator import retry, delay_exponential from common.proto.kpi_manager_pb2_grpc import KpiManagerServiceStub +from common.proto.kpi_manager_pb2 import KpiId, KpiDescriptor, KpiDescriptorFilter, KpiDescriptorList LOGGER = logging.getLogger(__name__) -MAX_RETRIES = 15 +MAX_RETRIES = 10 DELAY_FUNCTION = delay_exponential(initial=0.01, increment=2.0, maximum=5.0) RETRY_DECORATOR = retry(max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect') -- GitLab From 292df45380d0b9a6baeb2166e5464ee32f0f2037 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Wed, 24 Apr 2024 16:22:46 +0000 Subject: [PATCH 055/205] Initial "Telemetry Frontend" client code --- .../client/TelemetryFrontendClient.py | 70 +++++++++++++++++++ 1 file changed, 70 insertions(+) create mode 100644 src/telemetry_frontend/client/TelemetryFrontendClient.py diff --git a/src/telemetry_frontend/client/TelemetryFrontendClient.py b/src/telemetry_frontend/client/TelemetryFrontendClient.py new file mode 100644 index 000000000..9ca19bb8c --- /dev/null +++ b/src/telemetry_frontend/client/TelemetryFrontendClient.py @@ -0,0 +1,70 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import grpc, logging +from common.Constants import ServiceNameEnum +from common.Settings import get_service_host, get_service_port_grpc + +from common.proto.context_pb2 import Empty +from common.tools.grpc.Tools import grpc_message_to_json_string +from common.tools.client.RetryDecorator import retry, delay_exponential +from common.proto.telemetry_frontend_pb2_grpc import TelemetryFrontendServiceStub +from comment.proto.telemetry_frontend_pb2 import Collector, CollectorId, CollectorFilter, CollectorList + +LOGGER = logging.getLogger(__name__) +MAX_RETRIES = 10 +DELAY_FUNCTION = delay_exponential(initial=0.01, increment=2.0, maximum=5.0) +RETRY_DECORATOR = retry(max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect') + +class TelemetryFrontendClient: + def __init__(self, host=None, port=None): + if not host: host = get_service_host(ServiceNameEnum.TELEMETRYFRONTEND) + if not port: port = get_service_port_grpc(ServiceNameEnum.TELEMETRYFRONTEND) + self.endpoint = '{:s}:{:s}'.format(str(host), str(port)) + LOGGER.debug('Creating channel to {:s}...'.format(str(self.endpoint))) + self.channel = None + self.stub = None + self.connect() + LOGGER.debug('Channel created') + + def connect(self): + self.channel = grpc.insecure_channel(self.endpoint) + self.stub = TelemetryFrontendServiceStub(self.channel) + + def close(self): + if self.channel is not None: self.channel.close() + self.channel = None + self.stub = None + + @RETRY_DECORATOR + def StartCollector(self, request : Collector) --> CollectorId: + LOGGER.debug('StartCollector: {:s}'.format(grpc_message_to_json_string(request))) + response = self.stub.StartCollector(request) + LOGGER.debug('StartCollector result: {:s}'.format(grpc_message_to_json_string(response))) + return response + + @RETRY_DECORATOR + def StopCollector(self, request : CollectorId) --> Empty: + LOGGER.debug('StopCollector: {:s}'.format(grpc_message_to_json_string(request))) + response = self.stub.StopCollector(request) + LOGGER.debug('StopCollector result: {:s}'.format(grpc_message_to_json_string(response))) + return response + + @RETRY_DECORATOR + def SelectCollectors(self, request : CollectorFilter) --> CollectorList: + LOGGER.debug('SelectCollectors: {:s}'.format(grpc_message_to_json_string(request))) + response = self.stub.SelectCollectors(request) + LOGGER.debug('SelectCollectors result: {:s}'.format(grpc_message_to_json_string(response))) + return response + -- GitLab From 55deaf8425de3a736a5fbf574295a2477f9a793e Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Wed, 24 Apr 2024 16:24:05 +0000 Subject: [PATCH 056/205] __init__.py added in "Telemetry Frontend" subdirectories. --- src/telemetry_frontend/__init__.py | 1 + src/telemetry_frontend/client/__init__.py | 14 ++++++++++++++ src/telemetry_frontend/service/__init__.py | 0 3 files changed, 15 insertions(+) create mode 100644 src/telemetry_frontend/__init__.py create mode 100644 src/telemetry_frontend/client/__init__.py create mode 100644 src/telemetry_frontend/service/__init__.py diff --git a/src/telemetry_frontend/__init__.py b/src/telemetry_frontend/__init__.py new file mode 100644 index 000000000..eb1ae458f --- /dev/null +++ b/src/telemetry_frontend/__init__.py @@ -0,0 +1 @@ +... diff --git a/src/telemetry_frontend/client/__init__.py b/src/telemetry_frontend/client/__init__.py new file mode 100644 index 000000000..1549d9811 --- /dev/null +++ b/src/telemetry_frontend/client/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/src/telemetry_frontend/service/__init__.py b/src/telemetry_frontend/service/__init__.py new file mode 100644 index 000000000..e69de29bb -- GitLab From b7df322b4cc2b728168474cf21924f72cbfd2372 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Wed, 24 Apr 2024 16:50:22 +0000 Subject: [PATCH 057/205] "TelemetryFrontendServiceServicerImpl" file added --- .../service/TelemetryFrontendServiceServicerImpl.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 src/telemetry_frontend/service/TelemetryFrontendServiceServicerImpl.py diff --git a/src/telemetry_frontend/service/TelemetryFrontendServiceServicerImpl.py b/src/telemetry_frontend/service/TelemetryFrontendServiceServicerImpl.py new file mode 100644 index 000000000..f80ccfd52 --- /dev/null +++ b/src/telemetry_frontend/service/TelemetryFrontendServiceServicerImpl.py @@ -0,0 +1,13 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. \ No newline at end of file -- GitLab From a39e8f074701d1d79af7fc71e593ca71da39d6d2 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Wed, 24 Apr 2024 16:51:01 +0000 Subject: [PATCH 058/205] Initial "TelemetryFrontendService" generic file added --- .../service/TelemetryFrontendService.py | 30 +++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 src/telemetry_frontend/service/TelemetryFrontendService.py diff --git a/src/telemetry_frontend/service/TelemetryFrontendService.py b/src/telemetry_frontend/service/TelemetryFrontendService.py new file mode 100644 index 000000000..16e77e61b --- /dev/null +++ b/src/telemetry_frontend/service/TelemetryFrontendService.py @@ -0,0 +1,30 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from common.Constants import ServiceNameEnum +from common.Settings import get_service_port_grpc +from monitoring.service.NameMapping import NameMapping +from common.tools.service.GenericGrpcService import GenericGrpcService +from common.proto.telemetry_frontend_pb2_grpc import add_TelemetryFrontendServiceServicer_to_server +from telemetryfrontend.service.TelemetryFrontendServiceServicerImpl import TelemetryFrontendServiceServicerImpl + + +class TelemetryFrontendService(GenericGrpcService): + def __init__(self, name_mapping : NameMapping, cls_name: str = __name__) -> None: + port = get_service_port_grpc(ServiceNameEnum.TELEMETRYFRONTEND) + super().__init__(port, cls_name=cls_name) + self.telemetry_frontend_servicer = TelemetryFrontendServiceServicerImpl(name_mapping) + + def install_servicers(self): + add_TelemetryFrontendServiceServicer_to_server(self.telemetry_frontend_servicer, self.server) -- GitLab From faeed6d72cd393b7a6643a9038651d42b1395e94 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Thu, 25 Apr 2024 10:12:58 +0000 Subject: [PATCH 059/205] Metric pool sub-module name changed from "RPC" to "KpiManager" --- src/kpi_manager/service/KpiManagerServiceServicerImpl.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/kpi_manager/service/KpiManagerServiceServicerImpl.py b/src/kpi_manager/service/KpiManagerServiceServicerImpl.py index fc1ea3b96..4ffa1d2a6 100644 --- a/src/kpi_manager/service/KpiManagerServiceServicerImpl.py +++ b/src/kpi_manager/service/KpiManagerServiceServicerImpl.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +# do tests to verify the "grpc.ServicerContext" is required or not. import logging, grpc from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method from common.proto.context_pb2 import Empty @@ -23,7 +24,7 @@ from monitoring.service import ManagementDBTools LOGGER = logging.getLogger(__name__) -METRICS_POOL = MetricsPool('Monitoring', 'RPC') +METRICS_POOL = MetricsPool('Monitoring', 'KpiManager') class KpiManagerServiceServicerImpl(KpiManagerServiceServicer): def __init__(self, name_mapping : NameMapping): @@ -36,7 +37,7 @@ class KpiManagerServiceServicerImpl(KpiManagerServiceServicer): @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) def SetKpiDescriptor( self, request: KpiDescriptor, grpc_context: grpc.ServicerContext - ) -> KpiId: + ) -> KpiId: response = KpiId() kpi_description = request.kpi_description kpi_sample_type = request.kpi_sample_type -- GitLab From c8b75826bcda70d652b9c3c2a534b33ada39f8c7 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Thu, 25 Apr 2024 10:13:44 +0000 Subject: [PATCH 060/205] "__init__.py" file added in TelemeteryFrontend sub-directories --- src/telemetry_frontend/service/__init__.py | 14 ++++++++++++++ src/telemetry_frontend/tests/__init__.py | 14 ++++++++++++++ 2 files changed, 28 insertions(+) create mode 100644 src/telemetry_frontend/tests/__init__.py diff --git a/src/telemetry_frontend/service/__init__.py b/src/telemetry_frontend/service/__init__.py index e69de29bb..1549d9811 100644 --- a/src/telemetry_frontend/service/__init__.py +++ b/src/telemetry_frontend/service/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/src/telemetry_frontend/tests/__init__.py b/src/telemetry_frontend/tests/__init__.py new file mode 100644 index 000000000..1549d9811 --- /dev/null +++ b/src/telemetry_frontend/tests/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + -- GitLab From dd0f90fad5e20056bf2e2f2b799464b88391ce84 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Thu, 25 Apr 2024 10:14:37 +0000 Subject: [PATCH 061/205] "StartCollector" method dummy defination added --- .../TelemetryFrontendServiceServicerImpl.py | 27 ++++++++++++++++++- 1 file changed, 26 insertions(+), 1 deletion(-) diff --git a/src/telemetry_frontend/service/TelemetryFrontendServiceServicerImpl.py b/src/telemetry_frontend/service/TelemetryFrontendServiceServicerImpl.py index f80ccfd52..6ffb78ab1 100644 --- a/src/telemetry_frontend/service/TelemetryFrontendServiceServicerImpl.py +++ b/src/telemetry_frontend/service/TelemetryFrontendServiceServicerImpl.py @@ -10,4 +10,29 @@ # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and -# limitations under the License. \ No newline at end of file +# limitations under the License. + +import logging +from monitoring.service.NameMapping import NameMapping +from common.proto.telemetry_frontend_pb2 import CollectorId, Collector +from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method +from common.proto.telemetry_frontend_pb2_grpc import TelemetryFrontendServiceServicer + + +LOGGER = logging.getLogger(__name__) +METRICS_POOL = MetricsPool('Monitoring', 'TelemetryFrontend') + +class TelemetryFrontendServiceServicerImpl(TelemetryFrontendServiceServicer): + def __init__(self, name_mapping : NameMapping): + LOGGER.info('Init TelemetryFrontendService') + + @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def StartCollector(self, request: Collector) -> CollectorId: + response = CollectorId() + collector_id = request.collector_id + collector_kpi_id = request.kpi_id + collector_duration = request.duration_s + collector_interval = request.interval_s + + response.collector_id.uuid = request.collector_id.uuid + return response \ No newline at end of file -- GitLab From d66e79c3cbaf8a70e064689aae2690be658ccf29 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Thu, 25 Apr 2024 10:15:29 +0000 Subject: [PATCH 062/205] method to test "collectorId" message is added --- src/telemetry_frontend/tests/test_mesages.py | 22 ++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 src/telemetry_frontend/tests/test_mesages.py diff --git a/src/telemetry_frontend/tests/test_mesages.py b/src/telemetry_frontend/tests/test_mesages.py new file mode 100644 index 000000000..0b7eaeb2b --- /dev/null +++ b/src/telemetry_frontend/tests/test_mesages.py @@ -0,0 +1,22 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from common.proto import telemetry_frontend_pb2 +# from common.proto.kpi_manager_pb2 import kpi_id +# from common.proto.kpi_sample_types_pb2 import KpiSampleType + +def collector_id(): + _collector_id = telemetry_frontend_pb2.CollectorId() + _collector_id.collector_id.uuid = str(1) + return _collector_id \ No newline at end of file -- GitLab From e03a7344575d87aeaa234ada771d15205552e6b1 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Thu, 25 Apr 2024 10:16:27 +0000 Subject: [PATCH 063/205] __main__.py function is added in TelemetryFrontend directory --- src/telemetry_frontend/service/__main__.py | 69 ++++++++++++++++++++++ 1 file changed, 69 insertions(+) create mode 100644 src/telemetry_frontend/service/__main__.py diff --git a/src/telemetry_frontend/service/__main__.py b/src/telemetry_frontend/service/__main__.py new file mode 100644 index 000000000..9b5fe70de --- /dev/null +++ b/src/telemetry_frontend/service/__main__.py @@ -0,0 +1,69 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging, threading +from .TelemetryFrontendService import TelemetryFrontendService +from monitoring.service.NameMapping import NameMapping +from monitoring.service.EventTools import EventsDeviceCollector +from common.Settings import ( + get_log_level, wait_for_environment_variables, get_env_var_name, + get_metrics_port ) + +terminate = threading.Event() +LOGGER = None + +def signal_handler(signal, frame): # pylint: disable=redefined-outer-name + LOGGER.warning('Terminate signal received') + terminate.set() + +def main(): + global LOGGER + + log_level = get_log_level() + logging.basicConfig(level=log_level, format="[%(asctime)s] %(levelname)s:%(name)s:%(message)s") + LOGGER = logging.getLogger(__name__) + +# ------- will be added later -------------- + # wait_for_environment_variables([ + # get_env_var_name + + + # ]) +# ------- will be added later -------------- + + signal.signal(signal.SIGINT, signal_handler) + signal.signal(signal.SIGTERM, signal_handler) + + LOGGER.info('Starting...') + + # Start metrics server + metrics_port = get_metrics_port() + start_http_server(metrics_port) + + name_mapping = NameMapping() + + grpc_service = TelemetryFrontendService(name_mapping) + grpc_service.start() + + # Wait for Ctrl+C or termination signal + while not terminate.wait(timeout=1.0): pass + + LOGGER.info('Terminating...') + grpc_service.stop() + + LOGGER.info('Bye') + return 0 + +if __name__ == '__main__': + sys.exit(main()) \ No newline at end of file -- GitLab From af3678e5618f7c3f038da5b307bbc1afe17e8781 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Thu, 25 Apr 2024 15:08:50 +0000 Subject: [PATCH 064/205] test script for telemetry frontend --- .../run_tests_locally_telemetry-frontend.sh | 28 +++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100755 scripts/run_tests_locally_telemetry-frontend.sh diff --git a/scripts/run_tests_locally_telemetry-frontend.sh b/scripts/run_tests_locally_telemetry-frontend.sh new file mode 100755 index 000000000..ac59f6dde --- /dev/null +++ b/scripts/run_tests_locally_telemetry-frontend.sh @@ -0,0 +1,28 @@ +#!/bin/bash +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +PROJECTDIR=`pwd` + +cd $PROJECTDIR/src +# RCFILE=$PROJECTDIR/coverage/.coveragerc +# coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ +# kpi_manager/tests/test_unitary.py + +# python3 kpi_manager/tests/test_unitary.py + +RCFILE=$PROJECTDIR/coverage/.coveragerc +python3 -m pytest --log-level=INFO --verbose \ + telemetry_frontend/tests/test_unitary.py \ No newline at end of file -- GitLab From 48afe65f0b4122f768f045c3abb5d312727f0180 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Thu, 25 Apr 2024 15:09:35 +0000 Subject: [PATCH 065/205] added __init_.py file --- src/kpi_manager/tests/__init__.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 src/kpi_manager/tests/__init__.py diff --git a/src/kpi_manager/tests/__init__.py b/src/kpi_manager/tests/__init__.py new file mode 100644 index 000000000..1549d9811 --- /dev/null +++ b/src/kpi_manager/tests/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + -- GitLab From 2f8a2a3371ba4f638a0f03bb41f7b4949a8626a4 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 26 Apr 2024 07:05:34 +0000 Subject: [PATCH 066/205] improvements in comments --- src/kpi_manager/tests/test_unitary.py | 46 +-------------------------- 1 file changed, 1 insertion(+), 45 deletions(-) diff --git a/src/kpi_manager/tests/test_unitary.py b/src/kpi_manager/tests/test_unitary.py index adc886341..75987a5f4 100755 --- a/src/kpi_manager/tests/test_unitary.py +++ b/src/kpi_manager/tests/test_unitary.py @@ -191,50 +191,6 @@ def kpi_manager_client(kpi_manager_service : KpiManagerService): # pylint: disab LOGGER.info('Closed KpiManagerClient...') -# @pytest.fixture(scope='session') -# def management_db(): -# _management_db = ManagementDB('monitoring.db') -# return _management_db - -# @pytest.fixture(scope='session') -# def metrics_db(kpi_manager_service : KpiManagerService): # pylint: disable=redefined-outer-name -# return monitoring_service.monitoring_servicer.metrics_db - -# # This function os not clear to me (Changes should me made before execution) -# @pytest.fixture(scope='session') -# def metrics_db(monitoring_service : MonitoringService): # pylint: disable=redefined-outer-name -# return monitoring_service.monitoring_servicer.metrics_db -# #_metrics_db = MetricsDBTools.MetricsDB( -# # METRICSDB_HOSTNAME, METRICSDB_ILP_PORT, METRICSDB_REST_PORT, METRICSDB_TABLE_MONITORING_KPIS) -# #return _metrics_db - -# @pytest.fixture(scope='session') -# def subs_scheduler(): -# _scheduler = BackgroundScheduler(executors={'processpool': ProcessPoolExecutor(max_workers=20)}) -# _scheduler.start() -# return _scheduler - -# def ingestion_data(kpi_id_int): -# # pylint: disable=redefined-outer-name,unused-argument -# metrics_db = MetricsDB('localhost', '9009', '9000', 'monitoring') - -# kpiSampleType = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED -# kpiSampleType_name = KpiSampleType.Name(kpiSampleType).upper().replace('KPISAMPLETYPE_', '') -# for _ in range(50): -# kpiSampleType = kpiSampleType_name -# kpiId = kpi_id_int -# deviceId = 'DEV'+ str(kpi_id_int) -# endpointId = 'END' + str(kpi_id_int) -# serviceId = 'SERV' + str(kpi_id_int) -# sliceId = 'SLC' + str(kpi_id_int) -# connectionId = 'CON' + str(kpi_id_int) -# time_stamp = timestamp_utcnow_to_float() -# kpi_value = 500*random() - -# metrics_db.write_KPI(time_stamp, kpiId, kpiSampleType, deviceId, endpointId, serviceId, sliceId, connectionId, -# kpi_value) -# sleep(0.1) - ################################################## # Prepare Environment, should be the first test ################################################## @@ -248,7 +204,7 @@ def test_prepare_environment( context_client.SetTopology(Topology(**json_topology(DEFAULT_TOPOLOGY_NAME, context_id=context_id))) ########################### -# Tests Implementation +# Tests Implementation of Kpi Manager ########################### # Test case that makes use of client fixture to test server's CreateKpi method -- GitLab From 8cf8288c3ccf6d81d3f415fd50c0517b19982f28 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 26 Apr 2024 07:06:59 +0000 Subject: [PATCH 067/205] few syntax errors are removed --- src/telemetry_frontend/client/TelemetryFrontendClient.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/telemetry_frontend/client/TelemetryFrontendClient.py b/src/telemetry_frontend/client/TelemetryFrontendClient.py index 9ca19bb8c..a215dd408 100644 --- a/src/telemetry_frontend/client/TelemetryFrontendClient.py +++ b/src/telemetry_frontend/client/TelemetryFrontendClient.py @@ -20,7 +20,7 @@ from common.proto.context_pb2 import Empty from common.tools.grpc.Tools import grpc_message_to_json_string from common.tools.client.RetryDecorator import retry, delay_exponential from common.proto.telemetry_frontend_pb2_grpc import TelemetryFrontendServiceStub -from comment.proto.telemetry_frontend_pb2 import Collector, CollectorId, CollectorFilter, CollectorList +from common.proto.telemetry_frontend_pb2 import Collector, CollectorId, CollectorFilter, CollectorList LOGGER = logging.getLogger(__name__) MAX_RETRIES = 10 @@ -48,21 +48,21 @@ class TelemetryFrontendClient: self.stub = None @RETRY_DECORATOR - def StartCollector(self, request : Collector) --> CollectorId: + def StartCollector(self, request : Collector) -> CollectorId: LOGGER.debug('StartCollector: {:s}'.format(grpc_message_to_json_string(request))) response = self.stub.StartCollector(request) LOGGER.debug('StartCollector result: {:s}'.format(grpc_message_to_json_string(response))) return response @RETRY_DECORATOR - def StopCollector(self, request : CollectorId) --> Empty: + def StopCollector(self, request : CollectorId) -> Empty: LOGGER.debug('StopCollector: {:s}'.format(grpc_message_to_json_string(request))) response = self.stub.StopCollector(request) LOGGER.debug('StopCollector result: {:s}'.format(grpc_message_to_json_string(response))) return response @RETRY_DECORATOR - def SelectCollectors(self, request : CollectorFilter) --> CollectorList: + def SelectCollectors(self, request : CollectorFilter) -> CollectorList: LOGGER.debug('SelectCollectors: {:s}'.format(grpc_message_to_json_string(request))) response = self.stub.SelectCollectors(request) LOGGER.debug('SelectCollectors result: {:s}'.format(grpc_message_to_json_string(response))) -- GitLab From 70ebba4213af67e7cd762631c7700f7df4c681e1 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 26 Apr 2024 07:08:01 +0000 Subject: [PATCH 068/205] corrected package name to "telemetry_frontend" --- src/telemetry_frontend/service/TelemetryFrontendService.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/telemetry_frontend/service/TelemetryFrontendService.py b/src/telemetry_frontend/service/TelemetryFrontendService.py index 16e77e61b..a0ae704d3 100644 --- a/src/telemetry_frontend/service/TelemetryFrontendService.py +++ b/src/telemetry_frontend/service/TelemetryFrontendService.py @@ -17,7 +17,7 @@ from common.Settings import get_service_port_grpc from monitoring.service.NameMapping import NameMapping from common.tools.service.GenericGrpcService import GenericGrpcService from common.proto.telemetry_frontend_pb2_grpc import add_TelemetryFrontendServiceServicer_to_server -from telemetryfrontend.service.TelemetryFrontendServiceServicerImpl import TelemetryFrontendServiceServicerImpl +from telemetry_frontend.service.TelemetryFrontendServiceServicerImpl import TelemetryFrontendServiceServicerImpl class TelemetryFrontendService(GenericGrpcService): -- GitLab From e921ccd330ac0720591cc3d1417ec719944d80b2 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 26 Apr 2024 07:08:50 +0000 Subject: [PATCH 069/205] New messages test file added in "telemetry_frontend" --- src/telemetry_frontend/tests/Messages.py | 30 ++++++++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 src/telemetry_frontend/tests/Messages.py diff --git a/src/telemetry_frontend/tests/Messages.py b/src/telemetry_frontend/tests/Messages.py new file mode 100644 index 000000000..86b454004 --- /dev/null +++ b/src/telemetry_frontend/tests/Messages.py @@ -0,0 +1,30 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from common.proto import telemetry_frontend_pb2 +# from common.proto.kpi_manager_pb2 import kpi_id +# from common.proto.kpi_sample_types_pb2 import KpiSampleType + +def collector_id(): + _collector_id = telemetry_frontend_pb2.CollectorId() + _collector_id.collector_id.uuid = str(1) + return _collector_id + +def create_collector_request(coll_id_str): + _create_collector_request = telemetry_frontend_pb2.Collector() + _create_collector_request.collector_id.uuid = str(coll_id_str) + _create_collector_request.kpi_id.kpi_uuid.uuid = 'KPIid' + str(coll_id_str) + _create_collector_request.duration_s = float(-1) + _create_collector_request.interval_s = float(-1) + return _create_collector_request -- GitLab From c73dc9a5eb79edfd53f826653e0a5baeacc2f7af Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 26 Apr 2024 07:10:40 +0000 Subject: [PATCH 070/205] "test_messages.py" renamed to "Messages.py" --- src/telemetry_frontend/tests/test_mesages.py | 22 -------------------- 1 file changed, 22 deletions(-) delete mode 100644 src/telemetry_frontend/tests/test_mesages.py diff --git a/src/telemetry_frontend/tests/test_mesages.py b/src/telemetry_frontend/tests/test_mesages.py deleted file mode 100644 index 0b7eaeb2b..000000000 --- a/src/telemetry_frontend/tests/test_mesages.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from common.proto import telemetry_frontend_pb2 -# from common.proto.kpi_manager_pb2 import kpi_id -# from common.proto.kpi_sample_types_pb2 import KpiSampleType - -def collector_id(): - _collector_id = telemetry_frontend_pb2.CollectorId() - _collector_id.collector_id.uuid = str(1) - return _collector_id \ No newline at end of file -- GitLab From e994a4a1a29765f4142795fc01225350f263b385 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 26 Apr 2024 07:24:27 +0000 Subject: [PATCH 071/205] "test_unitary.py" file is added in "telemetery_frontend" --- src/telemetry_frontend/tests/test_unitary.py | 166 +++++++++++++++++++ 1 file changed, 166 insertions(+) create mode 100644 src/telemetry_frontend/tests/test_unitary.py diff --git a/src/telemetry_frontend/tests/test_unitary.py b/src/telemetry_frontend/tests/test_unitary.py new file mode 100644 index 000000000..c52d68921 --- /dev/null +++ b/src/telemetry_frontend/tests/test_unitary.py @@ -0,0 +1,166 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import logging +from common.Constants import ServiceNameEnum +from common.proto.telemetry_frontend_pb2 import CollectorId +from context.client.ContextClient import ContextClient + + +from telemetry_frontend.client.TelemetryFrontendClient import TelemetryFrontendClient +from telemetry_frontend.service.TelemetryFrontendService import TelemetryFrontendService +from telemetry_frontend.tests.Messages import create_collector_request + +from common.Settings import ( + get_service_port_grpc, get_env_var_name, ENVVAR_SUFIX_SERVICE_HOST, ENVVAR_SUFIX_SERVICE_PORT_GRPC) + +from device.client.DeviceClient import DeviceClient +from device.service.DeviceService import DeviceService +from device.service.driver_api.DriverFactory import DriverFactory +from device.service.driver_api.DriverInstanceCache import DriverInstanceCache + +from monitoring.service.NameMapping import NameMapping + +os.environ['DEVICE_EMULATED_ONLY'] = 'TRUE' +from device.service.drivers import DRIVERS + +########################### +# Tests Setup +########################### + +LOCAL_HOST = '127.0.0.1' +MOCKSERVICE_PORT = 10000 + +TELEMETRY_FRONTEND_PORT = MOCKSERVICE_PORT + get_service_port_grpc(ServiceNameEnum.TELEMETRYFRONTEND) +os.environ[get_env_var_name(ServiceNameEnum.TELEMETRYFRONTEND, ENVVAR_SUFIX_SERVICE_HOST )] = str(LOCAL_HOST) +os.environ[get_env_var_name(ServiceNameEnum.TELEMETRYFRONTEND, ENVVAR_SUFIX_SERVICE_PORT_GRPC)] = str(TELEMETRY_FRONTEND_PORT) + +LOGGER = logging.getLogger(__name__) + +class MockContextService(GenericGrpcService): + # Mock Service implementing Context to simplify unitary tests of Monitoring + + def __init__(self, bind_port: Union[str, int]) -> None: + super().__init__(bind_port, LOCAL_HOST, enable_health_servicer=False, cls_name='MockService') + + # pylint: disable=attribute-defined-outside-init + def install_servicers(self): + self.context_servicer = MockServicerImpl_Context() + add_ContextServiceServicer_to_server(self.context_servicer, self.server) + +@pytest.fixture(scope='session') +def context_service(): + LOGGER.info('Initializing MockContextService...') + _service = MockContextService(MOCKSERVICE_PORT) + _service.start() + + LOGGER.info('Yielding MockContextService...') + yield _service + + LOGGER.info('Terminating MockContextService...') + _service.context_servicer.msg_broker.terminate() + _service.stop() + + LOGGER.info('Terminated MockContextService...') + +@pytest.fixture(scope='session') +def context_client(context_service : MockContextService): # pylint: disable=redefined-outer-name,unused-argument + LOGGER.info('Initializing ContextClient...') + _client = ContextClient() + + LOGGER.info('Yielding ContextClient...') + yield _client + + LOGGER.info('Closing ContextClient...') + _client.close() + + LOGGER.info('Closed ContextClient...') + +@pytest.fixture(scope='session') +def device_service(context_service : MockContextService): # pylint: disable=redefined-outer-name,unused-argument + LOGGER.info('Initializing DeviceService...') + driver_factory = DriverFactory(DRIVERS) + driver_instance_cache = DriverInstanceCache(driver_factory) + _service = DeviceService(driver_instance_cache) + _service.start() + + # yield the server, when test finishes, execution will resume to stop it + LOGGER.info('Yielding DeviceService...') + yield _service + + LOGGER.info('Terminating DeviceService...') + _service.stop() + + LOGGER.info('Terminated DeviceService...') + +@pytest.fixture(scope='session') +def device_client(device_service : DeviceService): # pylint: disable=redefined-outer-name,unused-argument + LOGGER.info('Initializing DeviceClient...') + _client = DeviceClient() + + LOGGER.info('Yielding DeviceClient...') + yield _client + + LOGGER.info('Closing DeviceClient...') + _client.close() + + LOGGER.info('Closed DeviceClient...') + +@pytest.fixture(scope='session') +def telemetryFrontend_service( + context_service : MockContextService, + device_service : DeviceService + ): + LOGGER.info('Initializing TelemetryFrontendService...') + name_mapping = NameMapping() + + _service = TelemetryFrontendService(name_mapping) + _service.start() + + # yield the server, when test finishes, execution will resume to stop it + LOGGER.info('Yielding TelemetryFrontendService...') + yield _service + + LOGGER.info('Terminating TelemetryFrontendService...') + _service.stop() + + LOGGER.info('Terminated TelemetryFrontendService...') + +@pytest.fixture(scope='session') +def telemetryFrontend_client( + telemetryFrontend_service : TelemetryFrontendService + ): + LOGGER.info('Initializing TelemetryFrontendClient...') + _client = TelemetryFrontendClient() + + # yield the server, when test finishes, execution will resume to stop it + LOGGER.info('Yielding TelemetryFrontendClient...') + yield _client + + LOGGER.info('Closing TelemetryFrontendClient...') + _client.close() + + LOGGER.info('Closed TelemetryFrontendClient...') + + +########################### +# Tests Implementation of Telemetry Frontend +########################### +def test_start_collector(telemetryFrontend_client): + LOGGER.warning('test_start_collector requesting') + response = telemetryFrontend_client.StartCollector(create_collector_request('1')) + LOGGER.debug(str(response)) + assert isinstance(response, CollectorId) + -- GitLab From 2f8cc53e86e0502aedc78eda071159f14ec99252 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 26 Apr 2024 07:25:24 +0000 Subject: [PATCH 072/205] "# type: ignore" is added to surpass the warning message --- src/telemetry_frontend/client/TelemetryFrontendClient.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/telemetry_frontend/client/TelemetryFrontendClient.py b/src/telemetry_frontend/client/TelemetryFrontendClient.py index a215dd408..9b4e27b36 100644 --- a/src/telemetry_frontend/client/TelemetryFrontendClient.py +++ b/src/telemetry_frontend/client/TelemetryFrontendClient.py @@ -48,21 +48,21 @@ class TelemetryFrontendClient: self.stub = None @RETRY_DECORATOR - def StartCollector(self, request : Collector) -> CollectorId: + def StartCollector(self, request : Collector) -> CollectorId: # type: ignore LOGGER.debug('StartCollector: {:s}'.format(grpc_message_to_json_string(request))) response = self.stub.StartCollector(request) LOGGER.debug('StartCollector result: {:s}'.format(grpc_message_to_json_string(response))) return response @RETRY_DECORATOR - def StopCollector(self, request : CollectorId) -> Empty: + def StopCollector(self, request : CollectorId) -> Empty: # type: ignore LOGGER.debug('StopCollector: {:s}'.format(grpc_message_to_json_string(request))) response = self.stub.StopCollector(request) LOGGER.debug('StopCollector result: {:s}'.format(grpc_message_to_json_string(response))) return response @RETRY_DECORATOR - def SelectCollectors(self, request : CollectorFilter) -> CollectorList: + def SelectCollectors(self, request : CollectorFilter) -> CollectorList: # type: ignore LOGGER.debug('SelectCollectors: {:s}'.format(grpc_message_to_json_string(request))) response = self.stub.SelectCollectors(request) LOGGER.debug('SelectCollectors result: {:s}'.format(grpc_message_to_json_string(response))) -- GitLab From 79423dca2735fe4d06b6a6c64620f7250b388a7f Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 26 Apr 2024 07:26:36 +0000 Subject: [PATCH 073/205] New imports were corrected/added --- src/telemetry_frontend/tests/test_unitary.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/src/telemetry_frontend/tests/test_unitary.py b/src/telemetry_frontend/tests/test_unitary.py index c52d68921..4f37514dc 100644 --- a/src/telemetry_frontend/tests/test_unitary.py +++ b/src/telemetry_frontend/tests/test_unitary.py @@ -13,19 +13,22 @@ # limitations under the License. import os +import pytest import logging +from typing import Union from common.Constants import ServiceNameEnum from common.proto.telemetry_frontend_pb2 import CollectorId +from common.proto.context_pb2_grpc import add_ContextServiceServicer_to_server from context.client.ContextClient import ContextClient - +from common.tools.service.GenericGrpcService import GenericGrpcService +from common.tests.MockServicerImpl_Context import MockServicerImpl_Context +from common.Settings import ( + get_service_port_grpc, get_env_var_name, ENVVAR_SUFIX_SERVICE_HOST, ENVVAR_SUFIX_SERVICE_PORT_GRPC) from telemetry_frontend.client.TelemetryFrontendClient import TelemetryFrontendClient from telemetry_frontend.service.TelemetryFrontendService import TelemetryFrontendService from telemetry_frontend.tests.Messages import create_collector_request -from common.Settings import ( - get_service_port_grpc, get_env_var_name, ENVVAR_SUFIX_SERVICE_HOST, ENVVAR_SUFIX_SERVICE_PORT_GRPC) - from device.client.DeviceClient import DeviceClient from device.service.DeviceService import DeviceService from device.service.driver_api.DriverFactory import DriverFactory -- GitLab From 8a0adf79a66595af009a5d808141d4df13e6f118 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 26 Apr 2024 10:29:33 +0000 Subject: [PATCH 074/205] Added specific requirements in "telemetry_frontend" --- src/telemetry_frontend/requirements.in | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) create mode 100644 src/telemetry_frontend/requirements.in diff --git a/src/telemetry_frontend/requirements.in b/src/telemetry_frontend/requirements.in new file mode 100644 index 000000000..1dd24fe32 --- /dev/null +++ b/src/telemetry_frontend/requirements.in @@ -0,0 +1,24 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +anytree==2.8.0 +APScheduler==3.10.1 +influx-line-protocol==0.1.4 +psycopg2-binary==2.9.3 +python-dateutil==2.8.2 +python-json-logger==2.0.2 +pytz==2024.1 +questdb==1.0.1 +requests==2.27.1 +xmltodict==0.12.0 \ No newline at end of file -- GitLab From 7c705a70add69fde12435984a53d7911c8c12492 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 26 Apr 2024 10:30:49 +0000 Subject: [PATCH 075/205] Add "# type: ignore" to surpass warning message --- .../service/KpiManagerServiceServicerImpl.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/kpi_manager/service/KpiManagerServiceServicerImpl.py b/src/kpi_manager/service/KpiManagerServiceServicerImpl.py index 4ffa1d2a6..f1d370f30 100644 --- a/src/kpi_manager/service/KpiManagerServiceServicerImpl.py +++ b/src/kpi_manager/service/KpiManagerServiceServicerImpl.py @@ -36,8 +36,8 @@ class KpiManagerServiceServicerImpl(KpiManagerServiceServicer): @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) def SetKpiDescriptor( - self, request: KpiDescriptor, grpc_context: grpc.ServicerContext - ) -> KpiId: + self, request: KpiDescriptor, grpc_context: grpc.ServicerContext # type: ignore + ) -> KpiId: # type: ignore response = KpiId() kpi_description = request.kpi_description kpi_sample_type = request.kpi_sample_type @@ -58,7 +58,7 @@ class KpiManagerServiceServicerImpl(KpiManagerServiceServicer): return response @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def DeleteKpiDescriptor(self, request: KpiId, grpc_context: grpc.ServicerContext) -> Empty: + def DeleteKpiDescriptor(self, request: KpiId, grpc_context: grpc.ServicerContext) -> Empty: # type: ignore kpi_id = int(request.kpi_id.uuid) kpi = self.management_db.get_KPI(kpi_id) if kpi: @@ -68,7 +68,7 @@ class KpiManagerServiceServicerImpl(KpiManagerServiceServicer): return Empty() @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def GetKpiDescriptor(self, request: KpiId, grpc_context: grpc.ServicerContext) -> KpiDescriptor: + def GetKpiDescriptor(self, request: KpiId, grpc_context: grpc.ServicerContext) -> KpiDescriptor: # type: ignore kpi_id = request.kpi_id.uuid kpi_db = self.management_db.get_KPI(int(kpi_id)) kpiDescriptor = KpiDescriptor() @@ -86,7 +86,7 @@ class KpiManagerServiceServicerImpl(KpiManagerServiceServicer): return kpiDescriptor @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def SelectKpiDescriptor(self, request: KpiDescriptorFilter, grpc_context: grpc.ServicerContext) -> KpiDescriptorList: + def SelectKpiDescriptor(self, request: KpiDescriptorFilter, grpc_context: grpc.ServicerContext) -> KpiDescriptorList: # type: ignore kpi_descriptor_list = KpiDescriptorList() data = self.management_db.get_KPIS() LOGGER.debug(f"data: {data}") -- GitLab From 21f0b22aca943ee14cf9d6e20ed8e32bafcd3696 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 26 Apr 2024 10:31:35 +0000 Subject: [PATCH 076/205] Adds dummy logic in "StartCollector" function --- .../TelemetryFrontendServiceServicerImpl.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/src/telemetry_frontend/service/TelemetryFrontendServiceServicerImpl.py b/src/telemetry_frontend/service/TelemetryFrontendServiceServicerImpl.py index 6ffb78ab1..7814107dd 100644 --- a/src/telemetry_frontend/service/TelemetryFrontendServiceServicerImpl.py +++ b/src/telemetry_frontend/service/TelemetryFrontendServiceServicerImpl.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import grpc import logging from monitoring.service.NameMapping import NameMapping from common.proto.telemetry_frontend_pb2 import CollectorId, Collector @@ -27,12 +28,13 @@ class TelemetryFrontendServiceServicerImpl(TelemetryFrontendServiceServicer): LOGGER.info('Init TelemetryFrontendService') @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def StartCollector(self, request: Collector) -> CollectorId: + def StartCollector(self, request : Collector, grpc_context: grpc.ServicerContext # type: ignore + ) -> CollectorId: # type: ignore response = CollectorId() - collector_id = request.collector_id - collector_kpi_id = request.kpi_id - collector_duration = request.duration_s - collector_interval = request.interval_s + _collector_id = request.collector_id + # collector_kpi_id = request.kpi_id + # collector_duration = request.duration_s + # collector_interval = request.interval_s - response.collector_id.uuid = request.collector_id.uuid + response.collector_id.uuid = _collector_id.collector_id.uuid return response \ No newline at end of file -- GitLab From 29b6cfbd1009560aa808280d35f22555889c4b4c Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 26 Apr 2024 10:33:38 +0000 Subject: [PATCH 077/205] Correction of "collector_id.uuid" and "kpi_id.uuid" assignments --- src/telemetry_frontend/tests/Messages.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/telemetry_frontend/tests/Messages.py b/src/telemetry_frontend/tests/Messages.py index 86b454004..d63422b72 100644 --- a/src/telemetry_frontend/tests/Messages.py +++ b/src/telemetry_frontend/tests/Messages.py @@ -22,9 +22,9 @@ def collector_id(): return _collector_id def create_collector_request(coll_id_str): - _create_collector_request = telemetry_frontend_pb2.Collector() - _create_collector_request.collector_id.uuid = str(coll_id_str) - _create_collector_request.kpi_id.kpi_uuid.uuid = 'KPIid' + str(coll_id_str) - _create_collector_request.duration_s = float(-1) - _create_collector_request.interval_s = float(-1) + _create_collector_request = telemetry_frontend_pb2.Collector() + _create_collector_request.collector_id.collector_id.uuid = str(coll_id_str) + _create_collector_request.kpi_id.kpi_id.uuid = 'KPIid' + str(coll_id_str) + _create_collector_request.duration_s = float(-1) + _create_collector_request.interval_s = float(-1) return _create_collector_request -- GitLab From be791afc3d372eaadb9f7e7471864e05629e2c74 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 26 Apr 2024 16:15:51 +0000 Subject: [PATCH 078/205] files name updation --- ...ts_locally_kpi_manager.sh => run_tests_locally-kpi_manager.sh} | 0 ...emetry-frontend.sh => run_tests_locally-telemetry-frontend.sh} | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename scripts/{run_tests_locally_kpi_manager.sh => run_tests_locally-kpi_manager.sh} (100%) rename scripts/{run_tests_locally_telemetry-frontend.sh => run_tests_locally-telemetry-frontend.sh} (100%) diff --git a/scripts/run_tests_locally_kpi_manager.sh b/scripts/run_tests_locally-kpi_manager.sh similarity index 100% rename from scripts/run_tests_locally_kpi_manager.sh rename to scripts/run_tests_locally-kpi_manager.sh diff --git a/scripts/run_tests_locally_telemetry-frontend.sh b/scripts/run_tests_locally-telemetry-frontend.sh similarity index 100% rename from scripts/run_tests_locally_telemetry-frontend.sh rename to scripts/run_tests_locally-telemetry-frontend.sh -- GitLab From 4343146c456ecd906c0ab51154e5361fb4852a57 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 26 Apr 2024 16:16:43 +0000 Subject: [PATCH 079/205] "StopCollector" and "SelectCollector" methods are added --- .../TelemetryFrontendServiceServicerImpl.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/src/telemetry_frontend/service/TelemetryFrontendServiceServicerImpl.py b/src/telemetry_frontend/service/TelemetryFrontendServiceServicerImpl.py index 7814107dd..cb66b6b6a 100644 --- a/src/telemetry_frontend/service/TelemetryFrontendServiceServicerImpl.py +++ b/src/telemetry_frontend/service/TelemetryFrontendServiceServicerImpl.py @@ -14,8 +14,9 @@ import grpc import logging +from common.proto.context_pb2 import Empty from monitoring.service.NameMapping import NameMapping -from common.proto.telemetry_frontend_pb2 import CollectorId, Collector +from common.proto.telemetry_frontend_pb2 import CollectorId, Collector, CollectorFilter, CollectorList from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method from common.proto.telemetry_frontend_pb2_grpc import TelemetryFrontendServiceServicer @@ -37,4 +38,16 @@ class TelemetryFrontendServiceServicerImpl(TelemetryFrontendServiceServicer): # collector_interval = request.interval_s response.collector_id.uuid = _collector_id.collector_id.uuid + return response + + @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def StopCollector(self, request : CollectorId, grpc_context: grpc.ServicerContext # type: ignore + ) -> Empty: # type: ignore + + return Empty() + + def SelectCollectors(self, request : CollectorFilter, contextgrpc_context: grpc.ServicerContext # type: ignore + ) -> CollectorList: # type: ignore + response = CollectorList() + return response \ No newline at end of file -- GitLab From e2c690c1edbe07abae9531f39901c2f0cf8bbbe3 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 26 Apr 2024 16:18:30 +0000 Subject: [PATCH 080/205] "create_collector_filter" method is added in "Messages.py" --- src/telemetry_frontend/tests/Messages.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/telemetry_frontend/tests/Messages.py b/src/telemetry_frontend/tests/Messages.py index d63422b72..877781b79 100644 --- a/src/telemetry_frontend/tests/Messages.py +++ b/src/telemetry_frontend/tests/Messages.py @@ -28,3 +28,7 @@ def create_collector_request(coll_id_str): _create_collector_request.duration_s = float(-1) _create_collector_request.interval_s = float(-1) return _create_collector_request + +def create_collector_filter(filter): + _create_collector_filter = telemetry_frontend_pb2.CollectorFilter() + \ No newline at end of file -- GitLab From 231ab1e65b6f4b69c8c65e01b5e3be050273ec84 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 26 Apr 2024 16:19:31 +0000 Subject: [PATCH 081/205] "test_stop_collector" and "test_select_collectors" are added --- src/telemetry_frontend/tests/test_unitary.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/src/telemetry_frontend/tests/test_unitary.py b/src/telemetry_frontend/tests/test_unitary.py index 4f37514dc..4b2594839 100644 --- a/src/telemetry_frontend/tests/test_unitary.py +++ b/src/telemetry_frontend/tests/test_unitary.py @@ -16,8 +16,9 @@ import os import pytest import logging from typing import Union +from common.proto.context_pb2 import Empty from common.Constants import ServiceNameEnum -from common.proto.telemetry_frontend_pb2 import CollectorId +from common.proto.telemetry_frontend_pb2 import CollectorId, CollectorList from common.proto.context_pb2_grpc import add_ContextServiceServicer_to_server from context.client.ContextClient import ContextClient from common.tools.service.GenericGrpcService import GenericGrpcService @@ -167,3 +168,14 @@ def test_start_collector(telemetryFrontend_client): LOGGER.debug(str(response)) assert isinstance(response, CollectorId) +def test_stop_collector(telemetryFrontend_client): + LOGGER.warning('test_stop_collector requesting') + response = telemetryFrontend_client.StopCollector('1') + LOGGER.debug(str(response)) + assert isinstance(response, Empty) + +def test_select_collectors(telemetryFrontend_client): + LOGGER.warning('test_select_collector requesting') + response = telemetryFrontend_client.SelectCollectors() + LOGGER.debug(str(response)) + assert isinstance(response, CollectorList) \ No newline at end of file -- GitLab From 269da2e96f88e271fc255375923f437ba7a6342c Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Mon, 29 Apr 2024 12:36:22 +0000 Subject: [PATCH 082/205] dummy logic is added in "SopCollector" method --- .../service/TelemetryFrontendServiceServicerImpl.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/telemetry_frontend/service/TelemetryFrontendServiceServicerImpl.py b/src/telemetry_frontend/service/TelemetryFrontendServiceServicerImpl.py index cb66b6b6a..498d07a91 100644 --- a/src/telemetry_frontend/service/TelemetryFrontendServiceServicerImpl.py +++ b/src/telemetry_frontend/service/TelemetryFrontendServiceServicerImpl.py @@ -43,7 +43,7 @@ class TelemetryFrontendServiceServicerImpl(TelemetryFrontendServiceServicer): @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) def StopCollector(self, request : CollectorId, grpc_context: grpc.ServicerContext # type: ignore ) -> Empty: # type: ignore - + request.collector_id.uuid = "" return Empty() def SelectCollectors(self, request : CollectorFilter, contextgrpc_context: grpc.ServicerContext # type: ignore -- GitLab From 1c177bb7a6d589cdd0d1f38d001299cd8c646628 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Mon, 29 Apr 2024 12:37:22 +0000 Subject: [PATCH 083/205] New test case are added --- src/telemetry_frontend/tests/Messages.py | 42 ++++++++++++++++++++---- 1 file changed, 36 insertions(+), 6 deletions(-) diff --git a/src/telemetry_frontend/tests/Messages.py b/src/telemetry_frontend/tests/Messages.py index 877781b79..86c869834 100644 --- a/src/telemetry_frontend/tests/Messages.py +++ b/src/telemetry_frontend/tests/Messages.py @@ -13,12 +13,11 @@ # limitations under the License. from common.proto import telemetry_frontend_pb2 -# from common.proto.kpi_manager_pb2 import kpi_id -# from common.proto.kpi_sample_types_pb2 import KpiSampleType +from common.proto.kpi_sample_types_pb2 import KpiSampleType -def collector_id(): +def create_collector_id(coll_id_str): _collector_id = telemetry_frontend_pb2.CollectorId() - _collector_id.collector_id.uuid = str(1) + _collector_id.collector_id.uuid = str(coll_id_str) return _collector_id def create_collector_request(coll_id_str): @@ -29,6 +28,37 @@ def create_collector_request(coll_id_str): _create_collector_request.interval_s = float(-1) return _create_collector_request -def create_collector_filter(filter): +def create_collector_request_a(): + _create_collector_request_a = telemetry_frontend_pb2.Collector() + _create_collector_request_a.kpi_id.kpi_id.uuid = "-1" + return _create_collector_request_a + +def create_collector_request_b(str_kpi_id, coll_duration_s, coll_interval_s): + _create_collector_request_b = telemetry_frontend_pb2.Collector() + _create_collector_request_b.kpi_id.kpi_id.uuid = str_kpi_id + _create_collector_request_b.duration_s = coll_duration_s + _create_collector_request_b.interval_s = coll_interval_s + return _create_collector_request_b + +def create_collector_filter(): _create_collector_filter = telemetry_frontend_pb2.CollectorFilter() - \ No newline at end of file + new_collector_id = _create_collector_filter.collector_id.add() + new_collector_id.collector_id.uuid = "COLL1" + new_kpi_id = _create_collector_filter.kpi_id.add() + new_kpi_id.kpi_id.uuid = "KPI1" + new_device_id = _create_collector_filter.device_id.add() + new_device_id.device_uuid.uuid = 'DEV1' + new_service_id = _create_collector_filter.service_id.add() + new_service_id.service_uuid.uuid = 'SERV1' + new_slice_id = _create_collector_filter.slice_id.add() + new_slice_id.slice_uuid.uuid = 'SLC1' + new_endpoint_id = _create_collector_filter.endpoint_id.add() + new_endpoint_id.endpoint_uuid.uuid = 'END1' + new_connection_id = _create_collector_filter.connection_id.add() + new_connection_id.connection_uuid.uuid = 'CON1' + _create_collector_filter.kpi_sample_type.append(KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED) + return _create_collector_filter + +def create_collector_list(): + _create_collector_list = telemetry_frontend_pb2.CollectorList() + return _create_collector_list \ No newline at end of file -- GitLab From f6c040ab31901e0049c80aa0d91625dafd12b78c Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Mon, 29 Apr 2024 12:37:57 +0000 Subject: [PATCH 084/205] New test cases are added --- src/telemetry_frontend/tests/test_unitary.py | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/src/telemetry_frontend/tests/test_unitary.py b/src/telemetry_frontend/tests/test_unitary.py index 4b2594839..68467590f 100644 --- a/src/telemetry_frontend/tests/test_unitary.py +++ b/src/telemetry_frontend/tests/test_unitary.py @@ -16,6 +16,7 @@ import os import pytest import logging from typing import Union + from common.proto.context_pb2 import Empty from common.Constants import ServiceNameEnum from common.proto.telemetry_frontend_pb2 import CollectorId, CollectorList @@ -28,7 +29,8 @@ from common.Settings import ( from telemetry_frontend.client.TelemetryFrontendClient import TelemetryFrontendClient from telemetry_frontend.service.TelemetryFrontendService import TelemetryFrontendService -from telemetry_frontend.tests.Messages import create_collector_request +from telemetry_frontend.tests.Messages import ( create_collector_id, create_collector_request, + create_collector_filter, create_collector_request_a, create_collector_request_b) from device.client.DeviceClient import DeviceClient from device.service.DeviceService import DeviceService @@ -168,14 +170,26 @@ def test_start_collector(telemetryFrontend_client): LOGGER.debug(str(response)) assert isinstance(response, CollectorId) +def test_start_collector_a(telemetryFrontend_client): + LOGGER.warning('test_start_collector requesting') + response = telemetryFrontend_client.StartCollector(create_collector_request_a()) + LOGGER.debug(str(response)) + assert isinstance(response, CollectorId) + +def test_start_collector_b(telemetryFrontend_client): + LOGGER.warning('test_start_collector requesting') + response = telemetryFrontend_client.StartCollector(create_collector_request_b('1',10,2)) + LOGGER.debug(str(response)) + assert isinstance(response, CollectorId) + def test_stop_collector(telemetryFrontend_client): LOGGER.warning('test_stop_collector requesting') - response = telemetryFrontend_client.StopCollector('1') + response = telemetryFrontend_client.StopCollector(create_collector_id("1")) LOGGER.debug(str(response)) assert isinstance(response, Empty) def test_select_collectors(telemetryFrontend_client): LOGGER.warning('test_select_collector requesting') - response = telemetryFrontend_client.SelectCollectors() + response = telemetryFrontend_client.SelectCollectors(create_collector_filter()) LOGGER.debug(str(response)) assert isinstance(response, CollectorList) \ No newline at end of file -- GitLab From cd5f2e6565cab03b027614087c409d6431e5fcd2 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 30 Apr 2024 13:38:28 +0000 Subject: [PATCH 085/205] NodeExporter deplyment script --- manifests/mock_nodeexporter.yaml | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 manifests/mock_nodeexporter.yaml diff --git a/manifests/mock_nodeexporter.yaml b/manifests/mock_nodeexporter.yaml new file mode 100644 index 000000000..bf595d63a --- /dev/null +++ b/manifests/mock_nodeexporter.yaml @@ -0,0 +1,21 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: node-exporter + labels: + app: node-exporter +spec: + replicas: 1 + selector: + matchLabels: + app: node-exporter + template: + metadata: + labels: + app: node-exporter + spec: + containers: + - name: node-exporter + image: prom/node-exporter:latest + ports: + - containerPort: 9100 -- GitLab From 04f1681b12c9d403424434fe6d986c89a65b898c Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 30 Apr 2024 13:38:58 +0000 Subject: [PATCH 086/205] NodeExporter service Script --- manifests/mock_nodeexporterservice.yaml | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 manifests/mock_nodeexporterservice.yaml diff --git a/manifests/mock_nodeexporterservice.yaml b/manifests/mock_nodeexporterservice.yaml new file mode 100644 index 000000000..b7bb4f879 --- /dev/null +++ b/manifests/mock_nodeexporterservice.yaml @@ -0,0 +1,12 @@ +apiVersion: v1 +kind: Service +metadata: + name: node-exporter +spec: + selector: + app: node-exporter + ports: + - protocol: TCP + port: 9100 + targetPort: 9100 + type: NodePort -- GitLab From e08d7ce3e9f51ed989cb7e5a87ce27231a460d40 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 30 Apr 2024 13:41:24 +0000 Subject: [PATCH 087/205] List of Telemetry virtual enviornment installed packages --- src/telemetry_frontend/telemetry_virenv.txt | 44 +++++++++++++++++++++ 1 file changed, 44 insertions(+) create mode 100644 src/telemetry_frontend/telemetry_virenv.txt diff --git a/src/telemetry_frontend/telemetry_virenv.txt b/src/telemetry_frontend/telemetry_virenv.txt new file mode 100644 index 000000000..0ce9b803a --- /dev/null +++ b/src/telemetry_frontend/telemetry_virenv.txt @@ -0,0 +1,44 @@ +anytree==2.8.0 +APScheduler==3.10.1 +attrs==23.2.0 +certifi==2024.2.2 +charset-normalizer==2.0.12 +colorama==0.4.6 +confluent-kafka==2.3.0 +coverage==6.3 +future-fstrings==1.2.0 +grpcio==1.47.5 +grpcio-health-checking==1.47.5 +grpcio-tools==1.47.5 +grpclib==0.4.4 +h2==4.1.0 +hpack==4.0.0 +hyperframe==6.0.1 +idna==3.7 +influx-line-protocol==0.1.4 +iniconfig==2.0.0 +kafka-python==2.0.2 +multidict==6.0.5 +networkx==3.3 +packaging==24.0 +pluggy==1.5.0 +prettytable==3.5.0 +prometheus-client==0.13.0 +protobuf==3.20.3 +psycopg2-binary==2.9.3 +py==1.11.0 +py-cpuinfo==9.0.0 +pytest==6.2.5 +pytest-benchmark==3.4.1 +pytest-depends==1.0.1 +python-dateutil==2.8.2 +python-json-logger==2.0.2 +pytz==2024.1 +questdb==1.0.1 +requests==2.27.1 +six==1.16.0 +toml==0.10.2 +tzlocal==5.2 +urllib3==1.26.18 +wcwidth==0.2.13 +xmltodict==0.12.0 -- GitLab From 54839a5ca7635a187bdeb4c7e73c7e249eee6e3c Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 30 Apr 2024 13:43:19 +0000 Subject: [PATCH 088/205] KafkaProducerController: for Kafka configuration and execution of producer --- .../backend/KafkaProducerController.py | 57 +++++++++++++++++++ 1 file changed, 57 insertions(+) create mode 100755 src/telemetry_frontend/backend/KafkaProducerController.py diff --git a/src/telemetry_frontend/backend/KafkaProducerController.py b/src/telemetry_frontend/backend/KafkaProducerController.py new file mode 100755 index 000000000..8c88d5e8e --- /dev/null +++ b/src/telemetry_frontend/backend/KafkaProducerController.py @@ -0,0 +1,57 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from NodeExporterProducer import KafkaNodeExporterProducer + +class KafkaProducerController: + """ + Class to control Kafka producer functionality. + """ + def __init__(self): + kafka_configs = self.generate_kafka_configurations() + self.bootstrap_servers = kafka_configs['bootstrap_servers'] + self.node_exporter_endpoint = kafka_configs['node_exporter_endpoint'] + self.kafka_topic = kafka_configs['kafka_topic'] + self.run_duration = kafka_configs['run_duration'] + self.fetch_interval = kafka_configs['fetch_interval'] + + def generate_kafka_configurations(self): + """ + Method to generate Kafka configurations + """ + create_kafka_configuration = { + 'bootstrap_servers' : '127.0.0.1:9092', # Kafka broker address - Replace with your Kafka broker address + 'node_exporter_endpoint' : 'http://10.152.183.231:9100/metrics', # Node Exporter metrics endpoint - Replace with your Node Exporter endpoint + 'kafka_topic' : 'metric-data', # Kafka topic to produce to + 'run_duration' : 20, # Total duration to execute the producer + 'fetch_interval' : 3 # Time between two fetch requests + } + return create_kafka_configuration + + def run_producer(self): + """ + Method to create KafkaNodeExporterProducer object and start producer thread. + """ + # Create NodeExporterProducer object and run start_producer_thread + producer = KafkaNodeExporterProducer(self.bootstrap_servers, self.node_exporter_endpoint, + self.kafka_topic, self.run_duration, self.fetch_interval + ) + # producer.start_producer_thread() # if threading is required + producer.produce_metrics() # if threading is not required + +if __name__ == "__main__": + + # Create Kafka producer controller object and run producer + kafka_controller = KafkaProducerController() + kafka_controller.run_producer() -- GitLab From 179a2193cf887ba4aabd677bd745d148d800c136 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 30 Apr 2024 13:44:42 +0000 Subject: [PATCH 089/205] NodeExporterProducer: Implementation of Producer: Read from NodeExporter endpoint and writes on Kafka topic --- .../backend/NodeExporterProducer.py | 128 ++++++++++++++++++ 1 file changed, 128 insertions(+) create mode 100755 src/telemetry_frontend/backend/NodeExporterProducer.py diff --git a/src/telemetry_frontend/backend/NodeExporterProducer.py b/src/telemetry_frontend/backend/NodeExporterProducer.py new file mode 100755 index 000000000..5f7b1471b --- /dev/null +++ b/src/telemetry_frontend/backend/NodeExporterProducer.py @@ -0,0 +1,128 @@ + +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from confluent_kafka import Producer, KafkaException +from confluent_kafka.admin import AdminClient, NewTopic +import requests +import time +import threading + +class KafkaNodeExporterProducer: + """ + Class to fetch metrics from Node Exporter and produce them to Kafka. + """ + + def __init__(self, bootstrap_servers, node_exporter_endpoint, kafka_topic, run_duration, fetch_interval): + """ + Constructor to initialize Kafka producer parameters. + Args: + bootstrap_servers (str): Kafka broker address. + node_exporter_endpoint (str): Node Exporter metrics endpoint. + kafka_topic (str): Kafka topic to produce metrics to. + run_interval (int): Time interval in seconds to run the producer. + """ + self.bootstrap_servers = bootstrap_servers + self.node_exporter_endpoint = node_exporter_endpoint + self.kafka_topic = kafka_topic + self.run_duration = run_duration + self.fetch_interval = fetch_interval + + def fetch_metrics(self): + """ + Method to fetch metrics from Node Exporter. + Returns: + str: Metrics fetched from Node Exporter. + """ + try: + response = requests.get(self.node_exporter_endpoint) + if response.status_code == 200: + print(f"Metrics fetched sucessfully...") + return response.text + else: + print(f"Failed to fetch metrics. Status code: {response.status_code}") + return None + except Exception as e: + print(f"Failed to fetch metrics: {str(e)}") + return None + + def delivery_callback(self, err, msg): + """ + Callback function to handle message delivery status. + Args: + err (KafkaError): Kafka error object. + msg (Message): Kafka message object. + """ + if err: + print(f'Message delivery failed: {err}') + else: + print(f'Message delivered to topic {msg.topic()}') + + def create_topic_if_not_exists(self, admin_client): + """ + Method to create Kafka topic if it does not exist. + Args: + admin_client (AdminClient): Kafka admin client. + """ + try: + topic_metadata = admin_client.list_topics(timeout=5) + if self.kafka_topic not in topic_metadata.topics: + # If the topic does not exist, create a new topic + print(f"Topic '{self.kafka_topic}' does not exist. Creating...") + new_topic = NewTopic(self.kafka_topic, num_partitions=1, replication_factor=1) + admin_client.create_topics([new_topic]) + except KafkaException as e: + print(f"Failed to create topic: {e}") + + def produce_metrics(self): + """ + Method to continuously produce metrics to Kafka topic. + """ + conf = { + 'bootstrap.servers': self.bootstrap_servers, + } + + admin_client = AdminClient(conf) + self.create_topic_if_not_exists(admin_client) + + kafka_producer = Producer(conf) + + try: + start_time = time.time() + while True: + metrics = self.fetch_metrics() + + if metrics: + kafka_producer.produce(self.kafka_topic, metrics.encode('utf-8'), callback=self.delivery_callback) + kafka_producer.flush() + print("Metrics produced to Kafka topic") + + # Check if the specified run duration has elapsed + if time.time() - start_time >= self.run_duration: + break + + # waiting time until next fetch + time.sleep(self.fetch_interval) + except KeyboardInterrupt: + print("Keyboard interrupt detected. Exiting...") + finally: + kafka_producer.flush() + # kafka_producer.close() # this command generates ERROR + + def start_producer_thread(self): + """ + Method to start the producer thread. + """ + producer_thread = threading.Thread(target=self.produce_metrics) + producer_thread.start() -- GitLab From 97e5fafc9201f56737444996639e9f54f1483e27 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 30 Apr 2024 13:55:07 +0000 Subject: [PATCH 090/205] format improvements --- src/telemetry_frontend/backend/NodeExporterProducer.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/telemetry_frontend/backend/NodeExporterProducer.py b/src/telemetry_frontend/backend/NodeExporterProducer.py index 5f7b1471b..b15943727 100755 --- a/src/telemetry_frontend/backend/NodeExporterProducer.py +++ b/src/telemetry_frontend/backend/NodeExporterProducer.py @@ -33,11 +33,11 @@ class KafkaNodeExporterProducer: kafka_topic (str): Kafka topic to produce metrics to. run_interval (int): Time interval in seconds to run the producer. """ - self.bootstrap_servers = bootstrap_servers + self.bootstrap_servers = bootstrap_servers self.node_exporter_endpoint = node_exporter_endpoint - self.kafka_topic = kafka_topic - self.run_duration = run_duration - self.fetch_interval = fetch_interval + self.kafka_topic = kafka_topic + self.run_duration = run_duration + self.fetch_interval = fetch_interval def fetch_metrics(self): """ -- GitLab From 0e46347ccacdd2c76d0a2030a9e6233d7d885695 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 30 Apr 2024 15:43:42 +0000 Subject: [PATCH 091/205] files rename to "KafkaProducerService" and "KafkaProducerServiceImpl" --- .../KafkaProducerService.py} | 14 +++--- .../KafkaProducerServiceImpl.py} | 43 +++++++++++++++---- 2 files changed, 42 insertions(+), 15 deletions(-) rename src/telemetry_frontend/backend/{KafkaProducerController.py => service/KafkaProducerService.py} (83%) rename src/telemetry_frontend/backend/{NodeExporterProducer.py => service/KafkaProducerServiceImpl.py} (70%) diff --git a/src/telemetry_frontend/backend/KafkaProducerController.py b/src/telemetry_frontend/backend/service/KafkaProducerService.py similarity index 83% rename from src/telemetry_frontend/backend/KafkaProducerController.py rename to src/telemetry_frontend/backend/service/KafkaProducerService.py index 8c88d5e8e..fd2507abf 100755 --- a/src/telemetry_frontend/backend/KafkaProducerController.py +++ b/src/telemetry_frontend/backend/service/KafkaProducerService.py @@ -12,9 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -from NodeExporterProducer import KafkaNodeExporterProducer +from KafkaProducerServiceImpl import KafkaProducerServiceImpl -class KafkaProducerController: +class KafkaProducerService: """ Class to control Kafka producer functionality. """ @@ -35,16 +35,16 @@ class KafkaProducerController: 'node_exporter_endpoint' : 'http://10.152.183.231:9100/metrics', # Node Exporter metrics endpoint - Replace with your Node Exporter endpoint 'kafka_topic' : 'metric-data', # Kafka topic to produce to 'run_duration' : 20, # Total duration to execute the producer - 'fetch_interval' : 3 # Time between two fetch requests + 'fetch_interval' : 4 # Time between two fetch requests } return create_kafka_configuration def run_producer(self): """ - Method to create KafkaNodeExporterProducer object and start producer thread. + Method to create KafkaProducerServiceImpl object and start producer thread. """ # Create NodeExporterProducer object and run start_producer_thread - producer = KafkaNodeExporterProducer(self.bootstrap_servers, self.node_exporter_endpoint, + producer = KafkaProducerServiceImpl(self.bootstrap_servers, self.node_exporter_endpoint, self.kafka_topic, self.run_duration, self.fetch_interval ) # producer.start_producer_thread() # if threading is required @@ -52,6 +52,6 @@ class KafkaProducerController: if __name__ == "__main__": - # Create Kafka producer controller object and run producer - kafka_controller = KafkaProducerController() + # Create Kafka producer service object and run producer + kafka_controller = KafkaProducerService() kafka_controller.run_producer() diff --git a/src/telemetry_frontend/backend/NodeExporterProducer.py b/src/telemetry_frontend/backend/service/KafkaProducerServiceImpl.py similarity index 70% rename from src/telemetry_frontend/backend/NodeExporterProducer.py rename to src/telemetry_frontend/backend/service/KafkaProducerServiceImpl.py index b15943727..6017f26cc 100755 --- a/src/telemetry_frontend/backend/NodeExporterProducer.py +++ b/src/telemetry_frontend/backend/service/KafkaProducerServiceImpl.py @@ -19,9 +19,9 @@ import requests import time import threading -class KafkaNodeExporterProducer: +class KafkaProducerServiceImpl: """ - Class to fetch metrics from Node Exporter and produce them to Kafka. + Class to fetch metrics from Exporter and produce them to Kafka. """ def __init__(self, bootstrap_servers, node_exporter_endpoint, kafka_topic, run_duration, fetch_interval): @@ -39,17 +39,25 @@ class KafkaNodeExporterProducer: self.run_duration = run_duration self.fetch_interval = fetch_interval - def fetch_metrics(self): + def fetch_node_exporter_metrics(self): """ Method to fetch metrics from Node Exporter. Returns: str: Metrics fetched from Node Exporter. """ + KPI = "node_network_receive_packets_total" try: response = requests.get(self.node_exporter_endpoint) if response.status_code == 200: - print(f"Metrics fetched sucessfully...") - return response.text + # print(f"Metrics fetched sucessfully...") + metrics = response.text + # Check if the desired metric is available in the response + if KPI in metrics: + KPI_VALUE = self.extract_metric_value(metrics, KPI) + # Extract the metric value + if KPI_VALUE is not None: + print(f"KPI value: {KPI_VALUE}") + return KPI_VALUE else: print(f"Failed to fetch metrics. Status code: {response.status_code}") return None @@ -57,6 +65,25 @@ class KafkaNodeExporterProducer: print(f"Failed to fetch metrics: {str(e)}") return None + def extract_metric_value(self, metrics, metric_name): + """ + Method to extract the value of a metric from the metrics string. + Args: + metrics (str): Metrics string fetched from Node Exporter. + metric_name (str): Name of the metric to extract. + Returns: + float: Value of the extracted metric, or None if not found. + """ + try: + # Find the metric line containing the desired metric name + metric_line = next(line for line in metrics.split('\n') if line.startswith(metric_name)) + # Split the line to extract the metric value + metric_value = float(metric_line.split()[1]) + return metric_value + except StopIteration: + print(f"Metric '{metric_name}' not found in the metrics.") + return None + def delivery_callback(self, err, msg): """ Callback function to handle message delivery status. @@ -101,12 +128,12 @@ class KafkaNodeExporterProducer: try: start_time = time.time() while True: - metrics = self.fetch_metrics() + metrics = self.fetch_node_exporter_metrics() # select the function name based on the provided requirements if metrics: - kafka_producer.produce(self.kafka_topic, metrics.encode('utf-8'), callback=self.delivery_callback) + kafka_producer.produce(self.kafka_topic, str(metrics), callback=self.delivery_callback) kafka_producer.flush() - print("Metrics produced to Kafka topic") + # print("Metrics produced to Kafka topic") # Check if the specified run duration has elapsed if time.time() - start_time >= self.run_duration: -- GitLab From a5f83a179afd7b4176f4d72befdd475af839332a Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 30 Apr 2024 15:44:08 +0000 Subject: [PATCH 092/205] Kafka tests file added --- .../backend/tests/KafkaProducerTests.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100644 src/telemetry_frontend/backend/tests/KafkaProducerTests.py diff --git a/src/telemetry_frontend/backend/tests/KafkaProducerTests.py b/src/telemetry_frontend/backend/tests/KafkaProducerTests.py new file mode 100644 index 000000000..b353f9fe2 --- /dev/null +++ b/src/telemetry_frontend/backend/tests/KafkaProducerTests.py @@ -0,0 +1,19 @@ + +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from telemetry_frontend.backend.service.KafkaProducerService import KafkaProducerService + +kafka_controller = KafkaProducerService() +kafka_controller.run_producer() \ No newline at end of file -- GitLab From 23742187793fe472292300d972b7a07ba454d827 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 7 May 2024 14:01:42 +0000 Subject: [PATCH 093/205] Script to run "Telemetry Backend" service --- .../run_tests_locally-telemetry-backend.sh | 28 +++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100755 scripts/run_tests_locally-telemetry-backend.sh diff --git a/scripts/run_tests_locally-telemetry-backend.sh b/scripts/run_tests_locally-telemetry-backend.sh new file mode 100755 index 000000000..cbebd6807 --- /dev/null +++ b/scripts/run_tests_locally-telemetry-backend.sh @@ -0,0 +1,28 @@ +#!/bin/bash +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +PROJECTDIR=`pwd` + +cd $PROJECTDIR/src +# RCFILE=$PROJECTDIR/coverage/.coveragerc +# coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ +# kpi_manager/tests/test_unitary.py + +# python3 kpi_manager/tests/test_unitary.py + +RCFILE=$PROJECTDIR/coverage/.coveragerc +python3 -m pytest --log-level=INFO --verbose \ + telemetry_frontend/backend/tests/test_kafka_backend.py \ No newline at end of file -- GitLab From 6d5bc62a7cb4c87315fa4c23068e45babf8e409b Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 7 May 2024 14:02:12 +0000 Subject: [PATCH 094/205] __init__.py file added --- src/telemetry_frontend/__init__.py | 16 +++++++++++++++- .../{tests/KafkaProducerTests.py => __init__.py} | 6 ------ .../backend/service/__init__.py | 13 +++++++++++++ src/telemetry_frontend/backend/tests/__init__.py | 13 +++++++++++++ 4 files changed, 41 insertions(+), 7 deletions(-) rename src/telemetry_frontend/backend/{tests/KafkaProducerTests.py => __init__.py} (78%) create mode 100644 src/telemetry_frontend/backend/service/__init__.py create mode 100644 src/telemetry_frontend/backend/tests/__init__.py diff --git a/src/telemetry_frontend/__init__.py b/src/telemetry_frontend/__init__.py index eb1ae458f..6a8f39746 100644 --- a/src/telemetry_frontend/__init__.py +++ b/src/telemetry_frontend/__init__.py @@ -1 +1,15 @@ -... + +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/src/telemetry_frontend/backend/tests/KafkaProducerTests.py b/src/telemetry_frontend/backend/__init__.py similarity index 78% rename from src/telemetry_frontend/backend/tests/KafkaProducerTests.py rename to src/telemetry_frontend/backend/__init__.py index b353f9fe2..38d04994f 100644 --- a/src/telemetry_frontend/backend/tests/KafkaProducerTests.py +++ b/src/telemetry_frontend/backend/__init__.py @@ -1,4 +1,3 @@ - # Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -12,8 +11,3 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -from telemetry_frontend.backend.service.KafkaProducerService import KafkaProducerService - -kafka_controller = KafkaProducerService() -kafka_controller.run_producer() \ No newline at end of file diff --git a/src/telemetry_frontend/backend/service/__init__.py b/src/telemetry_frontend/backend/service/__init__.py new file mode 100644 index 000000000..38d04994f --- /dev/null +++ b/src/telemetry_frontend/backend/service/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/src/telemetry_frontend/backend/tests/__init__.py b/src/telemetry_frontend/backend/tests/__init__.py new file mode 100644 index 000000000..38d04994f --- /dev/null +++ b/src/telemetry_frontend/backend/tests/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. -- GitLab From beb53c88457021589a6a6a12288120ab9aaba74a Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 7 May 2024 14:03:24 +0000 Subject: [PATCH 095/205] method name change from "generate_kafka_configurations" to "generate_kafka_configs" --- .../backend/service/KafkaProducerService.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/src/telemetry_frontend/backend/service/KafkaProducerService.py b/src/telemetry_frontend/backend/service/KafkaProducerService.py index fd2507abf..4e2d79347 100755 --- a/src/telemetry_frontend/backend/service/KafkaProducerService.py +++ b/src/telemetry_frontend/backend/service/KafkaProducerService.py @@ -12,36 +12,38 @@ # See the License for the specific language governing permissions and # limitations under the License. -from KafkaProducerServiceImpl import KafkaProducerServiceImpl +from .KafkaProducerServiceImpl import KafkaProducerServiceImpl class KafkaProducerService: """ Class to control Kafka producer functionality. """ def __init__(self): - kafka_configs = self.generate_kafka_configurations() + + kafka_configs = self.generate_kafka_configs() + self.bootstrap_servers = kafka_configs['bootstrap_servers'] self.node_exporter_endpoint = kafka_configs['node_exporter_endpoint'] self.kafka_topic = kafka_configs['kafka_topic'] self.run_duration = kafka_configs['run_duration'] self.fetch_interval = kafka_configs['fetch_interval'] - def generate_kafka_configurations(self): + def generate_kafka_configs(self): # define the function to get every attribute """ Method to generate Kafka configurations """ - create_kafka_configuration = { + create_kafka_configs = { 'bootstrap_servers' : '127.0.0.1:9092', # Kafka broker address - Replace with your Kafka broker address 'node_exporter_endpoint' : 'http://10.152.183.231:9100/metrics', # Node Exporter metrics endpoint - Replace with your Node Exporter endpoint 'kafka_topic' : 'metric-data', # Kafka topic to produce to 'run_duration' : 20, # Total duration to execute the producer 'fetch_interval' : 4 # Time between two fetch requests } - return create_kafka_configuration + return create_kafka_configs def run_producer(self): """ - Method to create KafkaProducerServiceImpl object and start producer thread. + Method to create KafkaProducerServiceImpl object and start producer. """ # Create NodeExporterProducer object and run start_producer_thread producer = KafkaProducerServiceImpl(self.bootstrap_servers, self.node_exporter_endpoint, -- GitLab From 7a1da84510d9aa5480612edaf04fe311ffda63bd Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 7 May 2024 14:05:00 +0000 Subject: [PATCH 096/205] Two methods add "export_collector_value()" and "write_to_kafka()" --- .../service/KafkaProducerServiceImpl.py | 34 ++++++++++++++++--- 1 file changed, 29 insertions(+), 5 deletions(-) diff --git a/src/telemetry_frontend/backend/service/KafkaProducerServiceImpl.py b/src/telemetry_frontend/backend/service/KafkaProducerServiceImpl.py index 6017f26cc..b6b55f913 100755 --- a/src/telemetry_frontend/backend/service/KafkaProducerServiceImpl.py +++ b/src/telemetry_frontend/backend/service/KafkaProducerServiceImpl.py @@ -13,11 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. -from confluent_kafka import Producer, KafkaException -from confluent_kafka.admin import AdminClient, NewTopic -import requests import time +import grpc +import logging +import requests import threading +from common.proto.context_pb2 import Empty +from confluent_kafka import Producer, KafkaException +from confluent_kafka.admin import AdminClient, NewTopic +from common.proto.telemetry_frontend_pb2 import Collector, CollectorId +from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method + +LOGGER = logging.getLogger(__name__) +METRICS_POOL = MetricsPool('Telemetry', 'TelemetryBackend') class KafkaProducerServiceImpl: """ @@ -33,11 +41,25 @@ class KafkaProducerServiceImpl: kafka_topic (str): Kafka topic to produce metrics to. run_interval (int): Time interval in seconds to run the producer. """ + LOGGER.info('Init TelemetryBackendService') + self.bootstrap_servers = bootstrap_servers self.node_exporter_endpoint = node_exporter_endpoint self.kafka_topic = kafka_topic self.run_duration = run_duration self.fetch_interval = fetch_interval + + # @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def export_collector_value(request: CollectorId) -> str: # type: ignore + response = str() + response = '-1' + return response + + # @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def write_to_kafka(Collector, kpi_value) -> Empty: # type: ignore + return Empty() + +# ----------- BELOW: Actual Implementation of Kafka Producer with Node Exporter ----------- def fetch_node_exporter_metrics(self): """ @@ -114,7 +136,7 @@ class KafkaProducerServiceImpl: def produce_metrics(self): """ - Method to continuously produce metrics to Kafka topic. + Method to produce metrics to Kafka topic as per Kafka configs. """ conf = { 'bootstrap.servers': self.bootstrap_servers, @@ -146,10 +168,12 @@ class KafkaProducerServiceImpl: finally: kafka_producer.flush() # kafka_producer.close() # this command generates ERROR - + # --- def start_producer_thread(self): """ Method to start the producer thread. """ producer_thread = threading.Thread(target=self.produce_metrics) producer_thread.start() + +# ----------- ABOVE: Actual Implementation of Kafka Producer with Node Exporter ----------- \ No newline at end of file -- GitLab From 062cc59b7e178ab67507fc4d29d09694a0a26bf4 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 7 May 2024 14:05:37 +0000 Subject: [PATCH 097/205] "TelemetryBackend" messages file added --- .../backend/tests/messagesBackend.py | 46 +++++++++++++++++++ 1 file changed, 46 insertions(+) create mode 100644 src/telemetry_frontend/backend/tests/messagesBackend.py diff --git a/src/telemetry_frontend/backend/tests/messagesBackend.py b/src/telemetry_frontend/backend/tests/messagesBackend.py new file mode 100644 index 000000000..bfcebf0cc --- /dev/null +++ b/src/telemetry_frontend/backend/tests/messagesBackend.py @@ -0,0 +1,46 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def create_kafka_config(): + """ + No input parameter is requested + Returns the dict object with Kafka configs + """ + _kafka_configs = dict() + _kafka_configs['bootstrap_servers'] = '127.0.0.1:9092' + _kafka_configs['exporter_endpoint'] = 'http://10.152.183.231:9100/metrics' + _kafka_configs['kafka_topic'] = 'metric-data' + _kafka_configs['run_duration'] = 20 + _kafka_configs['fetch_interval'] = 4 + + return _kafka_configs + +def create_kafka_config_a(bootstrap_server, exporter_endpoint, kafka_topic, run_duration, fetch_interval): + """ + Provide ... + Bootstrap_server IP address as String. + Exporter endpoint with port address as String. + Kafka topic name as String. + Total duration of the test as Float. + Fetch_interval as Float. + """ + _kafka_configs = dict() + _kafka_configs['bootstrap_servers'] = bootstrap_server + _kafka_configs['exporter_endpoint'] = exporter_endpoint + _kafka_configs['kafka_topic'] = kafka_topic + _kafka_configs['run_duration'] = run_duration + _kafka_configs['fetch_interval'] = fetch_interval + + return _kafka_configs \ No newline at end of file -- GitLab From 4b39f8d0b7a2d553b670d3a9f28d3e3a420f61bc Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 7 May 2024 14:06:53 +0000 Subject: [PATCH 098/205] "TelemetryBackend" tests "test_get_kafka_configs()", "test_get_kafka_configs_a()" and "test_export_collector_value()" are added. --- .../backend/tests/test_kafka_backend.py | 51 +++++++++++++++++++ 1 file changed, 51 insertions(+) create mode 100644 src/telemetry_frontend/backend/tests/test_kafka_backend.py diff --git a/src/telemetry_frontend/backend/tests/test_kafka_backend.py b/src/telemetry_frontend/backend/tests/test_kafka_backend.py new file mode 100644 index 000000000..5caabb9e0 --- /dev/null +++ b/src/telemetry_frontend/backend/tests/test_kafka_backend.py @@ -0,0 +1,51 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# import sys +# print (sys.path) +import logging +from .messagesBackend import create_kafka_config, create_kafka_config_a +from src.telemetry_frontend.tests.Messages import create_collector_request +from src.telemetry_frontend.backend.service.KafkaProducerService import KafkaProducerService +from src.telemetry_frontend.backend.service.KafkaProducerServiceImpl import KafkaProducerServiceImpl + +LOGGER = logging.getLogger(__name__) + + +########################### +# Tests Implementation of Telemetry Backend +########################### +def test_get_kafka_configs(): + LOGGER.warning('test_get_kafka_configs requesting') + response = KafkaProducerService.generate_kafka_configs( + create_kafka_config() + ) + LOGGER.debug(str(response)) + assert isinstance(response, dict) + +def test_get_kafka_configs_a(): + LOGGER.warning('test_get_kafka_configs_a requesting') + response = KafkaProducerService.generate_kafka_configs( + create_kafka_config_a('ip:port', 'ip:port', 'test_topic', 10, 3) + ) + LOGGER.debug(str(response)) + assert isinstance(response, dict) + +def test_export_collector_value(): + LOGGER.warning('test_export_collector_value requesting') + response = KafkaProducerServiceImpl.export_collector_value( + create_collector_request('1') + ) + LOGGER.debug(str(response)) + assert isinstance(response, str) \ No newline at end of file -- GitLab From 1c03799fdf4730e2a6a389a3d9c3d82440b77ac9 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Wed, 8 May 2024 13:24:56 +0000 Subject: [PATCH 099/205] Move "Kafka_configs" initiation to "generate_kafka_configs" --- .../backend/service/KafkaProducerService.py | 20 +++++++------------ 1 file changed, 7 insertions(+), 13 deletions(-) diff --git a/src/telemetry_frontend/backend/service/KafkaProducerService.py b/src/telemetry_frontend/backend/service/KafkaProducerService.py index 4e2d79347..0a76b2d99 100755 --- a/src/telemetry_frontend/backend/service/KafkaProducerService.py +++ b/src/telemetry_frontend/backend/service/KafkaProducerService.py @@ -19,25 +19,19 @@ class KafkaProducerService: Class to control Kafka producer functionality. """ def __init__(self): + pass - kafka_configs = self.generate_kafka_configs() - self.bootstrap_servers = kafka_configs['bootstrap_servers'] - self.node_exporter_endpoint = kafka_configs['node_exporter_endpoint'] - self.kafka_topic = kafka_configs['kafka_topic'] - self.run_duration = kafka_configs['run_duration'] - self.fetch_interval = kafka_configs['fetch_interval'] - - def generate_kafka_configs(self): # define the function to get every attribute + def generate_kafka_configs(self): """ Method to generate Kafka configurations """ create_kafka_configs = { - 'bootstrap_servers' : '127.0.0.1:9092', # Kafka broker address - Replace with your Kafka broker address - 'node_exporter_endpoint' : 'http://10.152.183.231:9100/metrics', # Node Exporter metrics endpoint - Replace with your Node Exporter endpoint - 'kafka_topic' : 'metric-data', # Kafka topic to produce to - 'run_duration' : 20, # Total duration to execute the producer - 'fetch_interval' : 4 # Time between two fetch requests + 'bootstrap_servers' : "test_server", # Kafka broker address - Replace with your Kafka broker address + 'exporter_endpoint' : "test_exporter", # Node Exporter metrics endpoint - Replace with your Node Exporter endpoint + 'kafka_topic' : "test_kafka_topic", # Kafka topic to produce to + 'run_duration' : 10, # Total duration to execute the producer + 'fetch_interval' : 2 # Time between two fetch requests } return create_kafka_configs -- GitLab From eb47527c6a668ce8d48086d8f9432c6ad949a19e Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Wed, 8 May 2024 13:28:28 +0000 Subject: [PATCH 100/205] update methods "export_collector_value" and "write_to_kafka" parameters and return type of "export_collector_value". --- .../service/KafkaProducerServiceImpl.py | 29 ++++++++++--------- 1 file changed, 16 insertions(+), 13 deletions(-) diff --git a/src/telemetry_frontend/backend/service/KafkaProducerServiceImpl.py b/src/telemetry_frontend/backend/service/KafkaProducerServiceImpl.py index b6b55f913..da5513170 100755 --- a/src/telemetry_frontend/backend/service/KafkaProducerServiceImpl.py +++ b/src/telemetry_frontend/backend/service/KafkaProducerServiceImpl.py @@ -14,10 +14,10 @@ # limitations under the License. import time -import grpc import logging import requests import threading +from typing import Tuple from common.proto.context_pb2 import Empty from confluent_kafka import Producer, KafkaException from confluent_kafka.admin import AdminClient, NewTopic @@ -32,31 +32,34 @@ class KafkaProducerServiceImpl: Class to fetch metrics from Exporter and produce them to Kafka. """ - def __init__(self, bootstrap_servers, node_exporter_endpoint, kafka_topic, run_duration, fetch_interval): + def __init__(self, bootstrap_servers=None, exporter_endpoint=None, + kafka_topic=None, run_duration=None, fetch_interval=None): """ Constructor to initialize Kafka producer parameters. Args: bootstrap_servers (str): Kafka broker address. - node_exporter_endpoint (str): Node Exporter metrics endpoint. + exporter_endpoint (str): Node Exporter metrics endpoint. kafka_topic (str): Kafka topic to produce metrics to. run_interval (int): Time interval in seconds to run the producer. """ LOGGER.info('Init TelemetryBackendService') - self.bootstrap_servers = bootstrap_servers - self.node_exporter_endpoint = node_exporter_endpoint - self.kafka_topic = kafka_topic - self.run_duration = run_duration - self.fetch_interval = fetch_interval + self.bootstrap_servers = bootstrap_servers + self.exporter_endpoint = exporter_endpoint + self.kafka_topic = kafka_topic + self.run_duration = run_duration + self.fetch_interval = fetch_interval # @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def export_collector_value(request: CollectorId) -> str: # type: ignore - response = str() - response = '-1' + def export_collector_value(request : Collector) -> Tuple[str, str]: # type: ignore + response = Tuple[str, str] + response = ('test collector Id', 'test collected value') # Metric to be fetched from endpoint based on Collector message return response # @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def write_to_kafka(Collector, kpi_value) -> Empty: # type: ignore + def write_to_kafka(request: Tuple[str, str]) -> Empty: # type: ignore + # _collector_id, _collector_id_value = request + # write collector_id and collector_id value on the Kafka topic return Empty() # ----------- BELOW: Actual Implementation of Kafka Producer with Node Exporter ----------- @@ -69,7 +72,7 @@ class KafkaProducerServiceImpl: """ KPI = "node_network_receive_packets_total" try: - response = requests.get(self.node_exporter_endpoint) + response = requests.get(self.exporter_endpoint) if response.status_code == 200: # print(f"Metrics fetched sucessfully...") metrics = response.text -- GitLab From a7e1ddfbc2ee39936c83f2d6a2105ebfd97473bd Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Wed, 8 May 2024 13:29:03 +0000 Subject: [PATCH 101/205] Remove "create_kafka_config" message type --- .../backend/tests/messagesBackend.py | 34 ++++++------------- 1 file changed, 10 insertions(+), 24 deletions(-) diff --git a/src/telemetry_frontend/backend/tests/messagesBackend.py b/src/telemetry_frontend/backend/tests/messagesBackend.py index bfcebf0cc..ef1235383 100644 --- a/src/telemetry_frontend/backend/tests/messagesBackend.py +++ b/src/telemetry_frontend/backend/tests/messagesBackend.py @@ -13,34 +13,20 @@ # limitations under the License. -def create_kafka_config(): - """ - No input parameter is requested - Returns the dict object with Kafka configs - """ - _kafka_configs = dict() - _kafka_configs['bootstrap_servers'] = '127.0.0.1:9092' - _kafka_configs['exporter_endpoint'] = 'http://10.152.183.231:9100/metrics' - _kafka_configs['kafka_topic'] = 'metric-data' - _kafka_configs['run_duration'] = 20 - _kafka_configs['fetch_interval'] = 4 - - return _kafka_configs - -def create_kafka_config_a(bootstrap_server, exporter_endpoint, kafka_topic, run_duration, fetch_interval): +def create_kafka_config_a(bootstrap_server: str, exporter_endpoint: str, kafka_topic: str, + run_duration: int, fetch_interval: int): """ Provide ... Bootstrap_server IP address as String. Exporter endpoint with port address as String. Kafka topic name as String. - Total duration of the test as Float. - Fetch_interval as Float. + Total duration of the test as Int. + Fetch_interval as Int. """ - _kafka_configs = dict() - _kafka_configs['bootstrap_servers'] = bootstrap_server - _kafka_configs['exporter_endpoint'] = exporter_endpoint - _kafka_configs['kafka_topic'] = kafka_topic - _kafka_configs['run_duration'] = run_duration - _kafka_configs['fetch_interval'] = fetch_interval + _bootstrap_servers = bootstrap_server + _exporter_endpoint = exporter_endpoint + _kafka_topic = kafka_topic + _run_duration = run_duration + _fetch_interval = fetch_interval - return _kafka_configs \ No newline at end of file + return _bootstrap_servers, _exporter_endpoint, _kafka_topic, _run_duration, _fetch_interval -- GitLab From f82e44099a206d323c4415605527deb96d040ee2 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Wed, 8 May 2024 13:30:06 +0000 Subject: [PATCH 102/205] Test "test_get_kafka_configs_a" is removed and "test_write_to_kafka" is added --- .../backend/tests/test_kafka_backend.py | 27 +++++++++---------- 1 file changed, 13 insertions(+), 14 deletions(-) diff --git a/src/telemetry_frontend/backend/tests/test_kafka_backend.py b/src/telemetry_frontend/backend/tests/test_kafka_backend.py index 5caabb9e0..e64a65ccd 100644 --- a/src/telemetry_frontend/backend/tests/test_kafka_backend.py +++ b/src/telemetry_frontend/backend/tests/test_kafka_backend.py @@ -15,8 +15,9 @@ # import sys # print (sys.path) import logging -from .messagesBackend import create_kafka_config, create_kafka_config_a -from src.telemetry_frontend.tests.Messages import create_collector_request +from typing import Tuple +from common.proto.context_pb2 import Empty +from src.telemetry_frontend.tests.Messages import create_collector_request, create_collector_id from src.telemetry_frontend.backend.service.KafkaProducerService import KafkaProducerService from src.telemetry_frontend.backend.service.KafkaProducerServiceImpl import KafkaProducerServiceImpl @@ -28,17 +29,8 @@ LOGGER = logging.getLogger(__name__) ########################### def test_get_kafka_configs(): LOGGER.warning('test_get_kafka_configs requesting') - response = KafkaProducerService.generate_kafka_configs( - create_kafka_config() - ) - LOGGER.debug(str(response)) - assert isinstance(response, dict) - -def test_get_kafka_configs_a(): - LOGGER.warning('test_get_kafka_configs_a requesting') - response = KafkaProducerService.generate_kafka_configs( - create_kafka_config_a('ip:port', 'ip:port', 'test_topic', 10, 3) - ) + KafkaProducerServiceObj = KafkaProducerService() + response = KafkaProducerServiceObj.generate_kafka_configs() LOGGER.debug(str(response)) assert isinstance(response, dict) @@ -48,4 +40,11 @@ def test_export_collector_value(): create_collector_request('1') ) LOGGER.debug(str(response)) - assert isinstance(response, str) \ No newline at end of file + assert isinstance(response, Tuple) + +def test_write_to_kafka(): + LOGGER.warning('test_write_to_kafka requesting') + collector_value = KafkaProducerServiceImpl.export_collector_value(create_collector_request('1')) + response = KafkaProducerServiceImpl.write_to_kafka(collector_value) # type: ignore (don't know why warning here) + LOGGER.debug(str(response)) + assert isinstance(response, Empty) -- GitLab From 05194f29c452c8db23ce30443c92f9da05ce1026 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Wed, 8 May 2024 13:32:46 +0000 Subject: [PATCH 103/205] Added parameter type in methods "create_collector_id" and "create_collector_request". Replaced "kpi_id.kpi_id.uuid" with "collector_id.collector_id.uuid" in methods "create_collector_request_a" and "create_collector_request_b". --- src/telemetry_frontend/tests/Messages.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/src/telemetry_frontend/tests/Messages.py b/src/telemetry_frontend/tests/Messages.py index 86c869834..d323aa7fd 100644 --- a/src/telemetry_frontend/tests/Messages.py +++ b/src/telemetry_frontend/tests/Messages.py @@ -15,12 +15,12 @@ from common.proto import telemetry_frontend_pb2 from common.proto.kpi_sample_types_pb2 import KpiSampleType -def create_collector_id(coll_id_str): +def create_collector_id(coll_id_str : str): _collector_id = telemetry_frontend_pb2.CollectorId() _collector_id.collector_id.uuid = str(coll_id_str) return _collector_id -def create_collector_request(coll_id_str): +def create_collector_request(coll_id_str : str): _create_collector_request = telemetry_frontend_pb2.Collector() _create_collector_request.collector_id.collector_id.uuid = str(coll_id_str) _create_collector_request.kpi_id.kpi_id.uuid = 'KPIid' + str(coll_id_str) @@ -29,15 +29,16 @@ def create_collector_request(coll_id_str): return _create_collector_request def create_collector_request_a(): - _create_collector_request_a = telemetry_frontend_pb2.Collector() - _create_collector_request_a.kpi_id.kpi_id.uuid = "-1" + _create_collector_request_a = telemetry_frontend_pb2.Collector() + _create_collector_request_a.collector_id.collector_id.uuid = "-1" return _create_collector_request_a def create_collector_request_b(str_kpi_id, coll_duration_s, coll_interval_s): - _create_collector_request_b = telemetry_frontend_pb2.Collector() - _create_collector_request_b.kpi_id.kpi_id.uuid = str_kpi_id - _create_collector_request_b.duration_s = coll_duration_s - _create_collector_request_b.interval_s = coll_interval_s + _create_collector_request_b = telemetry_frontend_pb2.Collector() + _create_collector_request_b.collector_id.collector_id.uuid = '-1' + _create_collector_request_b.kpi_id.kpi_id.uuid = str_kpi_id + _create_collector_request_b.duration_s = coll_duration_s + _create_collector_request_b.interval_s = coll_interval_s return _create_collector_request_b def create_collector_filter(): -- GitLab From 0bcb839ea4cf561431b3a7111de4bde13e942fdb Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Wed, 8 May 2024 13:52:53 +0000 Subject: [PATCH 104/205] Two imports were missing --- src/telemetry/frontend/service/__main__.py | 71 ++++++++++++++++++++++ 1 file changed, 71 insertions(+) create mode 100644 src/telemetry/frontend/service/__main__.py diff --git a/src/telemetry/frontend/service/__main__.py b/src/telemetry/frontend/service/__main__.py new file mode 100644 index 000000000..afc381e09 --- /dev/null +++ b/src/telemetry/frontend/service/__main__.py @@ -0,0 +1,71 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import signal +import sys +import logging, threading +from .TelemetryFrontendService import TelemetryFrontendService +from monitoring.service.NameMapping import NameMapping +from monitoring.service.EventTools import EventsDeviceCollector +from common.Settings import ( + get_log_level, wait_for_environment_variables, get_env_var_name, + get_metrics_port ) + +terminate = threading.Event() +LOGGER = None + +def signal_handler(signal, frame): # pylint: disable=redefined-outer-name + LOGGER.warning('Terminate signal received') + terminate.set() + +def main(): + global LOGGER + + log_level = get_log_level() + logging.basicConfig(level=log_level, format="[%(asctime)s] %(levelname)s:%(name)s:%(message)s") + LOGGER = logging.getLogger(__name__) + +# ------- will be added later -------------- + # wait_for_environment_variables([ + # get_env_var_name + + + # ]) +# ------- will be added later -------------- + + signal.signal(signal.SIGINT, signal_handler) + signal.signal(signal.SIGTERM, signal_handler) + + LOGGER.info('Starting...') + + # Start metrics server + metrics_port = get_metrics_port() + start_http_server(metrics_port) + + name_mapping = NameMapping() + + grpc_service = TelemetryFrontendService(name_mapping) + grpc_service.start() + + # Wait for Ctrl+C or termination signal + while not terminate.wait(timeout=1.0): pass + + LOGGER.info('Terminating...') + grpc_service.stop() + + LOGGER.info('Bye') + return 0 + +if __name__ == '__main__': + sys.exit(main()) \ No newline at end of file -- GitLab From f05d40206e1137c3d10de79b5203bd981d8ff87f Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Wed, 8 May 2024 13:55:10 +0000 Subject: [PATCH 105/205] folder name changed from "telemetry_frontend" to "frontend", and "client", "service" and "tests" move inside the "frontend" --- .../__init__.py | 0 .../backend/__init__.py | 0 .../backend/service/KafkaProducerService.py | 0 .../service/KafkaProducerServiceImpl.py | 0 .../backend/service/__init__.py | 0 .../backend/tests/__init__.py | 0 .../backend/tests/messagesBackend.py | 0 .../backend/tests/test_kafka_backend.py | 6 +- src/telemetry/frontend/__init__.py | 15 ++++ .../client/TelemetryFrontendClient.py | 0 .../frontend}/client/__init__.py | 0 .../service/TelemetryFrontendService.py | 2 +- .../TelemetryFrontendServiceServicerImpl.py | 0 .../frontend}/service/__init__.py | 0 .../frontend}/tests/Messages.py | 0 .../frontend}/tests/__init__.py | 0 .../frontend}/tests/test_unitary.py | 6 +- .../requirements.in | 0 .../telemetry_virenv.txt | 0 src/telemetry_frontend/service/__main__.py | 69 ------------------- 20 files changed, 22 insertions(+), 76 deletions(-) rename src/{telemetry_frontend => telemetry}/__init__.py (100%) rename src/{telemetry_frontend => telemetry}/backend/__init__.py (100%) rename src/{telemetry_frontend => telemetry}/backend/service/KafkaProducerService.py (100%) rename src/{telemetry_frontend => telemetry}/backend/service/KafkaProducerServiceImpl.py (100%) rename src/{telemetry_frontend => telemetry}/backend/service/__init__.py (100%) rename src/{telemetry_frontend => telemetry}/backend/tests/__init__.py (100%) rename src/{telemetry_frontend => telemetry}/backend/tests/messagesBackend.py (100%) rename src/{telemetry_frontend => telemetry}/backend/tests/test_kafka_backend.py (87%) create mode 100644 src/telemetry/frontend/__init__.py rename src/{telemetry_frontend => telemetry/frontend}/client/TelemetryFrontendClient.py (100%) rename src/{telemetry_frontend => telemetry/frontend}/client/__init__.py (100%) rename src/{telemetry_frontend => telemetry/frontend}/service/TelemetryFrontendService.py (95%) rename src/{telemetry_frontend => telemetry/frontend}/service/TelemetryFrontendServiceServicerImpl.py (100%) rename src/{telemetry_frontend => telemetry/frontend}/service/__init__.py (100%) rename src/{telemetry_frontend => telemetry/frontend}/tests/Messages.py (100%) rename src/{telemetry_frontend => telemetry/frontend}/tests/__init__.py (100%) rename src/{telemetry_frontend => telemetry/frontend}/tests/test_unitary.py (97%) rename src/{telemetry_frontend => telemetry}/requirements.in (100%) rename src/{telemetry_frontend => telemetry}/telemetry_virenv.txt (100%) delete mode 100644 src/telemetry_frontend/service/__main__.py diff --git a/src/telemetry_frontend/__init__.py b/src/telemetry/__init__.py similarity index 100% rename from src/telemetry_frontend/__init__.py rename to src/telemetry/__init__.py diff --git a/src/telemetry_frontend/backend/__init__.py b/src/telemetry/backend/__init__.py similarity index 100% rename from src/telemetry_frontend/backend/__init__.py rename to src/telemetry/backend/__init__.py diff --git a/src/telemetry_frontend/backend/service/KafkaProducerService.py b/src/telemetry/backend/service/KafkaProducerService.py similarity index 100% rename from src/telemetry_frontend/backend/service/KafkaProducerService.py rename to src/telemetry/backend/service/KafkaProducerService.py diff --git a/src/telemetry_frontend/backend/service/KafkaProducerServiceImpl.py b/src/telemetry/backend/service/KafkaProducerServiceImpl.py similarity index 100% rename from src/telemetry_frontend/backend/service/KafkaProducerServiceImpl.py rename to src/telemetry/backend/service/KafkaProducerServiceImpl.py diff --git a/src/telemetry_frontend/backend/service/__init__.py b/src/telemetry/backend/service/__init__.py similarity index 100% rename from src/telemetry_frontend/backend/service/__init__.py rename to src/telemetry/backend/service/__init__.py diff --git a/src/telemetry_frontend/backend/tests/__init__.py b/src/telemetry/backend/tests/__init__.py similarity index 100% rename from src/telemetry_frontend/backend/tests/__init__.py rename to src/telemetry/backend/tests/__init__.py diff --git a/src/telemetry_frontend/backend/tests/messagesBackend.py b/src/telemetry/backend/tests/messagesBackend.py similarity index 100% rename from src/telemetry_frontend/backend/tests/messagesBackend.py rename to src/telemetry/backend/tests/messagesBackend.py diff --git a/src/telemetry_frontend/backend/tests/test_kafka_backend.py b/src/telemetry/backend/tests/test_kafka_backend.py similarity index 87% rename from src/telemetry_frontend/backend/tests/test_kafka_backend.py rename to src/telemetry/backend/tests/test_kafka_backend.py index e64a65ccd..05174da2b 100644 --- a/src/telemetry_frontend/backend/tests/test_kafka_backend.py +++ b/src/telemetry/backend/tests/test_kafka_backend.py @@ -17,9 +17,9 @@ import logging from typing import Tuple from common.proto.context_pb2 import Empty -from src.telemetry_frontend.tests.Messages import create_collector_request, create_collector_id -from src.telemetry_frontend.backend.service.KafkaProducerService import KafkaProducerService -from src.telemetry_frontend.backend.service.KafkaProducerServiceImpl import KafkaProducerServiceImpl +from src.telemetry.frontend.tests.Messages import create_collector_request, create_collector_id +from src.telemetry.backend.service.KafkaProducerService import KafkaProducerService +from src.telemetry.backend.service.KafkaProducerServiceImpl import KafkaProducerServiceImpl LOGGER = logging.getLogger(__name__) diff --git a/src/telemetry/frontend/__init__.py b/src/telemetry/frontend/__init__.py new file mode 100644 index 000000000..6a8f39746 --- /dev/null +++ b/src/telemetry/frontend/__init__.py @@ -0,0 +1,15 @@ + +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/src/telemetry_frontend/client/TelemetryFrontendClient.py b/src/telemetry/frontend/client/TelemetryFrontendClient.py similarity index 100% rename from src/telemetry_frontend/client/TelemetryFrontendClient.py rename to src/telemetry/frontend/client/TelemetryFrontendClient.py diff --git a/src/telemetry_frontend/client/__init__.py b/src/telemetry/frontend/client/__init__.py similarity index 100% rename from src/telemetry_frontend/client/__init__.py rename to src/telemetry/frontend/client/__init__.py diff --git a/src/telemetry_frontend/service/TelemetryFrontendService.py b/src/telemetry/frontend/service/TelemetryFrontendService.py similarity index 95% rename from src/telemetry_frontend/service/TelemetryFrontendService.py rename to src/telemetry/frontend/service/TelemetryFrontendService.py index a0ae704d3..522d125e6 100644 --- a/src/telemetry_frontend/service/TelemetryFrontendService.py +++ b/src/telemetry/frontend/service/TelemetryFrontendService.py @@ -17,7 +17,7 @@ from common.Settings import get_service_port_grpc from monitoring.service.NameMapping import NameMapping from common.tools.service.GenericGrpcService import GenericGrpcService from common.proto.telemetry_frontend_pb2_grpc import add_TelemetryFrontendServiceServicer_to_server -from telemetry_frontend.service.TelemetryFrontendServiceServicerImpl import TelemetryFrontendServiceServicerImpl +from telemetry.frontend.service.TelemetryFrontendServiceServicerImpl import TelemetryFrontendServiceServicerImpl class TelemetryFrontendService(GenericGrpcService): diff --git a/src/telemetry_frontend/service/TelemetryFrontendServiceServicerImpl.py b/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py similarity index 100% rename from src/telemetry_frontend/service/TelemetryFrontendServiceServicerImpl.py rename to src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py diff --git a/src/telemetry_frontend/service/__init__.py b/src/telemetry/frontend/service/__init__.py similarity index 100% rename from src/telemetry_frontend/service/__init__.py rename to src/telemetry/frontend/service/__init__.py diff --git a/src/telemetry_frontend/tests/Messages.py b/src/telemetry/frontend/tests/Messages.py similarity index 100% rename from src/telemetry_frontend/tests/Messages.py rename to src/telemetry/frontend/tests/Messages.py diff --git a/src/telemetry_frontend/tests/__init__.py b/src/telemetry/frontend/tests/__init__.py similarity index 100% rename from src/telemetry_frontend/tests/__init__.py rename to src/telemetry/frontend/tests/__init__.py diff --git a/src/telemetry_frontend/tests/test_unitary.py b/src/telemetry/frontend/tests/test_unitary.py similarity index 97% rename from src/telemetry_frontend/tests/test_unitary.py rename to src/telemetry/frontend/tests/test_unitary.py index 68467590f..312695659 100644 --- a/src/telemetry_frontend/tests/test_unitary.py +++ b/src/telemetry/frontend/tests/test_unitary.py @@ -27,9 +27,9 @@ from common.tests.MockServicerImpl_Context import MockServicerImpl_Context from common.Settings import ( get_service_port_grpc, get_env_var_name, ENVVAR_SUFIX_SERVICE_HOST, ENVVAR_SUFIX_SERVICE_PORT_GRPC) -from telemetry_frontend.client.TelemetryFrontendClient import TelemetryFrontendClient -from telemetry_frontend.service.TelemetryFrontendService import TelemetryFrontendService -from telemetry_frontend.tests.Messages import ( create_collector_id, create_collector_request, +from telemetry.frontend.client.TelemetryFrontendClient import TelemetryFrontendClient +from telemetry.frontend.service.TelemetryFrontendService import TelemetryFrontendService +from telemetry.frontend.tests.Messages import ( create_collector_id, create_collector_request, create_collector_filter, create_collector_request_a, create_collector_request_b) from device.client.DeviceClient import DeviceClient diff --git a/src/telemetry_frontend/requirements.in b/src/telemetry/requirements.in similarity index 100% rename from src/telemetry_frontend/requirements.in rename to src/telemetry/requirements.in diff --git a/src/telemetry_frontend/telemetry_virenv.txt b/src/telemetry/telemetry_virenv.txt similarity index 100% rename from src/telemetry_frontend/telemetry_virenv.txt rename to src/telemetry/telemetry_virenv.txt diff --git a/src/telemetry_frontend/service/__main__.py b/src/telemetry_frontend/service/__main__.py deleted file mode 100644 index 9b5fe70de..000000000 --- a/src/telemetry_frontend/service/__main__.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging, threading -from .TelemetryFrontendService import TelemetryFrontendService -from monitoring.service.NameMapping import NameMapping -from monitoring.service.EventTools import EventsDeviceCollector -from common.Settings import ( - get_log_level, wait_for_environment_variables, get_env_var_name, - get_metrics_port ) - -terminate = threading.Event() -LOGGER = None - -def signal_handler(signal, frame): # pylint: disable=redefined-outer-name - LOGGER.warning('Terminate signal received') - terminate.set() - -def main(): - global LOGGER - - log_level = get_log_level() - logging.basicConfig(level=log_level, format="[%(asctime)s] %(levelname)s:%(name)s:%(message)s") - LOGGER = logging.getLogger(__name__) - -# ------- will be added later -------------- - # wait_for_environment_variables([ - # get_env_var_name - - - # ]) -# ------- will be added later -------------- - - signal.signal(signal.SIGINT, signal_handler) - signal.signal(signal.SIGTERM, signal_handler) - - LOGGER.info('Starting...') - - # Start metrics server - metrics_port = get_metrics_port() - start_http_server(metrics_port) - - name_mapping = NameMapping() - - grpc_service = TelemetryFrontendService(name_mapping) - grpc_service.start() - - # Wait for Ctrl+C or termination signal - while not terminate.wait(timeout=1.0): pass - - LOGGER.info('Terminating...') - grpc_service.stop() - - LOGGER.info('Bye') - return 0 - -if __name__ == '__main__': - sys.exit(main()) \ No newline at end of file -- GitLab From 10a7540ceab14adb8abad0c751bb3207dcdb90bf Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Wed, 8 May 2024 14:20:48 +0000 Subject: [PATCH 106/205] "KafkaProducerService" and "KafkaProducerServiceImpl" class name changed to "TelemetryBackendService" and "TelemetryBackendServiceImpl" respectivily --- ...rService.py => TelemetryBackendService.py} | 18 +---- ...Impl.py => TelemetryBackendServiceImpl.py} | 9 +-- src/telemetry/backend/service/__main__.py | 72 +++++++++++++++++++ .../backend/tests/test_kafka_backend.py | 12 ++-- src/telemetry/frontend/service/__main__.py | 3 +- 5 files changed, 82 insertions(+), 32 deletions(-) rename src/telemetry/backend/service/{KafkaProducerService.py => TelemetryBackendService.py} (64%) rename src/telemetry/backend/service/{KafkaProducerServiceImpl.py => TelemetryBackendServiceImpl.py} (96%) create mode 100644 src/telemetry/backend/service/__main__.py diff --git a/src/telemetry/backend/service/KafkaProducerService.py b/src/telemetry/backend/service/TelemetryBackendService.py similarity index 64% rename from src/telemetry/backend/service/KafkaProducerService.py rename to src/telemetry/backend/service/TelemetryBackendService.py index 0a76b2d99..8e6fb243e 100755 --- a/src/telemetry/backend/service/KafkaProducerService.py +++ b/src/telemetry/backend/service/TelemetryBackendService.py @@ -12,9 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from .KafkaProducerServiceImpl import KafkaProducerServiceImpl - -class KafkaProducerService: +class TelemetryBackendService: """ Class to control Kafka producer functionality. """ @@ -35,19 +33,5 @@ class KafkaProducerService: } return create_kafka_configs - def run_producer(self): - """ - Method to create KafkaProducerServiceImpl object and start producer. - """ - # Create NodeExporterProducer object and run start_producer_thread - producer = KafkaProducerServiceImpl(self.bootstrap_servers, self.node_exporter_endpoint, - self.kafka_topic, self.run_duration, self.fetch_interval - ) - # producer.start_producer_thread() # if threading is required - producer.produce_metrics() # if threading is not required -if __name__ == "__main__": - # Create Kafka producer service object and run producer - kafka_controller = KafkaProducerService() - kafka_controller.run_producer() diff --git a/src/telemetry/backend/service/KafkaProducerServiceImpl.py b/src/telemetry/backend/service/TelemetryBackendServiceImpl.py similarity index 96% rename from src/telemetry/backend/service/KafkaProducerServiceImpl.py rename to src/telemetry/backend/service/TelemetryBackendServiceImpl.py index da5513170..ea57f6167 100755 --- a/src/telemetry/backend/service/KafkaProducerServiceImpl.py +++ b/src/telemetry/backend/service/TelemetryBackendServiceImpl.py @@ -27,7 +27,7 @@ from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_m LOGGER = logging.getLogger(__name__) METRICS_POOL = MetricsPool('Telemetry', 'TelemetryBackend') -class KafkaProducerServiceImpl: +class TelemetryBackendServiceImpl: """ Class to fetch metrics from Exporter and produce them to Kafka. """ @@ -171,12 +171,5 @@ class KafkaProducerServiceImpl: finally: kafka_producer.flush() # kafka_producer.close() # this command generates ERROR - # --- - def start_producer_thread(self): - """ - Method to start the producer thread. - """ - producer_thread = threading.Thread(target=self.produce_metrics) - producer_thread.start() # ----------- ABOVE: Actual Implementation of Kafka Producer with Node Exporter ----------- \ No newline at end of file diff --git a/src/telemetry/backend/service/__main__.py b/src/telemetry/backend/service/__main__.py new file mode 100644 index 000000000..10c3f76d1 --- /dev/null +++ b/src/telemetry/backend/service/__main__.py @@ -0,0 +1,72 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys +import signal +import logging +import threading +from prometheus_client import start_http_server +from monitoring.service.NameMapping import NameMapping +from .KafkaProducerService import KafkaProducerService +from common.Settings import ( + get_log_level, + get_metrics_port) + +terminate = threading.Event() +LOGGER = None + +def signal_handler(signal, frame): # pylint: disable=redefined-outer-name + LOGGER.warning('Terminate signal received') + terminate.set() + +def main(): + global LOGGER + + log_level = get_log_level() + logging.basicConfig(level=log_level, format="[%(asctime)s] %(levelname)s:%(name)s:%(message)s") + LOGGER = logging.getLogger(__name__) + +# ------- will be added later -------------- + # wait_for_environment_variables([ + # get_env_var_name + + + # ]) +# ------- will be added later -------------- + + signal.signal(signal.SIGINT, signal_handler) + signal.signal(signal.SIGTERM, signal_handler) + + LOGGER.info('Starting Telemetry Backend...') + + # Start metrics server + metrics_port = get_metrics_port() + start_http_server(metrics_port) + + name_mapping = NameMapping() + + grpc_service = KafkaProducerService(name_mapping) + grpc_service.start() + + # Wait for Ctrl+C or termination signal + while not terminate.wait(timeout=1.0): pass + + LOGGER.info('Terminating...') + grpc_service.stop() + + LOGGER.info('Bye') + return 0 + +if __name__ == '__main__': + sys.exit(main()) \ No newline at end of file diff --git a/src/telemetry/backend/tests/test_kafka_backend.py b/src/telemetry/backend/tests/test_kafka_backend.py index 05174da2b..ac49bc30f 100644 --- a/src/telemetry/backend/tests/test_kafka_backend.py +++ b/src/telemetry/backend/tests/test_kafka_backend.py @@ -18,8 +18,8 @@ import logging from typing import Tuple from common.proto.context_pb2 import Empty from src.telemetry.frontend.tests.Messages import create_collector_request, create_collector_id -from src.telemetry.backend.service.KafkaProducerService import KafkaProducerService -from src.telemetry.backend.service.KafkaProducerServiceImpl import KafkaProducerServiceImpl +from src.telemetry.backend.service.TelemetryBackendService import TelemetryBackendService +from src.telemetry.backend.service.TelemetryBackendServiceImpl import TelemetryBackendServiceImpl LOGGER = logging.getLogger(__name__) @@ -29,14 +29,14 @@ LOGGER = logging.getLogger(__name__) ########################### def test_get_kafka_configs(): LOGGER.warning('test_get_kafka_configs requesting') - KafkaProducerServiceObj = KafkaProducerService() + KafkaProducerServiceObj = TelemetryBackendService() response = KafkaProducerServiceObj.generate_kafka_configs() LOGGER.debug(str(response)) assert isinstance(response, dict) def test_export_collector_value(): LOGGER.warning('test_export_collector_value requesting') - response = KafkaProducerServiceImpl.export_collector_value( + response = TelemetryBackendServiceImpl.export_collector_value( create_collector_request('1') ) LOGGER.debug(str(response)) @@ -44,7 +44,7 @@ def test_export_collector_value(): def test_write_to_kafka(): LOGGER.warning('test_write_to_kafka requesting') - collector_value = KafkaProducerServiceImpl.export_collector_value(create_collector_request('1')) - response = KafkaProducerServiceImpl.write_to_kafka(collector_value) # type: ignore (don't know why warning here) + collector_value = TelemetryBackendServiceImpl.export_collector_value(create_collector_request('1')) + response = TelemetryBackendServiceImpl.write_to_kafka(collector_value) # type: ignore (don't know why warning here) LOGGER.debug(str(response)) assert isinstance(response, Empty) diff --git a/src/telemetry/frontend/service/__main__.py b/src/telemetry/frontend/service/__main__.py index afc381e09..0f48a4de1 100644 --- a/src/telemetry/frontend/service/__main__.py +++ b/src/telemetry/frontend/service/__main__.py @@ -15,8 +15,9 @@ import signal import sys import logging, threading -from .TelemetryFrontendService import TelemetryFrontendService +from prometheus_client import start_http_server from monitoring.service.NameMapping import NameMapping +from .TelemetryFrontendService import TelemetryFrontendService from monitoring.service.EventTools import EventsDeviceCollector from common.Settings import ( get_log_level, wait_for_environment_variables, get_env_var_name, -- GitLab From f52e3126a4de88469d949245e8ca5fe91166dbf0 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Thu, 9 May 2024 11:03:38 +0000 Subject: [PATCH 107/205] Telemetry Backend test path updated --- scripts/run_tests_locally-telemetry-backend.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/run_tests_locally-telemetry-backend.sh b/scripts/run_tests_locally-telemetry-backend.sh index cbebd6807..34e9e0542 100755 --- a/scripts/run_tests_locally-telemetry-backend.sh +++ b/scripts/run_tests_locally-telemetry-backend.sh @@ -25,4 +25,4 @@ cd $PROJECTDIR/src RCFILE=$PROJECTDIR/coverage/.coveragerc python3 -m pytest --log-level=INFO --verbose \ - telemetry_frontend/backend/tests/test_kafka_backend.py \ No newline at end of file + telemetry/backend/tests/testTelemetryBackend.py \ No newline at end of file -- GitLab From d2df9b75f9ad52dc40a6e139ebadfd117b2efc16 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Thu, 9 May 2024 11:05:52 +0000 Subject: [PATCH 108/205] Not required in service --- src/telemetry/backend/service/__main__.py | 72 ----------------------- 1 file changed, 72 deletions(-) delete mode 100644 src/telemetry/backend/service/__main__.py diff --git a/src/telemetry/backend/service/__main__.py b/src/telemetry/backend/service/__main__.py deleted file mode 100644 index 10c3f76d1..000000000 --- a/src/telemetry/backend/service/__main__.py +++ /dev/null @@ -1,72 +0,0 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys -import signal -import logging -import threading -from prometheus_client import start_http_server -from monitoring.service.NameMapping import NameMapping -from .KafkaProducerService import KafkaProducerService -from common.Settings import ( - get_log_level, - get_metrics_port) - -terminate = threading.Event() -LOGGER = None - -def signal_handler(signal, frame): # pylint: disable=redefined-outer-name - LOGGER.warning('Terminate signal received') - terminate.set() - -def main(): - global LOGGER - - log_level = get_log_level() - logging.basicConfig(level=log_level, format="[%(asctime)s] %(levelname)s:%(name)s:%(message)s") - LOGGER = logging.getLogger(__name__) - -# ------- will be added later -------------- - # wait_for_environment_variables([ - # get_env_var_name - - - # ]) -# ------- will be added later -------------- - - signal.signal(signal.SIGINT, signal_handler) - signal.signal(signal.SIGTERM, signal_handler) - - LOGGER.info('Starting Telemetry Backend...') - - # Start metrics server - metrics_port = get_metrics_port() - start_http_server(metrics_port) - - name_mapping = NameMapping() - - grpc_service = KafkaProducerService(name_mapping) - grpc_service.start() - - # Wait for Ctrl+C or termination signal - while not terminate.wait(timeout=1.0): pass - - LOGGER.info('Terminating...') - grpc_service.stop() - - LOGGER.info('Bye') - return 0 - -if __name__ == '__main__': - sys.exit(main()) \ No newline at end of file -- GitLab From 45f62a560db3115e343fbd04cae7ac8617a8d184 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Thu, 9 May 2024 11:07:55 +0000 Subject: [PATCH 109/205] basic defination is added into both "export_collector_value" and "write_to_kafka" function --- .../service/TelemetryBackendServiceImpl.py | 35 +++++++++++++------ 1 file changed, 24 insertions(+), 11 deletions(-) diff --git a/src/telemetry/backend/service/TelemetryBackendServiceImpl.py b/src/telemetry/backend/service/TelemetryBackendServiceImpl.py index ea57f6167..abcc30baf 100755 --- a/src/telemetry/backend/service/TelemetryBackendServiceImpl.py +++ b/src/telemetry/backend/service/TelemetryBackendServiceImpl.py @@ -16,16 +16,17 @@ import time import logging import requests -import threading from typing import Tuple from common.proto.context_pb2 import Empty -from confluent_kafka import Producer, KafkaException +from confluent_kafka import Producer as KafkaProducer +from confluent_kafka import KafkaException from confluent_kafka.admin import AdminClient, NewTopic from common.proto.telemetry_frontend_pb2 import Collector, CollectorId from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method LOGGER = logging.getLogger(__name__) METRICS_POOL = MetricsPool('Telemetry', 'TelemetryBackend') +ACTIVE_KAFKA_PRODUCERS = [] # list of active kafka producers class TelemetryBackendServiceImpl: """ @@ -51,17 +52,29 @@ class TelemetryBackendServiceImpl: self.fetch_interval = fetch_interval # @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def export_collector_value(request : Collector) -> Tuple[str, str]: # type: ignore - response = Tuple[str, str] - response = ('test collector Id', 'test collected value') # Metric to be fetched from endpoint based on Collector message + def export_collector_value(self, request : Collector) -> Tuple[str, str]: # type: ignore + response = Tuple[str, str] + collector_id = str('test collector Id') + collected_Value = str('test collected value') # Metric to be fetched from endpoint based on Collector message + response = (collector_id, collected_Value) return response # @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def write_to_kafka(request: Tuple[str, str]) -> Empty: # type: ignore + def write_to_kafka(self, request: Tuple[str, str]) -> KafkaProducer: + (collector_id, collector_value) = request + response = KafkaProducer({'bootstrap.servers': self.bootstrap_servers}) # _collector_id, _collector_id_value = request # write collector_id and collector_id value on the Kafka topic - return Empty() + + # get kafka bootstrap server and topic name + # write to kafka topic + return response + + def stop_producer(self, request: KafkaProducer) -> Empty: # type: ignore + # flush and close kafka producer object + return Empty() + # ----------- BELOW: Actual Implementation of Kafka Producer with Node Exporter ----------- def fetch_node_exporter_metrics(self): @@ -72,7 +85,7 @@ class TelemetryBackendServiceImpl: """ KPI = "node_network_receive_packets_total" try: - response = requests.get(self.exporter_endpoint) + response = requests.get(self.exporter_endpoint) # type: ignore if response.status_code == 200: # print(f"Metrics fetched sucessfully...") metrics = response.text @@ -148,7 +161,7 @@ class TelemetryBackendServiceImpl: admin_client = AdminClient(conf) self.create_topic_if_not_exists(admin_client) - kafka_producer = Producer(conf) + kafka_producer = KafkaProducer(conf) try: start_time = time.time() @@ -161,11 +174,11 @@ class TelemetryBackendServiceImpl: # print("Metrics produced to Kafka topic") # Check if the specified run duration has elapsed - if time.time() - start_time >= self.run_duration: + if time.time() - start_time >= self.run_duration: # type: ignore break # waiting time until next fetch - time.sleep(self.fetch_interval) + time.sleep(self.fetch_interval) # type: ignore except KeyboardInterrupt: print("Keyboard interrupt detected. Exiting...") finally: -- GitLab From 6bb6d617ce7fea549f5506beb57989d9d6033f6c Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Thu, 9 May 2024 11:08:43 +0000 Subject: [PATCH 110/205] file nae change to "testTelemetryBackend" --- ...fka_backend.py => testTelemetryBackend.py} | 25 +++++++++++++------ 1 file changed, 17 insertions(+), 8 deletions(-) rename src/telemetry/backend/tests/{test_kafka_backend.py => testTelemetryBackend.py} (63%) diff --git a/src/telemetry/backend/tests/test_kafka_backend.py b/src/telemetry/backend/tests/testTelemetryBackend.py similarity index 63% rename from src/telemetry/backend/tests/test_kafka_backend.py rename to src/telemetry/backend/tests/testTelemetryBackend.py index ac49bc30f..8c3fbd247 100644 --- a/src/telemetry/backend/tests/test_kafka_backend.py +++ b/src/telemetry/backend/tests/testTelemetryBackend.py @@ -16,6 +16,7 @@ # print (sys.path) import logging from typing import Tuple +from confluent_kafka import Producer as KafkaProducer from common.proto.context_pb2 import Empty from src.telemetry.frontend.tests.Messages import create_collector_request, create_collector_id from src.telemetry.backend.service.TelemetryBackendService import TelemetryBackendService @@ -29,22 +30,30 @@ LOGGER = logging.getLogger(__name__) ########################### def test_get_kafka_configs(): LOGGER.warning('test_get_kafka_configs requesting') - KafkaProducerServiceObj = TelemetryBackendService() - response = KafkaProducerServiceObj.generate_kafka_configs() + TelemetryBackendServiceObj = TelemetryBackendService() + response = TelemetryBackendServiceObj.generate_kafka_configs() LOGGER.debug(str(response)) assert isinstance(response, dict) def test_export_collector_value(): LOGGER.warning('test_export_collector_value requesting') - response = TelemetryBackendServiceImpl.export_collector_value( - create_collector_request('1') - ) + TelemetryBackendServiceObj = TelemetryBackendServiceImpl() + response = TelemetryBackendServiceObj.export_collector_value(create_collector_request('1')) LOGGER.debug(str(response)) assert isinstance(response, Tuple) def test_write_to_kafka(): LOGGER.warning('test_write_to_kafka requesting') - collector_value = TelemetryBackendServiceImpl.export_collector_value(create_collector_request('1')) - response = TelemetryBackendServiceImpl.write_to_kafka(collector_value) # type: ignore (don't know why warning here) + TelemetryBackendServiceObj = TelemetryBackendServiceImpl() + _collector_value = TelemetryBackendServiceObj.export_collector_value(create_collector_request('1')) + response = TelemetryBackendServiceObj.write_to_kafka(_collector_value) LOGGER.debug(str(response)) - assert isinstance(response, Empty) + assert isinstance(response, KafkaProducer) + +def test_stop_producer(): + LOGGER.warning('test_write_to_kafka requesting') + _kafka_configs = {'bootstrap.servers': '127.0.0.1:9092'} + TelemetryBackendServiceObj = TelemetryBackendServiceImpl() + response = TelemetryBackendServiceObj.stop_producer(KafkaProducer(_kafka_configs)) + LOGGER.debug(str(response)) + assert isinstance(response, Empty) \ No newline at end of file -- GitLab From 388708ff213efdebd2e579aebd07908e002279c8 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 10 May 2024 11:24:36 +0000 Subject: [PATCH 111/205] New code added to Telemetry Backend to read from Kafka --- .../service/TelemetryBackendService.py | 215 +++++++++++++++++- 1 file changed, 203 insertions(+), 12 deletions(-) diff --git a/src/telemetry/backend/service/TelemetryBackendService.py b/src/telemetry/backend/service/TelemetryBackendService.py index 8e6fb243e..4c76917c8 100755 --- a/src/telemetry/backend/service/TelemetryBackendService.py +++ b/src/telemetry/backend/service/TelemetryBackendService.py @@ -1,3 +1,4 @@ + # Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -12,26 +13,216 @@ # See the License for the specific language governing permissions and # limitations under the License. +import time +import logging +import requests +import threading +from typing import Tuple +from common.proto.context_pb2 import Empty +from confluent_kafka import Producer as KafkaProducer +from confluent_kafka import Consumer as KafkaConsumer +from confluent_kafka import KafkaException +from confluent_kafka import KafkaError +from confluent_kafka.admin import AdminClient, NewTopic +from common.proto.telemetry_frontend_pb2 import Collector, CollectorId +from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method + +LOGGER = logging.getLogger(__name__) +METRICS_POOL = MetricsPool('Telemetry', 'TelemetryBackend') +KAFKA_SERVER_IP = '127.0.0.1:9092' + class TelemetryBackendService: """ - Class to control Kafka producer functionality. + Class to fetch metrics from Exporter and produce them to Kafka. """ - def __init__(self): - pass + def __init__(self, bootstrap_servers='127.0.0.1:9092', exporter_endpoint=None, + kafka_topic=None, run_duration=None, fetch_interval=None): + """ + Constructor to initialize Kafka producer parameters. + Args: + bootstrap_servers (str): Kafka broker address. + exporter_endpoint (str): Node Exporter metrics endpoint. + kafka_topic (str): Kafka topic to produce metrics to. + run_interval (int): Time interval in seconds to run the producer. + """ + LOGGER.info('Init TelemetryBackendService') - def generate_kafka_configs(self): + self.bootstrap_servers = bootstrap_servers + self.exporter_endpoint = exporter_endpoint + self.kafka_topic = kafka_topic + self.run_duration = run_duration + self.fetch_interval = fetch_interval + + def receive_kafka_request(self, + ): # type: ignore """ - Method to generate Kafka configurations + Method to receive collector request on Kafka topic. """ - create_kafka_configs = { - 'bootstrap_servers' : "test_server", # Kafka broker address - Replace with your Kafka broker address - 'exporter_endpoint' : "test_exporter", # Node Exporter metrics endpoint - Replace with your Node Exporter endpoint - 'kafka_topic' : "test_kafka_topic", # Kafka topic to produce to - 'run_duration' : 10, # Total duration to execute the producer - 'fetch_interval' : 2 # Time between two fetch requests + conusmer_configs = { + 'bootstrap.servers' : KAFKA_SERVER_IP, + 'group.id' : 'consumer', + 'auto.offset.reset' : 'earliest' } - return create_kafka_configs + topic_request = "topic_request" + + consumerObj = KafkaConsumer(conusmer_configs) + consumerObj.subscribe([topic_request]) + + start_time = time.time() + while True: + receive_msg = consumerObj.poll(1.0) + if receive_msg is None: + print ("nothing to read ...", time.time() - start_time) + if time.time() - start_time >= 10: # type: ignore + print("Timeout: consumer terminated") + break + continue + elif receive_msg.error(): + if receive_msg.error().code() == KafkaError._PARTITION_EOF: + continue + else: + print("Consumer error: {}".format(receive_msg.error())) + break + print ("Received Message: ", receive_msg.value().decode('utf-8')) + + def execute_receive_kafka_request(self + )->Empty: # type: ignore + threading.Thread(target=self.receive_kafka_request).start() + return True + + # @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def export_collector_value(self, request : Collector) -> Tuple[str, str]: # type: ignore + response = Tuple[str, str] + collector_id = str('test collector Id') + collected_Value = str('test collected value') # Metric to be fetched from endpoint based on Collector message + response = (collector_id, collected_Value) + return response + + # @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def write_to_kafka(self, request: Tuple[str, str]) -> KafkaProducer: + (collector_id, collector_value) = request + response = KafkaProducer({'bootstrap.servers': self.bootstrap_servers}) + # _collector_id, _collector_id_value = request + # write collector_id and collector_id value on the Kafka topic + + # get kafka bootstrap server and topic name + # write to kafka topic + + return response + + def stop_producer(self, request: KafkaProducer) -> Empty: # type: ignore + # flush and close kafka producer object + return Empty() + +# ----------- BELOW: Actual Implementation of Kafka Producer with Node Exporter ----------- + + def fetch_node_exporter_metrics(self): + """ + Method to fetch metrics from Node Exporter. + Returns: + str: Metrics fetched from Node Exporter. + """ + KPI = "node_network_receive_packets_total" + try: + response = requests.get(self.exporter_endpoint) # type: ignore + if response.status_code == 200: + # print(f"Metrics fetched sucessfully...") + metrics = response.text + # Check if the desired metric is available in the response + if KPI in metrics: + KPI_VALUE = self.extract_metric_value(metrics, KPI) + # Extract the metric value + if KPI_VALUE is not None: + print(f"KPI value: {KPI_VALUE}") + return KPI_VALUE + else: + print(f"Failed to fetch metrics. Status code: {response.status_code}") + return None + except Exception as e: + print(f"Failed to fetch metrics: {str(e)}") + return None + + def extract_metric_value(self, metrics, metric_name): + """ + Method to extract the value of a metric from the metrics string. + Args: + metrics (str): Metrics string fetched from Node Exporter. + metric_name (str): Name of the metric to extract. + Returns: + float: Value of the extracted metric, or None if not found. + """ + try: + # Find the metric line containing the desired metric name + metric_line = next(line for line in metrics.split('\n') if line.startswith(metric_name)) + # Split the line to extract the metric value + metric_value = float(metric_line.split()[1]) + return metric_value + except StopIteration: + print(f"Metric '{metric_name}' not found in the metrics.") + return None + + def delivery_callback(self, err, msg): + """ + Callback function to handle message delivery status. + Args: + err (KafkaError): Kafka error object. + msg (Message): Kafka message object. + """ + if err: + print(f'Message delivery failed: {err}') + else: + print(f'Message delivered to topic {msg.topic()}') + + def create_topic_if_not_exists(self, admin_client): + """ + Method to create Kafka topic if it does not exist. + Args: + admin_client (AdminClient): Kafka admin client. + """ + try: + topic_metadata = admin_client.list_topics(timeout=5) + if self.kafka_topic not in topic_metadata.topics: + # If the topic does not exist, create a new topic + print(f"Topic '{self.kafka_topic}' does not exist. Creating...") + new_topic = NewTopic(self.kafka_topic, num_partitions=1, replication_factor=1) + admin_client.create_topics([new_topic]) + except KafkaException as e: + print(f"Failed to create topic: {e}") + + def produce_metrics(self): + """ + Method to produce metrics to Kafka topic as per Kafka configs. + """ + conf = { + 'bootstrap.servers': self.bootstrap_servers, + } + + admin_client = AdminClient(conf) + self.create_topic_if_not_exists(admin_client) + + kafka_producer = KafkaProducer(conf) + + try: + start_time = time.time() + while True: + metrics = self.fetch_node_exporter_metrics() # select the function name based on the provided requirements + + if metrics: + kafka_producer.produce(self.kafka_topic, str(metrics), callback=self.delivery_callback) + kafka_producer.flush() + # print("Metrics produced to Kafka topic") + # Check if the specified run duration has elapsed + if time.time() - start_time >= self.run_duration: # type: ignore + break + # waiting time until next fetch + time.sleep(self.fetch_interval) # type: ignore + except KeyboardInterrupt: + print("Keyboard interrupt detected. Exiting...") + finally: + kafka_producer.flush() + # kafka_producer.close() # this command generates ERROR +# ----------- ABOVE: Actual Implementation of Kafka Producer with Node Exporter ----------- \ No newline at end of file -- GitLab From ba36d2f90aa16b587eb03cdaf23942b318387ae6 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 10 May 2024 11:25:15 +0000 Subject: [PATCH 112/205] Add to generate request on Kafka topic --- .../TelemetryFrontendServiceServicerImpl.py | 74 +++++++++++++++---- 1 file changed, 58 insertions(+), 16 deletions(-) diff --git a/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py b/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py index 498d07a91..518dd471d 100644 --- a/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py +++ b/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py @@ -12,42 +12,84 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import Tuple import grpc import logging from common.proto.context_pb2 import Empty from monitoring.service.NameMapping import NameMapping +from confluent_kafka import Producer as KafkaProducer +from confluent_kafka import KafkaException from common.proto.telemetry_frontend_pb2 import CollectorId, Collector, CollectorFilter, CollectorList from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method from common.proto.telemetry_frontend_pb2_grpc import TelemetryFrontendServiceServicer -LOGGER = logging.getLogger(__name__) -METRICS_POOL = MetricsPool('Monitoring', 'TelemetryFrontend') +LOGGER = logging.getLogger(__name__) +METRICS_POOL = MetricsPool('Monitoring', 'TelemetryFrontend') +KAFKA_SERVER_IP = '127.0.0.1:9092' class TelemetryFrontendServiceServicerImpl(TelemetryFrontendServiceServicer): def __init__(self, name_mapping : NameMapping): LOGGER.info('Init TelemetryFrontendService') - @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def StartCollector(self, request : Collector, grpc_context: grpc.ServicerContext # type: ignore - ) -> CollectorId: # type: ignore + # @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def StartCollector(self, + request : Collector, grpc_context: grpc.ServicerContext # type: ignore + ) -> CollectorId: # type: ignore + # push info to frontend db response = CollectorId() - _collector_id = request.collector_id - # collector_kpi_id = request.kpi_id - # collector_duration = request.duration_s - # collector_interval = request.interval_s + _collector_id = request.collector_id + _collector_kpi_id = str(request.kpi_id.kpi_id.uuid) + _collector_duration = int(request.duration_s) + _collector_interval = int(request.interval_s) + activeCollObj = self.generate_kafka_request(str(_collector_id), _collector_kpi_id, _collector_duration, _collector_interval) response.collector_id.uuid = _collector_id.collector_id.uuid return response @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def StopCollector(self, request : CollectorId, grpc_context: grpc.ServicerContext # type: ignore - ) -> Empty: # type: ignore + def StopCollector(self, + request : CollectorId, grpc_context: grpc.ServicerContext # type: ignore + ) -> Empty: # type: ignore request.collector_id.uuid = "" return Empty() - - def SelectCollectors(self, request : CollectorFilter, contextgrpc_context: grpc.ServicerContext # type: ignore - ) -> CollectorList: # type: ignore + + @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def SelectCollectors(self, + request : CollectorFilter, contextgrpc_context: grpc.ServicerContext # type: ignore + ) -> CollectorList: # type: ignore response = CollectorList() - - return response \ No newline at end of file + return response + + # @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def generate_kafka_request(self, + msg_key, kpi: str, duration : int, interval: int + ) -> KafkaProducer: + """ + Method to generate collector request to Kafka topic. + """ + producer_configs = { + 'bootstrap.servers': KAFKA_SERVER_IP, + 'group.id' : 'requester', + } + topic_request = "topic_request" + msg_value = Tuple [str, int, int] + msg_value = (kpi, duration, interval) + + producerObj = KafkaProducer(producer_configs) + producerObj.produce(topic_request, key=msg_key, value= str(msg_value), callback=self.delivery_callback) + producerObj.flush() + return producerObj + + @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def delivery_callback(self, err, msg): + """ + Callback function to handle message delivery status. + Args: + err (KafkaError): Kafka error object. + msg (Message): Kafka message object. + """ + if err: + print(f'Message delivery failed: {err}') + else: + print(f'Message delivered to topic {msg.topic()}') \ No newline at end of file -- GitLab From 2ee9d310ac63356c0ba928bb88ae1df9dc4b1912 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 10 May 2024 20:27:45 +0000 Subject: [PATCH 113/205] Test file path updated --- scripts/run_tests_locally-telemetry-frontend.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/run_tests_locally-telemetry-frontend.sh b/scripts/run_tests_locally-telemetry-frontend.sh index ac59f6dde..cccbcbc5b 100755 --- a/scripts/run_tests_locally-telemetry-frontend.sh +++ b/scripts/run_tests_locally-telemetry-frontend.sh @@ -25,4 +25,4 @@ cd $PROJECTDIR/src RCFILE=$PROJECTDIR/coverage/.coveragerc python3 -m pytest --log-level=INFO --verbose \ - telemetry_frontend/tests/test_unitary.py \ No newline at end of file + telemetry/frontend/tests/test_unitary.py \ No newline at end of file -- GitLab From 0e7ad11c32602efb5ca57fcc68d7b7dba7c8cb5f Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 10 May 2024 22:44:02 +0000 Subject: [PATCH 114/205] "Kafka_listener" is added --- .../service/TelemetryBackendService.py | 85 +++++++------------ 1 file changed, 30 insertions(+), 55 deletions(-) diff --git a/src/telemetry/backend/service/TelemetryBackendService.py b/src/telemetry/backend/service/TelemetryBackendService.py index 4c76917c8..2e8478db1 100755 --- a/src/telemetry/backend/service/TelemetryBackendService.py +++ b/src/telemetry/backend/service/TelemetryBackendService.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import ast import time import logging import requests @@ -33,31 +34,15 @@ KAFKA_SERVER_IP = '127.0.0.1:9092' class TelemetryBackendService: """ - Class to fetch metrics from Exporter and produce them to Kafka. + Class to listens for request on Kafka topic, fetches metrics and produces measured values to another Kafka topic. """ - def __init__(self, bootstrap_servers='127.0.0.1:9092', exporter_endpoint=None, - kafka_topic=None, run_duration=None, fetch_interval=None): - """ - Constructor to initialize Kafka producer parameters. - Args: - bootstrap_servers (str): Kafka broker address. - exporter_endpoint (str): Node Exporter metrics endpoint. - kafka_topic (str): Kafka topic to produce metrics to. - run_interval (int): Time interval in seconds to run the producer. - """ + def __init__(self): LOGGER.info('Init TelemetryBackendService') - - self.bootstrap_servers = bootstrap_servers - self.exporter_endpoint = exporter_endpoint - self.kafka_topic = kafka_topic - self.run_duration = run_duration - self.fetch_interval = fetch_interval - def receive_kafka_request(self, - ): # type: ignore + def kafka_listener(self): """ - Method to receive collector request on Kafka topic. + listener for requests on Kafka topic. """ conusmer_configs = { 'bootstrap.servers' : KAFKA_SERVER_IP, @@ -69,14 +54,10 @@ class TelemetryBackendService: consumerObj = KafkaConsumer(conusmer_configs) consumerObj.subscribe([topic_request]) - start_time = time.time() while True: - receive_msg = consumerObj.poll(1.0) + receive_msg = consumerObj.poll(2.0) if receive_msg is None: - print ("nothing to read ...", time.time() - start_time) - if time.time() - start_time >= 10: # type: ignore - print("Timeout: consumer terminated") - break + print ("Telemetry backend listener is active: Kafka Topic: ", topic_request) # added for debugging purposes continue elif receive_msg.error(): if receive_msg.error().code() == KafkaError._PARTITION_EOF: @@ -84,36 +65,30 @@ class TelemetryBackendService: else: print("Consumer error: {}".format(receive_msg.error())) break - print ("Received Message: ", receive_msg.value().decode('utf-8')) - - def execute_receive_kafka_request(self - )->Empty: # type: ignore - threading.Thread(target=self.receive_kafka_request).start() - return True - - # @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def export_collector_value(self, request : Collector) -> Tuple[str, str]: # type: ignore - response = Tuple[str, str] - collector_id = str('test collector Id') - collected_Value = str('test collected value') # Metric to be fetched from endpoint based on Collector message - response = (collector_id, collected_Value) - return response - - # @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def write_to_kafka(self, request: Tuple[str, str]) -> KafkaProducer: - (collector_id, collector_value) = request - response = KafkaProducer({'bootstrap.servers': self.bootstrap_servers}) - # _collector_id, _collector_id_value = request - # write collector_id and collector_id value on the Kafka topic - - # get kafka bootstrap server and topic name - # write to kafka topic - - return response + (kpi_id, duration, interval) = ast.literal_eval(receive_msg.value().decode('utf-8')) + self.execute_process_kafka_request(kpi_id, duration, interval) - def stop_producer(self, request: KafkaProducer) -> Empty: # type: ignore - # flush and close kafka producer object - return Empty() + def run_kafka_listener(self)->Empty: # type: ignore + threading.Thread(target=self.kafka_listener).start() + return True + + def process_kafka_request(self, kpi_id, duration, interval + ): # type: ignore + """ + Method to receive collector request attribues and initiates collecter backend. + """ + start_time = time.time() + while True: + if time.time() - start_time >= duration: # type: ignore + print("Timeout: consumer terminated", time.time() - start_time) + break + # print ("Received KPI: ", kpi_id, ", Duration: ", duration, ", Fetch Interval: ", interval) + print ("Telemetry Backend running for KPI: ", kpi_id, "after FETCH INTERVAL: ", interval) + time.sleep(interval) + + def execute_process_kafka_request(self, kpi_id: str, duration: int, interval: int): + threading.Thread(target=self.process_kafka_request, args=(kpi_id, duration, interval)).start() + # ----------- BELOW: Actual Implementation of Kafka Producer with Node Exporter ----------- -- GitLab From 1e56397e8cf1209b0e4a1058e1bb146052a85bd6 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 10 May 2024 22:44:33 +0000 Subject: [PATCH 115/205] name changed --- .../service/TelemetryBackendServiceImpl.py | 188 ------------------ 1 file changed, 188 deletions(-) delete mode 100755 src/telemetry/backend/service/TelemetryBackendServiceImpl.py diff --git a/src/telemetry/backend/service/TelemetryBackendServiceImpl.py b/src/telemetry/backend/service/TelemetryBackendServiceImpl.py deleted file mode 100755 index abcc30baf..000000000 --- a/src/telemetry/backend/service/TelemetryBackendServiceImpl.py +++ /dev/null @@ -1,188 +0,0 @@ - -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import time -import logging -import requests -from typing import Tuple -from common.proto.context_pb2 import Empty -from confluent_kafka import Producer as KafkaProducer -from confluent_kafka import KafkaException -from confluent_kafka.admin import AdminClient, NewTopic -from common.proto.telemetry_frontend_pb2 import Collector, CollectorId -from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method - -LOGGER = logging.getLogger(__name__) -METRICS_POOL = MetricsPool('Telemetry', 'TelemetryBackend') -ACTIVE_KAFKA_PRODUCERS = [] # list of active kafka producers - -class TelemetryBackendServiceImpl: - """ - Class to fetch metrics from Exporter and produce them to Kafka. - """ - - def __init__(self, bootstrap_servers=None, exporter_endpoint=None, - kafka_topic=None, run_duration=None, fetch_interval=None): - """ - Constructor to initialize Kafka producer parameters. - Args: - bootstrap_servers (str): Kafka broker address. - exporter_endpoint (str): Node Exporter metrics endpoint. - kafka_topic (str): Kafka topic to produce metrics to. - run_interval (int): Time interval in seconds to run the producer. - """ - LOGGER.info('Init TelemetryBackendService') - - self.bootstrap_servers = bootstrap_servers - self.exporter_endpoint = exporter_endpoint - self.kafka_topic = kafka_topic - self.run_duration = run_duration - self.fetch_interval = fetch_interval - - # @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def export_collector_value(self, request : Collector) -> Tuple[str, str]: # type: ignore - response = Tuple[str, str] - collector_id = str('test collector Id') - collected_Value = str('test collected value') # Metric to be fetched from endpoint based on Collector message - response = (collector_id, collected_Value) - return response - - # @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def write_to_kafka(self, request: Tuple[str, str]) -> KafkaProducer: - (collector_id, collector_value) = request - response = KafkaProducer({'bootstrap.servers': self.bootstrap_servers}) - # _collector_id, _collector_id_value = request - # write collector_id and collector_id value on the Kafka topic - - # get kafka bootstrap server and topic name - # write to kafka topic - - return response - - def stop_producer(self, request: KafkaProducer) -> Empty: # type: ignore - # flush and close kafka producer object - return Empty() - -# ----------- BELOW: Actual Implementation of Kafka Producer with Node Exporter ----------- - - def fetch_node_exporter_metrics(self): - """ - Method to fetch metrics from Node Exporter. - Returns: - str: Metrics fetched from Node Exporter. - """ - KPI = "node_network_receive_packets_total" - try: - response = requests.get(self.exporter_endpoint) # type: ignore - if response.status_code == 200: - # print(f"Metrics fetched sucessfully...") - metrics = response.text - # Check if the desired metric is available in the response - if KPI in metrics: - KPI_VALUE = self.extract_metric_value(metrics, KPI) - # Extract the metric value - if KPI_VALUE is not None: - print(f"KPI value: {KPI_VALUE}") - return KPI_VALUE - else: - print(f"Failed to fetch metrics. Status code: {response.status_code}") - return None - except Exception as e: - print(f"Failed to fetch metrics: {str(e)}") - return None - - def extract_metric_value(self, metrics, metric_name): - """ - Method to extract the value of a metric from the metrics string. - Args: - metrics (str): Metrics string fetched from Node Exporter. - metric_name (str): Name of the metric to extract. - Returns: - float: Value of the extracted metric, or None if not found. - """ - try: - # Find the metric line containing the desired metric name - metric_line = next(line for line in metrics.split('\n') if line.startswith(metric_name)) - # Split the line to extract the metric value - metric_value = float(metric_line.split()[1]) - return metric_value - except StopIteration: - print(f"Metric '{metric_name}' not found in the metrics.") - return None - - def delivery_callback(self, err, msg): - """ - Callback function to handle message delivery status. - Args: - err (KafkaError): Kafka error object. - msg (Message): Kafka message object. - """ - if err: - print(f'Message delivery failed: {err}') - else: - print(f'Message delivered to topic {msg.topic()}') - - def create_topic_if_not_exists(self, admin_client): - """ - Method to create Kafka topic if it does not exist. - Args: - admin_client (AdminClient): Kafka admin client. - """ - try: - topic_metadata = admin_client.list_topics(timeout=5) - if self.kafka_topic not in topic_metadata.topics: - # If the topic does not exist, create a new topic - print(f"Topic '{self.kafka_topic}' does not exist. Creating...") - new_topic = NewTopic(self.kafka_topic, num_partitions=1, replication_factor=1) - admin_client.create_topics([new_topic]) - except KafkaException as e: - print(f"Failed to create topic: {e}") - - def produce_metrics(self): - """ - Method to produce metrics to Kafka topic as per Kafka configs. - """ - conf = { - 'bootstrap.servers': self.bootstrap_servers, - } - - admin_client = AdminClient(conf) - self.create_topic_if_not_exists(admin_client) - - kafka_producer = KafkaProducer(conf) - - try: - start_time = time.time() - while True: - metrics = self.fetch_node_exporter_metrics() # select the function name based on the provided requirements - - if metrics: - kafka_producer.produce(self.kafka_topic, str(metrics), callback=self.delivery_callback) - kafka_producer.flush() - # print("Metrics produced to Kafka topic") - - # Check if the specified run duration has elapsed - if time.time() - start_time >= self.run_duration: # type: ignore - break - - # waiting time until next fetch - time.sleep(self.fetch_interval) # type: ignore - except KeyboardInterrupt: - print("Keyboard interrupt detected. Exiting...") - finally: - kafka_producer.flush() - # kafka_producer.close() # this command generates ERROR - -# ----------- ABOVE: Actual Implementation of Kafka Producer with Node Exporter ----------- \ No newline at end of file -- GitLab From ff79702a2685f6d20fba8f0fe4be1bb8c11523dd Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 10 May 2024 22:44:52 +0000 Subject: [PATCH 116/205] No need messages --- src/telemetry/backend/tests/messagesBackend.py | 17 ----------------- 1 file changed, 17 deletions(-) diff --git a/src/telemetry/backend/tests/messagesBackend.py b/src/telemetry/backend/tests/messagesBackend.py index ef1235383..10f5e099a 100644 --- a/src/telemetry/backend/tests/messagesBackend.py +++ b/src/telemetry/backend/tests/messagesBackend.py @@ -13,20 +13,3 @@ # limitations under the License. -def create_kafka_config_a(bootstrap_server: str, exporter_endpoint: str, kafka_topic: str, - run_duration: int, fetch_interval: int): - """ - Provide ... - Bootstrap_server IP address as String. - Exporter endpoint with port address as String. - Kafka topic name as String. - Total duration of the test as Int. - Fetch_interval as Int. - """ - _bootstrap_servers = bootstrap_server - _exporter_endpoint = exporter_endpoint - _kafka_topic = kafka_topic - _run_duration = run_duration - _fetch_interval = fetch_interval - - return _bootstrap_servers, _exporter_endpoint, _kafka_topic, _run_duration, _fetch_interval -- GitLab From 80baa7bb89f60cbfeec401b5dfb485a880106fb2 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 10 May 2024 22:45:35 +0000 Subject: [PATCH 117/205] No need to old tests --- .../backend/tests/testTelemetryBackend.py | 42 +++++-------------- 1 file changed, 10 insertions(+), 32 deletions(-) diff --git a/src/telemetry/backend/tests/testTelemetryBackend.py b/src/telemetry/backend/tests/testTelemetryBackend.py index 8c3fbd247..bdbb8526a 100644 --- a/src/telemetry/backend/tests/testTelemetryBackend.py +++ b/src/telemetry/backend/tests/testTelemetryBackend.py @@ -12,15 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. -# import sys -# print (sys.path) +import sys +print (sys.path) +sys.path.append('/home/tfs/tfs-ctrl') import logging from typing import Tuple -from confluent_kafka import Producer as KafkaProducer from common.proto.context_pb2 import Empty -from src.telemetry.frontend.tests.Messages import create_collector_request, create_collector_id from src.telemetry.backend.service.TelemetryBackendService import TelemetryBackendService -from src.telemetry.backend.service.TelemetryBackendServiceImpl import TelemetryBackendServiceImpl + + LOGGER = logging.getLogger(__name__) @@ -28,32 +28,10 @@ LOGGER = logging.getLogger(__name__) ########################### # Tests Implementation of Telemetry Backend ########################### -def test_get_kafka_configs(): - LOGGER.warning('test_get_kafka_configs requesting') - TelemetryBackendServiceObj = TelemetryBackendService() - response = TelemetryBackendServiceObj.generate_kafka_configs() - LOGGER.debug(str(response)) - assert isinstance(response, dict) -def test_export_collector_value(): - LOGGER.warning('test_export_collector_value requesting') - TelemetryBackendServiceObj = TelemetryBackendServiceImpl() - response = TelemetryBackendServiceObj.export_collector_value(create_collector_request('1')) - LOGGER.debug(str(response)) - assert isinstance(response, Tuple) - -def test_write_to_kafka(): - LOGGER.warning('test_write_to_kafka requesting') - TelemetryBackendServiceObj = TelemetryBackendServiceImpl() - _collector_value = TelemetryBackendServiceObj.export_collector_value(create_collector_request('1')) - response = TelemetryBackendServiceObj.write_to_kafka(_collector_value) - LOGGER.debug(str(response)) - assert isinstance(response, KafkaProducer) - -def test_stop_producer(): - LOGGER.warning('test_write_to_kafka requesting') - _kafka_configs = {'bootstrap.servers': '127.0.0.1:9092'} - TelemetryBackendServiceObj = TelemetryBackendServiceImpl() - response = TelemetryBackendServiceObj.stop_producer(KafkaProducer(_kafka_configs)) +def test_run_kafka_listener(): + LOGGER.warning('test_receive_kafka_request requesting') + TelemetryBackendServiceObj = TelemetryBackendService() + response = TelemetryBackendServiceObj.run_kafka_listener() LOGGER.debug(str(response)) - assert isinstance(response, Empty) \ No newline at end of file + assert isinstance(response, bool) -- GitLab From e07c5445b117a28066c0be7cafffa2ed6ff313e2 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 10 May 2024 22:46:36 +0000 Subject: [PATCH 118/205] "create_collector_request_b" return updated to "telemetry_frontend_pb2.Collector" --- src/telemetry/frontend/tests/Messages.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/telemetry/frontend/tests/Messages.py b/src/telemetry/frontend/tests/Messages.py index d323aa7fd..9e7b6c049 100644 --- a/src/telemetry/frontend/tests/Messages.py +++ b/src/telemetry/frontend/tests/Messages.py @@ -33,9 +33,10 @@ def create_collector_request_a(): _create_collector_request_a.collector_id.collector_id.uuid = "-1" return _create_collector_request_a -def create_collector_request_b(str_kpi_id, coll_duration_s, coll_interval_s): +def create_collector_request_b(str_kpi_id, coll_duration_s, coll_interval_s + ) -> telemetry_frontend_pb2.Collector: _create_collector_request_b = telemetry_frontend_pb2.Collector() - _create_collector_request_b.collector_id.collector_id.uuid = '-1' + _create_collector_request_b.collector_id.collector_id.uuid = '1' _create_collector_request_b.kpi_id.kpi_id.uuid = str_kpi_id _create_collector_request_b.duration_s = coll_duration_s _create_collector_request_b.interval_s = coll_interval_s -- GitLab From 8f4e98a99220fc991a73d25632e7befbaa816be0 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 10 May 2024 22:47:54 +0000 Subject: [PATCH 119/205] No need to old tests --- src/telemetry/frontend/tests/test_unitary.py | 37 +++++++------------- 1 file changed, 13 insertions(+), 24 deletions(-) diff --git a/src/telemetry/frontend/tests/test_unitary.py b/src/telemetry/frontend/tests/test_unitary.py index 312695659..77a94a07a 100644 --- a/src/telemetry/frontend/tests/test_unitary.py +++ b/src/telemetry/frontend/tests/test_unitary.py @@ -164,32 +164,21 @@ def telemetryFrontend_client( ########################### # Tests Implementation of Telemetry Frontend ########################### -def test_start_collector(telemetryFrontend_client): - LOGGER.warning('test_start_collector requesting') - response = telemetryFrontend_client.StartCollector(create_collector_request('1')) - LOGGER.debug(str(response)) - assert isinstance(response, CollectorId) - -def test_start_collector_a(telemetryFrontend_client): - LOGGER.warning('test_start_collector requesting') - response = telemetryFrontend_client.StartCollector(create_collector_request_a()) - LOGGER.debug(str(response)) - assert isinstance(response, CollectorId) -def test_start_collector_b(telemetryFrontend_client): +def test_start_collector(telemetryFrontend_client): LOGGER.warning('test_start_collector requesting') - response = telemetryFrontend_client.StartCollector(create_collector_request_b('1',10,2)) + response = telemetryFrontend_client.StartCollector(create_collector_request_b('11',10,2)) LOGGER.debug(str(response)) assert isinstance(response, CollectorId) -def test_stop_collector(telemetryFrontend_client): - LOGGER.warning('test_stop_collector requesting') - response = telemetryFrontend_client.StopCollector(create_collector_id("1")) - LOGGER.debug(str(response)) - assert isinstance(response, Empty) - -def test_select_collectors(telemetryFrontend_client): - LOGGER.warning('test_select_collector requesting') - response = telemetryFrontend_client.SelectCollectors(create_collector_filter()) - LOGGER.debug(str(response)) - assert isinstance(response, CollectorList) \ No newline at end of file +# def test_stop_collector(telemetryFrontend_client): +# LOGGER.warning('test_stop_collector requesting') +# response = telemetryFrontend_client.StopCollector(create_collector_id("1")) +# LOGGER.debug(str(response)) +# assert isinstance(response, Empty) + +# def test_select_collectors(telemetryFrontend_client): +# LOGGER.warning('test_select_collector requesting') +# response = telemetryFrontend_client.SelectCollectors(create_collector_filter()) +# LOGGER.debug(str(response)) +# assert isinstance(response, CollectorList) \ No newline at end of file -- GitLab From ce22504118b89dc4c069f5ef935915d6909dda42 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 14 May 2024 13:53:13 +0000 Subject: [PATCH 120/205] changes the names of frontend tests file --- scripts/run_tests_locally-telemetry-frontend.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/run_tests_locally-telemetry-frontend.sh b/scripts/run_tests_locally-telemetry-frontend.sh index cccbcbc5b..c6ab54a34 100755 --- a/scripts/run_tests_locally-telemetry-frontend.sh +++ b/scripts/run_tests_locally-telemetry-frontend.sh @@ -25,4 +25,4 @@ cd $PROJECTDIR/src RCFILE=$PROJECTDIR/coverage/.coveragerc python3 -m pytest --log-level=INFO --verbose \ - telemetry/frontend/tests/test_unitary.py \ No newline at end of file + telemetry/frontend/tests/test_frontend.py \ No newline at end of file -- GitLab From c213a7bc2c0fea25ab07394e58a8b295290ee4a3 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 14 May 2024 13:56:06 +0000 Subject: [PATCH 121/205] Added the implementation of "Kafka_listener", "Initiate_collector_backend" and "generate_kafka_reply" --- .../service/TelemetryBackendService.py | 138 +++++++++++------- 1 file changed, 85 insertions(+), 53 deletions(-) diff --git a/src/telemetry/backend/service/TelemetryBackendService.py b/src/telemetry/backend/service/TelemetryBackendService.py index 2e8478db1..d5ba6ced4 100755 --- a/src/telemetry/backend/service/TelemetryBackendService.py +++ b/src/telemetry/backend/service/TelemetryBackendService.py @@ -15,6 +15,7 @@ import ast import time +import random import logging import requests import threading @@ -46,33 +47,33 @@ class TelemetryBackendService: """ conusmer_configs = { 'bootstrap.servers' : KAFKA_SERVER_IP, - 'group.id' : 'consumer', - 'auto.offset.reset' : 'earliest' + 'group.id' : 'backend', + 'auto.offset.reset' : 'latest' } topic_request = "topic_request" + if (self.create_topic_if_not_exists(topic_request)): + consumerObj = KafkaConsumer(conusmer_configs) + consumerObj.subscribe([topic_request]) - consumerObj = KafkaConsumer(conusmer_configs) - consumerObj.subscribe([topic_request]) - - while True: - receive_msg = consumerObj.poll(2.0) - if receive_msg is None: - print ("Telemetry backend listener is active: Kafka Topic: ", topic_request) # added for debugging purposes - continue - elif receive_msg.error(): - if receive_msg.error().code() == KafkaError._PARTITION_EOF: + while True: + receive_msg = consumerObj.poll(2.0) + if receive_msg is None: + print ("Telemetry backend is listening on Kafka Topic: ", topic_request) # added for debugging purposes continue - else: - print("Consumer error: {}".format(receive_msg.error())) - break - (kpi_id, duration, interval) = ast.literal_eval(receive_msg.value().decode('utf-8')) - self.execute_process_kafka_request(kpi_id, duration, interval) - - def run_kafka_listener(self)->Empty: # type: ignore + elif receive_msg.error(): + if receive_msg.error().code() == KafkaError._PARTITION_EOF: + continue + else: + print("Consumer error: {}".format(receive_msg.error())) + break + (kpi_id, duration, interval) = ast.literal_eval(receive_msg.value().decode('utf-8')) + self.execute_initiate_collector_backend(kpi_id, duration, interval) + + def run_kafka_listener(self)->bool: # type: ignore threading.Thread(target=self.kafka_listener).start() return True - def process_kafka_request(self, kpi_id, duration, interval + def initiate_collector_backend(self, kpi_id, duration, interval ): # type: ignore """ Method to receive collector request attribues and initiates collecter backend. @@ -80,15 +81,74 @@ class TelemetryBackendService: start_time = time.time() while True: if time.time() - start_time >= duration: # type: ignore - print("Timeout: consumer terminated", time.time() - start_time) + print("Requested Execution Time Completed: \n --- Consumer terminating: KPI ID: ", kpi_id, " - ", time.time() - start_time) break # print ("Received KPI: ", kpi_id, ", Duration: ", duration, ", Fetch Interval: ", interval) - print ("Telemetry Backend running for KPI: ", kpi_id, "after FETCH INTERVAL: ", interval) + self.extract_kpi_value(kpi_id) + # print ("Telemetry Backend running for KPI: ", kpi_id, "after FETCH INTERVAL: ", interval) time.sleep(interval) - def execute_process_kafka_request(self, kpi_id: str, duration: int, interval: int): - threading.Thread(target=self.process_kafka_request, args=(kpi_id, duration, interval)).start() - + def execute_initiate_collector_backend(self, kpi_id: str, duration: int, interval: int): + threading.Thread(target=self.initiate_collector_backend, args=(kpi_id, duration, interval)).start() + + + + def extract_kpi_value(self, kpi_id: str): + """ + Method to extract kpi value. + """ + measured_kpi_value = random.randint(1,100) + self.generate_kafka_reply(kpi_id , measured_kpi_value) + + def generate_kafka_reply(self, kpi_id: str, kpi_value: any): + """ + Method to write response on Kafka topic + """ + producer_configs = { + 'bootstrap.servers': KAFKA_SERVER_IP, + } + topic_response = "topic_response" + if (self.create_topic_if_not_exists(topic_response)): + msg_value = Tuple [str, any] + msg_value = (kpi_id, kpi_value) + msg_key = "111" # to be fetch from db??? + + producerObj = KafkaProducer(producer_configs) + producerObj.produce(topic_response, key=msg_key, value= str(msg_value), callback=self.delivery_callback) + producerObj.flush() + + def create_topic_if_not_exists(self, new_topic_name: str): + """ + Method to create Kafka topic if it does not exist. + Args: + admin_client (AdminClient): Kafka admin client. + """ + admin_kafka_client = AdminClient({'bootstrap.servers': KAFKA_SERVER_IP}) + try: + topic_metadata = admin_kafka_client.list_topics(timeout=5) + if new_topic_name not in topic_metadata.topics: + # If the topic does not exist, create a new topic + print(f"Topic '{new_topic_name}' does not exist. Creating...") + new_topic = NewTopic(new_topic_name, num_partitions=1, replication_factor=1) + admin_kafka_client.create_topics([new_topic]) + return True + except KafkaException as e: + print(f"Failed to create topic: {e}") + return False + + def delivery_callback(self, err, msg): + """ + Callback function to handle message delivery status. + Args: + err (KafkaError): Kafka error object. + msg (Message): Kafka message object. + """ + if err: + print(f'Message delivery failed: {err}') + else: + print(f'Message delivered to topic {msg.topic()}') + + # ----------- BELOW: Actual Implementation of Kafka Producer with Node Exporter ----------- @@ -137,34 +197,6 @@ class TelemetryBackendService: print(f"Metric '{metric_name}' not found in the metrics.") return None - def delivery_callback(self, err, msg): - """ - Callback function to handle message delivery status. - Args: - err (KafkaError): Kafka error object. - msg (Message): Kafka message object. - """ - if err: - print(f'Message delivery failed: {err}') - else: - print(f'Message delivered to topic {msg.topic()}') - - def create_topic_if_not_exists(self, admin_client): - """ - Method to create Kafka topic if it does not exist. - Args: - admin_client (AdminClient): Kafka admin client. - """ - try: - topic_metadata = admin_client.list_topics(timeout=5) - if self.kafka_topic not in topic_metadata.topics: - # If the topic does not exist, create a new topic - print(f"Topic '{self.kafka_topic}' does not exist. Creating...") - new_topic = NewTopic(self.kafka_topic, num_partitions=1, replication_factor=1) - admin_client.create_topics([new_topic]) - except KafkaException as e: - print(f"Failed to create topic: {e}") - def produce_metrics(self): """ Method to produce metrics to Kafka topic as per Kafka configs. -- GitLab From 1c15a21abb8cd144f20704b9a8e9ccfccf446f46 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 14 May 2024 13:57:10 +0000 Subject: [PATCH 122/205] Added "Kafka_listener" --- .../TelemetryFrontendServiceServicerImpl.py | 90 ++++++++++++++----- 1 file changed, 69 insertions(+), 21 deletions(-) diff --git a/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py b/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py index 518dd471d..045c56d5b 100644 --- a/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py +++ b/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py @@ -12,13 +12,19 @@ # See the License for the specific language governing permissions and # limitations under the License. +import ast +import threading +import time from typing import Tuple import grpc import logging + +from confluent_kafka import Consumer as KafkaConsumer from common.proto.context_pb2 import Empty from monitoring.service.NameMapping import NameMapping from confluent_kafka import Producer as KafkaProducer from confluent_kafka import KafkaException +from confluent_kafka import KafkaError from common.proto.telemetry_frontend_pb2 import CollectorId, Collector, CollectorFilter, CollectorList from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method from common.proto.telemetry_frontend_pb2_grpc import TelemetryFrontendServiceServicer @@ -38,49 +44,77 @@ class TelemetryFrontendServiceServicerImpl(TelemetryFrontendServiceServicer): ) -> CollectorId: # type: ignore # push info to frontend db response = CollectorId() - _collector_id = request.collector_id + _collector_id = str(request.collector_id.collector_id.uuid) _collector_kpi_id = str(request.kpi_id.kpi_id.uuid) _collector_duration = int(request.duration_s) _collector_interval = int(request.interval_s) - activeCollObj = self.generate_kafka_request(str(_collector_id), _collector_kpi_id, _collector_duration, _collector_interval) - - response.collector_id.uuid = _collector_id.collector_id.uuid + self.generate_kafka_request(_collector_id, _collector_kpi_id, _collector_duration, _collector_interval) + # self.run_generate_kafka_request(_collector_id, _collector_kpi_id, _collector_duration, _collector_interval) + response.collector_id.uuid = request.collector_id.collector_id.uuid return response - @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def StopCollector(self, - request : CollectorId, grpc_context: grpc.ServicerContext # type: ignore - ) -> Empty: # type: ignore - request.collector_id.uuid = "" - return Empty() + def run_generate_kafka_request(self, msg_key: str, kpi: str, duration : int, interval: int): + threading.Thread(target=self.generate_kafka_request, args=(msg_key, kpi, duration, interval)).start() - @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def SelectCollectors(self, - request : CollectorFilter, contextgrpc_context: grpc.ServicerContext # type: ignore - ) -> CollectorList: # type: ignore - response = CollectorList() - return response - # @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) def generate_kafka_request(self, - msg_key, kpi: str, duration : int, interval: int + msg_key: str, kpi: str, duration : int, interval: int ) -> KafkaProducer: """ Method to generate collector request to Kafka topic. """ + # time.sleep(5) producer_configs = { 'bootstrap.servers': KAFKA_SERVER_IP, - 'group.id' : 'requester', } topic_request = "topic_request" msg_value = Tuple [str, int, int] msg_value = (kpi, duration, interval) - + print ("Request generated: ", "Colletcor Id: ", msg_key, \ + ", \nKPI: ", kpi, ", Duration: ", duration, ", Interval: ", interval) producerObj = KafkaProducer(producer_configs) producerObj.produce(topic_request, key=msg_key, value= str(msg_value), callback=self.delivery_callback) producerObj.flush() return producerObj + def run_kafka_listener(self): + # print ("--- STARTED: run_kafka_listener ---") + threading.Thread(target=self.kafka_listener).start() + return True + + def kafka_listener(self): + """ + listener for response on Kafka topic. + """ + # print ("--- STARTED: kafka_listener ---") + conusmer_configs = { + 'bootstrap.servers' : KAFKA_SERVER_IP, + 'group.id' : 'frontend', + 'auto.offset.reset' : 'latest' + } + topic_response = "topic_response" + + consumerObj = KafkaConsumer(conusmer_configs) + consumerObj.subscribe([topic_response]) + # print (time.time()) + while True: + receive_msg = consumerObj.poll(2.0) + if receive_msg is None: + print (" - Telemetry frontend listening on Kafka Topic: ", topic_response) # added for debugging purposes + continue + elif receive_msg.error(): + if receive_msg.error().code() == KafkaError._PARTITION_EOF: + continue + else: + print("Consumer error: {}".format(receive_msg.error())) + break + (kpi_id, kpi_value) = ast.literal_eval(receive_msg.value().decode('utf-8')) + self.process_response(kpi_id, kpi_value) + # threading.Thread(target=self.process_response, args=(kpi_id, kpi_value)).start() + + def process_response(self, kpi_id: str, kpi_value: any): + print ("Frontend - KPI: ", kpi_id, ", VALUE: ", kpi_value) + @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) def delivery_callback(self, err, msg): """ @@ -92,4 +126,18 @@ class TelemetryFrontendServiceServicerImpl(TelemetryFrontendServiceServicer): if err: print(f'Message delivery failed: {err}') else: - print(f'Message delivered to topic {msg.topic()}') \ No newline at end of file + print(f'Message delivered to topic {msg.topic()}') + + @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def StopCollector(self, + request : CollectorId, grpc_context: grpc.ServicerContext # type: ignore + ) -> Empty: # type: ignore + request.collector_id.uuid = "" + return Empty() + + @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def SelectCollectors(self, + request : CollectorFilter, contextgrpc_context: grpc.ServicerContext # type: ignore + ) -> CollectorList: # type: ignore + response = CollectorList() + return response \ No newline at end of file -- GitLab From ed77dac801e0e6e859980c92b3c52ca67eb22092 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 14 May 2024 13:57:49 +0000 Subject: [PATCH 123/205] Update the messages format --- src/telemetry/frontend/tests/Messages.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/src/telemetry/frontend/tests/Messages.py b/src/telemetry/frontend/tests/Messages.py index 9e7b6c049..93a6066ee 100644 --- a/src/telemetry/frontend/tests/Messages.py +++ b/src/telemetry/frontend/tests/Messages.py @@ -12,20 +12,27 @@ # See the License for the specific language governing permissions and # limitations under the License. +import uuid +import random from common.proto import telemetry_frontend_pb2 from common.proto.kpi_sample_types_pb2 import KpiSampleType +def create_collector_id(): + _collector_id = telemetry_frontend_pb2.CollectorId() + _collector_id.collector_id.uuid = uuid.uuid4() + return _collector_id + def create_collector_id(coll_id_str : str): _collector_id = telemetry_frontend_pb2.CollectorId() _collector_id.collector_id.uuid = str(coll_id_str) return _collector_id -def create_collector_request(coll_id_str : str): +def create_collector_request(): _create_collector_request = telemetry_frontend_pb2.Collector() - _create_collector_request.collector_id.collector_id.uuid = str(coll_id_str) - _create_collector_request.kpi_id.kpi_id.uuid = 'KPIid' + str(coll_id_str) - _create_collector_request.duration_s = float(-1) - _create_collector_request.interval_s = float(-1) + _create_collector_request.collector_id.collector_id.uuid = str(uuid.uuid4()) + _create_collector_request.kpi_id.kpi_id.uuid = str(uuid.uuid4()) + _create_collector_request.duration_s = float(random.randint(8, 16)) + _create_collector_request.interval_s = float(random.randint(2, 3)) return _create_collector_request def create_collector_request_a(): -- GitLab From afc8ee6a71ee86a10c2713e28b7deaec8fb95666 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 14 May 2024 13:58:44 +0000 Subject: [PATCH 124/205] "test_unitary.py" to "test_frontend.py" --- src/telemetry/frontend/tests/test_frontend.py | 193 ++++++++++++++++++ 1 file changed, 193 insertions(+) create mode 100644 src/telemetry/frontend/tests/test_frontend.py diff --git a/src/telemetry/frontend/tests/test_frontend.py b/src/telemetry/frontend/tests/test_frontend.py new file mode 100644 index 000000000..4f59630d4 --- /dev/null +++ b/src/telemetry/frontend/tests/test_frontend.py @@ -0,0 +1,193 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import pytest +import logging +from typing import Union + +from common.proto.context_pb2 import Empty +from common.Constants import ServiceNameEnum +from common.proto.telemetry_frontend_pb2 import CollectorId, CollectorList +from common.proto.context_pb2_grpc import add_ContextServiceServicer_to_server +from context.client.ContextClient import ContextClient +from common.tools.service.GenericGrpcService import GenericGrpcService +from common.tests.MockServicerImpl_Context import MockServicerImpl_Context +from common.Settings import ( + get_service_port_grpc, get_env_var_name, ENVVAR_SUFIX_SERVICE_HOST, ENVVAR_SUFIX_SERVICE_PORT_GRPC) + +from telemetry.frontend.client.TelemetryFrontendClient import TelemetryFrontendClient +from telemetry.frontend.service.TelemetryFrontendService import TelemetryFrontendService +from telemetry.frontend.service.TelemetryFrontendServiceServicerImpl import TelemetryFrontendServiceServicerImpl +from telemetry.frontend.tests.Messages import ( create_collector_id, create_collector_request, + create_collector_filter, create_collector_request_a, create_collector_request_b) + +from device.client.DeviceClient import DeviceClient +from device.service.DeviceService import DeviceService +from device.service.driver_api.DriverFactory import DriverFactory +from device.service.driver_api.DriverInstanceCache import DriverInstanceCache + +from monitoring.service.NameMapping import NameMapping + +os.environ['DEVICE_EMULATED_ONLY'] = 'TRUE' +from device.service.drivers import DRIVERS + +########################### +# Tests Setup +########################### + +LOCAL_HOST = '127.0.0.1' +MOCKSERVICE_PORT = 10000 + +TELEMETRY_FRONTEND_PORT = MOCKSERVICE_PORT + get_service_port_grpc(ServiceNameEnum.TELEMETRYFRONTEND) +os.environ[get_env_var_name(ServiceNameEnum.TELEMETRYFRONTEND, ENVVAR_SUFIX_SERVICE_HOST )] = str(LOCAL_HOST) +os.environ[get_env_var_name(ServiceNameEnum.TELEMETRYFRONTEND, ENVVAR_SUFIX_SERVICE_PORT_GRPC)] = str(TELEMETRY_FRONTEND_PORT) + +LOGGER = logging.getLogger(__name__) + +class MockContextService(GenericGrpcService): + # Mock Service implementing Context to simplify unitary tests of Monitoring + + def __init__(self, bind_port: Union[str, int]) -> None: + super().__init__(bind_port, LOCAL_HOST, enable_health_servicer=False, cls_name='MockService') + + # pylint: disable=attribute-defined-outside-init + def install_servicers(self): + self.context_servicer = MockServicerImpl_Context() + add_ContextServiceServicer_to_server(self.context_servicer, self.server) + +@pytest.fixture(scope='session') +def context_service(): + LOGGER.info('Initializing MockContextService...') + _service = MockContextService(MOCKSERVICE_PORT) + _service.start() + + LOGGER.info('Yielding MockContextService...') + yield _service + + LOGGER.info('Terminating MockContextService...') + _service.context_servicer.msg_broker.terminate() + _service.stop() + + LOGGER.info('Terminated MockContextService...') + +@pytest.fixture(scope='session') +def context_client(context_service : MockContextService): # pylint: disable=redefined-outer-name,unused-argument + LOGGER.info('Initializing ContextClient...') + _client = ContextClient() + + LOGGER.info('Yielding ContextClient...') + yield _client + + LOGGER.info('Closing ContextClient...') + _client.close() + + LOGGER.info('Closed ContextClient...') + +@pytest.fixture(scope='session') +def device_service(context_service : MockContextService): # pylint: disable=redefined-outer-name,unused-argument + LOGGER.info('Initializing DeviceService...') + driver_factory = DriverFactory(DRIVERS) + driver_instance_cache = DriverInstanceCache(driver_factory) + _service = DeviceService(driver_instance_cache) + _service.start() + + # yield the server, when test finishes, execution will resume to stop it + LOGGER.info('Yielding DeviceService...') + yield _service + + LOGGER.info('Terminating DeviceService...') + _service.stop() + + LOGGER.info('Terminated DeviceService...') + +@pytest.fixture(scope='session') +def device_client(device_service : DeviceService): # pylint: disable=redefined-outer-name,unused-argument + LOGGER.info('Initializing DeviceClient...') + _client = DeviceClient() + + LOGGER.info('Yielding DeviceClient...') + yield _client + + LOGGER.info('Closing DeviceClient...') + _client.close() + + LOGGER.info('Closed DeviceClient...') + +@pytest.fixture(scope='session') +def telemetryFrontend_service( + context_service : MockContextService, + device_service : DeviceService + ): + LOGGER.info('Initializing TelemetryFrontendService...') + name_mapping = NameMapping() + + _service = TelemetryFrontendService(name_mapping) + _service.start() + + # yield the server, when test finishes, execution will resume to stop it + LOGGER.info('Yielding TelemetryFrontendService...') + yield _service + + LOGGER.info('Terminating TelemetryFrontendService...') + _service.stop() + + LOGGER.info('Terminated TelemetryFrontendService...') + +@pytest.fixture(scope='session') +def telemetryFrontend_client( + telemetryFrontend_service : TelemetryFrontendService + ): + LOGGER.info('Initializing TelemetryFrontendClient...') + _client = TelemetryFrontendClient() + + # yield the server, when test finishes, execution will resume to stop it + LOGGER.info('Yielding TelemetryFrontendClient...') + yield _client + + LOGGER.info('Closing TelemetryFrontendClient...') + _client.close() + + LOGGER.info('Closed TelemetryFrontendClient...') + + +########################### +# Tests Implementation of Telemetry Frontend +########################### + +def test_start_collector(telemetryFrontend_client): + LOGGER.warning('test_start_collector requesting') + response = telemetryFrontend_client.StartCollector(create_collector_request()) + LOGGER.debug(str(response)) + assert isinstance(response, CollectorId) + +def test_run_kafka_listener(): + LOGGER.warning('test_receive_kafka_request requesting') + name_mapping = NameMapping() + TelemetryFrontendServiceObj = TelemetryFrontendServiceServicerImpl(name_mapping) + response = TelemetryFrontendServiceObj.run_kafka_listener() # Method "run_kafka_listener" is not define in frontend.proto + LOGGER.debug(str(response)) + assert isinstance(response, bool) + +def test_stop_collector(telemetryFrontend_client): + LOGGER.warning('test_stop_collector requesting') + response = telemetryFrontend_client.StopCollector(create_collector_id("1")) + LOGGER.debug(str(response)) + assert isinstance(response, Empty) + +# def test_select_collectors(telemetryFrontend_client): +# LOGGER.warning('test_select_collector requesting') +# response = telemetryFrontend_client.SelectCollectors(create_collector_filter()) +# LOGGER.debug(str(response)) +# assert isinstance(response, CollectorList) \ No newline at end of file -- GitLab From 0bcd283f978a91fc889873ad7e11c6a63092bda8 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 14 May 2024 13:59:05 +0000 Subject: [PATCH 125/205] file renamed --- src/telemetry/frontend/tests/test_unitary.py | 184 ------------------- 1 file changed, 184 deletions(-) delete mode 100644 src/telemetry/frontend/tests/test_unitary.py diff --git a/src/telemetry/frontend/tests/test_unitary.py b/src/telemetry/frontend/tests/test_unitary.py deleted file mode 100644 index 77a94a07a..000000000 --- a/src/telemetry/frontend/tests/test_unitary.py +++ /dev/null @@ -1,184 +0,0 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import pytest -import logging -from typing import Union - -from common.proto.context_pb2 import Empty -from common.Constants import ServiceNameEnum -from common.proto.telemetry_frontend_pb2 import CollectorId, CollectorList -from common.proto.context_pb2_grpc import add_ContextServiceServicer_to_server -from context.client.ContextClient import ContextClient -from common.tools.service.GenericGrpcService import GenericGrpcService -from common.tests.MockServicerImpl_Context import MockServicerImpl_Context -from common.Settings import ( - get_service_port_grpc, get_env_var_name, ENVVAR_SUFIX_SERVICE_HOST, ENVVAR_SUFIX_SERVICE_PORT_GRPC) - -from telemetry.frontend.client.TelemetryFrontendClient import TelemetryFrontendClient -from telemetry.frontend.service.TelemetryFrontendService import TelemetryFrontendService -from telemetry.frontend.tests.Messages import ( create_collector_id, create_collector_request, - create_collector_filter, create_collector_request_a, create_collector_request_b) - -from device.client.DeviceClient import DeviceClient -from device.service.DeviceService import DeviceService -from device.service.driver_api.DriverFactory import DriverFactory -from device.service.driver_api.DriverInstanceCache import DriverInstanceCache - -from monitoring.service.NameMapping import NameMapping - -os.environ['DEVICE_EMULATED_ONLY'] = 'TRUE' -from device.service.drivers import DRIVERS - -########################### -# Tests Setup -########################### - -LOCAL_HOST = '127.0.0.1' -MOCKSERVICE_PORT = 10000 - -TELEMETRY_FRONTEND_PORT = MOCKSERVICE_PORT + get_service_port_grpc(ServiceNameEnum.TELEMETRYFRONTEND) -os.environ[get_env_var_name(ServiceNameEnum.TELEMETRYFRONTEND, ENVVAR_SUFIX_SERVICE_HOST )] = str(LOCAL_HOST) -os.environ[get_env_var_name(ServiceNameEnum.TELEMETRYFRONTEND, ENVVAR_SUFIX_SERVICE_PORT_GRPC)] = str(TELEMETRY_FRONTEND_PORT) - -LOGGER = logging.getLogger(__name__) - -class MockContextService(GenericGrpcService): - # Mock Service implementing Context to simplify unitary tests of Monitoring - - def __init__(self, bind_port: Union[str, int]) -> None: - super().__init__(bind_port, LOCAL_HOST, enable_health_servicer=False, cls_name='MockService') - - # pylint: disable=attribute-defined-outside-init - def install_servicers(self): - self.context_servicer = MockServicerImpl_Context() - add_ContextServiceServicer_to_server(self.context_servicer, self.server) - -@pytest.fixture(scope='session') -def context_service(): - LOGGER.info('Initializing MockContextService...') - _service = MockContextService(MOCKSERVICE_PORT) - _service.start() - - LOGGER.info('Yielding MockContextService...') - yield _service - - LOGGER.info('Terminating MockContextService...') - _service.context_servicer.msg_broker.terminate() - _service.stop() - - LOGGER.info('Terminated MockContextService...') - -@pytest.fixture(scope='session') -def context_client(context_service : MockContextService): # pylint: disable=redefined-outer-name,unused-argument - LOGGER.info('Initializing ContextClient...') - _client = ContextClient() - - LOGGER.info('Yielding ContextClient...') - yield _client - - LOGGER.info('Closing ContextClient...') - _client.close() - - LOGGER.info('Closed ContextClient...') - -@pytest.fixture(scope='session') -def device_service(context_service : MockContextService): # pylint: disable=redefined-outer-name,unused-argument - LOGGER.info('Initializing DeviceService...') - driver_factory = DriverFactory(DRIVERS) - driver_instance_cache = DriverInstanceCache(driver_factory) - _service = DeviceService(driver_instance_cache) - _service.start() - - # yield the server, when test finishes, execution will resume to stop it - LOGGER.info('Yielding DeviceService...') - yield _service - - LOGGER.info('Terminating DeviceService...') - _service.stop() - - LOGGER.info('Terminated DeviceService...') - -@pytest.fixture(scope='session') -def device_client(device_service : DeviceService): # pylint: disable=redefined-outer-name,unused-argument - LOGGER.info('Initializing DeviceClient...') - _client = DeviceClient() - - LOGGER.info('Yielding DeviceClient...') - yield _client - - LOGGER.info('Closing DeviceClient...') - _client.close() - - LOGGER.info('Closed DeviceClient...') - -@pytest.fixture(scope='session') -def telemetryFrontend_service( - context_service : MockContextService, - device_service : DeviceService - ): - LOGGER.info('Initializing TelemetryFrontendService...') - name_mapping = NameMapping() - - _service = TelemetryFrontendService(name_mapping) - _service.start() - - # yield the server, when test finishes, execution will resume to stop it - LOGGER.info('Yielding TelemetryFrontendService...') - yield _service - - LOGGER.info('Terminating TelemetryFrontendService...') - _service.stop() - - LOGGER.info('Terminated TelemetryFrontendService...') - -@pytest.fixture(scope='session') -def telemetryFrontend_client( - telemetryFrontend_service : TelemetryFrontendService - ): - LOGGER.info('Initializing TelemetryFrontendClient...') - _client = TelemetryFrontendClient() - - # yield the server, when test finishes, execution will resume to stop it - LOGGER.info('Yielding TelemetryFrontendClient...') - yield _client - - LOGGER.info('Closing TelemetryFrontendClient...') - _client.close() - - LOGGER.info('Closed TelemetryFrontendClient...') - - -########################### -# Tests Implementation of Telemetry Frontend -########################### - -def test_start_collector(telemetryFrontend_client): - LOGGER.warning('test_start_collector requesting') - response = telemetryFrontend_client.StartCollector(create_collector_request_b('11',10,2)) - LOGGER.debug(str(response)) - assert isinstance(response, CollectorId) - -# def test_stop_collector(telemetryFrontend_client): -# LOGGER.warning('test_stop_collector requesting') -# response = telemetryFrontend_client.StopCollector(create_collector_id("1")) -# LOGGER.debug(str(response)) -# assert isinstance(response, Empty) - -# def test_select_collectors(telemetryFrontend_client): -# LOGGER.warning('test_select_collector requesting') -# response = telemetryFrontend_client.SelectCollectors(create_collector_filter()) -# LOGGER.debug(str(response)) -# assert isinstance(response, CollectorList) \ No newline at end of file -- GitLab From 178b72d05ae92cd290a13108742d86705e7367e3 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Thu, 16 May 2024 13:53:00 +0000 Subject: [PATCH 126/205] StartCollector is working fine with unique communication identification logic. --- .../service/TelemetryBackendService.py | 100 +++++++++++------- .../TelemetryFrontendServiceServicerImpl.py | 28 +++-- src/telemetry/frontend/tests/Messages.py | 4 +- src/telemetry/frontend/tests/test_frontend.py | 25 +++-- 4 files changed, 99 insertions(+), 58 deletions(-) diff --git a/src/telemetry/backend/service/TelemetryBackendService.py b/src/telemetry/backend/service/TelemetryBackendService.py index d5ba6ced4..6cc3aab5f 100755 --- a/src/telemetry/backend/service/TelemetryBackendService.py +++ b/src/telemetry/backend/service/TelemetryBackendService.py @@ -19,7 +19,7 @@ import random import logging import requests import threading -from typing import Tuple +from typing import Any, Tuple from common.proto.context_pb2 import Empty from confluent_kafka import Producer as KafkaProducer from confluent_kafka import Consumer as KafkaConsumer @@ -32,6 +32,8 @@ from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_m LOGGER = logging.getLogger(__name__) METRICS_POOL = MetricsPool('Telemetry', 'TelemetryBackend') KAFKA_SERVER_IP = '127.0.0.1:9092' +ADMIN_KAFKA_CLIENT = AdminClient({'bootstrap.servers': KAFKA_SERVER_IP}) +ACTIVE_COLLECTORS = [] class TelemetryBackendService: """ @@ -41,6 +43,10 @@ class TelemetryBackendService: def __init__(self): LOGGER.info('Init TelemetryBackendService') + def run_kafka_listener(self)->bool: # type: ignore + threading.Thread(target=self.kafka_listener).start() + return True + def kafka_listener(self): """ listener for requests on Kafka topic. @@ -51,14 +57,14 @@ class TelemetryBackendService: 'auto.offset.reset' : 'latest' } topic_request = "topic_request" - if (self.create_topic_if_not_exists(topic_request)): + if (self.create_topic_if_not_exists([topic_request])): consumerObj = KafkaConsumer(conusmer_configs) consumerObj.subscribe([topic_request]) while True: receive_msg = consumerObj.poll(2.0) if receive_msg is None: - print ("Telemetry backend is listening on Kafka Topic: ", topic_request) # added for debugging purposes + print (time.time(), " - Telemetry backend is listening on Kafka Topic: ", topic_request) # added for debugging purposes continue elif receive_msg.error(): if receive_msg.error().code() == KafkaError._PARTITION_EOF: @@ -67,40 +73,38 @@ class TelemetryBackendService: print("Consumer error: {}".format(receive_msg.error())) break (kpi_id, duration, interval) = ast.literal_eval(receive_msg.value().decode('utf-8')) - self.execute_initiate_collector_backend(kpi_id, duration, interval) + collector_id = receive_msg.key().decode('utf-8') + self.run_initiate_collector_backend(collector_id, kpi_id, duration, interval) - def run_kafka_listener(self)->bool: # type: ignore - threading.Thread(target=self.kafka_listener).start() - return True - def initiate_collector_backend(self, kpi_id, duration, interval + + def run_initiate_collector_backend(self, collector_id: str, kpi_id: str, duration: int, interval: int): + threading.Thread(target=self.initiate_collector_backend, args=(collector_id, kpi_id, duration, interval)).start() + + def initiate_collector_backend(self, collector_id, kpi_id, duration, interval ): # type: ignore """ Method to receive collector request attribues and initiates collecter backend. """ start_time = time.time() while True: + ACTIVE_COLLECTORS.append(collector_id) if time.time() - start_time >= duration: # type: ignore print("Requested Execution Time Completed: \n --- Consumer terminating: KPI ID: ", kpi_id, " - ", time.time() - start_time) break # print ("Received KPI: ", kpi_id, ", Duration: ", duration, ", Fetch Interval: ", interval) - self.extract_kpi_value(kpi_id) + self.extract_kpi_value(collector_id, kpi_id) # print ("Telemetry Backend running for KPI: ", kpi_id, "after FETCH INTERVAL: ", interval) time.sleep(interval) - - def execute_initiate_collector_backend(self, kpi_id: str, duration: int, interval: int): - threading.Thread(target=self.initiate_collector_backend, args=(kpi_id, duration, interval)).start() - - - def extract_kpi_value(self, kpi_id: str): + def extract_kpi_value(self, collector_id: str, kpi_id: str): """ Method to extract kpi value. """ measured_kpi_value = random.randint(1,100) - self.generate_kafka_reply(kpi_id , measured_kpi_value) + self.generate_kafka_response(collector_id, kpi_id , measured_kpi_value) - def generate_kafka_reply(self, kpi_id: str, kpi_value: any): + def generate_kafka_response(self, collector_id: str, kpi_id: str, kpi_value: Any): """ Method to write response on Kafka topic """ @@ -108,33 +112,32 @@ class TelemetryBackendService: 'bootstrap.servers': KAFKA_SERVER_IP, } topic_response = "topic_response" - if (self.create_topic_if_not_exists(topic_response)): - msg_value = Tuple [str, any] - msg_value = (kpi_id, kpi_value) - msg_key = "111" # to be fetch from db??? + msg_value : Tuple [str, Any] = (kpi_id, kpi_value) + msg_key = collector_id + producerObj = KafkaProducer(producer_configs) + producerObj.produce(topic_response, key=msg_key, value= str(msg_value), callback=self.delivery_callback) + producerObj.flush() - producerObj = KafkaProducer(producer_configs) - producerObj.produce(topic_response, key=msg_key, value= str(msg_value), callback=self.delivery_callback) - producerObj.flush() - - def create_topic_if_not_exists(self, new_topic_name: str): + def create_topic_if_not_exists(self, new_topics: list): """ Method to create Kafka topic if it does not exist. Args: admin_client (AdminClient): Kafka admin client. """ - admin_kafka_client = AdminClient({'bootstrap.servers': KAFKA_SERVER_IP}) - try: - topic_metadata = admin_kafka_client.list_topics(timeout=5) - if new_topic_name not in topic_metadata.topics: - # If the topic does not exist, create a new topic - print(f"Topic '{new_topic_name}' does not exist. Creating...") - new_topic = NewTopic(new_topic_name, num_partitions=1, replication_factor=1) - admin_kafka_client.create_topics([new_topic]) - return True - except KafkaException as e: - print(f"Failed to create topic: {e}") - return False + for topic in new_topics: + try: + topic_metadata = ADMIN_KAFKA_CLIENT.list_topics(timeout=5) + if topic not in topic_metadata.topics: + # If the topic does not exist, create a new topic + print(f"Topic '{topic}' does not exist. Creating...") + new_topic = NewTopic(topic, num_partitions=1, replication_factor=1) + ADMIN_KAFKA_CLIENT.create_topics([new_topic]) + return True + except KafkaException as e: + print(f"Failed to create topic: {e}") + return False + + self.verify_required_kafka_topics() def delivery_callback(self, err, msg): """ @@ -148,7 +151,21 @@ class TelemetryBackendService: else: print(f'Message delivered to topic {msg.topic()}') + # Function to create a list of topics + # Function to list all topics in the Kafka cluster + def verify_required_kafka_topics(self) -> list: + """List all topics in the Kafka cluster.""" + try: + # Fetch metadata from the broker + metadata = ADMIN_KAFKA_CLIENT.list_topics(timeout=10) + topics = list(metadata.topics.keys()) + print("Topics in the cluster:", topics) + return topics + except Exception as e: + print(f"Failed to list topics: {e}") + return [] + # ----------- BELOW: Actual Implementation of Kafka Producer with Node Exporter ----------- @@ -159,8 +176,9 @@ class TelemetryBackendService: str: Metrics fetched from Node Exporter. """ KPI = "node_network_receive_packets_total" + EXPORTER_ENDPOINT = "http://node-exporter-7465c69b87-b6ks5.telebackend:9100/metrics" try: - response = requests.get(self.exporter_endpoint) # type: ignore + response = requests.get(EXPORTER_ENDPOINT) # type: ignore if response.status_code == 200: # print(f"Metrics fetched sucessfully...") metrics = response.text @@ -202,7 +220,7 @@ class TelemetryBackendService: Method to produce metrics to Kafka topic as per Kafka configs. """ conf = { - 'bootstrap.servers': self.bootstrap_servers, + 'bootstrap.servers': KAFKA_SERVER_IP, } admin_client = AdminClient(conf) @@ -216,7 +234,7 @@ class TelemetryBackendService: metrics = self.fetch_node_exporter_metrics() # select the function name based on the provided requirements if metrics: - kafka_producer.produce(self.kafka_topic, str(metrics), callback=self.delivery_callback) + kafka_producer.produce("topic_raw", str(metrics), callback=self.delivery_callback) kafka_producer.flush() # print("Metrics produced to Kafka topic") diff --git a/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py b/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py index 045c56d5b..ebd0db4ac 100644 --- a/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py +++ b/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py @@ -15,7 +15,7 @@ import ast import threading import time -from typing import Tuple +from typing import Tuple, Any import grpc import logging @@ -30,9 +30,10 @@ from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_m from common.proto.telemetry_frontend_pb2_grpc import TelemetryFrontendServiceServicer -LOGGER = logging.getLogger(__name__) -METRICS_POOL = MetricsPool('Monitoring', 'TelemetryFrontend') -KAFKA_SERVER_IP = '127.0.0.1:9092' +LOGGER = logging.getLogger(__name__) +METRICS_POOL = MetricsPool('Monitoring', 'TelemetryFrontend') +KAFKA_SERVER_IP = '127.0.0.1:9092' +ACTIVE_COLLECTORS = [] class TelemetryFrontendServiceServicerImpl(TelemetryFrontendServiceServicer): def __init__(self, name_mapping : NameMapping): @@ -50,7 +51,7 @@ class TelemetryFrontendServiceServicerImpl(TelemetryFrontendServiceServicer): _collector_interval = int(request.interval_s) self.generate_kafka_request(_collector_id, _collector_kpi_id, _collector_duration, _collector_interval) # self.run_generate_kafka_request(_collector_id, _collector_kpi_id, _collector_duration, _collector_interval) - response.collector_id.uuid = request.collector_id.collector_id.uuid + response.collector_id.uuid = request.collector_id.collector_id.uuid # type: ignore return response def run_generate_kafka_request(self, msg_key: str, kpi: str, duration : int, interval: int): @@ -74,6 +75,7 @@ class TelemetryFrontendServiceServicerImpl(TelemetryFrontendServiceServicer): ", \nKPI: ", kpi, ", Duration: ", duration, ", Interval: ", interval) producerObj = KafkaProducer(producer_configs) producerObj.produce(topic_request, key=msg_key, value= str(msg_value), callback=self.delivery_callback) + ACTIVE_COLLECTORS.append(msg_key) producerObj.flush() return producerObj @@ -108,11 +110,19 @@ class TelemetryFrontendServiceServicerImpl(TelemetryFrontendServiceServicer): else: print("Consumer error: {}".format(receive_msg.error())) break - (kpi_id, kpi_value) = ast.literal_eval(receive_msg.value().decode('utf-8')) - self.process_response(kpi_id, kpi_value) - # threading.Thread(target=self.process_response, args=(kpi_id, kpi_value)).start() + try: + collector_id = receive_msg.key().decode('utf-8') + if collector_id in ACTIVE_COLLECTORS: + (kpi_id, kpi_value) = ast.literal_eval(receive_msg.value().decode('utf-8')) + self.process_response(kpi_id, kpi_value) + else: + print(f"collector id does not match.\nRespone ID: '{collector_id}' --- Active IDs: '{ACTIVE_COLLECTORS}' ") + except Exception as e: + print(f"No message key found: {str(e)}") + continue + # return None - def process_response(self, kpi_id: str, kpi_value: any): + def process_response(self, kpi_id: str, kpi_value: Any): print ("Frontend - KPI: ", kpi_id, ", VALUE: ", kpi_value) @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) diff --git a/src/telemetry/frontend/tests/Messages.py b/src/telemetry/frontend/tests/Messages.py index 93a6066ee..2dea48c88 100644 --- a/src/telemetry/frontend/tests/Messages.py +++ b/src/telemetry/frontend/tests/Messages.py @@ -22,7 +22,7 @@ def create_collector_id(): _collector_id.collector_id.uuid = uuid.uuid4() return _collector_id -def create_collector_id(coll_id_str : str): +def create_collector_id_a(coll_id_str : str): _collector_id = telemetry_frontend_pb2.CollectorId() _collector_id.collector_id.uuid = str(coll_id_str) return _collector_id @@ -32,7 +32,7 @@ def create_collector_request(): _create_collector_request.collector_id.collector_id.uuid = str(uuid.uuid4()) _create_collector_request.kpi_id.kpi_id.uuid = str(uuid.uuid4()) _create_collector_request.duration_s = float(random.randint(8, 16)) - _create_collector_request.interval_s = float(random.randint(2, 3)) + _create_collector_request.interval_s = float(random.randint(2, 4)) return _create_collector_request def create_collector_request_a(): diff --git a/src/telemetry/frontend/tests/test_frontend.py b/src/telemetry/frontend/tests/test_frontend.py index 4f59630d4..a531ed617 100644 --- a/src/telemetry/frontend/tests/test_frontend.py +++ b/src/telemetry/frontend/tests/test_frontend.py @@ -13,6 +13,7 @@ # limitations under the License. import os +import time import pytest import logging from typing import Union @@ -50,7 +51,7 @@ from device.service.drivers import DRIVERS LOCAL_HOST = '127.0.0.1' MOCKSERVICE_PORT = 10000 -TELEMETRY_FRONTEND_PORT = MOCKSERVICE_PORT + get_service_port_grpc(ServiceNameEnum.TELEMETRYFRONTEND) +TELEMETRY_FRONTEND_PORT = str(MOCKSERVICE_PORT) + str(get_service_port_grpc(ServiceNameEnum.TELEMETRYFRONTEND)) os.environ[get_env_var_name(ServiceNameEnum.TELEMETRYFRONTEND, ENVVAR_SUFIX_SERVICE_HOST )] = str(LOCAL_HOST) os.environ[get_env_var_name(ServiceNameEnum.TELEMETRYFRONTEND, ENVVAR_SUFIX_SERVICE_PORT_GRPC)] = str(TELEMETRY_FRONTEND_PORT) @@ -172,6 +173,18 @@ def test_start_collector(telemetryFrontend_client): LOGGER.debug(str(response)) assert isinstance(response, CollectorId) +def test_start_collector_a(telemetryFrontend_client): + LOGGER.warning('test_start_collector requesting') + response = telemetryFrontend_client.StartCollector(create_collector_request()) + LOGGER.debug(str(response)) + assert isinstance(response, CollectorId) + +def test_start_collector_b(telemetryFrontend_client): + LOGGER.warning('test_start_collector requesting') + response = telemetryFrontend_client.StartCollector(create_collector_request()) + LOGGER.debug(str(response)) + assert isinstance(response, CollectorId) + def test_run_kafka_listener(): LOGGER.warning('test_receive_kafka_request requesting') name_mapping = NameMapping() @@ -180,11 +193,11 @@ def test_run_kafka_listener(): LOGGER.debug(str(response)) assert isinstance(response, bool) -def test_stop_collector(telemetryFrontend_client): - LOGGER.warning('test_stop_collector requesting') - response = telemetryFrontend_client.StopCollector(create_collector_id("1")) - LOGGER.debug(str(response)) - assert isinstance(response, Empty) +# def test_stop_collector(telemetryFrontend_client): +# LOGGER.warning('test_stop_collector requesting') +# response = telemetryFrontend_client.StopCollector(create_collector_id("1")) +# LOGGER.debug(str(response)) +# assert isinstance(response, Empty) # def test_select_collectors(telemetryFrontend_client): # LOGGER.warning('test_select_collector requesting') -- GitLab From 8ce11c893f923cd86d12468e973bef8e0c4c6de4 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 24 May 2024 09:05:20 +0000 Subject: [PATCH 127/205] improvements in Telemetry backend and Frontend service functionalities. --- .../service/TelemetryBackendService.py | 125 ++++++------------ .../backend/tests/testTelemetryBackend.py | 8 ++ .../TelemetryFrontendServiceServicerImpl.py | 16 ++- 3 files changed, 57 insertions(+), 92 deletions(-) diff --git a/src/telemetry/backend/service/TelemetryBackendService.py b/src/telemetry/backend/service/TelemetryBackendService.py index 6cc3aab5f..9d393b1ad 100755 --- a/src/telemetry/backend/service/TelemetryBackendService.py +++ b/src/telemetry/backend/service/TelemetryBackendService.py @@ -29,11 +29,14 @@ from confluent_kafka.admin import AdminClient, NewTopic from common.proto.telemetry_frontend_pb2 import Collector, CollectorId from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method -LOGGER = logging.getLogger(__name__) -METRICS_POOL = MetricsPool('Telemetry', 'TelemetryBackend') -KAFKA_SERVER_IP = '127.0.0.1:9092' +LOGGER = logging.getLogger(__name__) +METRICS_POOL = MetricsPool('Telemetry', 'TelemetryBackend') +KAFKA_SERVER_IP = '127.0.0.1:9092' ADMIN_KAFKA_CLIENT = AdminClient({'bootstrap.servers': KAFKA_SERVER_IP}) -ACTIVE_COLLECTORS = [] +ACTIVE_COLLECTORS = [] +KAFKA_TOPICS = {'request' : 'topic_request', + 'response': 'topic_response'} +EXPORTER_ENDPOINT = "http://node-exporter-7465c69b87-b6ks5.telebackend:9100/metrics" class TelemetryBackendService: """ @@ -43,7 +46,7 @@ class TelemetryBackendService: def __init__(self): LOGGER.info('Init TelemetryBackendService') - def run_kafka_listener(self)->bool: # type: ignore + def run_kafka_listener(self)->bool: threading.Thread(target=self.kafka_listener).start() return True @@ -56,26 +59,26 @@ class TelemetryBackendService: 'group.id' : 'backend', 'auto.offset.reset' : 'latest' } - topic_request = "topic_request" - if (self.create_topic_if_not_exists([topic_request])): - consumerObj = KafkaConsumer(conusmer_configs) - consumerObj.subscribe([topic_request]) - - while True: - receive_msg = consumerObj.poll(2.0) - if receive_msg is None: - print (time.time(), " - Telemetry backend is listening on Kafka Topic: ", topic_request) # added for debugging purposes - continue - elif receive_msg.error(): - if receive_msg.error().code() == KafkaError._PARTITION_EOF: - continue - else: - print("Consumer error: {}".format(receive_msg.error())) - break - (kpi_id, duration, interval) = ast.literal_eval(receive_msg.value().decode('utf-8')) - collector_id = receive_msg.key().decode('utf-8') - self.run_initiate_collector_backend(collector_id, kpi_id, duration, interval) + # topic_request = "topic_request" + consumerObj = KafkaConsumer(conusmer_configs) + # consumerObj.subscribe([topic_request]) + consumerObj.subscribe([KAFKA_TOPICS['request']]) + while True: + receive_msg = consumerObj.poll(2.0) + if receive_msg is None: + # print (time.time(), " - Telemetry backend is listening on Kafka Topic: ", topic_request) # added for debugging purposes + print (time.time(), " - Telemetry backend is listening on Kafka Topic: ", KAFKA_TOPICS['request']) # added for debugging purposes + continue + elif receive_msg.error(): + if receive_msg.error().code() == KafkaError._PARTITION_EOF: + continue + else: + print("Consumer error: {}".format(receive_msg.error())) + break + (kpi_id, duration, interval) = ast.literal_eval(receive_msg.value().decode('utf-8')) + collector_id = receive_msg.key().decode('utf-8') + self.run_initiate_collector_backend(collector_id, kpi_id, duration, interval) def run_initiate_collector_backend(self, collector_id: str, kpi_id: str, duration: int, interval: int): @@ -89,8 +92,10 @@ class TelemetryBackendService: start_time = time.time() while True: ACTIVE_COLLECTORS.append(collector_id) - if time.time() - start_time >= duration: # type: ignore - print("Requested Execution Time Completed: \n --- Consumer terminating: KPI ID: ", kpi_id, " - ", time.time() - start_time) + if time.time() - start_time >= duration: # condition to terminate backend + print("Execution Time Completed: \n --- Consumer terminating: KPI ID: ", kpi_id, " - ", time.time() - start_time) + self.generate_kafka_response(collector_id, "NULL", False) + # write to Kafka break # print ("Received KPI: ", kpi_id, ", Duration: ", duration, ", Fetch Interval: ", interval) self.extract_kpi_value(collector_id, kpi_id) @@ -101,7 +106,8 @@ class TelemetryBackendService: """ Method to extract kpi value. """ - measured_kpi_value = random.randint(1,100) + measured_kpi_value = random.randint(1,100) # Should be extracted from exporter/stream + # measured_kpi_value = self.fetch_node_exporter_metrics() # exporter extracted metric value against default KPI self.generate_kafka_response(collector_id, kpi_id , measured_kpi_value) def generate_kafka_response(self, collector_id: str, kpi_id: str, kpi_value: Any): @@ -111,14 +117,15 @@ class TelemetryBackendService: producer_configs = { 'bootstrap.servers': KAFKA_SERVER_IP, } - topic_response = "topic_response" + # topic_response = "topic_response" msg_value : Tuple [str, Any] = (kpi_id, kpi_value) msg_key = collector_id producerObj = KafkaProducer(producer_configs) - producerObj.produce(topic_response, key=msg_key, value= str(msg_value), callback=self.delivery_callback) + # producerObj.produce(topic_response, key=msg_key, value= str(msg_value), callback=self.delivery_callback) + producerObj.produce(KAFKA_TOPICS['response'], key=msg_key, value= str(msg_value), callback=self.delivery_callback) producerObj.flush() - def create_topic_if_not_exists(self, new_topics: list): + def create_topic_if_not_exists(self, new_topics: list) -> bool: """ Method to create Kafka topic if it does not exist. Args: @@ -132,12 +139,10 @@ class TelemetryBackendService: print(f"Topic '{topic}' does not exist. Creating...") new_topic = NewTopic(topic, num_partitions=1, replication_factor=1) ADMIN_KAFKA_CLIENT.create_topics([new_topic]) - return True except KafkaException as e: print(f"Failed to create topic: {e}") return False - - self.verify_required_kafka_topics() + return True def delivery_callback(self, err, msg): """ @@ -151,22 +156,6 @@ class TelemetryBackendService: else: print(f'Message delivered to topic {msg.topic()}') - # Function to create a list of topics - - # Function to list all topics in the Kafka cluster - def verify_required_kafka_topics(self) -> list: - """List all topics in the Kafka cluster.""" - try: - # Fetch metadata from the broker - metadata = ADMIN_KAFKA_CLIENT.list_topics(timeout=10) - topics = list(metadata.topics.keys()) - print("Topics in the cluster:", topics) - return topics - except Exception as e: - print(f"Failed to list topics: {e}") - return [] - - # ----------- BELOW: Actual Implementation of Kafka Producer with Node Exporter ----------- def fetch_node_exporter_metrics(self): @@ -176,7 +165,6 @@ class TelemetryBackendService: str: Metrics fetched from Node Exporter. """ KPI = "node_network_receive_packets_total" - EXPORTER_ENDPOINT = "http://node-exporter-7465c69b87-b6ks5.telebackend:9100/metrics" try: response = requests.get(EXPORTER_ENDPOINT) # type: ignore if response.status_code == 200: @@ -200,7 +188,7 @@ class TelemetryBackendService: """ Method to extract the value of a metric from the metrics string. Args: - metrics (str): Metrics string fetched from Node Exporter. + metrics (str): Metrics string fetched from Exporter. metric_name (str): Name of the metric to extract. Returns: float: Value of the extracted metric, or None if not found. @@ -215,39 +203,4 @@ class TelemetryBackendService: print(f"Metric '{metric_name}' not found in the metrics.") return None - def produce_metrics(self): - """ - Method to produce metrics to Kafka topic as per Kafka configs. - """ - conf = { - 'bootstrap.servers': KAFKA_SERVER_IP, - } - - admin_client = AdminClient(conf) - self.create_topic_if_not_exists(admin_client) - - kafka_producer = KafkaProducer(conf) - - try: - start_time = time.time() - while True: - metrics = self.fetch_node_exporter_metrics() # select the function name based on the provided requirements - - if metrics: - kafka_producer.produce("topic_raw", str(metrics), callback=self.delivery_callback) - kafka_producer.flush() - # print("Metrics produced to Kafka topic") - - # Check if the specified run duration has elapsed - if time.time() - start_time >= self.run_duration: # type: ignore - break - - # waiting time until next fetch - time.sleep(self.fetch_interval) # type: ignore - except KeyboardInterrupt: - print("Keyboard interrupt detected. Exiting...") - finally: - kafka_producer.flush() - # kafka_producer.close() # this command generates ERROR - # ----------- ABOVE: Actual Implementation of Kafka Producer with Node Exporter ----------- \ No newline at end of file diff --git a/src/telemetry/backend/tests/testTelemetryBackend.py b/src/telemetry/backend/tests/testTelemetryBackend.py index bdbb8526a..e3e8bbc4b 100644 --- a/src/telemetry/backend/tests/testTelemetryBackend.py +++ b/src/telemetry/backend/tests/testTelemetryBackend.py @@ -29,6 +29,14 @@ LOGGER = logging.getLogger(__name__) # Tests Implementation of Telemetry Backend ########################### +def test_verify_kafka_topics(): + LOGGER.warning('test_receive_kafka_request requesting') + TelemetryBackendServiceObj = TelemetryBackendService() + KafkaTopics = ['topic_request', 'topic_response'] + response = TelemetryBackendServiceObj.create_topic_if_not_exists(KafkaTopics) + LOGGER.debug(str(response)) + assert isinstance(response, bool) + def test_run_kafka_listener(): LOGGER.warning('test_receive_kafka_request requesting') TelemetryBackendServiceObj = TelemetryBackendService() diff --git a/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py b/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py index ebd0db4ac..f940ccd65 100644 --- a/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py +++ b/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py @@ -34,6 +34,9 @@ LOGGER = logging.getLogger(__name__) METRICS_POOL = MetricsPool('Monitoring', 'TelemetryFrontend') KAFKA_SERVER_IP = '127.0.0.1:9092' ACTIVE_COLLECTORS = [] +KAFKA_TOPICS = {'request' : 'topic_request', + 'response': 'topic_response'} + class TelemetryFrontendServiceServicerImpl(TelemetryFrontendServiceServicer): def __init__(self, name_mapping : NameMapping): @@ -51,7 +54,7 @@ class TelemetryFrontendServiceServicerImpl(TelemetryFrontendServiceServicer): _collector_interval = int(request.interval_s) self.generate_kafka_request(_collector_id, _collector_kpi_id, _collector_duration, _collector_interval) # self.run_generate_kafka_request(_collector_id, _collector_kpi_id, _collector_duration, _collector_interval) - response.collector_id.uuid = request.collector_id.collector_id.uuid # type: ignore + response.collector_id.uuid = request.collector_id.collector_id.uuid # type: ignore return response def run_generate_kafka_request(self, msg_key: str, kpi: str, duration : int, interval: int): @@ -68,13 +71,14 @@ class TelemetryFrontendServiceServicerImpl(TelemetryFrontendServiceServicer): producer_configs = { 'bootstrap.servers': KAFKA_SERVER_IP, } - topic_request = "topic_request" + # topic_request = "topic_request" msg_value = Tuple [str, int, int] msg_value = (kpi, duration, interval) print ("Request generated: ", "Colletcor Id: ", msg_key, \ ", \nKPI: ", kpi, ", Duration: ", duration, ", Interval: ", interval) producerObj = KafkaProducer(producer_configs) - producerObj.produce(topic_request, key=msg_key, value= str(msg_value), callback=self.delivery_callback) + producerObj.produce(KAFKA_TOPICS['request'], key=msg_key, value= str(msg_value), callback=self.delivery_callback) + # producerObj.produce(topic_request, key=msg_key, value= str(msg_value), callback=self.delivery_callback) ACTIVE_COLLECTORS.append(msg_key) producerObj.flush() return producerObj @@ -94,15 +98,15 @@ class TelemetryFrontendServiceServicerImpl(TelemetryFrontendServiceServicer): 'group.id' : 'frontend', 'auto.offset.reset' : 'latest' } - topic_response = "topic_response" + # topic_response = "topic_response" consumerObj = KafkaConsumer(conusmer_configs) - consumerObj.subscribe([topic_response]) + consumerObj.subscribe([KAFKA_TOPICS['response']]) # print (time.time()) while True: receive_msg = consumerObj.poll(2.0) if receive_msg is None: - print (" - Telemetry frontend listening on Kafka Topic: ", topic_response) # added for debugging purposes + print (" - Telemetry frontend listening on Kafka Topic: ", KAFKA_TOPICS['response']) # added for debugging purposes continue elif receive_msg.error(): if receive_msg.error().code() == KafkaError._PARTITION_EOF: -- GitLab From f2c91300c1ebbe90299328d6106c52209581ebcc Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Sat, 25 May 2024 02:51:06 +0000 Subject: [PATCH 128/205] Telemetry crDB creation, basic operation and test --- scripts/run_tests_locally-telemetry-DB.sh | 26 ++++ scripts/show_logs_telemetry-DB.sh | 27 ++++ src/telemetry/database/TelemetryDB.py | 122 ++++++++++++++++++ src/telemetry/database/TelemetryEngine.py | 57 ++++++++ src/telemetry/database/TelemetryModel.py | 59 +++++++++ src/telemetry/database/__init__.py | 14 ++ src/telemetry/database/__main__.py | 15 +++ src/telemetry/database/tests/__init__.py | 13 ++ src/telemetry/database/tests/messages.py | 25 ++++ .../database/tests/telemetryDBtests.py | 40 ++++++ src/telemetry/telemetry_virenv.txt | 5 + 11 files changed, 403 insertions(+) create mode 100755 scripts/run_tests_locally-telemetry-DB.sh create mode 100755 scripts/show_logs_telemetry-DB.sh create mode 100644 src/telemetry/database/TelemetryDB.py create mode 100644 src/telemetry/database/TelemetryEngine.py create mode 100644 src/telemetry/database/TelemetryModel.py create mode 100644 src/telemetry/database/__init__.py create mode 100644 src/telemetry/database/__main__.py create mode 100644 src/telemetry/database/tests/__init__.py create mode 100644 src/telemetry/database/tests/messages.py create mode 100644 src/telemetry/database/tests/telemetryDBtests.py diff --git a/scripts/run_tests_locally-telemetry-DB.sh b/scripts/run_tests_locally-telemetry-DB.sh new file mode 100755 index 000000000..0a896d92c --- /dev/null +++ b/scripts/run_tests_locally-telemetry-DB.sh @@ -0,0 +1,26 @@ +#!/bin/bash +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +PROJECTDIR=`pwd` + +cd $PROJECTDIR/src +# RCFILE=$PROJECTDIR/coverage/.coveragerc +# coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ +# kpi_manager/tests/test_unitary.py + +RCFILE=$PROJECTDIR/coverage/.coveragerc +python3 -m pytest --log-cli-level=INFO --verbose \ + telemetry/database/tests/telemetryDBtests.py \ No newline at end of file diff --git a/scripts/show_logs_telemetry-DB.sh b/scripts/show_logs_telemetry-DB.sh new file mode 100755 index 000000000..0f57a36af --- /dev/null +++ b/scripts/show_logs_telemetry-DB.sh @@ -0,0 +1,27 @@ +#!/bin/bash +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +######################################################################################################################## +# Define your deployment settings here +######################################################################################################################## + +# If not already set, set the name of the Kubernetes namespace to deploy to. +export TFS_K8S_NAMESPACE=${TFS_K8S_NAMESPACE:-"crdb"} + +######################################################################################################################## +# Automated steps start here +######################################################################################################################## + +kubectl --namespace $TFS_K8S_NAMESPACE logs cockroachdb-0 diff --git a/src/telemetry/database/TelemetryDB.py b/src/telemetry/database/TelemetryDB.py new file mode 100644 index 000000000..5ce722af5 --- /dev/null +++ b/src/telemetry/database/TelemetryDB.py @@ -0,0 +1,122 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging, time +from sqlalchemy import engine +from sqlalchemy.orm import sessionmaker +from telemetry.database.TelemetryModel import Collector as CollectorModel +from telemetry.database.TelemetryModel import Kpi as KpiModel +from sqlalchemy.ext.declarative import declarative_base +from telemetry.database.TelemetryEngine import TelemetryEngine + +LOGGER = logging.getLogger(__name__) + +# Create a base class for declarative models +Base = declarative_base() + +class TelemetryDB: + def __init__(self): + self.db_engine = TelemetryEngine.get_engine() + if self.db_engine is None: + LOGGER.error('Unable to get SQLAlchemy DB Engine...') + return False + LOGGER.info('test_telemetry_DB_connection -- Engine created sucessfully') + + def create_database(self): + try: + TelemetryEngine.create_database(self.db_engine) + LOGGER.info('test_telemetry_DB_connection -- DB created sucessfully') + return True + except: # pylint: disable=bare-except # pragma: no cover + LOGGER.exception('Failed to check/create the database: {:s}'.format(str(self.db_engine.url))) + return False + + # Function to create the collector and KPI tables in the database + def create_tables(self): + try: + Base.metadata.create_all(self.db_engine) # type: ignore + LOGGER.info("Collector and KPI tables created in the TelemetryFrontend database") + except Exception as e: + LOGGER.info("Tables cannot be created in the TelemetryFrontend database. {:s}".format(str(e))) + + # Function to insert a row into the Collector model + def insert_collector(self, kpi_id: int, collector: str, duration_s: float, interval_s: float): + # Create a session + Session = sessionmaker(bind=self.db_engine) + session = Session() + try: + # Create a new Collector instance + collectorObj = CollectorModel() + collectorObj.kpi_id = kpi_id + collectorObj.collector = collector + collectorObj.sampling_duration_s = duration_s + collectorObj.sampling_interval_s = interval_s + collectorObj.start_timestamp = time.time() + collectorObj.end_timestamp = time.time() + + # Add the instance to the session + session.add(collectorObj) + + # Commit the session + session.commit() + LOGGER.info("New collector inserted successfully") + except Exception as e: + session.rollback() + LOGGER.info("Failed to insert new collector. {:s}".format(str(e))) + finally: + # Close the session + session.close() + + def inser_kpi(self, kpi_id, kpi_descriptor): + # Create a session + Session = sessionmaker(bind=self.db_engine) + session = Session() + try: + # Create a new Collector instance + KpiObj = KpiModel() + KpiObj.kpi_id = kpi_id + KpiObj.kpi_description = kpi_descriptor + + # Add the instance to the session + session.add(KpiObj) + + # Commit the session + session.commit() + LOGGER.info("New collector inserted successfully") + except Exception as e: + session.rollback() + LOGGER.info("Failed to insert new collector. {:s}".format(str(e))) + finally: + # Close the session + session.close() + + def get_kpi(self, kpi_id): + # Create a session + Session = sessionmaker(bind=self.db_engine) + session = Session() + try: + kpi = session.query(KpiModel).filter_by(kpi_id=kpi_id).first() + + if kpi: + LOGGER.info("kpi ID found: {:s}".format(str(kpi))) + return kpi + else: + LOGGER.info("Kpi ID not found") + return None + except Exception as e: + LOGGER.info("Failed to retrieve KPI ID. {:s}".format(str(e))) + raise + finally: + # Close the session + session.close() \ No newline at end of file diff --git a/src/telemetry/database/TelemetryEngine.py b/src/telemetry/database/TelemetryEngine.py new file mode 100644 index 000000000..1884368bd --- /dev/null +++ b/src/telemetry/database/TelemetryEngine.py @@ -0,0 +1,57 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging, sqlalchemy, sqlalchemy_utils +# from common.Settings import get_setting + +LOGGER = logging.getLogger(__name__) + +APP_NAME = 'tfs' +ECHO = False # False: No dump SQL commands and transactions executed +CRDB_URI_TEMPLATE = 'cockroachdb://{:s}:{:s}@127.0.0.1:{:s}/{:s}?sslmode={:s}' +# CRDB_URI_TEMPLATE = 'cockroachdb://{:s}:{:s}@cockroachdb-public.{:s}.svc.cluster.local:{:s}/{:s}?sslmode={:s}' +# CRDB_URI_TEMPLATE = 'cockroachdb://{:s}:{:s}@cockroachdb-public.{:s}.svc.cluster.local:{:s}/{:s}?sslmode={:s}' + +class TelemetryEngine: + # def __init__(self): + # self.engine = self.get_engine() + @staticmethod + def get_engine() -> sqlalchemy.engine.Engine: + CRDB_NAMESPACE = "crdb" + CRDB_SQL_PORT = "26257" + CRDB_DATABASE = "TelemetryFrontend" + CRDB_USERNAME = "tfs" + CRDB_PASSWORD = "tfs123" + CRDB_SSLMODE = "require" + crdb_uri = CRDB_URI_TEMPLATE.format( + CRDB_USERNAME, CRDB_PASSWORD, CRDB_SQL_PORT, CRDB_DATABASE, CRDB_SSLMODE) + # crdb_uri = CRDB_URI_TEMPLATE.format( + # CRDB_USERNAME, CRDB_PASSWORD, CRDB_NAMESPACE, CRDB_SQL_PORT, CRDB_DATABASE, CRDB_SSLMODE) + try: + engine = sqlalchemy.create_engine( + crdb_uri, connect_args={'application_name': APP_NAME}, echo=ECHO, future=True) + except: # pylint: disable=bare-except # pragma: no cover + LOGGER.exception('Failed to connect to database: {:s}'.format(str(crdb_uri))) + return None # type: ignore + return engine # type: ignore + + @staticmethod + def create_database(engine : sqlalchemy.engine.Engine) -> None: + if not sqlalchemy_utils.database_exists(engine.url): + sqlalchemy_utils.create_database(engine.url) + + @staticmethod + def drop_database(engine : sqlalchemy.engine.Engine) -> None: + if sqlalchemy_utils.database_exists(engine.url): + sqlalchemy_utils.drop_database(engine.url) diff --git a/src/telemetry/database/TelemetryModel.py b/src/telemetry/database/TelemetryModel.py new file mode 100644 index 000000000..1f40bad56 --- /dev/null +++ b/src/telemetry/database/TelemetryModel.py @@ -0,0 +1,59 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +from sqlalchemy import Column, Integer, String, Float, Text, ForeignKey +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import sessionmaker, relationship + + +logging.basicConfig(level=logging.INFO) +LOGGER = logging.getLogger(__name__) + +# Create a base class for declarative models +Base = declarative_base() + +class Kpi(Base): + __tablename__ = 'KPI' + + kpi_id = Column(Integer, primary_key=True, autoincrement=True) + kpi_description = Column(Text) + kpi_sample_type = Column(Integer) + device_id = Column(String) + endpoint_id = Column(String) + service_id = Column(String) + slice_id = Column(String) + connection_id = Column(String) + link_id = Column(String) + monitor_flag = Column(String) + + # Relationship to Collector model: allows access to related Collector objects from a Kpi object + collectors = relationship('Collector', back_populates='kpi') + +class Collector(Base): + __tablename__ = 'collector' + + collector_id = Column(Integer, primary_key=True, autoincrement=True) + kpi_id = Column(Integer, ForeignKey('KPI.kpi_id')) + collector = Column(String) + sampling_duration_s = Column(Float) + sampling_interval_s = Column(Float) + start_timestamp = Column(Float) + end_timestamp = Column(Float) + + # Relationship to Kpi model: allows access to the related Kpi object from a Collector object + kpi = relationship('Kpi', back_populates='collectors') + + + diff --git a/src/telemetry/database/__init__.py b/src/telemetry/database/__init__.py new file mode 100644 index 000000000..1549d9811 --- /dev/null +++ b/src/telemetry/database/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/src/telemetry/database/__main__.py b/src/telemetry/database/__main__.py new file mode 100644 index 000000000..10f5e099a --- /dev/null +++ b/src/telemetry/database/__main__.py @@ -0,0 +1,15 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + diff --git a/src/telemetry/database/tests/__init__.py b/src/telemetry/database/tests/__init__.py new file mode 100644 index 000000000..f80ccfd52 --- /dev/null +++ b/src/telemetry/database/tests/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. \ No newline at end of file diff --git a/src/telemetry/database/tests/messages.py b/src/telemetry/database/tests/messages.py new file mode 100644 index 000000000..911abcdc9 --- /dev/null +++ b/src/telemetry/database/tests/messages.py @@ -0,0 +1,25 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import uuid +import random +from common.proto import telemetry_frontend_pb2 + +def create_collector_request(): + _create_collector_request = telemetry_frontend_pb2.Collector() + _create_collector_request.collector_id.collector_id.uuid = str(uuid.uuid4()) + _create_collector_request.kpi_id.kpi_id.uuid = str(uuid.uuid4()) + _create_collector_request.duration_s = float(random.randint(8, 16)) + _create_collector_request.interval_s = float(random.randint(2, 4)) + return _create_collector_request \ No newline at end of file diff --git a/src/telemetry/database/tests/telemetryDBtests.py b/src/telemetry/database/tests/telemetryDBtests.py new file mode 100644 index 000000000..0d0977bce --- /dev/null +++ b/src/telemetry/database/tests/telemetryDBtests.py @@ -0,0 +1,40 @@ + +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +from telemetry.database.TelemetryDB import TelemetryDB +from .messages import create_collector_request + +logging.basicConfig(level=logging.INFO) +LOGGER = logging.getLogger(__name__) + +def test_telemetry_DB_connection(): + LOGGER.info('test_telemetry_DB_connection begin') + TelemetryDBobj = TelemetryDB() + if(TelemetryDBobj.create_database()): + LOGGER.info('test_telemetry_DB_connection -----DB----') + TelemetryDBobj.create_tables() # type: ignore + LOGGER.info('test_telemetry_DB_connection -----Table----') + TelemetryDBobj.inser_kpi(4, 'this is test kpi') + LOGGER.info('test_telemetry_DB_connection -----INSERT KPI----') + TelemetryDBobj.insert_collector(4, "this is test collector", 3.0, 12.0) + LOGGER.info('test_telemetry_DB_connection -----INSERT COL----') + TelemetryDBobj.get_kpi(1) + LOGGER.info('test_telemetry_DB_connection -----GET KPI----') + + + + + \ No newline at end of file diff --git a/src/telemetry/telemetry_virenv.txt b/src/telemetry/telemetry_virenv.txt index 0ce9b803a..e39f80b65 100644 --- a/src/telemetry/telemetry_virenv.txt +++ b/src/telemetry/telemetry_virenv.txt @@ -7,6 +7,7 @@ colorama==0.4.6 confluent-kafka==2.3.0 coverage==6.3 future-fstrings==1.2.0 +greenlet==3.0.3 grpcio==1.47.5 grpcio-health-checking==1.47.5 grpcio-tools==1.47.5 @@ -37,7 +38,11 @@ pytz==2024.1 questdb==1.0.1 requests==2.27.1 six==1.16.0 +SQLAlchemy==1.4.52 +sqlalchemy-cockroachdb==1.4.4 +SQLAlchemy-Utils==0.38.3 toml==0.10.2 +typing_extensions==4.12.0 tzlocal==5.2 urllib3==1.26.18 wcwidth==0.2.13 -- GitLab From d0ee5beac3eb0eccfbb921588178dc7980ea24a4 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Mon, 27 May 2024 12:21:42 +0000 Subject: [PATCH 129/205] CRDB working example of all operations --- src/telemetry/database/tests/temp_DB.py | 284 ++++++++++++++++++++++++ 1 file changed, 284 insertions(+) create mode 100644 src/telemetry/database/tests/temp_DB.py diff --git a/src/telemetry/database/tests/temp_DB.py b/src/telemetry/database/tests/temp_DB.py new file mode 100644 index 000000000..5d3c3b1bd --- /dev/null +++ b/src/telemetry/database/tests/temp_DB.py @@ -0,0 +1,284 @@ +from sqlalchemy import create_engine, Column, String, Integer, Text, Float, ForeignKey +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import sessionmaker, relationship +from sqlalchemy.dialects.postgresql import UUID +import logging + +LOGGER = logging.getLogger(__name__) +Base = declarative_base() + +class Kpi(Base): + __tablename__ = 'kpi' + + kpi_id = Column(UUID(as_uuid=False), primary_key=True) + kpi_description = Column(Text) + kpi_sample_type = Column(Integer) + device_id = Column(String) + endpoint_id = Column(String) + service_id = Column(String) + slice_id = Column(String) + connection_id = Column(String) + link_id = Column(String) + + collectors = relationship('Collector', back_populates='kpi') + + def __repr__(self): + return (f"") + +class Collector(Base): + __tablename__ = 'collector' + + collector_id = Column(UUID(as_uuid=False), primary_key=True) + kpi_id = Column(UUID(as_uuid=False), ForeignKey('kpi.kpi_id')) + collector = Column(String) + sampling_duration_s = Column(Float) + sampling_interval_s = Column(Float) + start_timestamp = Column(Float) + end_timestamp = Column(Float) + + kpi = relationship('Kpi', back_populates='collectors') + + def __repr__(self): + return (f"") + +class DatabaseManager: + def __init__(self, db_url, db_name): + self.engine = create_engine(db_url) + self.db_name = db_name + self.Session = sessionmaker(bind=self.engine) + LOGGER.info("DatabaseManager initialized with DB URL: %s and DB Name: %s", db_url, db_name) + + def create_database(self): + try: + with self.engine.connect() as connection: + connection.execute(f"CREATE DATABASE {self.db_name};") + LOGGER.info("Database '%s' created successfully.", self.db_name) + except Exception as e: + LOGGER.error("Error creating database '%s': %s", self.db_name, e) + finally: + LOGGER.info("create_database method execution finished.") + + def create_tables(self): + try: + Base.metadata.create_all(self.engine) + LOGGER.info("Tables created successfully.") + except Exception as e: + LOGGER.error("Error creating tables: %s", e) + finally: + LOGGER.info("create_tables method execution finished.") + + def verify_table_creation(self): + try: + with self.engine.connect() as connection: + result = connection.execute("SHOW TABLES;") + tables = result.fetchall() + LOGGER.info("Tables verified: %s", tables) + return tables + except Exception as e: + LOGGER.error("Error verifying table creation: %s", e) + return [] + finally: + LOGGER.info("verify_table_creation method execution finished.") + + def insert_row_kpi(self, kpi_data): + session = self.Session() + try: + new_kpi = Kpi(**kpi_data) + session.add(new_kpi) + session.commit() + LOGGER.info("Inserted row into KPI table: %s", kpi_data) + except Exception as e: + session.rollback() + LOGGER.error("Error inserting row into KPI table: %s", e) + finally: + session.close() + LOGGER.info("insert_row_kpi method execution finished.") + + def insert_row_collector(self, collector_data): + session = self.Session() + try: + new_collector = Collector(**collector_data) + session.add(new_collector) + session.commit() + LOGGER.info("Inserted row into Collector table: %s", collector_data) + except Exception as e: + session.rollback() + LOGGER.error("Error inserting row into Collector table: %s", e) + finally: + session.close() + LOGGER.info("insert_row_collector method execution finished.") + + def verify_insertion_kpi(self, kpi_id): + session = self.Session() + try: + kpi = session.query(Kpi).filter_by(kpi_id=kpi_id).first() + LOGGER.info("Verified insertion in KPI table for kpi_id: %s, Result: %s", kpi_id, kpi) + return kpi + except Exception as e: + LOGGER.error("Error verifying insertion in KPI table for kpi_id %s: %s", kpi_id, e) + return None + finally: + session.close() + LOGGER.info("verify_insertion_kpi method execution finished.") + + def verify_insertion_collector(self, collector_id): + session = self.Session() + try: + collector = session.query(Collector).filter_by(collector_id=collector_id).first() + LOGGER.info("Verified insertion in Collector table for collector_id: %s, Result: %s", collector_id, collector) + return collector + except Exception as e: + LOGGER.error("Error verifying insertion in Collector table for collector_id %s: %s", collector_id, e) + return None + finally: + session.close() + LOGGER.info("verify_insertion_collector method execution finished.") + + def get_all_kpi_rows(self): + session = self.Session() + try: + kpi_rows = session.query(Kpi).all() + LOGGER.info("Fetched all rows from KPI table: %s", kpi_rows) + return kpi_rows + except Exception as e: + LOGGER.error("Error fetching all rows from KPI table: %s", e) + return [] + finally: + session.close() + LOGGER.info("get_all_kpi_rows method execution finished.") + + def get_all_collector_rows(self): + session = self.Session() + try: + collector_rows = session.query(Collector).all() + LOGGER.info("Fetched all rows from Collector table: %s", collector_rows) + return collector_rows + except Exception as e: + LOGGER.error("Error fetching all rows from Collector table: %s", e) + return [] + finally: + session.close() + LOGGER.info("get_all_collector_rows method execution finished.") + + def get_filtered_kpi_rows(self, **filters): + session = self.Session() + try: + query = session.query(Kpi) + for column, value in filters.items(): + query = query.filter(getattr(Kpi, column) == value) + result = query.all() + LOGGER.info("Fetched filtered rows from KPI table with filters ---------- : {:s}".format(str(result))) + return result + except NoResultFound: + LOGGER.warning("No results found in KPI table with filters %s", filters) + return [] + except Exception as e: + LOGGER.error("Error fetching filtered rows from KPI table with filters %s: %s", filters, e) + return [] + finally: + session.close() + LOGGER.info("get_filtered_kpi_rows method execution finished.") + + def get_filtered_collector_rows(self, **filters): + session = self.Session() + try: + query = session.query(Collector) + for column, value in filters.items(): + query = query.filter(getattr(Collector, column) == value) + result = query.all() + LOGGER.info("Fetched filtered rows from Collector table with filters %s: %s", filters, result) + return result + except NoResultFound: + LOGGER.warning("No results found in Collector table with filters %s", filters) + return [] + except Exception as e: + LOGGER.error("Error fetching filtered rows from Collector table with filters %s: %s", filters, e) + return [] + finally: + session.close() + LOGGER.info("get_filtered_collector_rows method execution finished.") + +# Example Usage +def main(): + CRDB_SQL_PORT = "26257" + CRDB_DATABASE = "TelemetryFrontend" + CRDB_USERNAME = "tfs" + CRDB_PASSWORD = "tfs123" + CRDB_SSLMODE = "require" + CRDB_URI_TEMPLATE = 'cockroachdb://{:s}:{:s}@127.0.0.1:{:s}/{:s}?sslmode={:s}' + crdb_uri = CRDB_URI_TEMPLATE.format( + CRDB_USERNAME, CRDB_PASSWORD, CRDB_SQL_PORT, CRDB_DATABASE, CRDB_SSLMODE) + # db_url = "cockroachdb://username:password@localhost:26257/" + # db_name = "yourdatabase" + db_manager = DatabaseManager(crdb_uri, CRDB_DATABASE) + + # Create database + # db_manager.create_database() + + # Update db_url to include the new database name + db_manager.engine = create_engine(f"{crdb_uri}") + db_manager.Session = sessionmaker(bind=db_manager.engine) + + # Create tables + db_manager.create_tables() + + # Verify table creation + tables = db_manager.verify_table_creation() + LOGGER.info('Tables in the database: {:s}'.format(str(tables))) + + # Insert a row into the KPI table + kpi_data = { + 'kpi_id': '123e4567-e89b-12d3-a456-426614174100', + 'kpi_description': 'Sample KPI', + 'kpi_sample_type': 1, + 'device_id': 'device_1', + 'endpoint_id': 'endpoint_1', + 'service_id': 'service_1', + 'slice_id': 'slice_1', + 'connection_id': 'conn_1', + 'link_id': 'link_1' + } + db_manager.insert_row_kpi(kpi_data) + + # Insert a row into the Collector table + collector_data = { + 'collector_id': '123e4567-e89b-12d3-a456-426614174101', + 'kpi_id': '123e4567-e89b-12d3-a456-426614174000', + 'collector': 'Collector 1', + 'sampling_duration_s': 60.0, + 'sampling_interval_s': 10.0, + 'start_timestamp': 1625247600.0, + 'end_timestamp': 1625247660.0 + } + db_manager.insert_row_collector(collector_data) + + # Verify insertion into KPI table + kpi = db_manager.verify_insertion_kpi('123e4567-e89b-12d3-a456-426614174000') + print("Inserted KPI:", kpi) + + # Verify insertion into Collector table + collector = db_manager.verify_insertion_collector('123e4567-e89b-12d3-a456-426614174001') + print("Inserted Collector:", collector) + + # Get all rows from KPI table + all_kpi_rows = db_manager.get_all_kpi_rows() + LOGGER.info("All KPI Rows: %s", all_kpi_rows) + + # Get all rows from Collector table + all_collector_rows = db_manager.get_all_collector_rows() + LOGGER.info("All Collector Rows: %s", all_collector_rows) + + # Get filtered rows from KPI table + filtered_kpi_rows = db_manager.get_filtered_kpi_rows(kpi_description='Sample KPI') + LOGGER.info("Filtered KPI Rows: %s", filtered_kpi_rows) + + # Get filtered rows from Collector table + filtered_collector_rows = db_manager.get_filtered_collector_rows(collector='Collector 1') + LOGGER.info("Filtered Collector Rows: %s", filtered_collector_rows) \ No newline at end of file -- GitLab From fdeccb2b759b742064b8e40e6e5cfc3dff6de3d3 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Mon, 27 May 2024 17:07:37 +0000 Subject: [PATCH 130/205] Kpi and Telemetry Basic operation: (Add, filter and select) --- src/telemetry/database/TelemetryDB.py | 122 --------------- src/telemetry/database/TelemetryDBmanager.py | 148 ++++++++++++++++++ src/telemetry/database/TelemetryEngine.py | 6 +- src/telemetry/database/TelemetryModel.py | 44 ++++-- src/telemetry/database/tests/messages.py | 33 +++- .../database/tests/telemetryDBtests.py | 46 +++--- 6 files changed, 239 insertions(+), 160 deletions(-) delete mode 100644 src/telemetry/database/TelemetryDB.py create mode 100644 src/telemetry/database/TelemetryDBmanager.py diff --git a/src/telemetry/database/TelemetryDB.py b/src/telemetry/database/TelemetryDB.py deleted file mode 100644 index 5ce722af5..000000000 --- a/src/telemetry/database/TelemetryDB.py +++ /dev/null @@ -1,122 +0,0 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging, time -from sqlalchemy import engine -from sqlalchemy.orm import sessionmaker -from telemetry.database.TelemetryModel import Collector as CollectorModel -from telemetry.database.TelemetryModel import Kpi as KpiModel -from sqlalchemy.ext.declarative import declarative_base -from telemetry.database.TelemetryEngine import TelemetryEngine - -LOGGER = logging.getLogger(__name__) - -# Create a base class for declarative models -Base = declarative_base() - -class TelemetryDB: - def __init__(self): - self.db_engine = TelemetryEngine.get_engine() - if self.db_engine is None: - LOGGER.error('Unable to get SQLAlchemy DB Engine...') - return False - LOGGER.info('test_telemetry_DB_connection -- Engine created sucessfully') - - def create_database(self): - try: - TelemetryEngine.create_database(self.db_engine) - LOGGER.info('test_telemetry_DB_connection -- DB created sucessfully') - return True - except: # pylint: disable=bare-except # pragma: no cover - LOGGER.exception('Failed to check/create the database: {:s}'.format(str(self.db_engine.url))) - return False - - # Function to create the collector and KPI tables in the database - def create_tables(self): - try: - Base.metadata.create_all(self.db_engine) # type: ignore - LOGGER.info("Collector and KPI tables created in the TelemetryFrontend database") - except Exception as e: - LOGGER.info("Tables cannot be created in the TelemetryFrontend database. {:s}".format(str(e))) - - # Function to insert a row into the Collector model - def insert_collector(self, kpi_id: int, collector: str, duration_s: float, interval_s: float): - # Create a session - Session = sessionmaker(bind=self.db_engine) - session = Session() - try: - # Create a new Collector instance - collectorObj = CollectorModel() - collectorObj.kpi_id = kpi_id - collectorObj.collector = collector - collectorObj.sampling_duration_s = duration_s - collectorObj.sampling_interval_s = interval_s - collectorObj.start_timestamp = time.time() - collectorObj.end_timestamp = time.time() - - # Add the instance to the session - session.add(collectorObj) - - # Commit the session - session.commit() - LOGGER.info("New collector inserted successfully") - except Exception as e: - session.rollback() - LOGGER.info("Failed to insert new collector. {:s}".format(str(e))) - finally: - # Close the session - session.close() - - def inser_kpi(self, kpi_id, kpi_descriptor): - # Create a session - Session = sessionmaker(bind=self.db_engine) - session = Session() - try: - # Create a new Collector instance - KpiObj = KpiModel() - KpiObj.kpi_id = kpi_id - KpiObj.kpi_description = kpi_descriptor - - # Add the instance to the session - session.add(KpiObj) - - # Commit the session - session.commit() - LOGGER.info("New collector inserted successfully") - except Exception as e: - session.rollback() - LOGGER.info("Failed to insert new collector. {:s}".format(str(e))) - finally: - # Close the session - session.close() - - def get_kpi(self, kpi_id): - # Create a session - Session = sessionmaker(bind=self.db_engine) - session = Session() - try: - kpi = session.query(KpiModel).filter_by(kpi_id=kpi_id).first() - - if kpi: - LOGGER.info("kpi ID found: {:s}".format(str(kpi))) - return kpi - else: - LOGGER.info("Kpi ID not found") - return None - except Exception as e: - LOGGER.info("Failed to retrieve KPI ID. {:s}".format(str(e))) - raise - finally: - # Close the session - session.close() \ No newline at end of file diff --git a/src/telemetry/database/TelemetryDBmanager.py b/src/telemetry/database/TelemetryDBmanager.py new file mode 100644 index 000000000..42d647e0d --- /dev/null +++ b/src/telemetry/database/TelemetryDBmanager.py @@ -0,0 +1,148 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging, time +from sqlalchemy import inspect +from sqlalchemy.orm import sessionmaker +from telemetry.database.TelemetryModel import Collector as CollectorModel +from telemetry.database.TelemetryModel import Kpi as KpiModel +from sqlalchemy.ext.declarative import declarative_base +from telemetry.database.TelemetryEngine import TelemetryEngine +from common.proto.kpi_manager_pb2 import KpiDescriptor, KpiId +from common.proto.telemetry_frontend_pb2 import Collector +from sqlalchemy.exc import SQLAlchemyError + + +LOGGER = logging.getLogger(__name__) +DB_NAME = "TelemetryFrontend" + +# Create a base class for declarative models +Base = declarative_base() + +class TelemetryDBmanager: + def __init__(self): + self.db_engine = TelemetryEngine.get_engine() + if self.db_engine is None: + LOGGER.error('Unable to get SQLAlchemy DB Engine...') + return False + self.db_name = DB_NAME + self.Session = sessionmaker(bind=self.db_engine) + + def create_database(self): + try: + with self.db_engine.connect() as connection: + connection.execute(f"CREATE DATABASE {self.db_name};") + LOGGER.info('TelemetryDBmanager initalized DB Name: {self.db_name}') + return True + except: # pylint: disable=bare-except # pragma: no cover + LOGGER.exception('Failed to check/create the database: {:s}'.format(str(self.db_engine.url))) + return False + + def create_tables(self): + try: + Base.metadata.create_all(self.db_engine) # type: ignore + LOGGER.info("Tables created in the DB Name: {:}".format(self.db_name)) + except Exception as e: + LOGGER.info("Tables cannot be created in the TelemetryFrontend database. {:s}".format(str(e))) + + def verify_tables(self): + try: + with self.db_engine.connect() as connection: + result = connection.execute("SHOW TABLES;") + tables = result.fetchall() + LOGGER.info("Tables verified: {:}".format(tables)) + except Exception as e: + LOGGER.info("Unable to fetch Table names. {:s}".format(str(e))) + + def inser_kpi(self, request: KpiDescriptor): + session = self.Session() + try: + # Create a new Collector instance + kpi_to_insert = KpiModel() + kpi_to_insert.kpi_id = request.kpi_id.kpi_id.uuid + kpi_to_insert.kpi_description = request.kpi_description + kpi_to_insert.kpi_sample_type = request.kpi_sample_type + kpi_to_insert.device_id = request.service_id.service_uuid.uuid + kpi_to_insert.endpoint_id = request.device_id.device_uuid.uuid + kpi_to_insert.service_id = request.slice_id.slice_uuid.uuid + kpi_to_insert.slice_id = request.endpoint_id.endpoint_uuid.uuid + kpi_to_insert.connection_id = request.connection_id.connection_uuid.uuid + # kpi_to_insert.link_id = request.link_id.link_id.uuid + # Add the instance to the session + session.add(kpi_to_insert) + session.commit() + LOGGER.info("Row inserted into kpi table: {:}".format(kpi_to_insert)) + except Exception as e: + session.rollback() + LOGGER.info("Failed to insert new kpi. {:s}".format(str(e))) + finally: + # Close the session + session.close() + + # Function to insert a row into the Collector model + def insert_collector(self, request: Collector): + session = self.Session() + try: + # Create a new Collector instance + collector_to_insert = CollectorModel() + collector_to_insert.collector_id = request.collector_id.collector_id.uuid + collector_to_insert.kpi_id = request.kpi_id.kpi_id.uuid + collector_to_insert.collector = "Test collector description" + collector_to_insert.sampling_duration_s = request.duration_s + collector_to_insert.sampling_interval_s = request.interval_s + collector_to_insert.start_timestamp = time.time() + collector_to_insert.end_timestamp = time.time() + + session.add(collector_to_insert) + session.commit() + LOGGER.info("Row inserted into collector table: {:}".format(collector_to_insert)) + except Exception as e: + session.rollback() + LOGGER.info("Failed to insert new collector. {:s}".format(str(e))) + finally: + # Close the session + session.close() + + def get_kpi_descriptor(self, kpi_id: KpiId): + session = self.Session() + try: + kpi_id_to_get = kpi_id.kpi_id.uuid + kpi = session.query(KpiModel).filter_by(kpi_id=kpi_id_to_get).first() + if kpi: + LOGGER.info("kpi ID found: {:s}".format(str(kpi))) + return kpi + else: + LOGGER.info("Kpi ID not found{:s}".format(str(kpi_id_to_get))) + return None + except Exception as e: + session.rollback() + LOGGER.info("Failed to retrieve KPI ID. {:s}".format(str(e))) + raise + finally: + session.close() + + def select_kpi_descriptor(self, **filters): + session = self.Session() + try: + query = session.query(KpiModel) + for column, value in filters.items(): + query = query.filter(getattr(KpiModel, column) == value) + result = query.all() + LOGGER.info("Fetched filtered rows from KPI table with filters ---------- : {:s}".format(str(result))) + return result + except SQLAlchemyError as e: + LOGGER.error("Error fetching filtered rows from KPI table with filters {:}: {:}".format(filters, e)) + return [] + finally: + session.close() \ No newline at end of file diff --git a/src/telemetry/database/TelemetryEngine.py b/src/telemetry/database/TelemetryEngine.py index 1884368bd..d6e54cc2f 100644 --- a/src/telemetry/database/TelemetryEngine.py +++ b/src/telemetry/database/TelemetryEngine.py @@ -39,8 +39,10 @@ class TelemetryEngine: # crdb_uri = CRDB_URI_TEMPLATE.format( # CRDB_USERNAME, CRDB_PASSWORD, CRDB_NAMESPACE, CRDB_SQL_PORT, CRDB_DATABASE, CRDB_SSLMODE) try: - engine = sqlalchemy.create_engine( - crdb_uri, connect_args={'application_name': APP_NAME}, echo=ECHO, future=True) + # engine = sqlalchemy.create_engine( + # crdb_uri, connect_args={'application_name': APP_NAME}, echo=ECHO, future=True) + engine = sqlalchemy.create_engine(crdb_uri) + LOGGER.info(' --- TelemetryDBmanager initalized with DB URL: {:}'.format(crdb_uri)) except: # pylint: disable=bare-except # pragma: no cover LOGGER.exception('Failed to connect to database: {:s}'.format(str(crdb_uri))) return None # type: ignore diff --git a/src/telemetry/database/TelemetryModel.py b/src/telemetry/database/TelemetryModel.py index 1f40bad56..8defdd2e8 100644 --- a/src/telemetry/database/TelemetryModel.py +++ b/src/telemetry/database/TelemetryModel.py @@ -13,6 +13,7 @@ # limitations under the License. import logging +from sqlalchemy.dialects.postgresql import UUID from sqlalchemy import Column, Integer, String, Float, Text, ForeignKey from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker, relationship @@ -25,35 +26,46 @@ LOGGER = logging.getLogger(__name__) Base = declarative_base() class Kpi(Base): - __tablename__ = 'KPI' + __tablename__ = 'kpi' - kpi_id = Column(Integer, primary_key=True, autoincrement=True) + kpi_id = Column(UUID(as_uuid=False), primary_key=True) kpi_description = Column(Text) kpi_sample_type = Column(Integer) - device_id = Column(String) - endpoint_id = Column(String) - service_id = Column(String) - slice_id = Column(String) - connection_id = Column(String) - link_id = Column(String) - monitor_flag = Column(String) + device_id = Column(String) + endpoint_id = Column(String) + service_id = Column(String) + slice_id = Column(String) + connection_id = Column(String) + link_id = Column(String) + # monitor_flag = Column(String) # Relationship to Collector model: allows access to related Collector objects from a Kpi object collectors = relationship('Collector', back_populates='kpi') + # helps in logging the information + def __repr__(self): + return (f"") + class Collector(Base): __tablename__ = 'collector' - collector_id = Column(Integer, primary_key=True, autoincrement=True) - kpi_id = Column(Integer, ForeignKey('KPI.kpi_id')) - collector = Column(String) + collector_id = Column(UUID(as_uuid=False), primary_key=True) + kpi_id = Column(UUID(as_uuid=False), ForeignKey('kpi.kpi_id')) + collector = Column(String) sampling_duration_s = Column(Float) sampling_interval_s = Column(Float) - start_timestamp = Column(Float) - end_timestamp = Column(Float) + start_timestamp = Column(Float) + end_timestamp = Column(Float) # Relationship to Kpi model: allows access to the related Kpi object from a Collector object kpi = relationship('Kpi', back_populates='collectors') - - + def __repr__(self): + return (f"") \ No newline at end of file diff --git a/src/telemetry/database/tests/messages.py b/src/telemetry/database/tests/messages.py index 911abcdc9..ea59d0925 100644 --- a/src/telemetry/database/tests/messages.py +++ b/src/telemetry/database/tests/messages.py @@ -15,11 +15,40 @@ import uuid import random from common.proto import telemetry_frontend_pb2 +from common.proto import kpi_manager_pb2 +from common.proto.kpi_sample_types_pb2 import KpiSampleType + def create_collector_request(): _create_collector_request = telemetry_frontend_pb2.Collector() _create_collector_request.collector_id.collector_id.uuid = str(uuid.uuid4()) - _create_collector_request.kpi_id.kpi_id.uuid = str(uuid.uuid4()) + _create_collector_request.kpi_id.kpi_id.uuid = '2a779f04-77a6-4b32-b020-893e0e1e656f' # must be primary key in kpi table + # _create_collector_request.kpi_id.kpi_id.uuid = str(uuid.uuid4()) _create_collector_request.duration_s = float(random.randint(8, 16)) _create_collector_request.interval_s = float(random.randint(2, 4)) - return _create_collector_request \ No newline at end of file + return _create_collector_request + +def create_kpi_request(): + _create_kpi_request = kpi_manager_pb2.KpiDescriptor() + _create_kpi_request.kpi_id.kpi_id.uuid = str(uuid.uuid4()) + _create_kpi_request.kpi_description = 'KPI Description Test' + _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED + _create_kpi_request.service_id.service_uuid.uuid = 'SERV' + _create_kpi_request.device_id.device_uuid.uuid = 'DEV' + _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC' + _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END' + _create_kpi_request.connection_id.connection_uuid.uuid = 'CON' + # _create_kpi_request.link_id.link_id.uuid = 'LNK' + return _create_kpi_request + +def create_kpi_id_request(): + _create_kpi_id_request = kpi_manager_pb2.KpiId() + _create_kpi_id_request.kpi_id.uuid = '11e2c6c6-b507-40aa-ab3a-ffd41e7125f0' + return _create_kpi_id_request + +def create_kpi_filter_request(): + # create a dict as follows: 'Key' = 'KpiModel' column name and 'Value' = filter to apply. + _create_kpi_filter_request = dict() + _create_kpi_filter_request['kpi_sample_type'] = 102 + _create_kpi_filter_request['kpi_id'] = '11e2c6c6-b507-40aa-ab3a-ffd41e7125f0' + return _create_kpi_filter_request \ No newline at end of file diff --git a/src/telemetry/database/tests/telemetryDBtests.py b/src/telemetry/database/tests/telemetryDBtests.py index 0d0977bce..217bdcfd4 100644 --- a/src/telemetry/database/tests/telemetryDBtests.py +++ b/src/telemetry/database/tests/telemetryDBtests.py @@ -14,27 +14,37 @@ # limitations under the License. import logging -from telemetry.database.TelemetryDB import TelemetryDB -from .messages import create_collector_request +from typing import Any +from telemetry.database.TelemetryDBmanager import TelemetryDBmanager +from telemetry.database.TelemetryEngine import TelemetryEngine +from telemetry.database.tests import temp_DB +from .messages import create_kpi_request, create_collector_request, \ + create_kpi_id_request, create_kpi_filter_request logging.basicConfig(level=logging.INFO) LOGGER = logging.getLogger(__name__) -def test_telemetry_DB_connection(): - LOGGER.info('test_telemetry_DB_connection begin') - TelemetryDBobj = TelemetryDB() - if(TelemetryDBobj.create_database()): - LOGGER.info('test_telemetry_DB_connection -----DB----') - TelemetryDBobj.create_tables() # type: ignore - LOGGER.info('test_telemetry_DB_connection -----Table----') - TelemetryDBobj.inser_kpi(4, 'this is test kpi') - LOGGER.info('test_telemetry_DB_connection -----INSERT KPI----') - TelemetryDBobj.insert_collector(4, "this is test collector", 3.0, 12.0) - LOGGER.info('test_telemetry_DB_connection -----INSERT COL----') - TelemetryDBobj.get_kpi(1) - LOGGER.info('test_telemetry_DB_connection -----GET KPI----') - - - +# def test_temp_DB(): +# temp_DB.main() + +def test_telemetry_object_creation(): + LOGGER.info('--- test_telemetry_object_creation: START') + LOGGER.info('>>> Creating TelemetryDBmanager Object: ') + TelemetryDBmanagerObj = TelemetryDBmanager() + # LOGGER.info('>>> Creating Tables: ') + # TelemetryDBmanagerObj.create_tables() + # LOGGER.info('>>> Verifing Table creation: ') + # TelemetryDBmanagerObj.verify_tables() + LOGGER.info('>>> Row Insertion Operation: kpi Table') + kpi_obj = create_kpi_request() + TelemetryDBmanagerObj.inser_kpi(kpi_obj) + LOGGER.info('>>> Row Insertion Operation: collector Table') + collector_obj = create_collector_request() + TelemetryDBmanagerObj.insert_collector(collector_obj) + LOGGER.info('>>> Get KpiDescriptor ') + kpi_id_obj = create_kpi_id_request() + TelemetryDBmanagerObj.get_kpi_descriptor(kpi_id_obj) + kpi_filter : dict[str, Any] = create_kpi_filter_request() + TelemetryDBmanagerObj.select_kpi_descriptor(**kpi_filter) \ No newline at end of file -- GitLab From cd73d36ed715357bfadb6ba770746a4409b97678 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Mon, 27 May 2024 18:21:46 +0000 Subject: [PATCH 131/205] All DB operation for KPI Manager and Telemetry FrontEnd (Add, delete, filter, all) --- src/telemetry/database/TelemetryDBmanager.py | 85 +++++++++++++++++-- src/telemetry/database/tests/messages.py | 12 +++ .../database/tests/telemetryDBtests.py | 32 ++++++- src/telemetry/database/tests/temp_DB.py | 47 +++++++++- 4 files changed, 165 insertions(+), 11 deletions(-) diff --git a/src/telemetry/database/TelemetryDBmanager.py b/src/telemetry/database/TelemetryDBmanager.py index 42d647e0d..0380bc8ee 100644 --- a/src/telemetry/database/TelemetryDBmanager.py +++ b/src/telemetry/database/TelemetryDBmanager.py @@ -20,7 +20,7 @@ from telemetry.database.TelemetryModel import Kpi as KpiModel from sqlalchemy.ext.declarative import declarative_base from telemetry.database.TelemetryEngine import TelemetryEngine from common.proto.kpi_manager_pb2 import KpiDescriptor, KpiId -from common.proto.telemetry_frontend_pb2 import Collector +from common.proto.telemetry_frontend_pb2 import Collector, CollectorId from sqlalchemy.exc import SQLAlchemyError @@ -65,6 +65,8 @@ class TelemetryDBmanager: except Exception as e: LOGGER.info("Unable to fetch Table names. {:s}".format(str(e))) +# ------------------ INSERT METHODs -------------------------------------- + def inser_kpi(self, request: KpiDescriptor): session = self.Session() try: @@ -114,16 +116,18 @@ class TelemetryDBmanager: # Close the session session.close() - def get_kpi_descriptor(self, kpi_id: KpiId): +# ------------------ GET METHODs -------------------------------------- + + def get_kpi_descriptor(self, request: KpiId): session = self.Session() try: - kpi_id_to_get = kpi_id.kpi_id.uuid - kpi = session.query(KpiModel).filter_by(kpi_id=kpi_id_to_get).first() + kpi_id_to_search = request.kpi_id.uuid + kpi = session.query(KpiModel).filter_by(kpi_id=kpi_id_to_search).first() if kpi: LOGGER.info("kpi ID found: {:s}".format(str(kpi))) return kpi else: - LOGGER.info("Kpi ID not found{:s}".format(str(kpi_id_to_get))) + LOGGER.warning("Kpi ID not found{:s}".format(str(kpi_id_to_search))) return None except Exception as e: session.rollback() @@ -131,7 +135,27 @@ class TelemetryDBmanager: raise finally: session.close() + + def get_collector(self, request: CollectorId): + session = self.Session() + try: + collector_id_to_search = request.collector_id.uuid + collector = session.query(CollectorModel).filter_by(collector_id=collector_id_to_search).first() + if collector: + LOGGER.info("collector ID found: {:s}".format(str(collector))) + return collector + else: + LOGGER.warning("collector ID not found{:s}".format(str(collector_id_to_search))) + return None + except Exception as e: + session.rollback() + LOGGER.info("Failed to retrieve collector ID. {:s}".format(str(e))) + raise + finally: + session.close() + # ------------------ SELECT METHODs -------------------------------------- + def select_kpi_descriptor(self, **filters): session = self.Session() try: @@ -144,5 +168,56 @@ class TelemetryDBmanager: except SQLAlchemyError as e: LOGGER.error("Error fetching filtered rows from KPI table with filters {:}: {:}".format(filters, e)) return [] + finally: + session.close() + + def select_collector(self, **filters): + session = self.Session() + try: + query = session.query(CollectorModel) + for column, value in filters.items(): + query = query.filter(getattr(CollectorModel, column) == value) + result = query.all() + LOGGER.info("Fetched filtered rows from KPI table with filters ---------- : {:s}".format(str(result))) + return result + except SQLAlchemyError as e: + LOGGER.error("Error fetching filtered rows from KPI table with filters {:}: {:}".format(filters, e)) + return [] + finally: + session.close() + +# ------------------ DELETE METHODs -------------------------------------- + + def delete_kpi_descriptor(self, request: KpiId): + session = self.Session() + try: + kpi_id_to_delete = request.kpi_id.uuid + kpi = session.query(KpiModel).filter_by(kpi_id=kpi_id_to_delete).first() + if kpi: + session.delete(kpi) + session.commit() + LOGGER.info("Deleted KPI with kpi_id: %s", kpi_id_to_delete) + else: + LOGGER.warning("KPI with kpi_id %s not found", kpi_id_to_delete) + except SQLAlchemyError as e: + session.rollback() + LOGGER.error("Error deleting KPI with kpi_id %s: %s", kpi_id_to_delete, e) + finally: + session.close() + + def delete_collector(self, request: CollectorId): + session = self.Session() + try: + collector_id_to_delete = request.collector_id.uuid + collector = session.query(CollectorModel).filter_by(collector_id=collector_id_to_delete).first() + if collector: + session.delete(collector) + session.commit() + LOGGER.info("Deleted KPI with kpi_id: %s", collector_id_to_delete) + else: + LOGGER.warning("KPI with kpi_id %s not found", collector_id_to_delete) + except SQLAlchemyError as e: + session.rollback() + LOGGER.error("Error deleting KPI with kpi_id %s: %s", collector_id_to_delete, e) finally: session.close() \ No newline at end of file diff --git a/src/telemetry/database/tests/messages.py b/src/telemetry/database/tests/messages.py index ea59d0925..258d4a844 100644 --- a/src/telemetry/database/tests/messages.py +++ b/src/telemetry/database/tests/messages.py @@ -46,9 +46,21 @@ def create_kpi_id_request(): _create_kpi_id_request.kpi_id.uuid = '11e2c6c6-b507-40aa-ab3a-ffd41e7125f0' return _create_kpi_id_request +def create_collector_id_request(): + _create_collector_id_request = telemetry_frontend_pb2.CollectorId() + _create_collector_id_request.collector_id.uuid = '50ba9199-7e9d-45b5-a2fc-3f97917bad65' + return _create_collector_id_request + def create_kpi_filter_request(): # create a dict as follows: 'Key' = 'KpiModel' column name and 'Value' = filter to apply. _create_kpi_filter_request = dict() _create_kpi_filter_request['kpi_sample_type'] = 102 _create_kpi_filter_request['kpi_id'] = '11e2c6c6-b507-40aa-ab3a-ffd41e7125f0' + return _create_kpi_filter_request + +def create_collector_filter_request(): + # create a dict as follows: 'Key' = 'KpiModel' column name and 'Value' = filter to apply. + _create_kpi_filter_request = dict() + _create_kpi_filter_request['sampling_interval_s'] = 3.0 + # _create_kpi_filter_request['kpi_id'] = '11e2c6c6-b507-40aa-ab3a-ffd41e7125f0' return _create_kpi_filter_request \ No newline at end of file diff --git a/src/telemetry/database/tests/telemetryDBtests.py b/src/telemetry/database/tests/telemetryDBtests.py index 217bdcfd4..81431beb7 100644 --- a/src/telemetry/database/tests/telemetryDBtests.py +++ b/src/telemetry/database/tests/telemetryDBtests.py @@ -19,7 +19,8 @@ from telemetry.database.TelemetryDBmanager import TelemetryDBmanager from telemetry.database.TelemetryEngine import TelemetryEngine from telemetry.database.tests import temp_DB from .messages import create_kpi_request, create_collector_request, \ - create_kpi_id_request, create_kpi_filter_request + create_kpi_id_request, create_kpi_filter_request, \ + create_collector_id_request, create_collector_filter_request logging.basicConfig(level=logging.INFO) LOGGER = logging.getLogger(__name__) @@ -29,22 +30,45 @@ LOGGER = logging.getLogger(__name__) def test_telemetry_object_creation(): LOGGER.info('--- test_telemetry_object_creation: START') + LOGGER.info('>>> Creating TelemetryDBmanager Object: ') TelemetryDBmanagerObj = TelemetryDBmanager() + # LOGGER.info('>>> Creating Tables: ') # TelemetryDBmanagerObj.create_tables() + # LOGGER.info('>>> Verifing Table creation: ') # TelemetryDBmanagerObj.verify_tables() - LOGGER.info('>>> Row Insertion Operation: kpi Table') + + LOGGER.info('>>> TESTING: Row Insertion Operation: kpi Table') kpi_obj = create_kpi_request() TelemetryDBmanagerObj.inser_kpi(kpi_obj) - LOGGER.info('>>> Row Insertion Operation: collector Table') + + LOGGER.info('>>> TESTING: Row Insertion Operation: collector Table') collector_obj = create_collector_request() TelemetryDBmanagerObj.insert_collector(collector_obj) - LOGGER.info('>>> Get KpiDescriptor ') + + LOGGER.info('>>> TESTING: Get KpiDescriptor ') kpi_id_obj = create_kpi_id_request() TelemetryDBmanagerObj.get_kpi_descriptor(kpi_id_obj) + + LOGGER.info('>>> TESTING: Select Collector ') + collector_id_obj = create_collector_id_request() + TelemetryDBmanagerObj.get_collector(collector_id_obj) + + LOGGER.info('>>> TESTING: Applying kpi filter ') kpi_filter : dict[str, Any] = create_kpi_filter_request() TelemetryDBmanagerObj.select_kpi_descriptor(**kpi_filter) + LOGGER.info('>>> TESTING: Applying collector filter ') + collector_filter : dict[str, Any] = create_collector_filter_request() + TelemetryDBmanagerObj.select_collector(**collector_filter) + + LOGGER.info('>>> TESTING: Delete KpiDescriptor ') + kpi_id_obj = create_kpi_id_request() + TelemetryDBmanagerObj.delete_kpi_descriptor(kpi_id_obj) + + LOGGER.info('>>> TESTING: Delete Collector ') + collector_id_obj = create_collector_id_request() + TelemetryDBmanagerObj.delete_collector(collector_id_obj) \ No newline at end of file diff --git a/src/telemetry/database/tests/temp_DB.py b/src/telemetry/database/tests/temp_DB.py index 5d3c3b1bd..7c1074fcf 100644 --- a/src/telemetry/database/tests/temp_DB.py +++ b/src/telemetry/database/tests/temp_DB.py @@ -204,7 +204,42 @@ class DatabaseManager: finally: session.close() LOGGER.info("get_filtered_collector_rows method execution finished.") - + + def delete_kpi_by_id(self, kpi_id): + session = self.Session() + try: + kpi = session.query(Kpi).filter_by(kpi_id=kpi_id).first() + if kpi: + session.delete(kpi) + session.commit() + LOGGER.info("Deleted KPI with kpi_id: %s", kpi_id) + else: + LOGGER.warning("KPI with kpi_id %s not found", kpi_id) + except SQLAlchemyError as e: + session.rollback() + LOGGER.error("Error deleting KPI with kpi_id %s: %s", kpi_id, e) + finally: + session.close() + LOGGER.info("delete_kpi_by_id method execution finished.") + + def delete_collector_by_id(self, collector_id): + session = self.Session() + try: + collector = session.query(Collector).filter_by(collector_id=collector_id).first() + if collector: + session.delete(collector) + session.commit() + LOGGER.info("Deleted Collector with collector_id: %s", collector_id) + else: + LOGGER.warning("Collector with collector_id %s not found", collector_id) + except SQLAlchemyError as e: + session.rollback() + LOGGER.error("Error deleting Collector with collector_id %s: %s", collector_id, e) + finally: + session.close() + LOGGER.info("delete_collector_by_id method execution finished.") + + # Example Usage def main(): CRDB_SQL_PORT = "26257" @@ -281,4 +316,12 @@ def main(): # Get filtered rows from Collector table filtered_collector_rows = db_manager.get_filtered_collector_rows(collector='Collector 1') - LOGGER.info("Filtered Collector Rows: %s", filtered_collector_rows) \ No newline at end of file + LOGGER.info("Filtered Collector Rows: %s", filtered_collector_rows) + + # Delete a KPI by kpi_id + kpi_id_to_delete = '123e4567-e89b-12d3-a456-426614174000' + db_manager.delete_kpi_by_id(kpi_id_to_delete) + + # Delete a Collector by collector_id + collector_id_to_delete = '123e4567-e89b-12d3-a456-426614174001' + db_manager.delete_collector_by_id(collector_id_to_delete) -- GitLab From 5d3979fa1fec61e3f0851b6fe55cfbe3e5379101 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Wed, 29 May 2024 08:23:51 +0000 Subject: [PATCH 132/205] ManagementDB.py is created to manage DB operation of 'KPImanager' and 'TelemetryManager'. --- .../run_tests_locally-telemetry-frontend.sh | 2 +- scripts/run_tests_locally-telemetry-mgtDB.sh | 26 +++++++ src/telemetry/database/TelemetryDBmanager.py | 67 +++++++++++------ src/telemetry/database/TelemetryEngine.py | 6 +- src/telemetry/database/managementDB.py | 72 +++++++++++++++++++ .../database/tests/managementDBtests.py | 22 ++++++ src/telemetry/database/tests/messages.py | 24 +++++-- .../database/tests/telemetryDBtests.py | 31 +++++--- src/telemetry/database/tests/temp_DB.py | 4 +- .../TelemetryFrontendServiceServicerImpl.py | 26 ++++++- src/telemetry/frontend/tests/test_frontend.py | 39 +++++----- 11 files changed, 257 insertions(+), 62 deletions(-) create mode 100755 scripts/run_tests_locally-telemetry-mgtDB.sh create mode 100644 src/telemetry/database/managementDB.py create mode 100644 src/telemetry/database/tests/managementDBtests.py diff --git a/scripts/run_tests_locally-telemetry-frontend.sh b/scripts/run_tests_locally-telemetry-frontend.sh index c6ab54a34..673104af6 100755 --- a/scripts/run_tests_locally-telemetry-frontend.sh +++ b/scripts/run_tests_locally-telemetry-frontend.sh @@ -24,5 +24,5 @@ cd $PROJECTDIR/src # python3 kpi_manager/tests/test_unitary.py RCFILE=$PROJECTDIR/coverage/.coveragerc -python3 -m pytest --log-level=INFO --verbose \ +python3 -m pytest --log-level=INFO --log-cli-level=INFO --verbose \ telemetry/frontend/tests/test_frontend.py \ No newline at end of file diff --git a/scripts/run_tests_locally-telemetry-mgtDB.sh b/scripts/run_tests_locally-telemetry-mgtDB.sh new file mode 100755 index 000000000..02a449abf --- /dev/null +++ b/scripts/run_tests_locally-telemetry-mgtDB.sh @@ -0,0 +1,26 @@ +#!/bin/bash +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +PROJECTDIR=`pwd` + +cd $PROJECTDIR/src +# RCFILE=$PROJECTDIR/coverage/.coveragerc +# coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ +# kpi_manager/tests/test_unitary.py + +RCFILE=$PROJECTDIR/coverage/.coveragerc +python3 -m pytest --log-cli-level=INFO --verbose \ + telemetry/database/tests/managementDBtests.py \ No newline at end of file diff --git a/src/telemetry/database/TelemetryDBmanager.py b/src/telemetry/database/TelemetryDBmanager.py index 0380bc8ee..6dc2868a1 100644 --- a/src/telemetry/database/TelemetryDBmanager.py +++ b/src/telemetry/database/TelemetryDBmanager.py @@ -13,7 +13,8 @@ # limitations under the License. import logging, time -from sqlalchemy import inspect +import sqlalchemy +from sqlalchemy import inspect, MetaData, Table from sqlalchemy.orm import sessionmaker from telemetry.database.TelemetryModel import Collector as CollectorModel from telemetry.database.TelemetryModel import Kpi as KpiModel @@ -22,13 +23,10 @@ from telemetry.database.TelemetryEngine import TelemetryEngine from common.proto.kpi_manager_pb2 import KpiDescriptor, KpiId from common.proto.telemetry_frontend_pb2 import Collector, CollectorId from sqlalchemy.exc import SQLAlchemyError - +from telemetry.database.TelemetryModel import Base LOGGER = logging.getLogger(__name__) -DB_NAME = "TelemetryFrontend" - -# Create a base class for declarative models -Base = declarative_base() +DB_NAME = "telemetryfrontend" class TelemetryDBmanager: def __init__(self): @@ -41,18 +39,19 @@ class TelemetryDBmanager: def create_database(self): try: - with self.db_engine.connect() as connection: - connection.execute(f"CREATE DATABASE {self.db_name};") - LOGGER.info('TelemetryDBmanager initalized DB Name: {self.db_name}') + # with self.db_engine.connect() as connection: + # connection.execute(f"CREATE DATABASE {self.db_name};") + TelemetryEngine.create_database(self.db_engine) + LOGGER.info('TelemetryDBmanager initalized DB Name: {:}'.format(self.db_name)) return True - except: # pylint: disable=bare-except # pragma: no cover - LOGGER.exception('Failed to check/create the database: {:s}'.format(str(self.db_engine.url))) + except Exception as e: # pylint: disable=bare-except # pragma: no cover + LOGGER.exception('Failed to check/create the database: {:s}'.format(str(e))) return False def create_tables(self): try: Base.metadata.create_all(self.db_engine) # type: ignore - LOGGER.info("Tables created in the DB Name: {:}".format(self.db_name)) + LOGGER.info("Tables created in database ({:}) the as per Models".format(self.db_name)) except Exception as e: LOGGER.info("Tables cannot be created in the TelemetryFrontend database. {:s}".format(str(e))) @@ -61,10 +60,30 @@ class TelemetryDBmanager: with self.db_engine.connect() as connection: result = connection.execute("SHOW TABLES;") tables = result.fetchall() - LOGGER.info("Tables verified: {:}".format(tables)) + LOGGER.info("Tables in DB: {:}".format(tables)) except Exception as e: LOGGER.info("Unable to fetch Table names. {:s}".format(str(e))) + def drop_table(self, table_to_drop: str): + try: + inspector = inspect(self.db_engine) + existing_tables = inspector.get_table_names() + if table_to_drop in existing_tables: + table = Table(table_to_drop, MetaData(), autoload_with=self.db_engine) + table.drop(self.db_engine) + LOGGER.info("Tables delete in the DB Name: {:}".format(self.db_name)) + else: + LOGGER.warning("No table {:} in database {:} ".format(table_to_drop, DB_NAME)) + except Exception as e: + LOGGER.info("Tables cannot be deleted in the {:} database. {:s}".format(DB_NAME, str(e))) + + def list_databases(self): + query = "SHOW DATABASES" + with self.db_engine.connect() as connection: + result = connection.execute(query) + databases = [row[0] for row in result] + LOGGER.info("List of available DBs: {:}".format(databases)) + # ------------------ INSERT METHODs -------------------------------------- def inser_kpi(self, request: KpiDescriptor): @@ -84,7 +103,7 @@ class TelemetryDBmanager: # Add the instance to the session session.add(kpi_to_insert) session.commit() - LOGGER.info("Row inserted into kpi table: {:}".format(kpi_to_insert)) + LOGGER.info("Row inserted into kpi table: {:}".format(kpi_to_insert.kpi_id)) except Exception as e: session.rollback() LOGGER.info("Failed to insert new kpi. {:s}".format(str(e))) @@ -108,7 +127,7 @@ class TelemetryDBmanager: session.add(collector_to_insert) session.commit() - LOGGER.info("Row inserted into collector table: {:}".format(collector_to_insert)) + LOGGER.info("Row inserted into collector table: {:}".format(collector_to_insert.collector_id)) except Exception as e: session.rollback() LOGGER.info("Failed to insert new collector. {:s}".format(str(e))) @@ -127,7 +146,7 @@ class TelemetryDBmanager: LOGGER.info("kpi ID found: {:s}".format(str(kpi))) return kpi else: - LOGGER.warning("Kpi ID not found{:s}".format(str(kpi_id_to_search))) + LOGGER.warning("Kpi ID not found {:s}".format(str(kpi_id_to_search))) return None except Exception as e: session.rollback() @@ -163,7 +182,10 @@ class TelemetryDBmanager: for column, value in filters.items(): query = query.filter(getattr(KpiModel, column) == value) result = query.all() - LOGGER.info("Fetched filtered rows from KPI table with filters ---------- : {:s}".format(str(result))) + if len(result) != 0: + LOGGER.info("Fetched filtered rows from KPI table with filters : {:s}".format(str(result))) + else: + LOGGER.warning("No matching row found : {:s}".format(str(result))) return result except SQLAlchemyError as e: LOGGER.error("Error fetching filtered rows from KPI table with filters {:}: {:}".format(filters, e)) @@ -178,7 +200,10 @@ class TelemetryDBmanager: for column, value in filters.items(): query = query.filter(getattr(CollectorModel, column) == value) result = query.all() - LOGGER.info("Fetched filtered rows from KPI table with filters ---------- : {:s}".format(str(result))) + if len(result) != 0: + LOGGER.info("Fetched filtered rows from KPI table with filters : {:s}".format(str(result))) + else: + LOGGER.warning("No matching row found : {:s}".format(str(result))) return result except SQLAlchemyError as e: LOGGER.error("Error fetching filtered rows from KPI table with filters {:}: {:}".format(filters, e)) @@ -213,11 +238,11 @@ class TelemetryDBmanager: if collector: session.delete(collector) session.commit() - LOGGER.info("Deleted KPI with kpi_id: %s", collector_id_to_delete) + LOGGER.info("Deleted collector with collector_id: %s", collector_id_to_delete) else: - LOGGER.warning("KPI with kpi_id %s not found", collector_id_to_delete) + LOGGER.warning("collector with collector_id %s not found", collector_id_to_delete) except SQLAlchemyError as e: session.rollback() - LOGGER.error("Error deleting KPI with kpi_id %s: %s", collector_id_to_delete, e) + LOGGER.error("Error deleting collector with collector_id %s: %s", collector_id_to_delete, e) finally: session.close() \ No newline at end of file diff --git a/src/telemetry/database/TelemetryEngine.py b/src/telemetry/database/TelemetryEngine.py index d6e54cc2f..ebeaf3787 100644 --- a/src/telemetry/database/TelemetryEngine.py +++ b/src/telemetry/database/TelemetryEngine.py @@ -30,7 +30,7 @@ class TelemetryEngine: def get_engine() -> sqlalchemy.engine.Engine: CRDB_NAMESPACE = "crdb" CRDB_SQL_PORT = "26257" - CRDB_DATABASE = "TelemetryFrontend" + CRDB_DATABASE = "telemetryfrontend" CRDB_USERNAME = "tfs" CRDB_PASSWORD = "tfs123" CRDB_SSLMODE = "require" @@ -41,8 +41,8 @@ class TelemetryEngine: try: # engine = sqlalchemy.create_engine( # crdb_uri, connect_args={'application_name': APP_NAME}, echo=ECHO, future=True) - engine = sqlalchemy.create_engine(crdb_uri) - LOGGER.info(' --- TelemetryDBmanager initalized with DB URL: {:}'.format(crdb_uri)) + engine = sqlalchemy.create_engine(crdb_uri, echo=False) + LOGGER.info(' TelemetryDBmanager initalized with DB URL: {:}'.format(crdb_uri)) except: # pylint: disable=bare-except # pragma: no cover LOGGER.exception('Failed to connect to database: {:s}'.format(str(crdb_uri))) return None # type: ignore diff --git a/src/telemetry/database/managementDB.py b/src/telemetry/database/managementDB.py new file mode 100644 index 000000000..f8d0ef9cb --- /dev/null +++ b/src/telemetry/database/managementDB.py @@ -0,0 +1,72 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging, time +from sqlalchemy.orm import sessionmaker +from sqlalchemy.ext.declarative import declarative_base +from telemetry.database.TelemetryEngine import TelemetryEngine + + +LOGGER = logging.getLogger(__name__) +TELEMETRY_DB_NAME = "telemetryfrontend" + +# Create a base class for declarative models +Base = declarative_base() + +class managementDB: + def __init__(self): + self.db_engine = TelemetryEngine.get_engine() + if self.db_engine is None: + LOGGER.error('Unable to get SQLAlchemy DB Engine...') + return False + self.db_name = TELEMETRY_DB_NAME + self.Session = sessionmaker(bind=self.db_engine) + + def create_database(self): + try: + with self.db_engine.connect() as connection: + connection.execute(f"CREATE DATABASE {self.db_name};") + LOGGER.info('managementDB initalizes database. Name: {self.db_name}') + return True + except: + LOGGER.exception('Failed to check/create the database: {:s}'.format(str(self.db_engine.url))) + return False + + def create_tables(self): + try: + Base.metadata.create_all(self.db_engine) # type: ignore + LOGGER.info("Tables created in the DB Name: {:}".format(self.db_name)) + except Exception as e: + LOGGER.info("Tables cannot be created in the TelemetryFrontend database. {:s}".format(str(e))) + + def verify_tables(self): + try: + with self.db_engine.connect() as connection: + result = connection.execute("SHOW TABLES;") + tables = result.fetchall() # type: ignore + LOGGER.info("Tables verified: {:}".format(tables)) + except Exception as e: + LOGGER.info("Unable to fetch Table names. {:s}".format(str(e))) + + def add_row_to_db(self, row): + session = self.Session() + try: + session.add(row) + session.commit() + LOGGER.info(f"Row inserted into {row.__class__.__name__} table. {row.__class__.__name__} Id: : {row.collector_id}") + except Exception as e: + session.rollback() + LOGGER.error(f"Failed to insert new row into {row.__class__.__name__} table. {str(e)}") + finally: + session.close() \ No newline at end of file diff --git a/src/telemetry/database/tests/managementDBtests.py b/src/telemetry/database/tests/managementDBtests.py new file mode 100644 index 000000000..3d7ef6615 --- /dev/null +++ b/src/telemetry/database/tests/managementDBtests.py @@ -0,0 +1,22 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from telemetry.database.managementDB import managementDB +from telemetry.database.tests.messages import create_collector_model_object + + +def test_add_row_to_db(): + managementDBobj = managementDB() + managementDBobj.add_row_to_db(create_collector_model_object()) \ No newline at end of file diff --git a/src/telemetry/database/tests/messages.py b/src/telemetry/database/tests/messages.py index 258d4a844..6452e79e7 100644 --- a/src/telemetry/database/tests/messages.py +++ b/src/telemetry/database/tests/messages.py @@ -12,17 +12,19 @@ # See the License for the specific language governing permissions and # limitations under the License. +import time import uuid import random from common.proto import telemetry_frontend_pb2 from common.proto import kpi_manager_pb2 from common.proto.kpi_sample_types_pb2 import KpiSampleType +from telemetry.database.TelemetryModel import Collector as CollectorModel def create_collector_request(): _create_collector_request = telemetry_frontend_pb2.Collector() _create_collector_request.collector_id.collector_id.uuid = str(uuid.uuid4()) - _create_collector_request.kpi_id.kpi_id.uuid = '2a779f04-77a6-4b32-b020-893e0e1e656f' # must be primary key in kpi table + _create_collector_request.kpi_id.kpi_id.uuid = '71d58648-bf47-49ac-996f-e63a9fbfead4' # must be primary key in kpi table # _create_collector_request.kpi_id.kpi_id.uuid = str(uuid.uuid4()) _create_collector_request.duration_s = float(random.randint(8, 16)) _create_collector_request.interval_s = float(random.randint(2, 4)) @@ -43,19 +45,19 @@ def create_kpi_request(): def create_kpi_id_request(): _create_kpi_id_request = kpi_manager_pb2.KpiId() - _create_kpi_id_request.kpi_id.uuid = '11e2c6c6-b507-40aa-ab3a-ffd41e7125f0' + _create_kpi_id_request.kpi_id.uuid = '71d58648-bf47-49ac-996f-e63a9fbfead4' return _create_kpi_id_request def create_collector_id_request(): _create_collector_id_request = telemetry_frontend_pb2.CollectorId() - _create_collector_id_request.collector_id.uuid = '50ba9199-7e9d-45b5-a2fc-3f97917bad65' + _create_collector_id_request.collector_id.uuid = '71d58648-bf47-49ac-996f-e63a9fbfead4' return _create_collector_id_request def create_kpi_filter_request(): # create a dict as follows: 'Key' = 'KpiModel' column name and 'Value' = filter to apply. _create_kpi_filter_request = dict() _create_kpi_filter_request['kpi_sample_type'] = 102 - _create_kpi_filter_request['kpi_id'] = '11e2c6c6-b507-40aa-ab3a-ffd41e7125f0' + _create_kpi_filter_request['kpi_id'] = '3a17230d-8e95-4afb-8b21-6965481aee5a' return _create_kpi_filter_request def create_collector_filter_request(): @@ -63,4 +65,16 @@ def create_collector_filter_request(): _create_kpi_filter_request = dict() _create_kpi_filter_request['sampling_interval_s'] = 3.0 # _create_kpi_filter_request['kpi_id'] = '11e2c6c6-b507-40aa-ab3a-ffd41e7125f0' - return _create_kpi_filter_request \ No newline at end of file + return _create_kpi_filter_request + +def create_collector_model_object(): + # Create a new Collector instance + collector_to_insert = CollectorModel() + collector_to_insert.collector_id = str(uuid.uuid4()) + collector_to_insert.kpi_id = '3a17230d-8e95-4afb-8b21-6965481aee5a' + collector_to_insert.collector = "Test collector description" + collector_to_insert.sampling_duration_s = 15 + collector_to_insert.sampling_interval_s = 3 + collector_to_insert.start_timestamp = time.time() + collector_to_insert.end_timestamp = time.time() + return collector_to_insert \ No newline at end of file diff --git a/src/telemetry/database/tests/telemetryDBtests.py b/src/telemetry/database/tests/telemetryDBtests.py index 81431beb7..14def9ef2 100644 --- a/src/telemetry/database/tests/telemetryDBtests.py +++ b/src/telemetry/database/tests/telemetryDBtests.py @@ -15,6 +15,7 @@ import logging from typing import Any +from sqlalchemy.ext.declarative import declarative_base from telemetry.database.TelemetryDBmanager import TelemetryDBmanager from telemetry.database.TelemetryEngine import TelemetryEngine from telemetry.database.tests import temp_DB @@ -25,42 +26,52 @@ from .messages import create_kpi_request, create_collector_request, \ logging.basicConfig(level=logging.INFO) LOGGER = logging.getLogger(__name__) + # def test_temp_DB(): # temp_DB.main() def test_telemetry_object_creation(): LOGGER.info('--- test_telemetry_object_creation: START') - LOGGER.info('>>> Creating TelemetryDBmanager Object: ') + LOGGER.info('>>> Creating TelemetryDBmanager Object <<< ') TelemetryDBmanagerObj = TelemetryDBmanager() - # LOGGER.info('>>> Creating Tables: ') + # LOGGER.info('>>> Creating database <<< ') + # TelemetryDBmanagerObj.create_database() + + # LOGGER.info('>>> verifing database <<< ') + # TelemetryDBmanagerObj.list_databases() + + # # LOGGER.info('>>> Droping Tables: ') + # # TelemetryDBmanagerObj.drop_table("table_naem_here") + + # LOGGER.info('>>> Creating Tables <<< ') # TelemetryDBmanagerObj.create_tables() - # LOGGER.info('>>> Verifing Table creation: ') - # TelemetryDBmanagerObj.verify_tables() + LOGGER.info('>>> Verifing Table creation <<< ') + TelemetryDBmanagerObj.verify_tables() - LOGGER.info('>>> TESTING: Row Insertion Operation: kpi Table') + LOGGER.info('>>> TESTING: Row Insertion Operation: kpi Table <<<') kpi_obj = create_kpi_request() TelemetryDBmanagerObj.inser_kpi(kpi_obj) - LOGGER.info('>>> TESTING: Row Insertion Operation: collector Table') + LOGGER.info('>>> TESTING: Row Insertion Operation: collector Table <<<') collector_obj = create_collector_request() TelemetryDBmanagerObj.insert_collector(collector_obj) - LOGGER.info('>>> TESTING: Get KpiDescriptor ') + LOGGER.info('>>> TESTING: Get KpiDescriptor <<<') kpi_id_obj = create_kpi_id_request() TelemetryDBmanagerObj.get_kpi_descriptor(kpi_id_obj) - LOGGER.info('>>> TESTING: Select Collector ') + LOGGER.info('>>> TESTING: Select Collector <<<') collector_id_obj = create_collector_id_request() TelemetryDBmanagerObj.get_collector(collector_id_obj) - LOGGER.info('>>> TESTING: Applying kpi filter ') + LOGGER.info('>>> TESTING: Applying kpi filter <<< ') kpi_filter : dict[str, Any] = create_kpi_filter_request() TelemetryDBmanagerObj.select_kpi_descriptor(**kpi_filter) - LOGGER.info('>>> TESTING: Applying collector filter ') + LOGGER.info('>>> TESTING: Applying collector filter <<<') collector_filter : dict[str, Any] = create_collector_filter_request() TelemetryDBmanagerObj.select_collector(**collector_filter) diff --git a/src/telemetry/database/tests/temp_DB.py b/src/telemetry/database/tests/temp_DB.py index 7c1074fcf..089d35424 100644 --- a/src/telemetry/database/tests/temp_DB.py +++ b/src/telemetry/database/tests/temp_DB.py @@ -243,7 +243,7 @@ class DatabaseManager: # Example Usage def main(): CRDB_SQL_PORT = "26257" - CRDB_DATABASE = "TelemetryFrontend" + CRDB_DATABASE = "telemetryfrontend" CRDB_USERNAME = "tfs" CRDB_PASSWORD = "tfs123" CRDB_SSLMODE = "require" @@ -255,7 +255,7 @@ def main(): db_manager = DatabaseManager(crdb_uri, CRDB_DATABASE) # Create database - # db_manager.create_database() + db_manager.create_database() # Update db_url to include the new database name db_manager.engine = create_engine(f"{crdb_uri}") diff --git a/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py b/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py index f940ccd65..62a8969f9 100644 --- a/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py +++ b/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py @@ -29,6 +29,8 @@ from common.proto.telemetry_frontend_pb2 import CollectorId, Collector, Collecto from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method from common.proto.telemetry_frontend_pb2_grpc import TelemetryFrontendServiceServicer +from telemetry.database.TelemetryModel import Collector as CollectorModel +from telemetry.database.managementDB import managementDB LOGGER = logging.getLogger(__name__) METRICS_POOL = MetricsPool('Monitoring', 'TelemetryFrontend') @@ -41,6 +43,23 @@ KAFKA_TOPICS = {'request' : 'topic_request', class TelemetryFrontendServiceServicerImpl(TelemetryFrontendServiceServicer): def __init__(self, name_mapping : NameMapping): LOGGER.info('Init TelemetryFrontendService') + self.managementDBobj = managementDB() + + + def add_collector_to_db(self, request: Collector ): + try: + # Create a new Collector instance + collector_to_insert = CollectorModel() + collector_to_insert.collector_id = request.collector_id.collector_id.uuid + collector_to_insert.kpi_id = '3a17230d-8e95-4afb-8b21-6965481aee5a' + collector_to_insert.collector = "Test collector description" + collector_to_insert.sampling_duration_s = request.duration_s + collector_to_insert.sampling_interval_s = request.interval_s + collector_to_insert.start_timestamp = time.time() + collector_to_insert.end_timestamp = time.time() + self.managementDBobj.add_row_to_db(collector_to_insert) + except Exception as e: + LOGGER.info("Unable to create collectorModel class object. {:}".format(e)) # @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) def StartCollector(self, @@ -52,6 +71,8 @@ class TelemetryFrontendServiceServicerImpl(TelemetryFrontendServiceServicer): _collector_kpi_id = str(request.kpi_id.kpi_id.uuid) _collector_duration = int(request.duration_s) _collector_interval = int(request.interval_s) + # pushing Collector to DB + self.add_collector_to_db(request) self.generate_kafka_request(_collector_id, _collector_kpi_id, _collector_duration, _collector_interval) # self.run_generate_kafka_request(_collector_id, _collector_kpi_id, _collector_duration, _collector_interval) response.collector_id.uuid = request.collector_id.collector_id.uuid # type: ignore @@ -74,10 +95,11 @@ class TelemetryFrontendServiceServicerImpl(TelemetryFrontendServiceServicer): # topic_request = "topic_request" msg_value = Tuple [str, int, int] msg_value = (kpi, duration, interval) - print ("Request generated: ", "Colletcor Id: ", msg_key, \ - ", \nKPI: ", kpi, ", Duration: ", duration, ", Interval: ", interval) + # print ("Request generated: ", "Colletcor Id: ", msg_key, \ + # ", \nKPI: ", kpi, ", Duration: ", duration, ", Interval: ", interval) producerObj = KafkaProducer(producer_configs) producerObj.produce(KAFKA_TOPICS['request'], key=msg_key, value= str(msg_value), callback=self.delivery_callback) + LOGGER.info("Collector Request Generated: {:} -- {:} -- {:} -- {:}".format(msg_key, kpi, duration, interval)) # producerObj.produce(topic_request, key=msg_key, value= str(msg_value), callback=self.delivery_callback) ACTIVE_COLLECTORS.append(msg_key) producerObj.flush() diff --git a/src/telemetry/frontend/tests/test_frontend.py b/src/telemetry/frontend/tests/test_frontend.py index a531ed617..230122a2d 100644 --- a/src/telemetry/frontend/tests/test_frontend.py +++ b/src/telemetry/frontend/tests/test_frontend.py @@ -168,30 +168,33 @@ def telemetryFrontend_client( ########################### def test_start_collector(telemetryFrontend_client): - LOGGER.warning('test_start_collector requesting') + LOGGER.info('test_start_collector requesting') response = telemetryFrontend_client.StartCollector(create_collector_request()) LOGGER.debug(str(response)) assert isinstance(response, CollectorId) -def test_start_collector_a(telemetryFrontend_client): - LOGGER.warning('test_start_collector requesting') - response = telemetryFrontend_client.StartCollector(create_collector_request()) - LOGGER.debug(str(response)) - assert isinstance(response, CollectorId) +# def test_start_collector_a(telemetryFrontend_client): +# LOGGER.warning('test_start_collector requesting') +# response = telemetryFrontend_client.StartCollector(create_collector_request()) +# LOGGER.debug(str(response)) +# assert isinstance(response, CollectorId) + +# def test_start_collector_b(telemetryFrontend_client): +# LOGGER.warning('test_start_collector requesting') +# response = telemetryFrontend_client.StartCollector(create_collector_request()) +# LOGGER.debug(str(response)) +# assert isinstance(response, CollectorId) + +# def test_run_kafka_listener(): +# LOGGER.warning('test_receive_kafka_request requesting') +# name_mapping = NameMapping() +# TelemetryFrontendServiceObj = TelemetryFrontendServiceServicerImpl(name_mapping) +# response = TelemetryFrontendServiceObj.run_kafka_listener() # Method "run_kafka_listener" is not define in frontend.proto +# LOGGER.debug(str(response)) +# assert isinstance(response, bool) + -def test_start_collector_b(telemetryFrontend_client): - LOGGER.warning('test_start_collector requesting') - response = telemetryFrontend_client.StartCollector(create_collector_request()) - LOGGER.debug(str(response)) - assert isinstance(response, CollectorId) -def test_run_kafka_listener(): - LOGGER.warning('test_receive_kafka_request requesting') - name_mapping = NameMapping() - TelemetryFrontendServiceObj = TelemetryFrontendServiceServicerImpl(name_mapping) - response = TelemetryFrontendServiceObj.run_kafka_listener() # Method "run_kafka_listener" is not define in frontend.proto - LOGGER.debug(str(response)) - assert isinstance(response, bool) # def test_stop_collector(telemetryFrontend_client): # LOGGER.warning('test_stop_collector requesting') -- GitLab From 1ef912c591b2ec37555ead02794667ee45b65e2c Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Thu, 30 May 2024 10:29:24 +0000 Subject: [PATCH 133/205] KpiManager with monitoringDB working fine --- scripts/run_tests_locally-kpi_manager.sh | 2 +- .../service/KpiManagerServiceServicerImpl.py | 198 ++++++++++++------ src/kpi_manager/tests/test_messages.py | 44 +++- src/kpi_manager/tests/test_unitary.py | 109 ++++++---- src/telemetry/database/TelemetryDBmanager.py | 2 +- src/telemetry/database/TelemetryEngine.py | 2 +- src/telemetry/database/managementDB.py | 53 ++++- .../database/tests/telemetryDBtests.py | 61 +++--- 8 files changed, 328 insertions(+), 143 deletions(-) diff --git a/scripts/run_tests_locally-kpi_manager.sh b/scripts/run_tests_locally-kpi_manager.sh index 8ed855a8e..e56716dea 100755 --- a/scripts/run_tests_locally-kpi_manager.sh +++ b/scripts/run_tests_locally-kpi_manager.sh @@ -24,5 +24,5 @@ cd $PROJECTDIR/src # python3 kpi_manager/tests/test_unitary.py RCFILE=$PROJECTDIR/coverage/.coveragerc -python3 -m pytest --log-level=INFO --verbose \ +python3 -m pytest --log-level=INFO --log-cli-level=INFO --verbose \ kpi_manager/tests/test_unitary.py \ No newline at end of file diff --git a/src/kpi_manager/service/KpiManagerServiceServicerImpl.py b/src/kpi_manager/service/KpiManagerServiceServicerImpl.py index f1d370f30..c37bf373f 100644 --- a/src/kpi_manager/service/KpiManagerServiceServicerImpl.py +++ b/src/kpi_manager/service/KpiManagerServiceServicerImpl.py @@ -19,8 +19,12 @@ from common.proto.context_pb2 import Empty from common.proto.kpi_manager_pb2_grpc import KpiManagerServiceServicer from common.proto.kpi_manager_pb2 import KpiId, KpiDescriptor, KpiDescriptorFilter, KpiDescriptorList from monitoring.service.NameMapping import NameMapping -from monitoring.service import ManagementDBTools +# from monitoring.service import ManagementDBTools +from telemetry.database.managementDB import managementDB +from telemetry.database.TelemetryModel import Kpi as KpiModel +from common.proto.context_pb2 import DeviceId, LinkId, ServiceId, SliceId,\ + ConnectionId, EndPointId LOGGER = logging.getLogger(__name__) @@ -29,77 +33,139 @@ METRICS_POOL = MetricsPool('Monitoring', 'KpiManager') class KpiManagerServiceServicerImpl(KpiManagerServiceServicer): def __init__(self, name_mapping : NameMapping): LOGGER.info('Init KpiManagerService') - - # Init sqlite monitoring db - self.management_db = ManagementDBTools.ManagementDB('monitoring.db') # why monitoring.db here??? - LOGGER.info('MetricsDB initialized --- KPI Manager Service') + self.managementDBobj = managementDB() @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def SetKpiDescriptor( - self, request: KpiDescriptor, grpc_context: grpc.ServicerContext # type: ignore - ) -> KpiId: # type: ignore + def SetKpiDescriptor(self, request: KpiDescriptor, grpc_context: grpc.ServicerContext # type: ignore + ) -> KpiId: # type: ignore response = KpiId() - kpi_description = request.kpi_description - kpi_sample_type = request.kpi_sample_type - kpi_device_id = request.device_id.device_uuid.uuid - kpi_endpoint_id = request.endpoint_id.endpoint_uuid.uuid - kpi_service_id = request.service_id.service_uuid.uuid - kpi_slice_id = request.slice_id.slice_uuid.uuid - kpi_connection_id = request.connection_id.connection_uuid.uuid - kpi_link_id = request.link_id.link_uuid.uuid - if request.kpi_id.kpi_id.uuid != "": + + try: + kpi_to_insert = KpiModel() + kpi_to_insert.kpi_id = request.kpi_id.kpi_id.uuid + kpi_to_insert.kpi_description = request.kpi_description + kpi_to_insert.kpi_sample_type = request.kpi_sample_type + kpi_to_insert.device_id = request.service_id.service_uuid.uuid + kpi_to_insert.endpoint_id = request.device_id.device_uuid.uuid + kpi_to_insert.service_id = request.slice_id.slice_uuid.uuid + kpi_to_insert.slice_id = request.endpoint_id.endpoint_uuid.uuid + kpi_to_insert.connection_id = request.connection_id.connection_uuid.uuid + # kpi_to_insert.link_id = request.link_id.link_id.uuid + self.managementDBobj.add_row_to_db(kpi_to_insert) response.kpi_id.uuid = request.kpi_id.kpi_id.uuid - # Here the code to modify an existing kpi - else: - data = self.management_db.insert_KPI( - kpi_description, kpi_sample_type, kpi_device_id, kpi_endpoint_id, - kpi_service_id, kpi_slice_id, kpi_connection_id, kpi_link_id) - response.kpi_id.uuid = str(data) - return response + LOGGER.info("Added Row: {:}".format(response)) + return response + except Exception as e: + LOGGER.info("Unable to create KpiModel class object. {:}".format(e)) + + @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def GetKpiDescriptor(self, request: KpiId, grpc_context: grpc.ServicerContext # type: ignore + ) -> KpiDescriptor: # type: ignore + response = KpiDescriptor() - @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def DeleteKpiDescriptor(self, request: KpiId, grpc_context: grpc.ServicerContext) -> Empty: # type: ignore - kpi_id = int(request.kpi_id.uuid) - kpi = self.management_db.get_KPI(kpi_id) - if kpi: - self.management_db.delete_KPI(kpi_id) - else: - LOGGER.info('DeleteKpi error: KpiID({:s}): not found in database'.format(str(kpi_id))) - return Empty() + try: + kpi_id_to_search = request.kpi_id.uuid + row = self.managementDBobj.search_db_row_by_id(KpiModel, 'kpi_id', kpi_id_to_search) + if row is not None: + response.kpi_id.kpi_id.uuid = row.kpi_id + response.kpi_description = row.kpi_description + response.kpi_sample_type = row.kpi_sample_type + response.service_id.service_uuid.uuid = row.service_id + response.device_id.device_uuid.uuid = row.device_id + response.slice_id.slice_uuid.uuid = row.slice_id + response.endpoint_id.endpoint_uuid.uuid = row.endpoint_id + response.connection_id.connection_uuid.uuid = row.connection_id + return response + except Exception as e: + LOGGER.info('Unable to search kpi id. {:}'.format(e)) @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def GetKpiDescriptor(self, request: KpiId, grpc_context: grpc.ServicerContext) -> KpiDescriptor: # type: ignore - kpi_id = request.kpi_id.uuid - kpi_db = self.management_db.get_KPI(int(kpi_id)) - kpiDescriptor = KpiDescriptor() - if kpi_db is None: - LOGGER.info('GetKpiDescriptor error: KpiID({:s}): not found in database'.format(str(kpi_id))) - else: - kpiDescriptor.kpi_description = kpi_db[1] - kpiDescriptor.kpi_sample_type = kpi_db[2] - kpiDescriptor.device_id.device_uuid.uuid = str(kpi_db[3]) - kpiDescriptor.endpoint_id.endpoint_uuid.uuid = str(kpi_db[4]) - kpiDescriptor.service_id.service_uuid.uuid = str(kpi_db[5]) - kpiDescriptor.slice_id.slice_uuid.uuid = str(kpi_db[6]) - kpiDescriptor.connection_id.connection_uuid.uuid = str(kpi_db[7]) - kpiDescriptor.link_id.link_uuid.uuid = str(kpi_db[8]) - return kpiDescriptor + def DeleteKpiDescriptor(self, request: KpiId, grpc_context: grpc.ServicerContext + ) -> Empty: # type: ignore + try: + kpi_id_to_search = request.kpi_id.uuid + self.managementDBobj.delete_db_row_by_id(KpiModel, 'kpi_id', kpi_id_to_search) + except Exception as e: + LOGGER.info('Unable to search kpi id. {:}'.format(e)) + finally: + return Empty() @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def SelectKpiDescriptor(self, request: KpiDescriptorFilter, grpc_context: grpc.ServicerContext) -> KpiDescriptorList: # type: ignore - kpi_descriptor_list = KpiDescriptorList() - data = self.management_db.get_KPIS() - LOGGER.debug(f"data: {data}") - for item in data: - kpi_descriptor = KpiDescriptor() - kpi_descriptor.kpi_id.kpi_id.uuid = str(item[0]) - kpi_descriptor.kpi_description = item[1] - kpi_descriptor.kpi_sample_type = item[2] - kpi_descriptor.device_id.device_uuid.uuid = str(item[3]) - kpi_descriptor.endpoint_id.endpoint_uuid.uuid = str(item[4]) - kpi_descriptor.service_id.service_uuid.uuid = str(item[5]) - kpi_descriptor.slice_id.slice_uuid.uuid = str(item[6]) - kpi_descriptor.connection_id.connection_uuid.uuid = str(item[7]) - kpi_descriptor.link_id.link_uuid.uuid = str(item[8]) - kpi_descriptor_list.kpi_descriptor_list.append(kpi_descriptor) - return kpi_descriptor_list \ No newline at end of file + def SelectKpiDescriptor(self, request: KpiDescriptorFilter, grpc_context: grpc.ServicerContext # type: ignore + ) -> KpiDescriptorList: # type: ignore + response = KpiDescriptorList() + # LOGGER.info("Recevied requested Object: {:}".format(request)) + # re-structre the filter. create dynamic filter + filter_to_apply = dict() + filter_to_apply['device_id'] = request.device_id[0].device_uuid.uuid + filter_to_apply['kpi_sample_type'] = request.kpi_sample_type[0] + try: + rows = self.managementDBobj.select_with_filter(KpiModel, **filter_to_apply) + except Exception as e: + LOGGER.info('Unable to apply filter on kpi descriptor. {:}'.format(e)) + try: + if len(rows) != 0: + kpi_id_obj = KpiId() + device_id_obj = DeviceId() + endpoint_id_obj = EndPointId() + service_id_obj = ServiceId() + slice_id_obj = SliceId() + link_id_obj = LinkId() + + for row in rows: + kpiDescriptor_obj = KpiDescriptor() + kpiDescriptor_obj.kpi_id.kpi_id.uuid = row.kpi_id + # kpiDescriptor_obj.kpi_description = row.kpi_description + + response.kpi_descriptor_list.append(kpiDescriptor_obj) + return response + except Exception as e: + LOGGER.info('Unable to process response {:}'.format(e)) + + + # @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + # def DeleteKpiDescriptor(self, request: KpiId, grpc_context: grpc.ServicerContext) -> Empty: # type: ignore + # kpi_id = int(request.kpi_id.uuid) + # kpi = self.management_db.get_KPI(kpi_id) + # if kpi: + # self.management_db.delete_KPI(kpi_id) + # else: + # LOGGER.info('DeleteKpi error: KpiID({:s}): not found in database'.format(str(kpi_id))) + # return Empty() + + # @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + # def GetKpiDescriptor(self, request: KpiId, grpc_context: grpc.ServicerContext) -> KpiDescriptor: # type: ignore + # kpi_id = request.kpi_id.uuid + # kpi_db = self.management_db.get_KPI(int(kpi_id)) + # kpiDescriptor = KpiDescriptor() + # if kpi_db is None: + # LOGGER.info('GetKpiDescriptor error: KpiID({:s}): not found in database'.format(str(kpi_id))) + # else: + # kpiDescriptor.kpi_description = kpi_db[1] + # kpiDescriptor.kpi_sample_type = kpi_db[2] + # kpiDescriptor.device_id.device_uuid.uuid = str(kpi_db[3]) + # kpiDescriptor.endpoint_id.endpoint_uuid.uuid = str(kpi_db[4]) + # kpiDescriptor.service_id.service_uuid.uuid = str(kpi_db[5]) + # kpiDescriptor.slice_id.slice_uuid.uuid = str(kpi_db[6]) + # kpiDescriptor.connection_id.connection_uuid.uuid = str(kpi_db[7]) + # kpiDescriptor.link_id.link_uuid.uuid = str(kpi_db[8]) + # return kpiDescriptor + + # @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + # def SelectKpiDescriptor(self, request: KpiDescriptorFilter, grpc_context: grpc.ServicerContext) -> KpiDescriptorList: # type: ignore + # kpi_descriptor_list = KpiDescriptorList() + # data = self.management_db.get_KPIS() + # LOGGER.debug(f"data: {data}") + # for item in data: + # kpi_descriptor = KpiDescriptor() + # kpi_descriptor.kpi_id.kpi_id.uuid = str(item[0]) + # kpi_descriptor.kpi_description = item[1] + # kpi_descriptor.kpi_sample_type = item[2] + # kpi_descriptor.device_id.device_uuid.uuid = str(item[3]) + # kpi_descriptor.endpoint_id.endpoint_uuid.uuid = str(item[4]) + # kpi_descriptor.service_id.service_uuid.uuid = str(item[5]) + # kpi_descriptor.slice_id.slice_uuid.uuid = str(item[6]) + # kpi_descriptor.connection_id.connection_uuid.uuid = str(item[7]) + # kpi_descriptor.link_id.link_uuid.uuid = str(item[8]) + # kpi_descriptor_list.kpi_descriptor_list.append(kpi_descriptor) + # return kpi_descriptor_list \ No newline at end of file diff --git a/src/kpi_manager/tests/test_messages.py b/src/kpi_manager/tests/test_messages.py index 72ff74c16..db6160be5 100755 --- a/src/kpi_manager/tests/test_messages.py +++ b/src/kpi_manager/tests/test_messages.py @@ -12,14 +12,53 @@ # See the License for the specific language governing permissions and # limitations under the License. +import uuid from common.proto import kpi_manager_pb2 from common.proto.kpi_sample_types_pb2 import KpiSampleType +from common.proto.context_pb2 import DeviceId, LinkId, ServiceId, SliceId,\ + ConnectionId, EndPointId -def kpi_id(): +# ---------------------- New Test Messages --------------------------------- +def create_kpi_id_request(): _kpi_id = kpi_manager_pb2.KpiId() - _kpi_id.kpi_id.uuid = str(1) # pylint: disable=maybe-no-member + _kpi_id.kpi_id.uuid = "34f73604-eca6-424f-9995-18b519ad0978" return _kpi_id +def create_kpi_descriptor_request(): + _create_kpi_request = kpi_manager_pb2.KpiDescriptor() + _create_kpi_request.kpi_id.kpi_id.uuid = str(uuid.uuid4()) + _create_kpi_request.kpi_description = 'KPI Description Test' + _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED + _create_kpi_request.device_id.device_uuid.uuid = 'DEV4' # pylint: disable=maybe-no-member + _create_kpi_request.service_id.service_uuid.uuid = 'SERV3' # pylint: disable=maybe-no-member + _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC3' # pylint: disable=maybe-no-member + _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END2' # pylint: disable=maybe-no-member + _create_kpi_request.connection_id.connection_uuid.uuid = 'CON2' # pylint: disable=maybe-no-member + return _create_kpi_request + +def create_kpi_filter_request_a(): + _create_kpi_filter_request = kpi_manager_pb2.KpiDescriptorFilter() + _create_kpi_filter_request.kpi_sample_type.append(102) + + device_id_obj = DeviceId() + device_id_obj.device_uuid.uuid = "SERV3" + _create_kpi_filter_request.device_id.append(device_id_obj) + + # new_device_id = _create_kpi_filter_request.device_id.add() + # new_device_id.device_uuid.uuid = 'DEV3' + # new_service_id = _create_kpi_filter_request.service_id.add() + # new_service_id.service_uuid.uuid = 'SERV1' + # new_slice_id = _create_kpi_filter_request.slice_id.add() + # new_slice_id.slice_uuid.uuid = 'SLC1' + # new_endpoint_id = _create_kpi_filter_request.endpoint_id.add() + # new_endpoint_id.endpoint_uuid.uuid = 'END1' + # new_connection_id = _create_kpi_filter_request.connection_id.add() + # new_connection_id.connection_uuid.uuid = 'CON1' + + return _create_kpi_filter_request + +# -------------------- Initial Test messages ------------------------------------- + def create_kpi_request(kpi_id_str): _create_kpi_request = kpi_manager_pb2.KpiDescriptor() _create_kpi_request.kpi_description = 'KPI Description Test' @@ -33,6 +72,7 @@ def create_kpi_request(kpi_id_str): def create_kpi_request_b(): _create_kpi_request = kpi_manager_pb2.KpiDescriptor() + _create_kpi_request = str(uuid.uuid4()) _create_kpi_request.kpi_description = 'KPI Description Test' _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED _create_kpi_request.device_id.device_uuid.uuid = 'DEV2' # pylint: disable=maybe-no-member diff --git a/src/kpi_manager/tests/test_unitary.py b/src/kpi_manager/tests/test_unitary.py index 75987a5f4..e60d2104e 100755 --- a/src/kpi_manager/tests/test_unitary.py +++ b/src/kpi_manager/tests/test_unitary.py @@ -43,7 +43,9 @@ from device.service.driver_api.DriverInstanceCache import DriverInstanceCache from device.service.DeviceService import DeviceService from device.client.DeviceClient import DeviceClient -from kpi_manager.tests.test_messages import create_kpi_request, create_kpi_request_b, create_kpi_request_c, create_kpi_request_d, create_kpi_filter_request +from kpi_manager.tests.test_messages import create_kpi_request, create_kpi_request_b, \ + create_kpi_request_c, create_kpi_request_d, create_kpi_filter_request, \ + create_kpi_descriptor_request, create_kpi_id_request, create_kpi_filter_request_a # from monitoring.service.MonitoringService import MonitoringService from kpi_manager.service.KpiManagerService import KpiManagerService # from monitoring.client.MonitoringClient import MonitoringClient @@ -63,7 +65,7 @@ from device.service.drivers import DRIVERS LOCAL_HOST = '127.0.0.1' MOCKSERVICE_PORT = 10000 -KPIMANAGER_SERVICE_PORT = MOCKSERVICE_PORT + get_service_port_grpc(ServiceNameEnum.KPIMANAGER) # avoid privileged ports +KPIMANAGER_SERVICE_PORT = MOCKSERVICE_PORT + get_service_port_grpc(ServiceNameEnum.KPIMANAGER) # type: ignore os.environ[get_env_var_name(ServiceNameEnum.KPIMANAGER, ENVVAR_SUFIX_SERVICE_HOST )] = str(LOCAL_HOST) os.environ[get_env_var_name(ServiceNameEnum.KPIMANAGER, ENVVAR_SUFIX_SERVICE_PORT_GRPC)] = str(KPIMANAGER_SERVICE_PORT) @@ -127,18 +129,18 @@ def device_service(context_service : MockContextService): # pylint: disable=rede LOGGER.info('Terminated DeviceService...') -@pytest.fixture(scope='session') -def device_client(device_service : DeviceService): # pylint: disable=redefined-outer-name,unused-argument - LOGGER.info('Initializing DeviceClient...') - _client = DeviceClient() +# @pytest.fixture(scope='session') +# def device_client(device_service : DeviceService): # pylint: disable=redefined-outer-name,unused-argument +# LOGGER.info('Initializing DeviceClient...') +# _client = DeviceClient() - LOGGER.info('Yielding DeviceClient...') - yield _client +# LOGGER.info('Yielding DeviceClient...') +# yield _client - LOGGER.info('Closing DeviceClient...') - _client.close() +# LOGGER.info('Closing DeviceClient...') +# _client.close() - LOGGER.info('Closed DeviceClient...') +# LOGGER.info('Closed DeviceClient...') @pytest.fixture(scope='session') def device_client(device_service : DeviceService): # pylint: disable=redefined-outer-name,unused-argument @@ -195,39 +197,64 @@ def kpi_manager_client(kpi_manager_service : KpiManagerService): # pylint: disab # Prepare Environment, should be the first test ################################################## -# ERROR on this test --- -def test_prepare_environment( - context_client : ContextClient, # pylint: disable=redefined-outer-name,unused-argument -): - context_id = json_context_id(DEFAULT_CONTEXT_NAME) - context_client.SetContext(Context(**json_context(DEFAULT_CONTEXT_NAME))) - context_client.SetTopology(Topology(**json_topology(DEFAULT_TOPOLOGY_NAME, context_id=context_id))) +# # ERROR on this test --- +# def test_prepare_environment( +# context_client : ContextClient, # pylint: disable=redefined-outer-name,unused-argument +# ): +# context_id = json_context_id(DEFAULT_CONTEXT_NAME) +# context_client.SetContext(Context(**json_context(DEFAULT_CONTEXT_NAME))) +# context_client.SetTopology(Topology(**json_topology(DEFAULT_TOPOLOGY_NAME, context_id=context_id))) ########################### # Tests Implementation of Kpi Manager ########################### -# Test case that makes use of client fixture to test server's CreateKpi method -def test_set_kpi(kpi_manager_client): # pylint: disable=redefined-outer-name - # make call to server - LOGGER.warning('test_create_kpi requesting') - for i in range(3): - response = kpi_manager_client.SetKpiDescriptor(create_kpi_request(str(i+1))) - LOGGER.debug(str(response)) - assert isinstance(response, KpiId) - -# Test case that makes use of client fixture to test server's DeleteKpi method -def test_delete_kpi(kpi_manager_client): # pylint: disable=redefined-outer-name - # make call to server - LOGGER.warning('delete_kpi requesting') - response = kpi_manager_client.SetKpiDescriptor(create_kpi_request('4')) - response = kpi_manager_client.DeleteKpiDescriptor(response) - LOGGER.debug(str(response)) - assert isinstance(response, Empty) - -# Test case that makes use of client fixture to test server's GetKpiDescriptor method -def test_select_kpi_descriptor(kpi_manager_client): # pylint: disable=redefined-outer-name - LOGGER.warning('test_selectkpidescritor begin') - response = kpi_manager_client.SelectKpiDescriptor(create_kpi_filter_request()) - LOGGER.debug(str(response)) +# ---------- 2nd Iteration Tests ----------------- +def test_SetKpiDescriptor(kpi_manager_client): + LOGGER.info(" >>> test_SetKpiDescriptor: START <<< ") + response = kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request()) + assert isinstance(response, KpiId) + +def test_GetKpiDescriptor(kpi_manager_client): + LOGGER.info(" >>> test_GetKpiDescriptor: START <<< ") + response = kpi_manager_client.GetKpiDescriptor(create_kpi_id_request()) + assert isinstance(response, KpiDescriptor) + +def test_DeleteKpiDescriptor(kpi_manager_client): + LOGGER.info(" >>> test_DeleteKpiDescriptor: START <<< ") + response = kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request()) + kpi_manager_client.DeleteKpiDescriptor(response) + kpi_manager_client.GetKpiDescriptor(response) + assert isinstance(response, KpiId) + +def test_SelectKpiDescriptor(kpi_manager_client): + LOGGER.info(" >>> test_SelectKpiDescriptor: START <<< ") + response = kpi_manager_client.SelectKpiDescriptor(create_kpi_filter_request_a()) + # LOGGER.info(" >>> test_SelectKpiDescriptor: END <<< {:}".format(response)) assert isinstance(response, KpiDescriptorList) + +# ------------- INITIAL TESTs ---------------- +# Test case that makes use of client fixture to test server's CreateKpi method +# def test_set_kpi(kpi_manager_client): # pylint: disable=redefined-outer-name +# # make call to server +# LOGGER.warning('test_create_kpi requesting') +# for i in range(3): +# response = kpi_manager_client.SetKpiDescriptor(create_kpi_request(str(i+1))) +# LOGGER.debug(str(response)) +# assert isinstance(response, KpiId) + +# # Test case that makes use of client fixture to test server's DeleteKpi method +# def test_delete_kpi(kpi_manager_client): # pylint: disable=redefined-outer-name +# # make call to server +# LOGGER.warning('delete_kpi requesting') +# response = kpi_manager_client.SetKpiDescriptor(create_kpi_request('4')) +# response = kpi_manager_client.DeleteKpiDescriptor(response) +# LOGGER.debug(str(response)) +# assert isinstance(response, Empty) + +# # Test case that makes use of client fixture to test server's GetKpiDescriptor method +# def test_select_kpi_descriptor(kpi_manager_client): # pylint: disable=redefined-outer-name +# LOGGER.warning('test_selectkpidescritor begin') +# response = kpi_manager_client.SelectKpiDescriptor(create_kpi_filter_request()) +# LOGGER.debug(str(response)) +# assert isinstance(response, KpiDescriptorList) diff --git a/src/telemetry/database/TelemetryDBmanager.py b/src/telemetry/database/TelemetryDBmanager.py index 6dc2868a1..e2b1f63a2 100644 --- a/src/telemetry/database/TelemetryDBmanager.py +++ b/src/telemetry/database/TelemetryDBmanager.py @@ -89,7 +89,7 @@ class TelemetryDBmanager: def inser_kpi(self, request: KpiDescriptor): session = self.Session() try: - # Create a new Collector instance + # Create a new Kpi instance kpi_to_insert = KpiModel() kpi_to_insert.kpi_id = request.kpi_id.kpi_id.uuid kpi_to_insert.kpi_description = request.kpi_description diff --git a/src/telemetry/database/TelemetryEngine.py b/src/telemetry/database/TelemetryEngine.py index ebeaf3787..2b47e4ec8 100644 --- a/src/telemetry/database/TelemetryEngine.py +++ b/src/telemetry/database/TelemetryEngine.py @@ -21,7 +21,6 @@ APP_NAME = 'tfs' ECHO = False # False: No dump SQL commands and transactions executed CRDB_URI_TEMPLATE = 'cockroachdb://{:s}:{:s}@127.0.0.1:{:s}/{:s}?sslmode={:s}' # CRDB_URI_TEMPLATE = 'cockroachdb://{:s}:{:s}@cockroachdb-public.{:s}.svc.cluster.local:{:s}/{:s}?sslmode={:s}' -# CRDB_URI_TEMPLATE = 'cockroachdb://{:s}:{:s}@cockroachdb-public.{:s}.svc.cluster.local:{:s}/{:s}?sslmode={:s}' class TelemetryEngine: # def __init__(self): @@ -51,6 +50,7 @@ class TelemetryEngine: @staticmethod def create_database(engine : sqlalchemy.engine.Engine) -> None: if not sqlalchemy_utils.database_exists(engine.url): + LOGGER.info("Database created. {:}".format(engine.url)) sqlalchemy_utils.create_database(engine.url) @staticmethod diff --git a/src/telemetry/database/managementDB.py b/src/telemetry/database/managementDB.py index f8d0ef9cb..706133477 100644 --- a/src/telemetry/database/managementDB.py +++ b/src/telemetry/database/managementDB.py @@ -64,9 +64,60 @@ class managementDB: try: session.add(row) session.commit() - LOGGER.info(f"Row inserted into {row.__class__.__name__} table. {row.__class__.__name__} Id: : {row.collector_id}") + LOGGER.info(f"Row inserted into {row.__class__.__name__} table.") except Exception as e: session.rollback() LOGGER.error(f"Failed to insert new row into {row.__class__.__name__} table. {str(e)}") + finally: + session.close() + + def search_db_row_by_id(self, model, col_name, id_to_search): + session = self.Session() + try: + entity = session.query(model).filter_by(**{col_name: id_to_search}).first() + if entity: + LOGGER.info(f"{model.__name__} ID found: {str(entity)}") + return entity + else: + LOGGER.warning(f"{model.__name__} ID not found: {str(id_to_search)}") + return None + except Exception as e: + session.rollback() + LOGGER.info(f"Failed to retrieve {model.__name__} ID. {str(e)}") + raise + finally: + session.close() + + def delete_db_row_by_id(self, model, col_name, id_to_search): + session = self.Session() + try: + record = session.query(model).filter_by(**{col_name: id_to_search}).first() + if record: + session.delete(record) + session.commit() + LOGGER.info("Deleted %s with %s: %s", model.__name__, col_name, id_to_search) + else: + LOGGER.warning("%s with %s %s not found", model.__name__, col_name, id_to_search) + except Exception as e: + session.rollback() + LOGGER.error("Error deleting %s with %s %s: %s", model.__name__, col_name, id_to_search, e) + finally: + session.close() + + def select_with_filter(self, model, **filters): + session = self.Session() + try: + query = session.query(model) + for column, value in filters.items(): + query = query.filter(getattr(model, column) == value) # type: ignore + result = query.all() + if result: + LOGGER.info(f"Fetched filtered rows from {model.__name__} table with filters: {filters}- Results: {result}") # + else: + LOGGER.warning(f"No matching row found in {model.__name__} table with filters: {filters}") + return result + except Exception as e: + LOGGER.error(f"Error fetching filtered rows from {model.__name__} table with filters {filters} ::: {e}") + return [] finally: session.close() \ No newline at end of file diff --git a/src/telemetry/database/tests/telemetryDBtests.py b/src/telemetry/database/tests/telemetryDBtests.py index 14def9ef2..9cb856a3d 100644 --- a/src/telemetry/database/tests/telemetryDBtests.py +++ b/src/telemetry/database/tests/telemetryDBtests.py @@ -35,51 +35,52 @@ def test_telemetry_object_creation(): LOGGER.info('>>> Creating TelemetryDBmanager Object <<< ') TelemetryDBmanagerObj = TelemetryDBmanager() + TelemetryEngine.create_database(TelemetryDBmanagerObj.db_engine) - # LOGGER.info('>>> Creating database <<< ') - # TelemetryDBmanagerObj.create_database() + LOGGER.info('>>> Creating database <<< ') + TelemetryDBmanagerObj.create_database() - # LOGGER.info('>>> verifing database <<< ') - # TelemetryDBmanagerObj.list_databases() + LOGGER.info('>>> verifing database <<< ') + TelemetryDBmanagerObj.list_databases() # # LOGGER.info('>>> Droping Tables: ') # # TelemetryDBmanagerObj.drop_table("table_naem_here") - # LOGGER.info('>>> Creating Tables <<< ') - # TelemetryDBmanagerObj.create_tables() + LOGGER.info('>>> Creating Tables <<< ') + TelemetryDBmanagerObj.create_tables() LOGGER.info('>>> Verifing Table creation <<< ') TelemetryDBmanagerObj.verify_tables() - LOGGER.info('>>> TESTING: Row Insertion Operation: kpi Table <<<') - kpi_obj = create_kpi_request() - TelemetryDBmanagerObj.inser_kpi(kpi_obj) + # LOGGER.info('>>> TESTING: Row Insertion Operation: kpi Table <<<') + # kpi_obj = create_kpi_request() + # TelemetryDBmanagerObj.inser_kpi(kpi_obj) - LOGGER.info('>>> TESTING: Row Insertion Operation: collector Table <<<') - collector_obj = create_collector_request() - TelemetryDBmanagerObj.insert_collector(collector_obj) + # LOGGER.info('>>> TESTING: Row Insertion Operation: collector Table <<<') + # collector_obj = create_collector_request() + # TelemetryDBmanagerObj.insert_collector(collector_obj) - LOGGER.info('>>> TESTING: Get KpiDescriptor <<<') - kpi_id_obj = create_kpi_id_request() - TelemetryDBmanagerObj.get_kpi_descriptor(kpi_id_obj) + # LOGGER.info('>>> TESTING: Get KpiDescriptor <<<') + # kpi_id_obj = create_kpi_id_request() + # TelemetryDBmanagerObj.get_kpi_descriptor(kpi_id_obj) - LOGGER.info('>>> TESTING: Select Collector <<<') - collector_id_obj = create_collector_id_request() - TelemetryDBmanagerObj.get_collector(collector_id_obj) + # LOGGER.info('>>> TESTING: Select Collector <<<') + # collector_id_obj = create_collector_id_request() + # TelemetryDBmanagerObj.get_collector(collector_id_obj) - LOGGER.info('>>> TESTING: Applying kpi filter <<< ') - kpi_filter : dict[str, Any] = create_kpi_filter_request() - TelemetryDBmanagerObj.select_kpi_descriptor(**kpi_filter) + # LOGGER.info('>>> TESTING: Applying kpi filter <<< ') + # kpi_filter : dict[str, Any] = create_kpi_filter_request() + # TelemetryDBmanagerObj.select_kpi_descriptor(**kpi_filter) - LOGGER.info('>>> TESTING: Applying collector filter <<<') - collector_filter : dict[str, Any] = create_collector_filter_request() - TelemetryDBmanagerObj.select_collector(**collector_filter) + # LOGGER.info('>>> TESTING: Applying collector filter <<<') + # collector_filter : dict[str, Any] = create_collector_filter_request() + # TelemetryDBmanagerObj.select_collector(**collector_filter) - LOGGER.info('>>> TESTING: Delete KpiDescriptor ') - kpi_id_obj = create_kpi_id_request() - TelemetryDBmanagerObj.delete_kpi_descriptor(kpi_id_obj) + # LOGGER.info('>>> TESTING: Delete KpiDescriptor ') + # kpi_id_obj = create_kpi_id_request() + # TelemetryDBmanagerObj.delete_kpi_descriptor(kpi_id_obj) - LOGGER.info('>>> TESTING: Delete Collector ') - collector_id_obj = create_collector_id_request() - TelemetryDBmanagerObj.delete_collector(collector_id_obj) + # LOGGER.info('>>> TESTING: Delete Collector ') + # collector_id_obj = create_collector_id_request() + # TelemetryDBmanagerObj.delete_collector(collector_id_obj) \ No newline at end of file -- GitLab From 23e6f6e8410641121435f034793b8bd82451d99c Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Thu, 30 May 2024 11:12:55 +0000 Subject: [PATCH 134/205] Logs are added to log gRPC messages. --- .../service/KpiManagerServiceServicerImpl.py | 15 +++++---------- src/kpi_manager/tests/test_unitary.py | 7 +++++-- src/telemetry/database/managementDB.py | 2 +- 3 files changed, 11 insertions(+), 13 deletions(-) diff --git a/src/kpi_manager/service/KpiManagerServiceServicerImpl.py b/src/kpi_manager/service/KpiManagerServiceServicerImpl.py index c37bf373f..c5127a2de 100644 --- a/src/kpi_manager/service/KpiManagerServiceServicerImpl.py +++ b/src/kpi_manager/service/KpiManagerServiceServicerImpl.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -# do tests to verify the "grpc.ServicerContext" is required or not. + import logging, grpc from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method from common.proto.context_pb2 import Empty @@ -39,7 +39,7 @@ class KpiManagerServiceServicerImpl(KpiManagerServiceServicer): def SetKpiDescriptor(self, request: KpiDescriptor, grpc_context: grpc.ServicerContext # type: ignore ) -> KpiId: # type: ignore response = KpiId() - + LOGGER.info("Received gRPC message object: {:}".format(request)) try: kpi_to_insert = KpiModel() kpi_to_insert.kpi_id = request.kpi_id.kpi_id.uuid @@ -62,7 +62,7 @@ class KpiManagerServiceServicerImpl(KpiManagerServiceServicer): def GetKpiDescriptor(self, request: KpiId, grpc_context: grpc.ServicerContext # type: ignore ) -> KpiDescriptor: # type: ignore response = KpiDescriptor() - + LOGGER.info("Received gRPC message object: {:}".format(request)) try: kpi_id_to_search = request.kpi_id.uuid row = self.managementDBobj.search_db_row_by_id(KpiModel, 'kpi_id', kpi_id_to_search) @@ -82,6 +82,7 @@ class KpiManagerServiceServicerImpl(KpiManagerServiceServicer): @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) def DeleteKpiDescriptor(self, request: KpiId, grpc_context: grpc.ServicerContext ) -> Empty: # type: ignore + LOGGER.info("Received gRPC message object: {:}".format(request)) try: kpi_id_to_search = request.kpi_id.uuid self.managementDBobj.delete_db_row_by_id(KpiModel, 'kpi_id', kpi_id_to_search) @@ -93,6 +94,7 @@ class KpiManagerServiceServicerImpl(KpiManagerServiceServicer): @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) def SelectKpiDescriptor(self, request: KpiDescriptorFilter, grpc_context: grpc.ServicerContext # type: ignore ) -> KpiDescriptorList: # type: ignore + LOGGER.info("Received gRPC message object: {:}".format(request)) response = KpiDescriptorList() # LOGGER.info("Recevied requested Object: {:}".format(request)) # re-structre the filter. create dynamic filter @@ -105,13 +107,6 @@ class KpiManagerServiceServicerImpl(KpiManagerServiceServicer): LOGGER.info('Unable to apply filter on kpi descriptor. {:}'.format(e)) try: if len(rows) != 0: - kpi_id_obj = KpiId() - device_id_obj = DeviceId() - endpoint_id_obj = EndPointId() - service_id_obj = ServiceId() - slice_id_obj = SliceId() - link_id_obj = LinkId() - for row in rows: kpiDescriptor_obj = KpiDescriptor() kpiDescriptor_obj.kpi_id.kpi_id.uuid = row.kpi_id diff --git a/src/kpi_manager/tests/test_unitary.py b/src/kpi_manager/tests/test_unitary.py index e60d2104e..84cf44497 100755 --- a/src/kpi_manager/tests/test_unitary.py +++ b/src/kpi_manager/tests/test_unitary.py @@ -213,24 +213,27 @@ def kpi_manager_client(kpi_manager_service : KpiManagerService): # pylint: disab def test_SetKpiDescriptor(kpi_manager_client): LOGGER.info(" >>> test_SetKpiDescriptor: START <<< ") response = kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request()) + LOGGER.info("Response gRPC message object: {:}".format(response)) assert isinstance(response, KpiId) def test_GetKpiDescriptor(kpi_manager_client): LOGGER.info(" >>> test_GetKpiDescriptor: START <<< ") response = kpi_manager_client.GetKpiDescriptor(create_kpi_id_request()) + LOGGER.info("Response gRPC message object: {:}".format(response)) assert isinstance(response, KpiDescriptor) def test_DeleteKpiDescriptor(kpi_manager_client): LOGGER.info(" >>> test_DeleteKpiDescriptor: START <<< ") response = kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request()) - kpi_manager_client.DeleteKpiDescriptor(response) + del_response = kpi_manager_client.DeleteKpiDescriptor(response) kpi_manager_client.GetKpiDescriptor(response) + LOGGER.info("Response of delete method gRPC message object: {:}".format(del_response)) assert isinstance(response, KpiId) def test_SelectKpiDescriptor(kpi_manager_client): LOGGER.info(" >>> test_SelectKpiDescriptor: START <<< ") response = kpi_manager_client.SelectKpiDescriptor(create_kpi_filter_request_a()) - # LOGGER.info(" >>> test_SelectKpiDescriptor: END <<< {:}".format(response)) + LOGGER.info("Response gRPC message object: {:}".format(response)) assert isinstance(response, KpiDescriptorList) # ------------- INITIAL TESTs ---------------- diff --git a/src/telemetry/database/managementDB.py b/src/telemetry/database/managementDB.py index 706133477..0a94c6c25 100644 --- a/src/telemetry/database/managementDB.py +++ b/src/telemetry/database/managementDB.py @@ -112,7 +112,7 @@ class managementDB: query = query.filter(getattr(model, column) == value) # type: ignore result = query.all() if result: - LOGGER.info(f"Fetched filtered rows from {model.__name__} table with filters: {filters}- Results: {result}") # + LOGGER.info(f"Fetched filtered rows from {model.__name__} table with filters: {filters}") # - Results: {result} else: LOGGER.warning(f"No matching row found in {model.__name__} table with filters: {filters}") return result -- GitLab From 4f2374f6bc51d771c3cc261455706eeff5f400eb Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Thu, 30 May 2024 13:16:15 +0000 Subject: [PATCH 135/205] Minor changes in Telemetry package --- src/telemetry/backend/service/TelemetryBackendService.py | 2 +- src/telemetry/database/tests/telemetryDBtests.py | 2 +- .../frontend/service/TelemetryFrontendServiceServicerImpl.py | 4 ++-- src/telemetry/frontend/tests/Messages.py | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/telemetry/backend/service/TelemetryBackendService.py b/src/telemetry/backend/service/TelemetryBackendService.py index 9d393b1ad..b8888bf8b 100755 --- a/src/telemetry/backend/service/TelemetryBackendService.py +++ b/src/telemetry/backend/service/TelemetryBackendService.py @@ -95,7 +95,7 @@ class TelemetryBackendService: if time.time() - start_time >= duration: # condition to terminate backend print("Execution Time Completed: \n --- Consumer terminating: KPI ID: ", kpi_id, " - ", time.time() - start_time) self.generate_kafka_response(collector_id, "NULL", False) - # write to Kafka + # write to Kafka to send the termination confirmation. break # print ("Received KPI: ", kpi_id, ", Duration: ", duration, ", Fetch Interval: ", interval) self.extract_kpi_value(collector_id, kpi_id) diff --git a/src/telemetry/database/tests/telemetryDBtests.py b/src/telemetry/database/tests/telemetryDBtests.py index 9cb856a3d..59043b33f 100644 --- a/src/telemetry/database/tests/telemetryDBtests.py +++ b/src/telemetry/database/tests/telemetryDBtests.py @@ -35,7 +35,7 @@ def test_telemetry_object_creation(): LOGGER.info('>>> Creating TelemetryDBmanager Object <<< ') TelemetryDBmanagerObj = TelemetryDBmanager() - TelemetryEngine.create_database(TelemetryDBmanagerObj.db_engine) + TelemetryEngine.create_database(TelemetryDBmanagerObj.db_engine) # creates 'frontend' db, if it doesnot exists. LOGGER.info('>>> Creating database <<< ') TelemetryDBmanagerObj.create_database() diff --git a/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py b/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py index 62a8969f9..245f92f81 100644 --- a/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py +++ b/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py @@ -51,8 +51,8 @@ class TelemetryFrontendServiceServicerImpl(TelemetryFrontendServiceServicer): # Create a new Collector instance collector_to_insert = CollectorModel() collector_to_insert.collector_id = request.collector_id.collector_id.uuid - collector_to_insert.kpi_id = '3a17230d-8e95-4afb-8b21-6965481aee5a' - collector_to_insert.collector = "Test collector description" + collector_to_insert.kpi_id = request.kpi_id.kpi_id.uuid + collector_to_insert.collector = "DESC 1" collector_to_insert.sampling_duration_s = request.duration_s collector_to_insert.sampling_interval_s = request.interval_s collector_to_insert.start_timestamp = time.time() diff --git a/src/telemetry/frontend/tests/Messages.py b/src/telemetry/frontend/tests/Messages.py index 2dea48c88..0a33de63e 100644 --- a/src/telemetry/frontend/tests/Messages.py +++ b/src/telemetry/frontend/tests/Messages.py @@ -30,7 +30,7 @@ def create_collector_id_a(coll_id_str : str): def create_collector_request(): _create_collector_request = telemetry_frontend_pb2.Collector() _create_collector_request.collector_id.collector_id.uuid = str(uuid.uuid4()) - _create_collector_request.kpi_id.kpi_id.uuid = str(uuid.uuid4()) + _create_collector_request.kpi_id.kpi_id.uuid = "165d20c5-a446-42fa-812f-e2b7ed283c6f" _create_collector_request.duration_s = float(random.randint(8, 16)) _create_collector_request.interval_s = float(random.randint(2, 4)) return _create_collector_request -- GitLab From 2f9bdc382bc3684bd0cf54de501f1ff89dfad001 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Thu, 30 May 2024 23:32:01 +0000 Subject: [PATCH 136/205] Telemetry Start,Stop and select Collector complete implementation --- .../run_tests_locally-telemetry-backend.sh | 2 +- .../service/TelemetryBackendService.py | 35 ++++-- .../backend/tests/testTelemetryBackend.py | 6 +- .../TelemetryFrontendServiceServicerImpl.py | 105 +++++++++++------- src/telemetry/frontend/tests/Messages.py | 76 +++++++------ src/telemetry/frontend/tests/test_frontend.py | 59 ++++------ 6 files changed, 158 insertions(+), 125 deletions(-) diff --git a/scripts/run_tests_locally-telemetry-backend.sh b/scripts/run_tests_locally-telemetry-backend.sh index 34e9e0542..8f72fb283 100755 --- a/scripts/run_tests_locally-telemetry-backend.sh +++ b/scripts/run_tests_locally-telemetry-backend.sh @@ -24,5 +24,5 @@ cd $PROJECTDIR/src # python3 kpi_manager/tests/test_unitary.py RCFILE=$PROJECTDIR/coverage/.coveragerc -python3 -m pytest --log-level=INFO --verbose \ +python3 -m pytest --log-level=INFO --log-cli-level=INFO --verbose \ telemetry/backend/tests/testTelemetryBackend.py \ No newline at end of file diff --git a/src/telemetry/backend/service/TelemetryBackendService.py b/src/telemetry/backend/service/TelemetryBackendService.py index b8888bf8b..f2e5ff3ac 100755 --- a/src/telemetry/backend/service/TelemetryBackendService.py +++ b/src/telemetry/backend/service/TelemetryBackendService.py @@ -33,7 +33,6 @@ LOGGER = logging.getLogger(__name__) METRICS_POOL = MetricsPool('Telemetry', 'TelemetryBackend') KAFKA_SERVER_IP = '127.0.0.1:9092' ADMIN_KAFKA_CLIENT = AdminClient({'bootstrap.servers': KAFKA_SERVER_IP}) -ACTIVE_COLLECTORS = [] KAFKA_TOPICS = {'request' : 'topic_request', 'response': 'topic_response'} EXPORTER_ENDPOINT = "http://node-exporter-7465c69b87-b6ks5.telebackend:9100/metrics" @@ -45,6 +44,7 @@ class TelemetryBackendService: def __init__(self): LOGGER.info('Init TelemetryBackendService') + self.running_threads = {} def run_kafka_listener(self)->bool: threading.Thread(target=self.kafka_listener).start() @@ -68,7 +68,7 @@ class TelemetryBackendService: receive_msg = consumerObj.poll(2.0) if receive_msg is None: # print (time.time(), " - Telemetry backend is listening on Kafka Topic: ", topic_request) # added for debugging purposes - print (time.time(), " - Telemetry backend is listening on Kafka Topic: ", KAFKA_TOPICS['request']) # added for debugging purposes + # print (time.time(), " - Telemetry backend is listening on Kafka Topic: ", KAFKA_TOPICS['request']) # added for debugging purposes continue elif receive_msg.error(): if receive_msg.error().code() == KafkaError._PARTITION_EOF: @@ -78,23 +78,31 @@ class TelemetryBackendService: break (kpi_id, duration, interval) = ast.literal_eval(receive_msg.value().decode('utf-8')) collector_id = receive_msg.key().decode('utf-8') - self.run_initiate_collector_backend(collector_id, kpi_id, duration, interval) + if duration == -1 and interval == -1: + self.terminate_collector_backend(collector_id) + # threading.Thread(target=self.terminate_collector_backend, args=(collector_id)) + else: + self.run_initiate_collector_backend(collector_id, kpi_id, duration, interval) def run_initiate_collector_backend(self, collector_id: str, kpi_id: str, duration: int, interval: int): - threading.Thread(target=self.initiate_collector_backend, args=(collector_id, kpi_id, duration, interval)).start() + stop_event = threading.Event() + thread = threading.Thread(target=self.initiate_collector_backend, + args=(collector_id, kpi_id, duration, interval, stop_event)) + self.running_threads[collector_id] = (thread, stop_event) + thread.start() - def initiate_collector_backend(self, collector_id, kpi_id, duration, interval + def initiate_collector_backend(self, collector_id, kpi_id, duration, interval, stop_event ): # type: ignore """ Method to receive collector request attribues and initiates collecter backend. """ + print("Initiating backend for collector: ", collector_id) start_time = time.time() - while True: - ACTIVE_COLLECTORS.append(collector_id) + while not stop_event.is_set(): if time.time() - start_time >= duration: # condition to terminate backend - print("Execution Time Completed: \n --- Consumer terminating: KPI ID: ", kpi_id, " - ", time.time() - start_time) - self.generate_kafka_response(collector_id, "NULL", False) + print("Execuation duration completed: Terminating backend: Collector Id: ", collector_id, " - ", time.time() - start_time) + self.generate_kafka_response(collector_id, "-1", -1) # write to Kafka to send the termination confirmation. break # print ("Received KPI: ", kpi_id, ", Duration: ", duration, ", Fetch Interval: ", interval) @@ -125,6 +133,15 @@ class TelemetryBackendService: producerObj.produce(KAFKA_TOPICS['response'], key=msg_key, value= str(msg_value), callback=self.delivery_callback) producerObj.flush() + def terminate_collector_backend(self, collector_id): + if collector_id in self.running_threads: + thread, stop_event = self.running_threads[collector_id] + stop_event.set() + thread.join() + print ("Terminating backend (by StopCollector): Collector Id: ", collector_id) + del self.running_threads[collector_id] + self.generate_kafka_response(collector_id, "-1", -1) + def create_topic_if_not_exists(self, new_topics: list) -> bool: """ Method to create Kafka topic if it does not exist. diff --git a/src/telemetry/backend/tests/testTelemetryBackend.py b/src/telemetry/backend/tests/testTelemetryBackend.py index e3e8bbc4b..b8b29d04a 100644 --- a/src/telemetry/backend/tests/testTelemetryBackend.py +++ b/src/telemetry/backend/tests/testTelemetryBackend.py @@ -20,8 +20,6 @@ from typing import Tuple from common.proto.context_pb2 import Empty from src.telemetry.backend.service.TelemetryBackendService import TelemetryBackendService - - LOGGER = logging.getLogger(__name__) @@ -30,7 +28,7 @@ LOGGER = logging.getLogger(__name__) ########################### def test_verify_kafka_topics(): - LOGGER.warning('test_receive_kafka_request requesting') + LOGGER.info('test_verify_kafka_topics requesting') TelemetryBackendServiceObj = TelemetryBackendService() KafkaTopics = ['topic_request', 'topic_response'] response = TelemetryBackendServiceObj.create_topic_if_not_exists(KafkaTopics) @@ -38,7 +36,7 @@ def test_verify_kafka_topics(): assert isinstance(response, bool) def test_run_kafka_listener(): - LOGGER.warning('test_receive_kafka_request requesting') + LOGGER.info('test_receive_kafka_request requesting') TelemetryBackendServiceObj = TelemetryBackendService() response = TelemetryBackendServiceObj.run_kafka_listener() LOGGER.debug(str(response)) diff --git a/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py b/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py index 245f92f81..2fab04b31 100644 --- a/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py +++ b/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py @@ -44,9 +44,12 @@ class TelemetryFrontendServiceServicerImpl(TelemetryFrontendServiceServicer): def __init__(self, name_mapping : NameMapping): LOGGER.info('Init TelemetryFrontendService') self.managementDBobj = managementDB() + self.kafka_producer = KafkaProducer({'bootstrap.servers': KAFKA_SERVER_IP,}) + self.kafka_consumer = KafkaConsumer({'bootstrap.servers' : KAFKA_SERVER_IP, + 'group.id' : 'frontend', + 'auto.offset.reset' : 'latest'}) - - def add_collector_to_db(self, request: Collector ): + def add_collector_to_db(self, request: Collector ): # type: ignore try: # Create a new Collector instance collector_to_insert = CollectorModel() @@ -66,6 +69,7 @@ class TelemetryFrontendServiceServicerImpl(TelemetryFrontendServiceServicer): request : Collector, grpc_context: grpc.ServicerContext # type: ignore ) -> CollectorId: # type: ignore # push info to frontend db + LOGGER.info ("gRPC message: {:}".format(request)) response = CollectorId() _collector_id = str(request.collector_id.collector_id.uuid) _collector_kpi_id = str(request.kpi_id.kpi_id.uuid) @@ -73,37 +77,36 @@ class TelemetryFrontendServiceServicerImpl(TelemetryFrontendServiceServicer): _collector_interval = int(request.interval_s) # pushing Collector to DB self.add_collector_to_db(request) - self.generate_kafka_request(_collector_id, _collector_kpi_id, _collector_duration, _collector_interval) - # self.run_generate_kafka_request(_collector_id, _collector_kpi_id, _collector_duration, _collector_interval) + self.publish_to_kafka_request_topic(_collector_id, _collector_kpi_id, _collector_duration, _collector_interval) + # self.run_publish_to_kafka_request_topic(_collector_id, _collector_kpi_id, _collector_duration, _collector_interval) response.collector_id.uuid = request.collector_id.collector_id.uuid # type: ignore return response - def run_generate_kafka_request(self, msg_key: str, kpi: str, duration : int, interval: int): - threading.Thread(target=self.generate_kafka_request, args=(msg_key, kpi, duration, interval)).start() + def run_publish_to_kafka_request_topic(self, msg_key: str, kpi: str, duration : int, interval: int): + # Add threading.Thread() response to dictonary and call start() in the next statement + threading.Thread(target=self.publish_to_kafka_request_topic, args=(msg_key, kpi, duration, interval)).start() - # @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def generate_kafka_request(self, - msg_key: str, kpi: str, duration : int, interval: int - ) -> KafkaProducer: + def publish_to_kafka_request_topic(self, + collector_id: str, kpi: str, duration : int, interval: int + ): """ Method to generate collector request to Kafka topic. """ # time.sleep(5) - producer_configs = { - 'bootstrap.servers': KAFKA_SERVER_IP, - } + # producer_configs = { + # 'bootstrap.servers': KAFKA_SERVER_IP, + # } # topic_request = "topic_request" - msg_value = Tuple [str, int, int] - msg_value = (kpi, duration, interval) - # print ("Request generated: ", "Colletcor Id: ", msg_key, \ + msg_value : Tuple [str, int, int] = (kpi, duration, interval) + # print ("Request generated: ", "Colletcor Id: ", collector_id, \ # ", \nKPI: ", kpi, ", Duration: ", duration, ", Interval: ", interval) - producerObj = KafkaProducer(producer_configs) - producerObj.produce(KAFKA_TOPICS['request'], key=msg_key, value= str(msg_value), callback=self.delivery_callback) - LOGGER.info("Collector Request Generated: {:} -- {:} -- {:} -- {:}".format(msg_key, kpi, duration, interval)) - # producerObj.produce(topic_request, key=msg_key, value= str(msg_value), callback=self.delivery_callback) - ACTIVE_COLLECTORS.append(msg_key) - producerObj.flush() - return producerObj + # producerObj = KafkaProducer(producer_configs) + self.kafka_producer.produce(KAFKA_TOPICS['request'], key=collector_id, value= str(msg_value), callback=self.delivery_callback) + # producerObj.produce(KAFKA_TOPICS['request'], key=collector_id, value= str(msg_value), callback=self.delivery_callback) + LOGGER.info("Collector Request Generated: {:}, {:}, {:}, {:}".format(collector_id, kpi, duration, interval)) + # producerObj.produce(topic_request, key=collector_id, value= str(msg_value), callback=self.delivery_callback) + ACTIVE_COLLECTORS.append(collector_id) + self.kafka_producer.flush() def run_kafka_listener(self): # print ("--- STARTED: run_kafka_listener ---") @@ -114,21 +117,21 @@ class TelemetryFrontendServiceServicerImpl(TelemetryFrontendServiceServicer): """ listener for response on Kafka topic. """ - # print ("--- STARTED: kafka_listener ---") - conusmer_configs = { - 'bootstrap.servers' : KAFKA_SERVER_IP, - 'group.id' : 'frontend', - 'auto.offset.reset' : 'latest' - } - # topic_response = "topic_response" - - consumerObj = KafkaConsumer(conusmer_configs) - consumerObj.subscribe([KAFKA_TOPICS['response']]) + # # print ("--- STARTED: kafka_listener ---") + # conusmer_configs = { + # 'bootstrap.servers' : KAFKA_SERVER_IP, + # 'group.id' : 'frontend', + # 'auto.offset.reset' : 'latest' + # } + # # topic_response = "topic_response" + + # consumerObj = KafkaConsumer(conusmer_configs) + self.kafka_consumer.subscribe([KAFKA_TOPICS['response']]) # print (time.time()) while True: - receive_msg = consumerObj.poll(2.0) + receive_msg = self.kafka_consumer.poll(2.0) if receive_msg is None: - print (" - Telemetry frontend listening on Kafka Topic: ", KAFKA_TOPICS['response']) # added for debugging purposes + # print (" - Telemetry frontend listening on Kafka Topic: ", KAFKA_TOPICS['response']) # added for debugging purposes continue elif receive_msg.error(): if receive_msg.error().code() == KafkaError._PARTITION_EOF: @@ -140,7 +143,7 @@ class TelemetryFrontendServiceServicerImpl(TelemetryFrontendServiceServicer): collector_id = receive_msg.key().decode('utf-8') if collector_id in ACTIVE_COLLECTORS: (kpi_id, kpi_value) = ast.literal_eval(receive_msg.value().decode('utf-8')) - self.process_response(kpi_id, kpi_value) + self.process_response(collector_id, kpi_id, kpi_value) else: print(f"collector id does not match.\nRespone ID: '{collector_id}' --- Active IDs: '{ACTIVE_COLLECTORS}' ") except Exception as e: @@ -148,8 +151,12 @@ class TelemetryFrontendServiceServicerImpl(TelemetryFrontendServiceServicer): continue # return None - def process_response(self, kpi_id: str, kpi_value: Any): - print ("Frontend - KPI: ", kpi_id, ", VALUE: ", kpi_value) + def process_response(self, collector_id: str, kpi_id: str, kpi_value: Any): + if kpi_id == "-1" and kpi_value == -1: + # LOGGER.info("Sucessfully terminated Collector: {:}".format(collector_id)) + print ("Sucessfully terminated Collector: ", collector_id) + else: + print ("Frontend-Received values Collector Id:", collector_id, "-KPI:", kpi_id, "-VALUE:", kpi_value) @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) def delivery_callback(self, err, msg): @@ -168,12 +175,30 @@ class TelemetryFrontendServiceServicerImpl(TelemetryFrontendServiceServicer): def StopCollector(self, request : CollectorId, grpc_context: grpc.ServicerContext # type: ignore ) -> Empty: # type: ignore - request.collector_id.uuid = "" + LOGGER.info ("gRPC message: {:}".format(request)) + _collector_id = request.collector_id.uuid + self.publish_to_kafka_request_topic(_collector_id, "", -1, -1) return Empty() @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) def SelectCollectors(self, request : CollectorFilter, contextgrpc_context: grpc.ServicerContext # type: ignore ) -> CollectorList: # type: ignore + LOGGER.info("gRPC message: {:}".format(request)) response = CollectorList() - return response \ No newline at end of file + filter_to_apply = dict() + filter_to_apply['kpi_id'] = request.kpi_id[0].kpi_id.uuid + # filter_to_apply['duration_s'] = request.duration_s[0] + try: + rows = self.managementDBobj.select_with_filter(CollectorModel, **filter_to_apply) + except Exception as e: + LOGGER.info('Unable to apply filter on kpi descriptor. {:}'.format(e)) + try: + if len(rows) != 0: + for row in rows: + collector_obj = Collector() + collector_obj.collector_id.collector_id.uuid = row.collector_id + response.collector_list.append(collector_obj) + return response + except Exception as e: + LOGGER.info('Unable to process response {:}'.format(e)) \ No newline at end of file diff --git a/src/telemetry/frontend/tests/Messages.py b/src/telemetry/frontend/tests/Messages.py index 0a33de63e..48668f7bf 100644 --- a/src/telemetry/frontend/tests/Messages.py +++ b/src/telemetry/frontend/tests/Messages.py @@ -22,10 +22,10 @@ def create_collector_id(): _collector_id.collector_id.uuid = uuid.uuid4() return _collector_id -def create_collector_id_a(coll_id_str : str): - _collector_id = telemetry_frontend_pb2.CollectorId() - _collector_id.collector_id.uuid = str(coll_id_str) - return _collector_id +# def create_collector_id_a(coll_id_str : str): +# _collector_id = telemetry_frontend_pb2.CollectorId() +# _collector_id.collector_id.uuid = str(coll_id_str) +# return _collector_id def create_collector_request(): _create_collector_request = telemetry_frontend_pb2.Collector() @@ -35,39 +35,45 @@ def create_collector_request(): _create_collector_request.interval_s = float(random.randint(2, 4)) return _create_collector_request -def create_collector_request_a(): - _create_collector_request_a = telemetry_frontend_pb2.Collector() - _create_collector_request_a.collector_id.collector_id.uuid = "-1" - return _create_collector_request_a - -def create_collector_request_b(str_kpi_id, coll_duration_s, coll_interval_s - ) -> telemetry_frontend_pb2.Collector: - _create_collector_request_b = telemetry_frontend_pb2.Collector() - _create_collector_request_b.collector_id.collector_id.uuid = '1' - _create_collector_request_b.kpi_id.kpi_id.uuid = str_kpi_id - _create_collector_request_b.duration_s = coll_duration_s - _create_collector_request_b.interval_s = coll_interval_s - return _create_collector_request_b - def create_collector_filter(): _create_collector_filter = telemetry_frontend_pb2.CollectorFilter() - new_collector_id = _create_collector_filter.collector_id.add() - new_collector_id.collector_id.uuid = "COLL1" new_kpi_id = _create_collector_filter.kpi_id.add() - new_kpi_id.kpi_id.uuid = "KPI1" - new_device_id = _create_collector_filter.device_id.add() - new_device_id.device_uuid.uuid = 'DEV1' - new_service_id = _create_collector_filter.service_id.add() - new_service_id.service_uuid.uuid = 'SERV1' - new_slice_id = _create_collector_filter.slice_id.add() - new_slice_id.slice_uuid.uuid = 'SLC1' - new_endpoint_id = _create_collector_filter.endpoint_id.add() - new_endpoint_id.endpoint_uuid.uuid = 'END1' - new_connection_id = _create_collector_filter.connection_id.add() - new_connection_id.connection_uuid.uuid = 'CON1' - _create_collector_filter.kpi_sample_type.append(KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED) + new_kpi_id.kpi_id.uuid = "165d20c5-a446-42fa-812f-e2b7ed283c6f" return _create_collector_filter -def create_collector_list(): - _create_collector_list = telemetry_frontend_pb2.CollectorList() - return _create_collector_list \ No newline at end of file +# def create_collector_request_a(): +# _create_collector_request_a = telemetry_frontend_pb2.Collector() +# _create_collector_request_a.collector_id.collector_id.uuid = "-1" +# return _create_collector_request_a + +# def create_collector_request_b(str_kpi_id, coll_duration_s, coll_interval_s +# ) -> telemetry_frontend_pb2.Collector: +# _create_collector_request_b = telemetry_frontend_pb2.Collector() +# _create_collector_request_b.collector_id.collector_id.uuid = '1' +# _create_collector_request_b.kpi_id.kpi_id.uuid = str_kpi_id +# _create_collector_request_b.duration_s = coll_duration_s +# _create_collector_request_b.interval_s = coll_interval_s +# return _create_collector_request_b + +# def create_collector_filter(): +# _create_collector_filter = telemetry_frontend_pb2.CollectorFilter() +# new_collector_id = _create_collector_filter.collector_id.add() +# new_collector_id.collector_id.uuid = "COLL1" +# new_kpi_id = _create_collector_filter.kpi_id.add() +# new_kpi_id.kpi_id.uuid = "KPI1" +# new_device_id = _create_collector_filter.device_id.add() +# new_device_id.device_uuid.uuid = 'DEV1' +# new_service_id = _create_collector_filter.service_id.add() +# new_service_id.service_uuid.uuid = 'SERV1' +# new_slice_id = _create_collector_filter.slice_id.add() +# new_slice_id.slice_uuid.uuid = 'SLC1' +# new_endpoint_id = _create_collector_filter.endpoint_id.add() +# new_endpoint_id.endpoint_uuid.uuid = 'END1' +# new_connection_id = _create_collector_filter.connection_id.add() +# new_connection_id.connection_uuid.uuid = 'CON1' +# _create_collector_filter.kpi_sample_type.append(KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED) +# return _create_collector_filter + +# def create_collector_list(): +# _create_collector_list = telemetry_frontend_pb2.CollectorList() +# return _create_collector_list \ No newline at end of file diff --git a/src/telemetry/frontend/tests/test_frontend.py b/src/telemetry/frontend/tests/test_frontend.py index 230122a2d..7d050349b 100644 --- a/src/telemetry/frontend/tests/test_frontend.py +++ b/src/telemetry/frontend/tests/test_frontend.py @@ -31,8 +31,7 @@ from common.Settings import ( from telemetry.frontend.client.TelemetryFrontendClient import TelemetryFrontendClient from telemetry.frontend.service.TelemetryFrontendService import TelemetryFrontendService from telemetry.frontend.service.TelemetryFrontendServiceServicerImpl import TelemetryFrontendServiceServicerImpl -from telemetry.frontend.tests.Messages import ( create_collector_id, create_collector_request, - create_collector_filter, create_collector_request_a, create_collector_request_b) +from telemetry.frontend.tests.Messages import ( create_collector_request, create_collector_filter) from device.client.DeviceClient import DeviceClient from device.service.DeviceService import DeviceService @@ -167,43 +166,31 @@ def telemetryFrontend_client( # Tests Implementation of Telemetry Frontend ########################### -def test_start_collector(telemetryFrontend_client): - LOGGER.info('test_start_collector requesting') +def test_StartCollector(telemetryFrontend_client): + LOGGER.info(' >>> test_StartCollector START: <<< ') response = telemetryFrontend_client.StartCollector(create_collector_request()) LOGGER.debug(str(response)) assert isinstance(response, CollectorId) -# def test_start_collector_a(telemetryFrontend_client): -# LOGGER.warning('test_start_collector requesting') -# response = telemetryFrontend_client.StartCollector(create_collector_request()) -# LOGGER.debug(str(response)) -# assert isinstance(response, CollectorId) - -# def test_start_collector_b(telemetryFrontend_client): -# LOGGER.warning('test_start_collector requesting') -# response = telemetryFrontend_client.StartCollector(create_collector_request()) -# LOGGER.debug(str(response)) -# assert isinstance(response, CollectorId) - -# def test_run_kafka_listener(): -# LOGGER.warning('test_receive_kafka_request requesting') -# name_mapping = NameMapping() -# TelemetryFrontendServiceObj = TelemetryFrontendServiceServicerImpl(name_mapping) -# response = TelemetryFrontendServiceObj.run_kafka_listener() # Method "run_kafka_listener" is not define in frontend.proto -# LOGGER.debug(str(response)) -# assert isinstance(response, bool) - - - +def test_run_kafka_listener(): + LOGGER.info(' >>> test_run_kafka_listener START: <<< ') + name_mapping = NameMapping() + TelemetryFrontendServiceObj = TelemetryFrontendServiceServicerImpl(name_mapping) + response = TelemetryFrontendServiceObj.run_kafka_listener() # Method "run_kafka_listener" is not define in frontend.proto + LOGGER.debug(str(response)) + assert isinstance(response, bool) -# def test_stop_collector(telemetryFrontend_client): -# LOGGER.warning('test_stop_collector requesting') -# response = telemetryFrontend_client.StopCollector(create_collector_id("1")) -# LOGGER.debug(str(response)) -# assert isinstance(response, Empty) +def test_StopCollector(telemetryFrontend_client): + LOGGER.info(' >>> test_StopCollector START: <<< ') + _collector_id = telemetryFrontend_client.StartCollector(create_collector_request()) + time.sleep(3) # wait for small amount before call the stopCollecter() + response = telemetryFrontend_client.StopCollector(_collector_id) + LOGGER.debug(str(response)) + assert isinstance(response, Empty) -# def test_select_collectors(telemetryFrontend_client): -# LOGGER.warning('test_select_collector requesting') -# response = telemetryFrontend_client.SelectCollectors(create_collector_filter()) -# LOGGER.debug(str(response)) -# assert isinstance(response, CollectorList) \ No newline at end of file +def test_select_collectors(telemetryFrontend_client): + LOGGER.info(' >>> test_select_collector requesting <<< ') + response = telemetryFrontend_client.SelectCollectors(create_collector_filter()) + LOGGER.info('Received Rows after applying Filter: {:} '.format(response)) + LOGGER.debug(str(response)) + assert isinstance(response, CollectorList) \ No newline at end of file -- GitLab From 3b8e35eea744878b51f8e3e8e7265c0088a1db23 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Mon, 3 Jun 2024 10:02:14 +0000 Subject: [PATCH 137/205] Modification made for tests (fetch_node_exporter_metrics and stream_node_export_metrics_to_raw_topic) --- .../service/TelemetryBackendService.py | 61 ++++++++++++++----- .../backend/tests/testTelemetryBackend.py | 39 +++++++----- 2 files changed, 70 insertions(+), 30 deletions(-) diff --git a/src/telemetry/backend/service/TelemetryBackendService.py b/src/telemetry/backend/service/TelemetryBackendService.py index f2e5ff3ac..2ce8ebf70 100755 --- a/src/telemetry/backend/service/TelemetryBackendService.py +++ b/src/telemetry/backend/service/TelemetryBackendService.py @@ -33,9 +33,11 @@ LOGGER = logging.getLogger(__name__) METRICS_POOL = MetricsPool('Telemetry', 'TelemetryBackend') KAFKA_SERVER_IP = '127.0.0.1:9092' ADMIN_KAFKA_CLIENT = AdminClient({'bootstrap.servers': KAFKA_SERVER_IP}) -KAFKA_TOPICS = {'request' : 'topic_request', - 'response': 'topic_response'} -EXPORTER_ENDPOINT = "http://node-exporter-7465c69b87-b6ks5.telebackend:9100/metrics" +KAFKA_TOPICS = {'request' : 'topic_request', 'response': 'topic_response', + 'raw' : 'topic_raw' , 'labeled' : 'topic_labled'} +EXPORTER_ENDPOINT = "http://127.0.0.1:9100/metrics" +PRODUCER_CONFIG = {'bootstrap.servers': KAFKA_SERVER_IP,} + class TelemetryBackendService: """ @@ -122,15 +124,12 @@ class TelemetryBackendService: """ Method to write response on Kafka topic """ - producer_configs = { - 'bootstrap.servers': KAFKA_SERVER_IP, - } # topic_response = "topic_response" msg_value : Tuple [str, Any] = (kpi_id, kpi_value) msg_key = collector_id - producerObj = KafkaProducer(producer_configs) + producerObj = KafkaProducer(PRODUCER_CONFIG) # producerObj.produce(topic_response, key=msg_key, value= str(msg_value), callback=self.delivery_callback) - producerObj.produce(KAFKA_TOPICS['response'], key=msg_key, value= str(msg_value), callback=self.delivery_callback) + producerObj.produce(KAFKA_TOPICS['response'], key=msg_key, value= str(msg_value), callback=TelemetryBackendService.delivery_callback) producerObj.flush() def terminate_collector_backend(self, collector_id): @@ -161,7 +160,8 @@ class TelemetryBackendService: return False return True - def delivery_callback(self, err, msg): + @staticmethod + def delivery_callback( err, msg): """ Callback function to handle message delivery status. Args: @@ -174,8 +174,8 @@ class TelemetryBackendService: print(f'Message delivered to topic {msg.topic()}') # ----------- BELOW: Actual Implementation of Kafka Producer with Node Exporter ----------- - - def fetch_node_exporter_metrics(self): + @staticmethod + def fetch_single_node_exporter_metric(): """ Method to fetch metrics from Node Exporter. Returns: @@ -184,24 +184,29 @@ class TelemetryBackendService: KPI = "node_network_receive_packets_total" try: response = requests.get(EXPORTER_ENDPOINT) # type: ignore + LOGGER.info("Request status {:}".format(response)) if response.status_code == 200: # print(f"Metrics fetched sucessfully...") metrics = response.text # Check if the desired metric is available in the response if KPI in metrics: - KPI_VALUE = self.extract_metric_value(metrics, KPI) + KPI_VALUE = TelemetryBackendService.extract_metric_value(metrics, KPI) # Extract the metric value if KPI_VALUE is not None: - print(f"KPI value: {KPI_VALUE}") + LOGGER.info("Extracted value of {:} is {:}".format(KPI, KPI_VALUE)) + print(f"Extracted value of {KPI} is: {KPI_VALUE}") return KPI_VALUE else: - print(f"Failed to fetch metrics. Status code: {response.status_code}") + LOGGER.info("Failed to fetch metrics. Status code: {:}".format(response.status_code)) + # print(f"Failed to fetch metrics. Status code: {response.status_code}") return None except Exception as e: - print(f"Failed to fetch metrics: {str(e)}") + LOGGER.info("Failed to fetch metrics. Status code: {:}".format(e)) + # print(f"Failed to fetch metrics: {str(e)}") return None - def extract_metric_value(self, metrics, metric_name): + @staticmethod + def extract_metric_value(metrics, metric_name): """ Method to extract the value of a metric from the metrics string. Args: @@ -220,4 +225,28 @@ class TelemetryBackendService: print(f"Metric '{metric_name}' not found in the metrics.") return None + @staticmethod + def stream_node_export_metrics_to_raw_topic(): + try: + while True: + response = requests.get(EXPORTER_ENDPOINT) + LOGGER.info("Response Status {:} ".format(response)) + try: + if response.status_code == 200: + producerObj = KafkaProducer(PRODUCER_CONFIG) + producerObj.produce(KAFKA_TOPICS['raw'], key="raw", value= str(response.text), callback=TelemetryBackendService.delivery_callback) + producerObj.flush() + LOGGER.info("Produce to topic") + else: + LOGGER.info("Didn't received expected response. Status code: {:}".format(response.status_code)) + print(f"Didn't received expected response. Status code: {response.status_code}") + return None + time.sleep(5) + except Exception as e: + LOGGER.info("Failed to process response. Status code: {:}".format(e)) + return None + except Exception as e: + LOGGER.info("Failed to fetch metrics. Status code: {:}".format(e)) + print(f"Failed to fetch metrics: {str(e)}") + return None # ----------- ABOVE: Actual Implementation of Kafka Producer with Node Exporter ----------- \ No newline at end of file diff --git a/src/telemetry/backend/tests/testTelemetryBackend.py b/src/telemetry/backend/tests/testTelemetryBackend.py index b8b29d04a..bc64c473e 100644 --- a/src/telemetry/backend/tests/testTelemetryBackend.py +++ b/src/telemetry/backend/tests/testTelemetryBackend.py @@ -15,6 +15,7 @@ import sys print (sys.path) sys.path.append('/home/tfs/tfs-ctrl') +import threading import logging from typing import Tuple from common.proto.context_pb2 import Empty @@ -27,17 +28,27 @@ LOGGER = logging.getLogger(__name__) # Tests Implementation of Telemetry Backend ########################### -def test_verify_kafka_topics(): - LOGGER.info('test_verify_kafka_topics requesting') - TelemetryBackendServiceObj = TelemetryBackendService() - KafkaTopics = ['topic_request', 'topic_response'] - response = TelemetryBackendServiceObj.create_topic_if_not_exists(KafkaTopics) - LOGGER.debug(str(response)) - assert isinstance(response, bool) - -def test_run_kafka_listener(): - LOGGER.info('test_receive_kafka_request requesting') - TelemetryBackendServiceObj = TelemetryBackendService() - response = TelemetryBackendServiceObj.run_kafka_listener() - LOGGER.debug(str(response)) - assert isinstance(response, bool) +# def test_verify_kafka_topics(): +# LOGGER.info('test_verify_kafka_topics requesting') +# TelemetryBackendServiceObj = TelemetryBackendService() +# KafkaTopics = ['topic_request', 'topic_response'] +# response = TelemetryBackendServiceObj.create_topic_if_not_exists(KafkaTopics) +# LOGGER.debug(str(response)) +# assert isinstance(response, bool) + +# def test_run_kafka_listener(): +# LOGGER.info('test_receive_kafka_request requesting') +# TelemetryBackendServiceObj = TelemetryBackendService() +# response = TelemetryBackendServiceObj.run_kafka_listener() +# LOGGER.debug(str(response)) +# assert isinstance(response, bool) + + +def test_fetch_node_exporter_metrics(): + LOGGER.info(' >>> test_fetch_node_exporter_metrics START <<< ') + TelemetryBackendService.fetch_single_node_exporter_metric() + +def test_stream_node_export_metrics_to_raw_topic(): + LOGGER.info(' >>> test_stream_node_export_metrics_to_raw_topic START <<< ') + threading.Thread(target=TelemetryBackendService.stream_node_export_metrics_to_raw_topic, args=()).start() + -- GitLab From a4a63b5d81994262b29bba7e6c9e20ae0094656d Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Mon, 3 Jun 2024 12:10:32 +0000 Subject: [PATCH 138/205] KpiComposer: Read Kafka stream and extracts the metric value as per configured KPIs. --- scripts/run_tests_locally-kpi-composer.sh | 23 +++++ ...er.sh => run_tests_locally-kpi-manager.sh} | 0 src/kpi_manager/service/KpiValueComposer.py | 86 +++++++++++++++++++ src/kpi_manager/tests/test_kpi_composer.py | 23 +++++ .../service/TelemetryBackendService.py | 1 - .../backend/tests/testTelemetryBackend.py | 7 +- .../TelemetryFrontendServiceServicerImpl.py | 70 +++++++-------- 7 files changed, 170 insertions(+), 40 deletions(-) create mode 100755 scripts/run_tests_locally-kpi-composer.sh rename scripts/{run_tests_locally-kpi_manager.sh => run_tests_locally-kpi-manager.sh} (100%) create mode 100644 src/kpi_manager/service/KpiValueComposer.py create mode 100644 src/kpi_manager/tests/test_kpi_composer.py diff --git a/scripts/run_tests_locally-kpi-composer.sh b/scripts/run_tests_locally-kpi-composer.sh new file mode 100755 index 000000000..c61b25788 --- /dev/null +++ b/scripts/run_tests_locally-kpi-composer.sh @@ -0,0 +1,23 @@ +#!/bin/bash +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +PROJECTDIR=`pwd` + +cd $PROJECTDIR/src + +RCFILE=$PROJECTDIR/coverage/.coveragerc +python3 -m pytest --log-level=INFO --log-cli-level=INFO --verbose \ + kpi_manager/tests/test_kpi_composer.py \ No newline at end of file diff --git a/scripts/run_tests_locally-kpi_manager.sh b/scripts/run_tests_locally-kpi-manager.sh similarity index 100% rename from scripts/run_tests_locally-kpi_manager.sh rename to scripts/run_tests_locally-kpi-manager.sh diff --git a/src/kpi_manager/service/KpiValueComposer.py b/src/kpi_manager/service/KpiValueComposer.py new file mode 100644 index 000000000..2710aac81 --- /dev/null +++ b/src/kpi_manager/service/KpiValueComposer.py @@ -0,0 +1,86 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# read Kafka stream from Kafka topic + +import re +import logging +import threading +from confluent_kafka import KafkaError +from confluent_kafka import Producer as KafkaProducer +from confluent_kafka import Consumer as KafkaConsumer + +LOGGER = logging.getLogger(__name__) +KAFKA_SERVER_IP = '127.0.0.1:9092' +# ADMIN_KAFKA_CLIENT = AdminClient({'bootstrap.servers': KAFKA_SERVER_IP}) +KAFKA_TOPICS = {'request' : 'topic_request', 'response': 'topic_response', + 'raw' : 'topic_raw' , 'labeled' : 'topic_labled'} +PRODUCER_CONFIG = {'bootstrap.servers': KAFKA_SERVER_IP,} +CONSUMER_CONFIG = {'bootstrap.servers' : KAFKA_SERVER_IP, + 'group.id' : 'kpi_composer', + 'auto.offset.reset' : 'latest'} +KPIs_TO_SEARCH = ["node_timex_status", "node_timex_sync_status", "node_udp_queues"] + +class KpiValueComposer: + def __init__(self) -> None: + pass + + @staticmethod + def compose_kpi(): + KpiValueComposer.run_kafka_listener() + + @staticmethod + def run_kafka_listener(): + threading.Thread(target=KpiValueComposer.kafka_listener, args=()).start() + + @staticmethod + def kafka_listener(): + """ + listener for events on Kafka topic. + """ + kafka_consumer = KafkaConsumer(CONSUMER_CONFIG) + kafka_consumer.subscribe([KAFKA_TOPICS['raw']]) + while True: + receive_msg = kafka_consumer.poll(2.0) + if receive_msg is None: + # print (" - Telemetry frontend listening on Kafka Topic: ", KAFKA_TOPICS['raw']) # added for debugging purposes + continue + elif receive_msg.error(): + if receive_msg.error().code() == KafkaError._PARTITION_EOF: + continue + else: + print("Consumer error: {}".format(receive_msg.error())) + continue + try: + new_event = receive_msg.value().decode('utf-8') + # print("New event on topic '{:}' is {:}".format(KAFKA_TOPICS['raw'], new_event)) + LOGGER.info("New event on topic '{:}' is {:}".format(KAFKA_TOPICS['raw'], new_event)) + KpiValueComposer.extract_kpi_values(new_event) + except Exception as e: + print(f"Error to consume event from topic: {KAFKA_TOPICS['raw']}. Error detail: {str(e)}") + continue + + @staticmethod + def extract_kpi_values(event): + pattern = re.compile("|".join(map(re.escape, KPIs_TO_SEARCH))) + lines = event.split('\n') + matching_rows = [] + for line in lines: + if pattern.search(line) and not line.startswith("# HELP") and not line.startswith("# TYPE"): + matching_rows.append(tuple(line.split(" "))) + print("Extracted Rows that match the KPIs {:}".format(matching_rows)) + # LOGGER.info("Extracted Rows that match the KPIs {:}".format(matching_rows)) + return matching_rows + + diff --git a/src/kpi_manager/tests/test_kpi_composer.py b/src/kpi_manager/tests/test_kpi_composer.py new file mode 100644 index 000000000..a4312ea53 --- /dev/null +++ b/src/kpi_manager/tests/test_kpi_composer.py @@ -0,0 +1,23 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import threading +import logging +from kpi_manager.service.KpiValueComposer import KpiValueComposer + +LOGGER = logging.getLogger(__name__) + +def test_compose_kpi(): + LOGGER.info(' >>> test_compose_kpi START <<< ') + KpiValueComposer.compose_kpi() \ No newline at end of file diff --git a/src/telemetry/backend/service/TelemetryBackendService.py b/src/telemetry/backend/service/TelemetryBackendService.py index 2ce8ebf70..4cfee8dba 100755 --- a/src/telemetry/backend/service/TelemetryBackendService.py +++ b/src/telemetry/backend/service/TelemetryBackendService.py @@ -1,4 +1,3 @@ - # Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/src/telemetry/backend/tests/testTelemetryBackend.py b/src/telemetry/backend/tests/testTelemetryBackend.py index bc64c473e..7c3b7497b 100644 --- a/src/telemetry/backend/tests/testTelemetryBackend.py +++ b/src/telemetry/backend/tests/testTelemetryBackend.py @@ -43,10 +43,9 @@ LOGGER = logging.getLogger(__name__) # LOGGER.debug(str(response)) # assert isinstance(response, bool) - -def test_fetch_node_exporter_metrics(): - LOGGER.info(' >>> test_fetch_node_exporter_metrics START <<< ') - TelemetryBackendService.fetch_single_node_exporter_metric() +# def test_fetch_node_exporter_metrics(): +# LOGGER.info(' >>> test_fetch_node_exporter_metrics START <<< ') +# TelemetryBackendService.fetch_single_node_exporter_metric() def test_stream_node_export_metrics_to_raw_topic(): LOGGER.info(' >>> test_stream_node_export_metrics_to_raw_topic START <<< ') diff --git a/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py b/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py index 2fab04b31..d10e9dffd 100644 --- a/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py +++ b/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py @@ -114,42 +114,42 @@ class TelemetryFrontendServiceServicerImpl(TelemetryFrontendServiceServicer): return True def kafka_listener(self): - """ - listener for response on Kafka topic. - """ - # # print ("--- STARTED: kafka_listener ---") - # conusmer_configs = { - # 'bootstrap.servers' : KAFKA_SERVER_IP, - # 'group.id' : 'frontend', - # 'auto.offset.reset' : 'latest' - # } - # # topic_response = "topic_response" - - # consumerObj = KafkaConsumer(conusmer_configs) - self.kafka_consumer.subscribe([KAFKA_TOPICS['response']]) - # print (time.time()) - while True: - receive_msg = self.kafka_consumer.poll(2.0) - if receive_msg is None: - # print (" - Telemetry frontend listening on Kafka Topic: ", KAFKA_TOPICS['response']) # added for debugging purposes - continue - elif receive_msg.error(): - if receive_msg.error().code() == KafkaError._PARTITION_EOF: - continue - else: - print("Consumer error: {}".format(receive_msg.error())) - break - try: - collector_id = receive_msg.key().decode('utf-8') - if collector_id in ACTIVE_COLLECTORS: - (kpi_id, kpi_value) = ast.literal_eval(receive_msg.value().decode('utf-8')) - self.process_response(collector_id, kpi_id, kpi_value) - else: - print(f"collector id does not match.\nRespone ID: '{collector_id}' --- Active IDs: '{ACTIVE_COLLECTORS}' ") - except Exception as e: - print(f"No message key found: {str(e)}") + """ + listener for response on Kafka topic. + """ + # # print ("--- STARTED: kafka_listener ---") + # conusmer_configs = { + # 'bootstrap.servers' : KAFKA_SERVER_IP, + # 'group.id' : 'frontend', + # 'auto.offset.reset' : 'latest' + # } + # # topic_response = "topic_response" + + # consumerObj = KafkaConsumer(conusmer_configs) + self.kafka_consumer.subscribe([KAFKA_TOPICS['response']]) + # print (time.time()) + while True: + receive_msg = self.kafka_consumer.poll(2.0) + if receive_msg is None: + # print (" - Telemetry frontend listening on Kafka Topic: ", KAFKA_TOPICS['response']) # added for debugging purposes + continue + elif receive_msg.error(): + if receive_msg.error().code() == KafkaError._PARTITION_EOF: continue - # return None + else: + print("Consumer error: {}".format(receive_msg.error())) + break + try: + collector_id = receive_msg.key().decode('utf-8') + if collector_id in ACTIVE_COLLECTORS: + (kpi_id, kpi_value) = ast.literal_eval(receive_msg.value().decode('utf-8')) + self.process_response(collector_id, kpi_id, kpi_value) + else: + print(f"collector id does not match.\nRespone ID: '{collector_id}' --- Active IDs: '{ACTIVE_COLLECTORS}' ") + except Exception as e: + print(f"No message key found: {str(e)}") + continue + # return None def process_response(self, collector_id: str, kpi_id: str, kpi_value: Any): if kpi_id == "-1" and kpi_value == -1: -- GitLab From 6a191bd306bfea36ac63a55a3d10da41bf28ce92 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Wed, 5 Jun 2024 14:53:34 +0000 Subject: [PATCH 139/205] after VM crash recovery. composer is able to mtach and read KPIs --- deploy/exporters.sh | 23 +++++++++++++++++++ deploy/kafka.sh | 4 ++-- ...ter.yaml => node_exporter_deployment.yaml} | 0 ...ervice.yaml => node_exporter_service.yaml} | 0 proto/telemetry_frontend.proto | 14 +++++------ src/kpi_manager/service/KpiValueComposer.py | 5 ++-- src/kpi_manager/tests/test_messages.py | 2 +- .../service/TelemetryBackendService.py | 10 ++++---- .../backend/tests/testTelemetryBackend.py | 4 ++-- 9 files changed, 44 insertions(+), 18 deletions(-) create mode 100644 deploy/exporters.sh rename manifests/{mock_nodeexporter.yaml => node_exporter_deployment.yaml} (100%) rename manifests/{mock_nodeexporterservice.yaml => node_exporter_service.yaml} (100%) diff --git a/deploy/exporters.sh b/deploy/exporters.sh new file mode 100644 index 000000000..6c56f25c9 --- /dev/null +++ b/deploy/exporters.sh @@ -0,0 +1,23 @@ +#!/bin/bash +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +######################################################################################################################## +# Read deployment settings +######################################################################################################################## + +# If not already set, set the namespace where Apache Kafka will be deployed. +export KFK_NAMESPACE=${KFK_NAMESPACE:-"exporters"} + +# Add instruction of exporter automatic deployment here \ No newline at end of file diff --git a/deploy/kafka.sh b/deploy/kafka.sh index f2fb666b5..4be5ef6b2 100755 --- a/deploy/kafka.sh +++ b/deploy/kafka.sh @@ -60,10 +60,10 @@ echo ">>> Deploying Apache Kafka Broker" kubectl --namespace ${KFK_NAMESPACE} apply -f "${TMP_MANIFESTS_FOLDER}/$KFK_MANIFEST" echo ">>> Verifing Apache Kafka deployment" -sleep 5 +sleep 10 KFK_PODS_STATUS=$(kubectl --namespace ${KFK_NAMESPACE} get pods) if echo "$KFK_PODS_STATUS" | grep -qEv 'STATUS|Running'; then - echo "Deployment Error: $KFK_PODS_STATUS" + echo "Deployment Error: \n $KFK_PODS_STATUS" else echo "$KFK_PODS_STATUS" fi \ No newline at end of file diff --git a/manifests/mock_nodeexporter.yaml b/manifests/node_exporter_deployment.yaml similarity index 100% rename from manifests/mock_nodeexporter.yaml rename to manifests/node_exporter_deployment.yaml diff --git a/manifests/mock_nodeexporterservice.yaml b/manifests/node_exporter_service.yaml similarity index 100% rename from manifests/mock_nodeexporterservice.yaml rename to manifests/node_exporter_service.yaml diff --git a/proto/telemetry_frontend.proto b/proto/telemetry_frontend.proto index 1f89a5d54..48bfd7a0e 100644 --- a/proto/telemetry_frontend.proto +++ b/proto/telemetry_frontend.proto @@ -28,13 +28,13 @@ message CollectorFilter { // All fields empty means: list all Collectors repeated CollectorId collector_id = 1; repeated kpi_manager.KpiId kpi_id = 2; - repeated kpi_sample_types.KpiSampleType kpi_sample_type = 3; - repeated context.DeviceId device_id = 4; - repeated context.EndPointId endpoint_id = 5; - repeated context.ServiceId service_id = 6; - repeated context.SliceId slice_id = 7; - repeated context.ConnectionId connection_id = 8; - repeated context.LinkId link_id = 9; + // repeated kpi_sample_types.KpiSampleType kpi_sample_type = 3; + // repeated context.DeviceId device_id = 4; + // repeated context.EndPointId endpoint_id = 5; + // repeated context.ServiceId service_id = 6; + // repeated context.SliceId slice_id = 7; + // repeated context.ConnectionId connection_id = 8; + // repeated context.LinkId link_id = 9; } message CollectorList { diff --git a/src/kpi_manager/service/KpiValueComposer.py b/src/kpi_manager/service/KpiValueComposer.py index 2710aac81..38c07a22a 100644 --- a/src/kpi_manager/service/KpiValueComposer.py +++ b/src/kpi_manager/service/KpiValueComposer.py @@ -22,7 +22,8 @@ from confluent_kafka import Producer as KafkaProducer from confluent_kafka import Consumer as KafkaConsumer LOGGER = logging.getLogger(__name__) -KAFKA_SERVER_IP = '127.0.0.1:9092' +KAFKA_SERVER_IP = '10.152.183.175:9092' +# KAFKA_SERVER_IP = '127.0.0.1:9092' # ADMIN_KAFKA_CLIENT = AdminClient({'bootstrap.servers': KAFKA_SERVER_IP}) KAFKA_TOPICS = {'request' : 'topic_request', 'response': 'topic_response', 'raw' : 'topic_raw' , 'labeled' : 'topic_labled'} @@ -65,7 +66,7 @@ class KpiValueComposer: try: new_event = receive_msg.value().decode('utf-8') # print("New event on topic '{:}' is {:}".format(KAFKA_TOPICS['raw'], new_event)) - LOGGER.info("New event on topic '{:}' is {:}".format(KAFKA_TOPICS['raw'], new_event)) + # LOGGER.info("New event on topic '{:}' is {:}".format(KAFKA_TOPICS['raw'], new_event)) KpiValueComposer.extract_kpi_values(new_event) except Exception as e: print(f"Error to consume event from topic: {KAFKA_TOPICS['raw']}. Error detail: {str(e)}") diff --git a/src/kpi_manager/tests/test_messages.py b/src/kpi_manager/tests/test_messages.py index db6160be5..83150c102 100755 --- a/src/kpi_manager/tests/test_messages.py +++ b/src/kpi_manager/tests/test_messages.py @@ -18,7 +18,7 @@ from common.proto.kpi_sample_types_pb2 import KpiSampleType from common.proto.context_pb2 import DeviceId, LinkId, ServiceId, SliceId,\ ConnectionId, EndPointId -# ---------------------- New Test Messages --------------------------------- +# ---------------------- 2nd iteration Test Messages --------------------------------- def create_kpi_id_request(): _kpi_id = kpi_manager_pb2.KpiId() _kpi_id.kpi_id.uuid = "34f73604-eca6-424f-9995-18b519ad0978" diff --git a/src/telemetry/backend/service/TelemetryBackendService.py b/src/telemetry/backend/service/TelemetryBackendService.py index 4cfee8dba..60cfcc6e6 100755 --- a/src/telemetry/backend/service/TelemetryBackendService.py +++ b/src/telemetry/backend/service/TelemetryBackendService.py @@ -30,11 +30,12 @@ from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_m LOGGER = logging.getLogger(__name__) METRICS_POOL = MetricsPool('Telemetry', 'TelemetryBackend') -KAFKA_SERVER_IP = '127.0.0.1:9092' +# KAFKA_SERVER_IP = '127.0.0.1:9092' +KAFKA_SERVER_IP = '10.152.183.175:9092' ADMIN_KAFKA_CLIENT = AdminClient({'bootstrap.servers': KAFKA_SERVER_IP}) KAFKA_TOPICS = {'request' : 'topic_request', 'response': 'topic_response', 'raw' : 'topic_raw' , 'labeled' : 'topic_labled'} -EXPORTER_ENDPOINT = "http://127.0.0.1:9100/metrics" +EXPORTER_ENDPOINT = "http://10.152.183.2:9100/metrics" PRODUCER_CONFIG = {'bootstrap.servers': KAFKA_SERVER_IP,} @@ -68,7 +69,6 @@ class TelemetryBackendService: while True: receive_msg = consumerObj.poll(2.0) if receive_msg is None: - # print (time.time(), " - Telemetry backend is listening on Kafka Topic: ", topic_request) # added for debugging purposes # print (time.time(), " - Telemetry backend is listening on Kafka Topic: ", KAFKA_TOPICS['request']) # added for debugging purposes continue elif receive_msg.error(): @@ -152,6 +152,7 @@ class TelemetryBackendService: if topic not in topic_metadata.topics: # If the topic does not exist, create a new topic print(f"Topic '{topic}' does not exist. Creating...") + LOGGER.info("Topic {:} does not exist. Creating...") new_topic = NewTopic(topic, num_partitions=1, replication_factor=1) ADMIN_KAFKA_CLIENT.create_topics([new_topic]) except KafkaException as e: @@ -229,7 +230,8 @@ class TelemetryBackendService: try: while True: response = requests.get(EXPORTER_ENDPOINT) - LOGGER.info("Response Status {:} ".format(response)) + # print("Response Status {:} ".format(response)) + # LOGGER.info("Response Status {:} ".format(response)) try: if response.status_code == 200: producerObj = KafkaProducer(PRODUCER_CONFIG) diff --git a/src/telemetry/backend/tests/testTelemetryBackend.py b/src/telemetry/backend/tests/testTelemetryBackend.py index 7c3b7497b..e81e98473 100644 --- a/src/telemetry/backend/tests/testTelemetryBackend.py +++ b/src/telemetry/backend/tests/testTelemetryBackend.py @@ -18,7 +18,7 @@ sys.path.append('/home/tfs/tfs-ctrl') import threading import logging from typing import Tuple -from common.proto.context_pb2 import Empty +# from common.proto.context_pb2 import Empty from src.telemetry.backend.service.TelemetryBackendService import TelemetryBackendService LOGGER = logging.getLogger(__name__) @@ -31,7 +31,7 @@ LOGGER = logging.getLogger(__name__) # def test_verify_kafka_topics(): # LOGGER.info('test_verify_kafka_topics requesting') # TelemetryBackendServiceObj = TelemetryBackendService() -# KafkaTopics = ['topic_request', 'topic_response'] +# KafkaTopics = ['topic_request', 'topic_response', 'topic_raw', 'topic_labled'] # response = TelemetryBackendServiceObj.create_topic_if_not_exists(KafkaTopics) # LOGGER.debug(str(response)) # assert isinstance(response, bool) -- GitLab From 0859b4b2a48cee97bc3590570f5134aba535ab21 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Thu, 6 Jun 2024 14:40:25 +0000 Subject: [PATCH 140/205] KpiDescriptor with attributes inserted into the Kpi Table. Working --- scripts/run_tests_locally-kpi-DB.sh | 28 ++++ .../service/KpiManagerServiceServicerImpl.py | 32 +++-- src/kpi_manager/service/KpiValueComposer.py | 7 +- .../service/database/KpiDBtests.py | 30 +++++ src/kpi_manager/service/database/KpiEngine.py | 49 +++++++ src/kpi_manager/service/database/KpiModel.py | 49 +++++++ src/kpi_manager/service/database/Kpi_DB.py | 127 ++++++++++++++++++ src/kpi_manager/service/database/__init__.py | 14 ++ src/kpi_manager/service/database/__main__.py | 107 +++++++++++++++ src/kpi_manager/tests/test_messages.py | 14 ++ src/kpi_manager/tests/test_unitary.py | 51 +++---- 11 files changed, 471 insertions(+), 37 deletions(-) create mode 100755 scripts/run_tests_locally-kpi-DB.sh create mode 100644 src/kpi_manager/service/database/KpiDBtests.py create mode 100644 src/kpi_manager/service/database/KpiEngine.py create mode 100644 src/kpi_manager/service/database/KpiModel.py create mode 100644 src/kpi_manager/service/database/Kpi_DB.py create mode 100644 src/kpi_manager/service/database/__init__.py create mode 100644 src/kpi_manager/service/database/__main__.py diff --git a/scripts/run_tests_locally-kpi-DB.sh b/scripts/run_tests_locally-kpi-DB.sh new file mode 100755 index 000000000..aa9767620 --- /dev/null +++ b/scripts/run_tests_locally-kpi-DB.sh @@ -0,0 +1,28 @@ +#!/bin/bash +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +PROJECTDIR=`pwd` + +cd $PROJECTDIR/src +# RCFILE=$PROJECTDIR/coverage/.coveragerc +# coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ +# kpi_manager/tests/test_unitary.py + +# python3 kpi_manager/tests/test_unitary.py + +RCFILE=$PROJECTDIR/coverage/.coveragerc +python3 -m pytest --log-level=INFO --log-cli-level=INFO --verbose \ + kpi_manager/service/database/KpiDBtests.py \ No newline at end of file diff --git a/src/kpi_manager/service/KpiManagerServiceServicerImpl.py b/src/kpi_manager/service/KpiManagerServiceServicerImpl.py index c5127a2de..7f62280ff 100644 --- a/src/kpi_manager/service/KpiManagerServiceServicerImpl.py +++ b/src/kpi_manager/service/KpiManagerServiceServicerImpl.py @@ -14,13 +14,14 @@ import logging, grpc +import sqlalchemy, sqlalchemy_utils from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method from common.proto.context_pb2 import Empty from common.proto.kpi_manager_pb2_grpc import KpiManagerServiceServicer from common.proto.kpi_manager_pb2 import KpiId, KpiDescriptor, KpiDescriptorFilter, KpiDescriptorList from monitoring.service.NameMapping import NameMapping # from monitoring.service import ManagementDBTools -from telemetry.database.managementDB import managementDB +from kpi_manager.service.database.Kpi_DB import Kpi_DB from telemetry.database.TelemetryModel import Kpi as KpiModel from common.proto.context_pb2 import DeviceId, LinkId, ServiceId, SliceId,\ @@ -33,8 +34,13 @@ METRICS_POOL = MetricsPool('Monitoring', 'KpiManager') class KpiManagerServiceServicerImpl(KpiManagerServiceServicer): def __init__(self, name_mapping : NameMapping): LOGGER.info('Init KpiManagerService') - self.managementDBobj = managementDB() - + self.Kpi_DBobj = Kpi_DB() + + @staticmethod + def create_database_if_not_exist(engine: sqlalchemy.engine.Engine) -> None: + if not sqlalchemy_utils.database_exists(engine.url): + sqlalchemy_utils.create_database(engine.url) + @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) def SetKpiDescriptor(self, request: KpiDescriptor, grpc_context: grpc.ServicerContext # type: ignore ) -> KpiId: # type: ignore @@ -45,15 +51,15 @@ class KpiManagerServiceServicerImpl(KpiManagerServiceServicer): kpi_to_insert.kpi_id = request.kpi_id.kpi_id.uuid kpi_to_insert.kpi_description = request.kpi_description kpi_to_insert.kpi_sample_type = request.kpi_sample_type - kpi_to_insert.device_id = request.service_id.service_uuid.uuid - kpi_to_insert.endpoint_id = request.device_id.device_uuid.uuid - kpi_to_insert.service_id = request.slice_id.slice_uuid.uuid - kpi_to_insert.slice_id = request.endpoint_id.endpoint_uuid.uuid + kpi_to_insert.device_id = request.device_id.device_uuid.uuid + kpi_to_insert.endpoint_id = request.endpoint_id.endpoint_uuid.uuid + kpi_to_insert.service_id = request.service_id.service_uuid.uuid + kpi_to_insert.slice_id = request.slice_id.slice_uuid.uuid kpi_to_insert.connection_id = request.connection_id.connection_uuid.uuid # kpi_to_insert.link_id = request.link_id.link_id.uuid - self.managementDBobj.add_row_to_db(kpi_to_insert) - response.kpi_id.uuid = request.kpi_id.kpi_id.uuid - LOGGER.info("Added Row: {:}".format(response)) + if(self.Kpi_DBobj.add_row_to_db(kpi_to_insert)): + response.kpi_id.uuid = request.kpi_id.kpi_id.uuid + # LOGGER.info("Added Row: {:}".format(response)) return response except Exception as e: LOGGER.info("Unable to create KpiModel class object. {:}".format(e)) @@ -65,7 +71,7 @@ class KpiManagerServiceServicerImpl(KpiManagerServiceServicer): LOGGER.info("Received gRPC message object: {:}".format(request)) try: kpi_id_to_search = request.kpi_id.uuid - row = self.managementDBobj.search_db_row_by_id(KpiModel, 'kpi_id', kpi_id_to_search) + row = self.Kpi_DBobj.search_db_row_by_id(KpiModel, 'kpi_id', kpi_id_to_search) if row is not None: response.kpi_id.kpi_id.uuid = row.kpi_id response.kpi_description = row.kpi_description @@ -85,7 +91,7 @@ class KpiManagerServiceServicerImpl(KpiManagerServiceServicer): LOGGER.info("Received gRPC message object: {:}".format(request)) try: kpi_id_to_search = request.kpi_id.uuid - self.managementDBobj.delete_db_row_by_id(KpiModel, 'kpi_id', kpi_id_to_search) + self.Kpi_DBobj.delete_db_row_by_id(KpiModel, 'kpi_id', kpi_id_to_search) except Exception as e: LOGGER.info('Unable to search kpi id. {:}'.format(e)) finally: @@ -102,7 +108,7 @@ class KpiManagerServiceServicerImpl(KpiManagerServiceServicer): filter_to_apply['device_id'] = request.device_id[0].device_uuid.uuid filter_to_apply['kpi_sample_type'] = request.kpi_sample_type[0] try: - rows = self.managementDBobj.select_with_filter(KpiModel, **filter_to_apply) + rows = self.Kpi_DBobj.select_with_filter(KpiModel, **filter_to_apply) except Exception as e: LOGGER.info('Unable to apply filter on kpi descriptor. {:}'.format(e)) try: diff --git a/src/kpi_manager/service/KpiValueComposer.py b/src/kpi_manager/service/KpiValueComposer.py index 38c07a22a..38b5b124a 100644 --- a/src/kpi_manager/service/KpiValueComposer.py +++ b/src/kpi_manager/service/KpiValueComposer.py @@ -20,6 +20,7 @@ import threading from confluent_kafka import KafkaError from confluent_kafka import Producer as KafkaProducer from confluent_kafka import Consumer as KafkaConsumer +from kpi_manager.service.database.Kpi_DB import Kpi_DB LOGGER = logging.getLogger(__name__) KAFKA_SERVER_IP = '10.152.183.175:9092' @@ -84,4 +85,8 @@ class KpiValueComposer: # LOGGER.info("Extracted Rows that match the KPIs {:}".format(matching_rows)) return matching_rows - + @staticmethod + def request_kpi_descriptor_from_db(): + col_name = "kpi_description" + kpi_name = KPIs_TO_SEARCH[0] + Kpi_DB.search_db_row_by_id() diff --git a/src/kpi_manager/service/database/KpiDBtests.py b/src/kpi_manager/service/database/KpiDBtests.py new file mode 100644 index 000000000..022a7633d --- /dev/null +++ b/src/kpi_manager/service/database/KpiDBtests.py @@ -0,0 +1,30 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import logging +from kpi_manager.service.database.Kpi_DB import Kpi_DB + +LOGGER = logging.getLogger(__name__) + + +def test_create_db_object(): + LOGGER.info('>>> test_create_db_object : START<<< ') + kpiDBobj = Kpi_DB() + +def test_verify_Tables(): + LOGGER.info('>>> test_verify_Tables : START <<< ') + kpiDBobj = Kpi_DB() + kpiDBobj.create_tables() + kpiDBobj.verify_tables() diff --git a/src/kpi_manager/service/database/KpiEngine.py b/src/kpi_manager/service/database/KpiEngine.py new file mode 100644 index 000000000..041784ff4 --- /dev/null +++ b/src/kpi_manager/service/database/KpiEngine.py @@ -0,0 +1,49 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging, sqlalchemy, sqlalchemy_utils +# from common.Settings import get_setting + +LOGGER = logging.getLogger(__name__) + +APP_NAME = 'tfs' +ECHO = False # False: No dump SQL commands and transactions executed +CRDB_URI_TEMPLATE = 'cockroachdb://{:s}:{:s}@127.0.0.1:{:s}/{:s}?sslmode={:s}' +CRDB_NAMESPACE = "crdb" +CRDB_SQL_PORT = "26257" +CRDB_DATABASE = "kpi" +CRDB_USERNAME = "tfs" +CRDB_PASSWORD = "tfs123" +CRDB_SSLMODE = "require" +# CRDB_URI_TEMPLATE = 'cockroachdb://{:s}:{:s}@cockroachdb-public.{:s}.svc.cluster.local:{:s}/{:s}?sslmode={:s}' + +class KpiEngine: + # def __init__(self): + # self.engine = self.get_engine() + + @staticmethod + def get_engine() -> sqlalchemy.engine.Engine: + crdb_uri = CRDB_URI_TEMPLATE.format( + CRDB_USERNAME, CRDB_PASSWORD, CRDB_SQL_PORT, CRDB_DATABASE, CRDB_SSLMODE) + # crdb_uri = CRDB_URI_TEMPLATE.format( + # CRDB_USERNAME, CRDB_PASSWORD, CRDB_NAMESPACE, CRDB_SQL_PORT, CRDB_DATABASE, CRDB_SSLMODE) + try: + # engine = sqlalchemy.create_engine( + # crdb_uri, connect_args={'application_name': APP_NAME}, echo=ECHO, future=True) + engine = sqlalchemy.create_engine(crdb_uri, echo=False) + LOGGER.info(' KpiDBmanager initalized with DB URL: {:}'.format(crdb_uri)) + except: # pylint: disable=bare-except # pragma: no cover + LOGGER.exception('Failed to connect to database: {:s}'.format(str(crdb_uri))) + return None # type: ignore + return engine # type: ignore diff --git a/src/kpi_manager/service/database/KpiModel.py b/src/kpi_manager/service/database/KpiModel.py new file mode 100644 index 000000000..16844fdc0 --- /dev/null +++ b/src/kpi_manager/service/database/KpiModel.py @@ -0,0 +1,49 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy import Column, Integer, String, Float, Text, ForeignKey +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import sessionmaker, relationship + + +logging.basicConfig(level=logging.INFO) +LOGGER = logging.getLogger(__name__) + +# Create a base class for declarative models +Base = declarative_base() + +class Kpi(Base): + __tablename__ = 'kpi' + + kpi_id = Column(UUID(as_uuid=False), primary_key=True) + kpi_description = Column(Text) + kpi_sample_type = Column(Integer) + device_id = Column(String) + endpoint_id = Column(String) + service_id = Column(String) + slice_id = Column(String) + connection_id = Column(String) + link_id = Column(String) + # monitor_flag = Column(String) + + + # helps in logging the information + def __repr__(self): + return (f"") diff --git a/src/kpi_manager/service/database/Kpi_DB.py b/src/kpi_manager/service/database/Kpi_DB.py new file mode 100644 index 000000000..fd5a1c319 --- /dev/null +++ b/src/kpi_manager/service/database/Kpi_DB.py @@ -0,0 +1,127 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging, time +import sqlalchemy +import sqlalchemy_utils +from sqlalchemy.orm import sessionmaker +from sqlalchemy.ext.declarative import declarative_base +from kpi_manager.service.database.KpiEngine import KpiEngine +from kpi_manager.service.database.KpiModel import Kpi + +LOGGER = logging.getLogger(__name__) +DB_NAME = "kpi" + +class Kpi_DB: + def __init__(self): + self.db_engine = KpiEngine.get_engine() + if self.db_engine is None: + LOGGER.error('Unable to get SQLAlchemy DB Engine...') + return False + self.db_name = DB_NAME + # self.drop_database(self.db_engine) # added to test + self.create_database(self.db_engine) + self.Session = sessionmaker(bind=self.db_engine) + + @staticmethod + def create_database(engine : sqlalchemy.engine.Engine) -> None: + if not sqlalchemy_utils.database_exists(engine.url): + LOGGER.info("Database created. {:}".format(engine.url)) + sqlalchemy_utils.create_database(engine.url) + + @staticmethod + def drop_database(engine : sqlalchemy.engine.Engine) -> None: + if sqlalchemy_utils.database_exists(engine.url): + sqlalchemy_utils.drop_database(engine.url) + + def create_tables(self): + try: + Kpi.metadata.create_all(self.db_engine) # type: ignore + LOGGER.info("Tables created in the DB Name: {:}".format(self.db_name)) + except Exception as e: + LOGGER.info("Tables cannot be created in the TelemetryFrontend database. {:s}".format(str(e))) + + def verify_tables(self): + try: + with self.db_engine.connect() as connection: + result = connection.execute("SHOW TABLES;") + tables = result.fetchall() # type: ignore + LOGGER.info("Tables verified: {:}".format(tables)) + except Exception as e: + LOGGER.info("Unable to fetch Table names. {:s}".format(str(e))) + + def add_row_to_db(self, row): + session = self.Session() + try: + session.add(row) + session.commit() + LOGGER.info(f"Row inserted into {row.__class__.__name__} table.") + return True + except Exception as e: + session.rollback() + LOGGER.error(f"Failed to insert new row into {row.__class__.__name__} table. {str(e)}") + return False + finally: + session.close() + + def search_db_row_by_id(self, model, col_name, id_to_search): + session = self.Session() + try: + entity = session.query(model).filter_by(**{col_name: id_to_search}).first() + if entity: + LOGGER.info(f"{model.__name__} ID found: {str(entity)}") + return entity + else: + LOGGER.warning(f"{model.__name__} ID not found: {str(id_to_search)}") + return None + except Exception as e: + session.rollback() + LOGGER.info(f"Failed to retrieve {model.__name__} ID. {str(e)}") + raise + finally: + session.close() + + def delete_db_row_by_id(self, model, col_name, id_to_search): + session = self.Session() + try: + record = session.query(model).filter_by(**{col_name: id_to_search}).first() + if record: + session.delete(record) + session.commit() + LOGGER.info("Deleted %s with %s: %s", model.__name__, col_name, id_to_search) + else: + LOGGER.warning("%s with %s %s not found", model.__name__, col_name, id_to_search) + except Exception as e: + session.rollback() + LOGGER.error("Error deleting %s with %s %s: %s", model.__name__, col_name, id_to_search, e) + finally: + session.close() + + def select_with_filter(self, model, **filters): + session = self.Session() + try: + query = session.query(model) + for column, value in filters.items(): + query = query.filter(getattr(model, column) == value) # type: ignore + result = query.all() + if result: + LOGGER.info(f"Fetched filtered rows from {model.__name__} table with filters: {filters}") # - Results: {result} + else: + LOGGER.warning(f"No matching row found in {model.__name__} table with filters: {filters}") + return result + except Exception as e: + LOGGER.error(f"Error fetching filtered rows from {model.__name__} table with filters {filters} ::: {e}") + return [] + finally: + session.close() \ No newline at end of file diff --git a/src/kpi_manager/service/database/__init__.py b/src/kpi_manager/service/database/__init__.py new file mode 100644 index 000000000..1549d9811 --- /dev/null +++ b/src/kpi_manager/service/database/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/src/kpi_manager/service/database/__main__.py b/src/kpi_manager/service/database/__main__.py new file mode 100644 index 000000000..9f0e53246 --- /dev/null +++ b/src/kpi_manager/service/database/__main__.py @@ -0,0 +1,107 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging, signal, sys, threading, time +from prometheus_client import start_http_server +from common.Constants import ServiceNameEnum +from common.Settings import ( + ENVVAR_SUFIX_SERVICE_HOST, ENVVAR_SUFIX_SERVICE_PORT_GRPC, get_env_var_name, get_log_level, get_metrics_port, + wait_for_environment_variables) +from common.proto import monitoring_pb2 +from monitoring.service.EventTools import EventsDeviceCollector # import updated +from monitoring.service.NameMapping import NameMapping # import updated +# from .MonitoringService import MonitoringService +from .KpiManagerService import KpiManagerService + +terminate = threading.Event() +LOGGER = None + +def signal_handler(signal, frame): # pylint: disable=redefined-outer-name + LOGGER.warning('Terminate signal received') + terminate.set() + +def start_kpi_manager(name_mapping : NameMapping): + LOGGER.info('Start Monitoring...',) + + events_collector = EventsDeviceCollector(name_mapping) + events_collector.start() + + # TODO: redesign this method to be more clear and clean + + # Iterate while terminate is not set + while not terminate.is_set(): + list_new_kpi_ids = events_collector.listen_events() + + # Monitor Kpis + if bool(list_new_kpi_ids): + for kpi_id in list_new_kpi_ids: + # Create Monitor Kpi Requests + monitor_kpi_request = monitoring_pb2.MonitorKpiRequest() + monitor_kpi_request.kpi_id.CopyFrom(kpi_id) + monitor_kpi_request.monitoring_window_s = 86400 + monitor_kpi_request.sampling_rate_s = 10 + events_collector._monitoring_client.MonitorKpi(monitor_kpi_request) + + time.sleep(0.5) # let other tasks run; do not overload CPU + else: + # Terminate is set, looping terminates + LOGGER.warning("Stopping execution...") + + events_collector.start() + +def main(): + global LOGGER # pylint: disable=global-statement + + log_level = get_log_level() + logging.basicConfig(level=log_level) + LOGGER = logging.getLogger(__name__) + + wait_for_environment_variables([ + get_env_var_name(ServiceNameEnum.CONTEXT, ENVVAR_SUFIX_SERVICE_HOST ), + get_env_var_name(ServiceNameEnum.CONTEXT, ENVVAR_SUFIX_SERVICE_PORT_GRPC), + get_env_var_name(ServiceNameEnum.DEVICE, ENVVAR_SUFIX_SERVICE_HOST ), + get_env_var_name(ServiceNameEnum.DEVICE, ENVVAR_SUFIX_SERVICE_PORT_GRPC), + ]) + + signal.signal(signal.SIGINT, signal_handler) + signal.signal(signal.SIGTERM, signal_handler) + + LOGGER.info('Starting...') + + # Start metrics server + metrics_port = get_metrics_port() + start_http_server(metrics_port) + + name_mapping = NameMapping() + # Starting monitoring service + # grpc_service = MonitoringService(name_mapping) + # grpc_service.start() + # start_monitoring(name_mapping) + + grpc_service = KpiManagerService(name_mapping) + grpc_service.start() + + start_kpi_manager(name_mapping) + + # Wait for Ctrl+C or termination signal + while not terminate.wait(timeout=1.0): pass + + LOGGER.info('Terminating...') + grpc_service.stop() + + LOGGER.info('Bye') + return 0 + +if __name__ == '__main__': + sys.exit(main()) diff --git a/src/kpi_manager/tests/test_messages.py b/src/kpi_manager/tests/test_messages.py index 83150c102..bc4c5b9d1 100755 --- a/src/kpi_manager/tests/test_messages.py +++ b/src/kpi_manager/tests/test_messages.py @@ -24,6 +24,19 @@ def create_kpi_id_request(): _kpi_id.kpi_id.uuid = "34f73604-eca6-424f-9995-18b519ad0978" return _kpi_id +def create_kpi_descriptor_request_a(descriptor_name: str): + _create_kpi_request = kpi_manager_pb2.KpiDescriptor() + _create_kpi_request.kpi_id.kpi_id.uuid = str(uuid.uuid4()) + _create_kpi_request.kpi_description = descriptor_name + _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED + _create_kpi_request.device_id.device_uuid.uuid = 'DEV1' # pylint: disable=maybe-no-member + _create_kpi_request.service_id.service_uuid.uuid = 'SERV1' # pylint: disable=maybe-no-member + _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC1' # pylint: disable=maybe-no-member + _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END1' # pylint: disable=maybe-no-member + _create_kpi_request.connection_id.connection_uuid.uuid = 'CON1' # pylint: disable=maybe-no-member + _create_kpi_request.link_id.link_uuid.uuid = 'LNK1' # pylint: disable=maybe-no-member + return _create_kpi_request + def create_kpi_descriptor_request(): _create_kpi_request = kpi_manager_pb2.KpiDescriptor() _create_kpi_request.kpi_id.kpi_id.uuid = str(uuid.uuid4()) @@ -34,6 +47,7 @@ def create_kpi_descriptor_request(): _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC3' # pylint: disable=maybe-no-member _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END2' # pylint: disable=maybe-no-member _create_kpi_request.connection_id.connection_uuid.uuid = 'CON2' # pylint: disable=maybe-no-member + _create_kpi_request.link_id.link_uuid.uuid = 'LNK2' # pylint: disable=maybe-no-member return _create_kpi_request def create_kpi_filter_request_a(): diff --git a/src/kpi_manager/tests/test_unitary.py b/src/kpi_manager/tests/test_unitary.py index 84cf44497..8ac50a38b 100755 --- a/src/kpi_manager/tests/test_unitary.py +++ b/src/kpi_manager/tests/test_unitary.py @@ -19,7 +19,7 @@ import os, pytest import logging, json from typing import Union -from apscheduler.schedulers.background import BackgroundScheduler +# from apscheduler.schedulers.background import BackgroundScheduler from common.proto.context_pb2 import ConfigActionEnum, Context, ContextId, DeviceOperationalStatusEnum, EventTypeEnum, DeviceEvent, Device, Empty, Topology, TopologyId from common.Constants import ServiceNameEnum @@ -45,12 +45,15 @@ from device.client.DeviceClient import DeviceClient from kpi_manager.tests.test_messages import create_kpi_request, create_kpi_request_b, \ create_kpi_request_c, create_kpi_request_d, create_kpi_filter_request, \ - create_kpi_descriptor_request, create_kpi_id_request, create_kpi_filter_request_a + create_kpi_descriptor_request, create_kpi_id_request, create_kpi_filter_request_a, \ + create_kpi_descriptor_request_a # from monitoring.service.MonitoringService import MonitoringService from kpi_manager.service.KpiManagerService import KpiManagerService # from monitoring.client.MonitoringClient import MonitoringClient from kpi_manager.client.KpiManagerClient import KpiManagerClient +from kpi_manager.service.KpiManagerServiceServicerImpl import KpiManagerServiceServicerImpl + from monitoring.service.ManagementDBTools import ManagementDB from monitoring.service.MetricsDBTools import MetricsDB from monitoring.service.NameMapping import NameMapping @@ -212,29 +215,31 @@ def kpi_manager_client(kpi_manager_service : KpiManagerService): # pylint: disab # ---------- 2nd Iteration Tests ----------------- def test_SetKpiDescriptor(kpi_manager_client): LOGGER.info(" >>> test_SetKpiDescriptor: START <<< ") - response = kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request()) - LOGGER.info("Response gRPC message object: {:}".format(response)) - assert isinstance(response, KpiId) - -def test_GetKpiDescriptor(kpi_manager_client): - LOGGER.info(" >>> test_GetKpiDescriptor: START <<< ") - response = kpi_manager_client.GetKpiDescriptor(create_kpi_id_request()) - LOGGER.info("Response gRPC message object: {:}".format(response)) - assert isinstance(response, KpiDescriptor) - -def test_DeleteKpiDescriptor(kpi_manager_client): - LOGGER.info(" >>> test_DeleteKpiDescriptor: START <<< ") - response = kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request()) - del_response = kpi_manager_client.DeleteKpiDescriptor(response) - kpi_manager_client.GetKpiDescriptor(response) - LOGGER.info("Response of delete method gRPC message object: {:}".format(del_response)) + _descriptors = ["node_timex_status", "node_timex_sync_status", "node_udp_queues"] + for _descritor_name in _descriptors: + response = kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request_a(_descritor_name)) + LOGGER.info("Response gRPC message object: {:}".format(response)) assert isinstance(response, KpiId) -def test_SelectKpiDescriptor(kpi_manager_client): - LOGGER.info(" >>> test_SelectKpiDescriptor: START <<< ") - response = kpi_manager_client.SelectKpiDescriptor(create_kpi_filter_request_a()) - LOGGER.info("Response gRPC message object: {:}".format(response)) - assert isinstance(response, KpiDescriptorList) +# def test_GetKpiDescriptor(kpi_manager_client): +# LOGGER.info(" >>> test_GetKpiDescriptor: START <<< ") +# response = kpi_manager_client.GetKpiDescriptor(create_kpi_id_request()) +# LOGGER.info("Response gRPC message object: {:}".format(response)) +# assert isinstance(response, KpiDescriptor) + +# def test_DeleteKpiDescriptor(kpi_manager_client): +# LOGGER.info(" >>> test_DeleteKpiDescriptor: START <<< ") +# response = kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request()) +# del_response = kpi_manager_client.DeleteKpiDescriptor(response) +# kpi_manager_client.GetKpiDescriptor(response) +# LOGGER.info("Response of delete method gRPC message object: {:}".format(del_response)) +# assert isinstance(response, KpiId) + +# def test_SelectKpiDescriptor(kpi_manager_client): +# LOGGER.info(" >>> test_SelectKpiDescriptor: START <<< ") +# response = kpi_manager_client.SelectKpiDescriptor(create_kpi_filter_request_a()) +# LOGGER.info("Response gRPC message object: {:}".format(response)) +# assert isinstance(response, KpiDescriptorList) # ------------- INITIAL TESTs ---------------- # Test case that makes use of client fixture to test server's CreateKpi method -- GitLab From c50b4191ca47093c5d141ba7afb1ae2dff092cb5 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Thu, 6 Jun 2024 15:36:07 +0000 Subject: [PATCH 141/205] Kpidescriptor extracted from db --- src/kpi_manager/service/KpiValueComposer.py | 6 +++++- src/kpi_manager/service/database/Kpi_DB.py | 2 +- src/kpi_manager/tests/test_kpi_composer.py | 10 +++++++--- 3 files changed, 13 insertions(+), 5 deletions(-) diff --git a/src/kpi_manager/service/KpiValueComposer.py b/src/kpi_manager/service/KpiValueComposer.py index 38b5b124a..1bdae0e46 100644 --- a/src/kpi_manager/service/KpiValueComposer.py +++ b/src/kpi_manager/service/KpiValueComposer.py @@ -21,6 +21,7 @@ from confluent_kafka import KafkaError from confluent_kafka import Producer as KafkaProducer from confluent_kafka import Consumer as KafkaConsumer from kpi_manager.service.database.Kpi_DB import Kpi_DB +from kpi_manager.service.database.KpiModel import Kpi as KpiModel LOGGER = logging.getLogger(__name__) KAFKA_SERVER_IP = '10.152.183.175:9092' @@ -89,4 +90,7 @@ class KpiValueComposer: def request_kpi_descriptor_from_db(): col_name = "kpi_description" kpi_name = KPIs_TO_SEARCH[0] - Kpi_DB.search_db_row_by_id() + kpiDBobj = Kpi_DB() + + row = kpiDBobj.search_db_row_by_id(KpiModel, col_name, kpi_name) + LOGGER.info("Extracted Row: {:}".format(row)) diff --git a/src/kpi_manager/service/database/Kpi_DB.py b/src/kpi_manager/service/database/Kpi_DB.py index fd5a1c319..06800605b 100644 --- a/src/kpi_manager/service/database/Kpi_DB.py +++ b/src/kpi_manager/service/database/Kpi_DB.py @@ -31,7 +31,7 @@ class Kpi_DB: return False self.db_name = DB_NAME # self.drop_database(self.db_engine) # added to test - self.create_database(self.db_engine) + # self.create_database(self.db_engine) # to add database self.Session = sessionmaker(bind=self.db_engine) @staticmethod diff --git a/src/kpi_manager/tests/test_kpi_composer.py b/src/kpi_manager/tests/test_kpi_composer.py index a4312ea53..5c1f7a265 100644 --- a/src/kpi_manager/tests/test_kpi_composer.py +++ b/src/kpi_manager/tests/test_kpi_composer.py @@ -18,6 +18,10 @@ from kpi_manager.service.KpiValueComposer import KpiValueComposer LOGGER = logging.getLogger(__name__) -def test_compose_kpi(): - LOGGER.info(' >>> test_compose_kpi START <<< ') - KpiValueComposer.compose_kpi() \ No newline at end of file +# def test_compose_kpi(): +# LOGGER.info(' >>> test_compose_kpi START <<< ') +# KpiValueComposer.compose_kpi() + +def test_request_kpi_descriptor_from_db(): + LOGGER.info(' >>> test_request_kpi_descriptor_from_db START <<< ') + KpiValueComposer.request_kpi_descriptor_from_db() \ No newline at end of file -- GitLab From 9da93bf75b88c6716fc6205b66965db0d60ab5a3 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 7 Jun 2024 09:31:57 +0000 Subject: [PATCH 142/205] link_id reference is added --- src/kpi_manager/service/KpiManagerServiceServicerImpl.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/kpi_manager/service/KpiManagerServiceServicerImpl.py b/src/kpi_manager/service/KpiManagerServiceServicerImpl.py index 7f62280ff..d099f8a5e 100644 --- a/src/kpi_manager/service/KpiManagerServiceServicerImpl.py +++ b/src/kpi_manager/service/KpiManagerServiceServicerImpl.py @@ -21,9 +21,10 @@ from common.proto.kpi_manager_pb2_grpc import KpiManagerServiceServicer from common.proto.kpi_manager_pb2 import KpiId, KpiDescriptor, KpiDescriptorFilter, KpiDescriptorList from monitoring.service.NameMapping import NameMapping # from monitoring.service import ManagementDBTools -from kpi_manager.service.database.Kpi_DB import Kpi_DB -from telemetry.database.TelemetryModel import Kpi as KpiModel +from kpi_manager.service.database.Kpi_DB import Kpi_DB +from kpi_manager.service.database.KpiModel import Kpi as KpiModel +# from telemetry.database.TelemetryModel import Kpi as KpiModel from common.proto.context_pb2 import DeviceId, LinkId, ServiceId, SliceId,\ ConnectionId, EndPointId @@ -56,7 +57,7 @@ class KpiManagerServiceServicerImpl(KpiManagerServiceServicer): kpi_to_insert.service_id = request.service_id.service_uuid.uuid kpi_to_insert.slice_id = request.slice_id.slice_uuid.uuid kpi_to_insert.connection_id = request.connection_id.connection_uuid.uuid - # kpi_to_insert.link_id = request.link_id.link_id.uuid + kpi_to_insert.link_id = request.link_id.link_uuid.uuid if(self.Kpi_DBobj.add_row_to_db(kpi_to_insert)): response.kpi_id.uuid = request.kpi_id.kpi_id.uuid # LOGGER.info("Added Row: {:}".format(response)) -- GitLab From 7a4b0d19570cb75e203e083566a48f850dd7164d Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 7 Jun 2024 09:32:48 +0000 Subject: [PATCH 143/205] import of Base class of updated to newer version --- src/kpi_manager/service/database/KpiModel.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/kpi_manager/service/database/KpiModel.py b/src/kpi_manager/service/database/KpiModel.py index 16844fdc0..9ab98e4ef 100644 --- a/src/kpi_manager/service/database/KpiModel.py +++ b/src/kpi_manager/service/database/KpiModel.py @@ -15,7 +15,8 @@ import logging from sqlalchemy.dialects.postgresql import UUID from sqlalchemy import Column, Integer, String, Float, Text, ForeignKey -from sqlalchemy.ext.declarative import declarative_base +# from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import registry from sqlalchemy.orm import sessionmaker, relationship @@ -23,7 +24,8 @@ logging.basicConfig(level=logging.INFO) LOGGER = logging.getLogger(__name__) # Create a base class for declarative models -Base = declarative_base() +Base = registry().generate_base() +# Base = declarative_base() class Kpi(Base): __tablename__ = 'kpi' -- GitLab From fe6ced62e9c847551905fc80d87395bb98454f89 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 7 Jun 2024 17:30:45 +0000 Subject: [PATCH 144/205] Kpi Composer progress --- my_deploy.sh | 2 +- src/kpi_manager/service/KpiValueComposer.py | 81 ++++++++++++++++--- src/kpi_manager/service/database/KpiModel.py | 4 +- src/kpi_manager/service/database/Kpi_DB.py | 3 +- src/kpi_manager/tests/test_kpi_composer.py | 12 ++- src/kpi_manager/tests/test_unitary.py | 4 +- .../service/TelemetryBackendService.py | 6 +- 7 files changed, 86 insertions(+), 26 deletions(-) diff --git a/my_deploy.sh b/my_deploy.sh index 74c293619..403b3a6a4 100755 --- a/my_deploy.sh +++ b/my_deploy.sh @@ -93,7 +93,7 @@ export CRDB_DATABASE="tfs" export CRDB_DEPLOY_MODE="single" # Disable flag for dropping database, if it exists. -export CRDB_DROP_DATABASE_IF_EXISTS="YES" +export CRDB_DROP_DATABASE_IF_EXISTS="NO" # Disable flag for re-deploying CockroachDB from scratch. export CRDB_REDEPLOY="" diff --git a/src/kpi_manager/service/KpiValueComposer.py b/src/kpi_manager/service/KpiValueComposer.py index 1bdae0e46..8274c9fd1 100644 --- a/src/kpi_manager/service/KpiValueComposer.py +++ b/src/kpi_manager/service/KpiValueComposer.py @@ -24,8 +24,8 @@ from kpi_manager.service.database.Kpi_DB import Kpi_DB from kpi_manager.service.database.KpiModel import Kpi as KpiModel LOGGER = logging.getLogger(__name__) -KAFKA_SERVER_IP = '10.152.183.175:9092' -# KAFKA_SERVER_IP = '127.0.0.1:9092' +# KAFKA_SERVER_IP = '10.152.183.175:30092' +KAFKA_SERVER_IP = '127.0.0.1:9092' # ADMIN_KAFKA_CLIENT = AdminClient({'bootstrap.servers': KAFKA_SERVER_IP}) KAFKA_TOPICS = {'request' : 'topic_request', 'response': 'topic_response', 'raw' : 'topic_raw' , 'labeled' : 'topic_labled'} @@ -33,7 +33,11 @@ PRODUCER_CONFIG = {'bootstrap.servers': KAFKA_SERVER_IP,} CONSUMER_CONFIG = {'bootstrap.servers' : KAFKA_SERVER_IP, 'group.id' : 'kpi_composer', 'auto.offset.reset' : 'latest'} -KPIs_TO_SEARCH = ["node_timex_status", "node_timex_sync_status", "node_udp_queues"] +KPIs_TO_SEARCH = ["node_network_receive_packets_total", + "node_network_receive_bytes_total", + "node_network_transmit_bytes_total", + "process_open_fds"] +DB_TABLE_NAME = KpiModel class KpiValueComposer: def __init__(self) -> None: @@ -78,19 +82,70 @@ class KpiValueComposer: def extract_kpi_values(event): pattern = re.compile("|".join(map(re.escape, KPIs_TO_SEARCH))) lines = event.split('\n') - matching_rows = [] + # matching_rows = [] + sub_names = kpi_value = "" for line in lines: - if pattern.search(line) and not line.startswith("# HELP") and not line.startswith("# TYPE"): - matching_rows.append(tuple(line.split(" "))) - print("Extracted Rows that match the KPIs {:}".format(matching_rows)) - # LOGGER.info("Extracted Rows that match the KPIs {:}".format(matching_rows)) - return matching_rows + try: + if pattern.search(line) and not line.startswith("# HELP") and not line.startswith("# TYPE"): + (kpi_name, kpi_value) = line.split(" ") + if kpi_name.endswith('}'): + (kpi_name, sub_names) = kpi_name.replace('}','').split('{') + print("Extracted row that match the KPI {:}".format((kpi_name, sub_names, kpi_value))) + kpi_descriptor = KpiValueComposer.request_kpi_descriptor_from_db() + if kpi_descriptor is not None: + kpi_to_produce = KpiValueComposer.merge_kpi_descriptor_and_value(kpi_descriptor, kpi_value) + producerObj = KafkaProducer(PRODUCER_CONFIG) + producerObj.produce(KAFKA_TOPICS['labeled'], key="labeled", value= str(kpi_to_produce), callback=KpiValueComposer.delivery_callback) + producerObj.flush() + except Exception as e: + print("Unable to extract kpi name and value from raw data: ERROR Info: {:}".format(e)) @staticmethod - def request_kpi_descriptor_from_db(): + def request_kpi_descriptor_from_db(kpi_name: str = KPIs_TO_SEARCH[0]): col_name = "kpi_description" - kpi_name = KPIs_TO_SEARCH[0] kpiDBobj = Kpi_DB() + row = kpiDBobj.search_db_row_by_id(DB_TABLE_NAME, col_name, kpi_name) + if row is not None: + LOGGER.info("Extracted Row: {:}".format(row)) + return row + else: + return None + + @staticmethod + def merge_kpi_descriptor_and_value(kpi_descriptor, kpi_value): + # Creating a dictionary from the kpi_descriptor's attributes + kpi_dict = { + 'kpi_id' : kpi_descriptor.kpi_id, + 'kpi_description': kpi_descriptor.kpi_description, + 'kpi_sample_type': kpi_descriptor.kpi_sample_type, + 'device_id' : kpi_descriptor.device_id, + 'endpoint_id' : kpi_descriptor.endpoint_id, + 'service_id' : kpi_descriptor.service_id, + 'slice_id' : kpi_descriptor.slice_id, + 'connection_id' : kpi_descriptor.connection_id, + 'link_id' : kpi_descriptor.link_id, + 'kpi_value' : kpi_value + } + return kpi_dict + + @staticmethod + def delete_kpi_by_id(): + col_name = "link_id" + kpi_name = None + kpiDBobj = Kpi_DB() + row = kpiDBobj.delete_db_row_by_id(DB_TABLE_NAME, col_name, kpi_name) + if row is not None: + LOGGER.info("Deleted Row: {:}".format(row)) - row = kpiDBobj.search_db_row_by_id(KpiModel, col_name, kpi_name) - LOGGER.info("Extracted Row: {:}".format(row)) + @staticmethod + def delivery_callback( err, msg): + """ + Callback function to handle message delivery status. + Args: + err (KafkaError): Kafka error object. + msg (Message): Kafka message object. + """ + if err: + print(f'Message delivery failed: {err}') + else: + print(f'Message delivered to topic {msg.topic()}') \ No newline at end of file diff --git a/src/kpi_manager/service/database/KpiModel.py b/src/kpi_manager/service/database/KpiModel.py index 9ab98e4ef..5bfc5525b 100644 --- a/src/kpi_manager/service/database/KpiModel.py +++ b/src/kpi_manager/service/database/KpiModel.py @@ -31,7 +31,7 @@ class Kpi(Base): __tablename__ = 'kpi' kpi_id = Column(UUID(as_uuid=False), primary_key=True) - kpi_description = Column(Text) + kpi_description = Column(Text, unique=True) kpi_sample_type = Column(Integer) device_id = Column(String) endpoint_id = Column(String) @@ -39,8 +39,6 @@ class Kpi(Base): slice_id = Column(String) connection_id = Column(String) link_id = Column(String) - # monitor_flag = Column(String) - # helps in logging the information def __repr__(self): diff --git a/src/kpi_manager/service/database/Kpi_DB.py b/src/kpi_manager/service/database/Kpi_DB.py index 06800605b..68ac156c7 100644 --- a/src/kpi_manager/service/database/Kpi_DB.py +++ b/src/kpi_manager/service/database/Kpi_DB.py @@ -80,7 +80,7 @@ class Kpi_DB: try: entity = session.query(model).filter_by(**{col_name: id_to_search}).first() if entity: - LOGGER.info(f"{model.__name__} ID found: {str(entity)}") + # LOGGER.info(f"{model.__name__} ID found: {str(entity)}") return entity else: LOGGER.warning(f"{model.__name__} ID not found: {str(id_to_search)}") @@ -102,6 +102,7 @@ class Kpi_DB: LOGGER.info("Deleted %s with %s: %s", model.__name__, col_name, id_to_search) else: LOGGER.warning("%s with %s %s not found", model.__name__, col_name, id_to_search) + return None except Exception as e: session.rollback() LOGGER.error("Error deleting %s with %s %s: %s", model.__name__, col_name, id_to_search, e) diff --git a/src/kpi_manager/tests/test_kpi_composer.py b/src/kpi_manager/tests/test_kpi_composer.py index 5c1f7a265..6b96f4fc2 100644 --- a/src/kpi_manager/tests/test_kpi_composer.py +++ b/src/kpi_manager/tests/test_kpi_composer.py @@ -18,10 +18,14 @@ from kpi_manager.service.KpiValueComposer import KpiValueComposer LOGGER = logging.getLogger(__name__) -# def test_compose_kpi(): -# LOGGER.info(' >>> test_compose_kpi START <<< ') -# KpiValueComposer.compose_kpi() +def test_compose_kpi(): + LOGGER.info(' >>> test_compose_kpi START <<< ') + KpiValueComposer.compose_kpi() def test_request_kpi_descriptor_from_db(): LOGGER.info(' >>> test_request_kpi_descriptor_from_db START <<< ') - KpiValueComposer.request_kpi_descriptor_from_db() \ No newline at end of file + KpiValueComposer.request_kpi_descriptor_from_db() + +# def test_delete_kpi_by_id(): +# LOGGER.info(' >>> test_request_kpi_descriptor_from_db START <<< ') +# KpiValueComposer.delete_kpi_by_id() \ No newline at end of file diff --git a/src/kpi_manager/tests/test_unitary.py b/src/kpi_manager/tests/test_unitary.py index 8ac50a38b..f6d8460d9 100755 --- a/src/kpi_manager/tests/test_unitary.py +++ b/src/kpi_manager/tests/test_unitary.py @@ -215,7 +215,9 @@ def kpi_manager_client(kpi_manager_service : KpiManagerService): # pylint: disab # ---------- 2nd Iteration Tests ----------------- def test_SetKpiDescriptor(kpi_manager_client): LOGGER.info(" >>> test_SetKpiDescriptor: START <<< ") - _descriptors = ["node_timex_status", "node_timex_sync_status", "node_udp_queues"] + _descriptors = ["node_network_receive_packets_total", + "node_network_receive_bytes_total", + "node_network_transmit_bytes_total"] for _descritor_name in _descriptors: response = kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request_a(_descritor_name)) LOGGER.info("Response gRPC message object: {:}".format(response)) diff --git a/src/telemetry/backend/service/TelemetryBackendService.py b/src/telemetry/backend/service/TelemetryBackendService.py index 60cfcc6e6..903945018 100755 --- a/src/telemetry/backend/service/TelemetryBackendService.py +++ b/src/telemetry/backend/service/TelemetryBackendService.py @@ -30,8 +30,8 @@ from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_m LOGGER = logging.getLogger(__name__) METRICS_POOL = MetricsPool('Telemetry', 'TelemetryBackend') -# KAFKA_SERVER_IP = '127.0.0.1:9092' -KAFKA_SERVER_IP = '10.152.183.175:9092' +KAFKA_SERVER_IP = '127.0.0.1:9092' +# KAFKA_SERVER_IP = '10.152.183.175:30092' ADMIN_KAFKA_CLIENT = AdminClient({'bootstrap.servers': KAFKA_SERVER_IP}) KAFKA_TOPICS = {'request' : 'topic_request', 'response': 'topic_response', 'raw' : 'topic_raw' , 'labeled' : 'topic_labled'} @@ -242,7 +242,7 @@ class TelemetryBackendService: LOGGER.info("Didn't received expected response. Status code: {:}".format(response.status_code)) print(f"Didn't received expected response. Status code: {response.status_code}") return None - time.sleep(5) + time.sleep(15) except Exception as e: LOGGER.info("Failed to process response. Status code: {:}".format(e)) return None -- GitLab From 98039a9c20206c05707bc6075f83ee73830e5b4f Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Sun, 9 Jun 2024 09:51:29 +0000 Subject: [PATCH 145/205] Kafka topic name changed from "labled" to "labeled" --- src/kpi_manager/service/KpiValueComposer.py | 16 +++++++++------- .../backend/service/TelemetryBackendService.py | 2 +- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/src/kpi_manager/service/KpiValueComposer.py b/src/kpi_manager/service/KpiValueComposer.py index 8274c9fd1..9d703b233 100644 --- a/src/kpi_manager/service/KpiValueComposer.py +++ b/src/kpi_manager/service/KpiValueComposer.py @@ -28,7 +28,7 @@ LOGGER = logging.getLogger(__name__) KAFKA_SERVER_IP = '127.0.0.1:9092' # ADMIN_KAFKA_CLIENT = AdminClient({'bootstrap.servers': KAFKA_SERVER_IP}) KAFKA_TOPICS = {'request' : 'topic_request', 'response': 'topic_response', - 'raw' : 'topic_raw' , 'labeled' : 'topic_labled'} + 'raw' : 'topic_raw' , 'labeled' : 'topic_labeled'} PRODUCER_CONFIG = {'bootstrap.servers': KAFKA_SERVER_IP,} CONSUMER_CONFIG = {'bootstrap.servers' : KAFKA_SERVER_IP, 'group.id' : 'kpi_composer', @@ -45,12 +45,12 @@ class KpiValueComposer: @staticmethod def compose_kpi(): - KpiValueComposer.run_kafka_listener() - - @staticmethod - def run_kafka_listener(): threading.Thread(target=KpiValueComposer.kafka_listener, args=()).start() + # @staticmethod + # def run_kafka_listener(): + # threading.Thread(target=KpiValueComposer.kafka_listener, args=()).start() + @staticmethod def kafka_listener(): """ @@ -91,17 +91,19 @@ class KpiValueComposer: if kpi_name.endswith('}'): (kpi_name, sub_names) = kpi_name.replace('}','').split('{') print("Extracted row that match the KPI {:}".format((kpi_name, sub_names, kpi_value))) - kpi_descriptor = KpiValueComposer.request_kpi_descriptor_from_db() + kpi_descriptor = KpiValueComposer.request_kpi_descriptor_from_db(kpi_name) if kpi_descriptor is not None: kpi_to_produce = KpiValueComposer.merge_kpi_descriptor_and_value(kpi_descriptor, kpi_value) producerObj = KafkaProducer(PRODUCER_CONFIG) producerObj.produce(KAFKA_TOPICS['labeled'], key="labeled", value= str(kpi_to_produce), callback=KpiValueComposer.delivery_callback) producerObj.flush() + else: + print ("No matching of KPI ({:}) found in db".format(kpi_name)) except Exception as e: print("Unable to extract kpi name and value from raw data: ERROR Info: {:}".format(e)) @staticmethod - def request_kpi_descriptor_from_db(kpi_name: str = KPIs_TO_SEARCH[0]): + def request_kpi_descriptor_from_db(kpi_name: str = KPIs_TO_SEARCH[0]): # = KPIs_TO_SEARCH[0] is added for testing col_name = "kpi_description" kpiDBobj = Kpi_DB() row = kpiDBobj.search_db_row_by_id(DB_TABLE_NAME, col_name, kpi_name) diff --git a/src/telemetry/backend/service/TelemetryBackendService.py b/src/telemetry/backend/service/TelemetryBackendService.py index 903945018..ad0132e47 100755 --- a/src/telemetry/backend/service/TelemetryBackendService.py +++ b/src/telemetry/backend/service/TelemetryBackendService.py @@ -34,7 +34,7 @@ KAFKA_SERVER_IP = '127.0.0.1:9092' # KAFKA_SERVER_IP = '10.152.183.175:30092' ADMIN_KAFKA_CLIENT = AdminClient({'bootstrap.servers': KAFKA_SERVER_IP}) KAFKA_TOPICS = {'request' : 'topic_request', 'response': 'topic_response', - 'raw' : 'topic_raw' , 'labeled' : 'topic_labled'} + 'raw' : 'topic_raw' , 'labeled' : 'topic_labeled'} EXPORTER_ENDPOINT = "http://10.152.183.2:9100/metrics" PRODUCER_CONFIG = {'bootstrap.servers': KAFKA_SERVER_IP,} -- GitLab From 25eb5f8bb4fc2531e0bc0a122a40c84bee0ca38e Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Sun, 9 Jun 2024 13:28:08 +0000 Subject: [PATCH 146/205] KpiWriter is sucessfully able to read from Kafka "topic_labled". --- scripts/run_tests_locally-kpi-manager.sh | 2 +- scripts/run_tests_locally-kpi-writer.sh | 23 +++++ src/kpi_manager/service/KpiWriter.py | 87 +++++++++++++++++++ src/kpi_manager/service/database/Kpi_DB.py | 4 +- .../{test_unitary.py => test_kpi_manager.py} | 0 src/kpi_manager/tests/test_kpi_writer.py | 24 +++++ 6 files changed, 137 insertions(+), 3 deletions(-) create mode 100755 scripts/run_tests_locally-kpi-writer.sh create mode 100644 src/kpi_manager/service/KpiWriter.py rename src/kpi_manager/tests/{test_unitary.py => test_kpi_manager.py} (100%) create mode 100644 src/kpi_manager/tests/test_kpi_writer.py diff --git a/scripts/run_tests_locally-kpi-manager.sh b/scripts/run_tests_locally-kpi-manager.sh index e56716dea..be69980e0 100755 --- a/scripts/run_tests_locally-kpi-manager.sh +++ b/scripts/run_tests_locally-kpi-manager.sh @@ -25,4 +25,4 @@ cd $PROJECTDIR/src RCFILE=$PROJECTDIR/coverage/.coveragerc python3 -m pytest --log-level=INFO --log-cli-level=INFO --verbose \ - kpi_manager/tests/test_unitary.py \ No newline at end of file + kpi_manager/tests/test_kpi_manager.py \ No newline at end of file diff --git a/scripts/run_tests_locally-kpi-writer.sh b/scripts/run_tests_locally-kpi-writer.sh new file mode 100755 index 000000000..2bc2e5130 --- /dev/null +++ b/scripts/run_tests_locally-kpi-writer.sh @@ -0,0 +1,23 @@ +#!/bin/bash +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +PROJECTDIR=`pwd` + +cd $PROJECTDIR/src + +RCFILE=$PROJECTDIR/coverage/.coveragerc +python3 -m pytest --log-level=INFO --log-cli-level=INFO --verbose \ + kpi_manager/tests/test_kpi_writer.py \ No newline at end of file diff --git a/src/kpi_manager/service/KpiWriter.py b/src/kpi_manager/service/KpiWriter.py new file mode 100644 index 000000000..3c8382c12 --- /dev/null +++ b/src/kpi_manager/service/KpiWriter.py @@ -0,0 +1,87 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# read Kafka stream from Kafka topic + +import threading +from confluent_kafka import KafkaError +from prometheus_client import start_http_server, Gauge +from confluent_kafka import Consumer as KafkaConsumer + +KAFKA_SERVER_IP = '127.0.0.1:9092' +KAFKA_TOPICS = {'request' : 'topic_request', 'response': 'topic_response', + 'raw' : 'topic_raw' , 'labeled' : 'topic_labeled'} +CONSUMER_CONFIG = {'bootstrap.servers' : KAFKA_SERVER_IP, + 'group.id' : 'kpi_writer', + 'auto.offset.reset' : 'latest'} +KPIs_TO_SEARCH = ["node_network_receive_packets_total", + "node_network_receive_bytes_total", + "node_network_transmit_bytes_total", + "process_open_fds"] +PROM_METRICS = {} + + +class KpiWriter: + def __init__(self) -> None: + pass + + @staticmethod + def kpi_writer(): + threading.Thread(target=KpiWriter.kafka_listener, args=()).start() + + @staticmethod + def kafka_listener(): + """ + listener for events on Kafka topic. + """ + kafka_consumer = KafkaConsumer(CONSUMER_CONFIG) + kafka_consumer.subscribe([KAFKA_TOPICS['labeled']]) + while True: + receive_msg = kafka_consumer.poll(2.0) + if receive_msg is None: + # print (" - Telemetry frontend listening on Kafka Topic: ", KAFKA_TOPICS['raw']) # added for debugging purposes + continue + elif receive_msg.error(): + if receive_msg.error().code() == KafkaError._PARTITION_EOF: + continue + else: + print("Consumer error: {}".format(receive_msg.error())) + continue + try: + new_event = receive_msg.value().decode('utf-8') + # print("New event on topic '{:}' is {:}".format(KAFKA_TOPICS['raw'], new_event)) + # LOGGER.info("New event on topic '{:}' is {:}".format(KAFKA_TOPICS['raw'], new_event)) + KpiWriter.write_metric_to_promtheus(new_event) + except Exception as e: + print(f"Error to consume event from topic: {KAFKA_TOPICS['labeled']}. Error detail: {str(e)}") + continue + + @staticmethod + # send metric to Prometheus + def write_metric_to_promtheus(event): + print("New recevied event: {:}".format(event)) + + # # create Prometheus metrics + # for metric_key in KPIs_TO_SEARCH: + # metric_name = metric_key + # metric_description = "description of " + str(metric_key) + # metric_tags = "tags of " + str(metric_key) + # PROM_METRICS[metric_key] = Gauge( metric_name, metric_description,metric_tags ) + + # NN_REC_PKTS_TOTAL = PROM_METRICS["node_network_receive_packets_total"] + # NN_REC_BYTS_TOTAL = PROM_METRICS["node_network_receive_bytes_total"] + # NN_TRSMT_BYTS_TOTAL = PROM_METRICS["node_network_transmit_bytes_total"] + # PROC_OPEN_FDs = PROM_METRICS["process_open_fds"] + + diff --git a/src/kpi_manager/service/database/Kpi_DB.py b/src/kpi_manager/service/database/Kpi_DB.py index 68ac156c7..45c9ff7ed 100644 --- a/src/kpi_manager/service/database/Kpi_DB.py +++ b/src/kpi_manager/service/database/Kpi_DB.py @@ -31,7 +31,7 @@ class Kpi_DB: return False self.db_name = DB_NAME # self.drop_database(self.db_engine) # added to test - # self.create_database(self.db_engine) # to add database + self.create_database(self.db_engine) # to add database self.Session = sessionmaker(bind=self.db_engine) @staticmethod @@ -50,7 +50,7 @@ class Kpi_DB: Kpi.metadata.create_all(self.db_engine) # type: ignore LOGGER.info("Tables created in the DB Name: {:}".format(self.db_name)) except Exception as e: - LOGGER.info("Tables cannot be created in the TelemetryFrontend database. {:s}".format(str(e))) + LOGGER.info("Tables cannot be created in the kpi database. {:s}".format(str(e))) def verify_tables(self): try: diff --git a/src/kpi_manager/tests/test_unitary.py b/src/kpi_manager/tests/test_kpi_manager.py similarity index 100% rename from src/kpi_manager/tests/test_unitary.py rename to src/kpi_manager/tests/test_kpi_manager.py diff --git a/src/kpi_manager/tests/test_kpi_writer.py b/src/kpi_manager/tests/test_kpi_writer.py new file mode 100644 index 000000000..d2261b6ad --- /dev/null +++ b/src/kpi_manager/tests/test_kpi_writer.py @@ -0,0 +1,24 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import threading +import logging +from kpi_manager.service.KpiWriter import KpiWriter + +LOGGER = logging.getLogger(__name__) + +def test_kpi_writer(): + LOGGER.info(' >>> test_kpi_writer START <<< ') + KpiWriter.kpi_writer() + -- GitLab From f766e5123d0dd713d25291e23d779122d395fd96 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Sun, 9 Jun 2024 14:38:18 +0000 Subject: [PATCH 147/205] Create Promtheus client V1 (not working 100%) --- src/kpi_manager/service/KpiWriter.py | 48 ++++++++++++++++++---------- 1 file changed, 31 insertions(+), 17 deletions(-) diff --git a/src/kpi_manager/service/KpiWriter.py b/src/kpi_manager/service/KpiWriter.py index 3c8382c12..8d26ce494 100644 --- a/src/kpi_manager/service/KpiWriter.py +++ b/src/kpi_manager/service/KpiWriter.py @@ -14,9 +14,11 @@ # read Kafka stream from Kafka topic +import ast +import time import threading from confluent_kafka import KafkaError -from prometheus_client import start_http_server, Gauge +from prometheus_client import start_http_server, Gauge, CollectorRegistry from confluent_kafka import Consumer as KafkaConsumer KAFKA_SERVER_IP = '127.0.0.1:9092' @@ -30,7 +32,7 @@ KPIs_TO_SEARCH = ["node_network_receive_packets_total", "node_network_transmit_bytes_total", "process_open_fds"] PROM_METRICS = {} - +KAFKA_REGISTERY = CollectorRegistry() class KpiWriter: def __init__(self) -> None: @@ -38,6 +40,9 @@ class KpiWriter: @staticmethod def kpi_writer(): + # Start up the server to expose the metrics at port number mention below. + start_http_server(8101) + KpiWriter.create_prom_metrics_name() threading.Thread(target=KpiWriter.kafka_listener, args=()).start() @staticmethod @@ -67,21 +72,30 @@ class KpiWriter: print(f"Error to consume event from topic: {KAFKA_TOPICS['labeled']}. Error detail: {str(e)}") continue - @staticmethod # send metric to Prometheus + @staticmethod def write_metric_to_promtheus(event): - print("New recevied event: {:}".format(event)) - - # # create Prometheus metrics - # for metric_key in KPIs_TO_SEARCH: - # metric_name = metric_key - # metric_description = "description of " + str(metric_key) - # metric_tags = "tags of " + str(metric_key) - # PROM_METRICS[metric_key] = Gauge( metric_name, metric_description,metric_tags ) - - # NN_REC_PKTS_TOTAL = PROM_METRICS["node_network_receive_packets_total"] - # NN_REC_BYTS_TOTAL = PROM_METRICS["node_network_receive_bytes_total"] - # NN_TRSMT_BYTS_TOTAL = PROM_METRICS["node_network_transmit_bytes_total"] - # PROC_OPEN_FDs = PROM_METRICS["process_open_fds"] - + event = ast.literal_eval(event) # converted into dict + print("New recevied event: {:}".format(event['kpi_description'])) + event_kpi_name = event['kpi_description'] + if event_kpi_name in KPIs_TO_SEARCH: + PROM_METRICS[event_kpi_name].labels( + tag1 = "test tag value", + tag2 = "test tag value" + ).set(event['kpi_value']) + time.sleep(0.05) + @staticmethod + def create_prom_metrics_name(): + metric_tags = ["tag1", "tag2"] + for metric_key in KPIs_TO_SEARCH: + metric_name = metric_key + metric_description = "description of " + str(metric_key) + try: + PROM_METRICS[metric_key] = Gauge ( + metric_name, metric_description, metric_tags, + registry=KAFKA_REGISTERY ) + print("Metric pushed to Prometheus: {:}".format(PROM_METRICS[metric_key])) + except ValueError as e: + if 'Duplicated timeseries' in str(e): + print("Metric {:} is already registered. Skipping.".format(metric_name)) -- GitLab From d120ce74db0201b910147af0ee523b2ec5ba61c8 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Sun, 9 Jun 2024 22:38:48 +0000 Subject: [PATCH 148/205] Promtheus exporter is sucessfully working. --- src/kpi_manager/service/KpiWriter.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/kpi_manager/service/KpiWriter.py b/src/kpi_manager/service/KpiWriter.py index 8d26ce494..62fd4b7b3 100644 --- a/src/kpi_manager/service/KpiWriter.py +++ b/src/kpi_manager/service/KpiWriter.py @@ -40,8 +40,6 @@ class KpiWriter: @staticmethod def kpi_writer(): - # Start up the server to expose the metrics at port number mention below. - start_http_server(8101) KpiWriter.create_prom_metrics_name() threading.Thread(target=KpiWriter.kafka_listener, args=()).start() @@ -50,6 +48,8 @@ class KpiWriter: """ listener for events on Kafka topic. """ + # Start up the server to expose the metrics at port number mention below. + start_http_server(8101, registry=KAFKA_REGISTERY) kafka_consumer = KafkaConsumer(CONSUMER_CONFIG) kafka_consumer.subscribe([KAFKA_TOPICS['labeled']]) while True: -- GitLab From f4b549f7c29925d461522d364a4f60c92e3489a5 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Sun, 9 Jun 2024 23:18:54 +0000 Subject: [PATCH 149/205] All KPI tags are added with metric. --- src/kpi_manager/service/KpiValueComposer.py | 2 +- src/kpi_manager/service/KpiWriter.py | 19 +++++++++++++------ 2 files changed, 14 insertions(+), 7 deletions(-) diff --git a/src/kpi_manager/service/KpiValueComposer.py b/src/kpi_manager/service/KpiValueComposer.py index 9d703b233..31da6c5db 100644 --- a/src/kpi_manager/service/KpiValueComposer.py +++ b/src/kpi_manager/service/KpiValueComposer.py @@ -86,7 +86,7 @@ class KpiValueComposer: sub_names = kpi_value = "" for line in lines: try: - if pattern.search(line) and not line.startswith("# HELP") and not line.startswith("# TYPE"): + if pattern.search(line) and not line.startswith("# HELP") and not line.startswith("# TYPE") and not 'device="lo"' in line: (kpi_name, kpi_value) = line.split(" ") if kpi_name.endswith('}'): (kpi_name, sub_names) = kpi_name.replace('}','').split('{') diff --git a/src/kpi_manager/service/KpiWriter.py b/src/kpi_manager/service/KpiWriter.py index 62fd4b7b3..6c74f1a05 100644 --- a/src/kpi_manager/service/KpiWriter.py +++ b/src/kpi_manager/service/KpiWriter.py @@ -76,18 +76,25 @@ class KpiWriter: @staticmethod def write_metric_to_promtheus(event): event = ast.literal_eval(event) # converted into dict - print("New recevied event: {:}".format(event['kpi_description'])) + print("New recevied event: {:}".format(event)) event_kpi_name = event['kpi_description'] if event_kpi_name in KPIs_TO_SEARCH: PROM_METRICS[event_kpi_name].labels( - tag1 = "test tag value", - tag2 = "test tag value" - ).set(event['kpi_value']) + kpi_id = event['kpi_id'], + kpi_sample_type = event['kpi_sample_type'], + device_id = event['device_id'], + endpoint_id = event['endpoint_id'], + service_id = event['service_id'], + slice_id = event['slice_id'], + connection_id = event['connection_id'], + link_id = event['link_id'] + ).set(float(event['kpi_value'])) time.sleep(0.05) @staticmethod def create_prom_metrics_name(): - metric_tags = ["tag1", "tag2"] + metric_tags = ['kpi_id','kpi_sample_type','device_id', + 'endpoint_id','service_id','slice_id','connection_id','link_id'] for metric_key in KPIs_TO_SEARCH: metric_name = metric_key metric_description = "description of " + str(metric_key) @@ -95,7 +102,7 @@ class KpiWriter: PROM_METRICS[metric_key] = Gauge ( metric_name, metric_description, metric_tags, registry=KAFKA_REGISTERY ) - print("Metric pushed to Prometheus: {:}".format(PROM_METRICS[metric_key])) + # print("Metric pushed to Prometheus: {:}".format(PROM_METRICS[metric_key])) except ValueError as e: if 'Duplicated timeseries' in str(e): print("Metric {:} is already registered. Skipping.".format(metric_name)) -- GitLab From 10e29ea8a488002d2c51078439626edf58f3d845 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Mon, 10 Jun 2024 09:42:03 +0000 Subject: [PATCH 150/205] minor changes in KPI composer --- src/kpi_manager/service/KpiValueComposer.py | 21 +++------------------ 1 file changed, 3 insertions(+), 18 deletions(-) diff --git a/src/kpi_manager/service/KpiValueComposer.py b/src/kpi_manager/service/KpiValueComposer.py index 31da6c5db..bb2b6ebf3 100644 --- a/src/kpi_manager/service/KpiValueComposer.py +++ b/src/kpi_manager/service/KpiValueComposer.py @@ -47,10 +47,6 @@ class KpiValueComposer: def compose_kpi(): threading.Thread(target=KpiValueComposer.kafka_listener, args=()).start() - # @staticmethod - # def run_kafka_listener(): - # threading.Thread(target=KpiValueComposer.kafka_listener, args=()).start() - @staticmethod def kafka_listener(): """ @@ -71,15 +67,13 @@ class KpiValueComposer: continue try: new_event = receive_msg.value().decode('utf-8') - # print("New event on topic '{:}' is {:}".format(KAFKA_TOPICS['raw'], new_event)) - # LOGGER.info("New event on topic '{:}' is {:}".format(KAFKA_TOPICS['raw'], new_event)) - KpiValueComposer.extract_kpi_values(new_event) + KpiValueComposer.process_event_and_label_kpi(new_event) except Exception as e: print(f"Error to consume event from topic: {KAFKA_TOPICS['raw']}. Error detail: {str(e)}") continue @staticmethod - def extract_kpi_values(event): + def process_event_and_label_kpi(event): pattern = re.compile("|".join(map(re.escape, KPIs_TO_SEARCH))) lines = event.split('\n') # matching_rows = [] @@ -90,7 +84,7 @@ class KpiValueComposer: (kpi_name, kpi_value) = line.split(" ") if kpi_name.endswith('}'): (kpi_name, sub_names) = kpi_name.replace('}','').split('{') - print("Extracted row that match the KPI {:}".format((kpi_name, sub_names, kpi_value))) + print("Received KPI from raw topic: {:}".format((kpi_name, sub_names, kpi_value))) kpi_descriptor = KpiValueComposer.request_kpi_descriptor_from_db(kpi_name) if kpi_descriptor is not None: kpi_to_produce = KpiValueComposer.merge_kpi_descriptor_and_value(kpi_descriptor, kpi_value) @@ -130,15 +124,6 @@ class KpiValueComposer: } return kpi_dict - @staticmethod - def delete_kpi_by_id(): - col_name = "link_id" - kpi_name = None - kpiDBobj = Kpi_DB() - row = kpiDBobj.delete_db_row_by_id(DB_TABLE_NAME, col_name, kpi_name) - if row is not None: - LOGGER.info("Deleted Row: {:}".format(row)) - @staticmethod def delivery_callback( err, msg): """ -- GitLab From 8d398733bf78762230c0b766acf9e2b8d17a7e9c Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Mon, 10 Jun 2024 11:46:16 +0000 Subject: [PATCH 151/205] minor changes in TelemetryFrontend --- .../backend/tests/testTelemetryBackend.py | 34 ++++++------- src/telemetry/database/TelemetryModel.py | 48 ++++------------- src/telemetry/database/managementDB.py | 51 ++++++++++++------- .../TelemetryFrontendServiceServicerImpl.py | 4 +- src/telemetry/frontend/tests/Messages.py | 8 ++- src/telemetry/frontend/tests/test_frontend.py | 8 +++ 6 files changed, 77 insertions(+), 76 deletions(-) diff --git a/src/telemetry/backend/tests/testTelemetryBackend.py b/src/telemetry/backend/tests/testTelemetryBackend.py index e81e98473..f8abc08cf 100644 --- a/src/telemetry/backend/tests/testTelemetryBackend.py +++ b/src/telemetry/backend/tests/testTelemetryBackend.py @@ -28,26 +28,26 @@ LOGGER = logging.getLogger(__name__) # Tests Implementation of Telemetry Backend ########################### -# def test_verify_kafka_topics(): -# LOGGER.info('test_verify_kafka_topics requesting') -# TelemetryBackendServiceObj = TelemetryBackendService() -# KafkaTopics = ['topic_request', 'topic_response', 'topic_raw', 'topic_labled'] -# response = TelemetryBackendServiceObj.create_topic_if_not_exists(KafkaTopics) -# LOGGER.debug(str(response)) -# assert isinstance(response, bool) - -# def test_run_kafka_listener(): -# LOGGER.info('test_receive_kafka_request requesting') -# TelemetryBackendServiceObj = TelemetryBackendService() -# response = TelemetryBackendServiceObj.run_kafka_listener() -# LOGGER.debug(str(response)) -# assert isinstance(response, bool) +def test_verify_kafka_topics(): + LOGGER.info('test_verify_kafka_topics requesting') + TelemetryBackendServiceObj = TelemetryBackendService() + KafkaTopics = ['topic_request', 'topic_response', 'topic_raw', 'topic_labled'] + response = TelemetryBackendServiceObj.create_topic_if_not_exists(KafkaTopics) + LOGGER.debug(str(response)) + assert isinstance(response, bool) + +def test_run_kafka_listener(): + LOGGER.info('test_receive_kafka_request requesting') + TelemetryBackendServiceObj = TelemetryBackendService() + response = TelemetryBackendServiceObj.run_kafka_listener() + LOGGER.debug(str(response)) + assert isinstance(response, bool) # def test_fetch_node_exporter_metrics(): # LOGGER.info(' >>> test_fetch_node_exporter_metrics START <<< ') # TelemetryBackendService.fetch_single_node_exporter_metric() -def test_stream_node_export_metrics_to_raw_topic(): - LOGGER.info(' >>> test_stream_node_export_metrics_to_raw_topic START <<< ') - threading.Thread(target=TelemetryBackendService.stream_node_export_metrics_to_raw_topic, args=()).start() +# def test_stream_node_export_metrics_to_raw_topic(): +# LOGGER.info(' >>> test_stream_node_export_metrics_to_raw_topic START <<< ') +# threading.Thread(target=TelemetryBackendService.stream_node_export_metrics_to_raw_topic, args=()).start() diff --git a/src/telemetry/database/TelemetryModel.py b/src/telemetry/database/TelemetryModel.py index 8defdd2e8..54b7c13ef 100644 --- a/src/telemetry/database/TelemetryModel.py +++ b/src/telemetry/database/TelemetryModel.py @@ -17,55 +17,29 @@ from sqlalchemy.dialects.postgresql import UUID from sqlalchemy import Column, Integer, String, Float, Text, ForeignKey from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker, relationship - +from sqlalchemy.orm import registry logging.basicConfig(level=logging.INFO) LOGGER = logging.getLogger(__name__) # Create a base class for declarative models -Base = declarative_base() - -class Kpi(Base): - __tablename__ = 'kpi' - - kpi_id = Column(UUID(as_uuid=False), primary_key=True) - kpi_description = Column(Text) - kpi_sample_type = Column(Integer) - device_id = Column(String) - endpoint_id = Column(String) - service_id = Column(String) - slice_id = Column(String) - connection_id = Column(String) - link_id = Column(String) - # monitor_flag = Column(String) - - # Relationship to Collector model: allows access to related Collector objects from a Kpi object - collectors = relationship('Collector', back_populates='kpi') - - # helps in logging the information - def __repr__(self): - return (f"") +Base = registry().generate_base() +# Base = declarative_base() class Collector(Base): __tablename__ = 'collector' - collector_id = Column(UUID(as_uuid=False), primary_key=True) - kpi_id = Column(UUID(as_uuid=False), ForeignKey('kpi.kpi_id')) - collector = Column(String) - sampling_duration_s = Column(Float) - sampling_interval_s = Column(Float) - start_timestamp = Column(Float) - end_timestamp = Column(Float) + collector_id = Column(UUID(as_uuid=False), primary_key=True) + kpi_id = Column(UUID(as_uuid=False)) + collector_decription = Column(String) + sampling_duration_s = Column(Float) + sampling_interval_s = Column(Float) + start_timestamp = Column(Float) + end_timestamp = Column(Float) - # Relationship to Kpi model: allows access to the related Kpi object from a Collector object - kpi = relationship('Kpi', back_populates='collectors') def __repr__(self): return (f"") \ No newline at end of file diff --git a/src/telemetry/database/managementDB.py b/src/telemetry/database/managementDB.py index 0a94c6c25..3e0cfc5fb 100644 --- a/src/telemetry/database/managementDB.py +++ b/src/telemetry/database/managementDB.py @@ -13,16 +13,18 @@ # limitations under the License. import logging, time +import sqlalchemy +import sqlalchemy_utils from sqlalchemy.orm import sessionmaker from sqlalchemy.ext.declarative import declarative_base from telemetry.database.TelemetryEngine import TelemetryEngine - +from telemetry.database.TelemetryModel import Base LOGGER = logging.getLogger(__name__) -TELEMETRY_DB_NAME = "telemetryfrontend" +DB_NAME = "telemetryfrontend" -# Create a base class for declarative models -Base = declarative_base() +# # Create a base class for declarative models +# Base = declarative_base() class managementDB: def __init__(self): @@ -30,23 +32,35 @@ class managementDB: if self.db_engine is None: LOGGER.error('Unable to get SQLAlchemy DB Engine...') return False - self.db_name = TELEMETRY_DB_NAME + self.db_name = DB_NAME self.Session = sessionmaker(bind=self.db_engine) - def create_database(self): - try: - with self.db_engine.connect() as connection: - connection.execute(f"CREATE DATABASE {self.db_name};") - LOGGER.info('managementDB initalizes database. Name: {self.db_name}') - return True - except: - LOGGER.exception('Failed to check/create the database: {:s}'.format(str(self.db_engine.url))) - return False + @staticmethod + def create_database(engine : sqlalchemy.engine.Engine) -> None: + if not sqlalchemy_utils.database_exists(engine.url): + LOGGER.info("Database created. {:}".format(engine.url)) + sqlalchemy_utils.create_database(engine.url) + + @staticmethod + def drop_database(engine : sqlalchemy.engine.Engine) -> None: + if sqlalchemy_utils.database_exists(engine.url): + sqlalchemy_utils.drop_database(engine.url) + + # def create_database(self): + # try: + # with self.db_engine.connect() as connection: + # connection.execute(f"CREATE DATABASE {self.db_name};") + # LOGGER.info('managementDB initalizes database. Name: {self.db_name}') + # return True + # except: + # LOGGER.exception('Failed to check/create the database: {:s}'.format(str(self.db_engine.url))) + # return False - def create_tables(self): + @staticmethod + def create_tables(engine : sqlalchemy.engine.Engine): try: - Base.metadata.create_all(self.db_engine) # type: ignore - LOGGER.info("Tables created in the DB Name: {:}".format(self.db_name)) + Base.metadata.create_all(engine) # type: ignore + LOGGER.info("Tables created in the DB Name: {:}".format(DB_NAME)) except Exception as e: LOGGER.info("Tables cannot be created in the TelemetryFrontend database. {:s}".format(str(e))) @@ -59,6 +73,7 @@ class managementDB: except Exception as e: LOGGER.info("Unable to fetch Table names. {:s}".format(str(e))) + @staticmethod def add_row_to_db(self, row): session = self.Session() try: @@ -103,7 +118,7 @@ class managementDB: LOGGER.error("Error deleting %s with %s %s: %s", model.__name__, col_name, id_to_search, e) finally: session.close() - + def select_with_filter(self, model, **filters): session = self.Session() try: diff --git a/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py b/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py index d10e9dffd..c63b42cbf 100644 --- a/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py +++ b/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py @@ -55,12 +55,12 @@ class TelemetryFrontendServiceServicerImpl(TelemetryFrontendServiceServicer): collector_to_insert = CollectorModel() collector_to_insert.collector_id = request.collector_id.collector_id.uuid collector_to_insert.kpi_id = request.kpi_id.kpi_id.uuid - collector_to_insert.collector = "DESC 1" + # collector_to_insert.collector_decription= request.collector collector_to_insert.sampling_duration_s = request.duration_s collector_to_insert.sampling_interval_s = request.interval_s collector_to_insert.start_timestamp = time.time() collector_to_insert.end_timestamp = time.time() - self.managementDBobj.add_row_to_db(collector_to_insert) + managementDB.add_row_to_db(collector_to_insert) except Exception as e: LOGGER.info("Unable to create collectorModel class object. {:}".format(e)) diff --git a/src/telemetry/frontend/tests/Messages.py b/src/telemetry/frontend/tests/Messages.py index 48668f7bf..6dc1dffa9 100644 --- a/src/telemetry/frontend/tests/Messages.py +++ b/src/telemetry/frontend/tests/Messages.py @@ -17,6 +17,8 @@ import random from common.proto import telemetry_frontend_pb2 from common.proto.kpi_sample_types_pb2 import KpiSampleType + +# ----------------------- "2nd" Iteration -------------------------------- def create_collector_id(): _collector_id = telemetry_frontend_pb2.CollectorId() _collector_id.collector_id.uuid = uuid.uuid4() @@ -31,16 +33,18 @@ def create_collector_request(): _create_collector_request = telemetry_frontend_pb2.Collector() _create_collector_request.collector_id.collector_id.uuid = str(uuid.uuid4()) _create_collector_request.kpi_id.kpi_id.uuid = "165d20c5-a446-42fa-812f-e2b7ed283c6f" + # _create_collector_request.collector = "collector description" _create_collector_request.duration_s = float(random.randint(8, 16)) _create_collector_request.interval_s = float(random.randint(2, 4)) return _create_collector_request def create_collector_filter(): _create_collector_filter = telemetry_frontend_pb2.CollectorFilter() - new_kpi_id = _create_collector_filter.kpi_id.add() - new_kpi_id.kpi_id.uuid = "165d20c5-a446-42fa-812f-e2b7ed283c6f" + new_kpi_id = _create_collector_filter.kpi_id.add() + new_kpi_id.kpi_id.uuid = "165d20c5-a446-42fa-812f-e2b7ed283c6f" return _create_collector_filter +# ----------------------- "First" Iteration -------------------------------- # def create_collector_request_a(): # _create_collector_request_a = telemetry_frontend_pb2.Collector() # _create_collector_request_a.collector_id.collector_id.uuid = "-1" diff --git a/src/telemetry/frontend/tests/test_frontend.py b/src/telemetry/frontend/tests/test_frontend.py index 7d050349b..e33545dcc 100644 --- a/src/telemetry/frontend/tests/test_frontend.py +++ b/src/telemetry/frontend/tests/test_frontend.py @@ -32,6 +32,8 @@ from telemetry.frontend.client.TelemetryFrontendClient import TelemetryFrontendC from telemetry.frontend.service.TelemetryFrontendService import TelemetryFrontendService from telemetry.frontend.service.TelemetryFrontendServiceServicerImpl import TelemetryFrontendServiceServicerImpl from telemetry.frontend.tests.Messages import ( create_collector_request, create_collector_filter) +from telemetry.database.managementDB import managementDB +from telemetry.database.TelemetryEngine import TelemetryEngine from device.client.DeviceClient import DeviceClient from device.service.DeviceService import DeviceService @@ -166,6 +168,12 @@ def telemetryFrontend_client( # Tests Implementation of Telemetry Frontend ########################### +def test_verify_db_and_table(): + LOGGER.info(' >>> test_verify_database_and_tables START: <<< ') + _engine = TelemetryEngine.get_engine() + managementDB.create_database(_engine) + managementDB.create_tables(_engine) + def test_StartCollector(telemetryFrontend_client): LOGGER.info(' >>> test_StartCollector START: <<< ') response = telemetryFrontend_client.StartCollector(create_collector_request()) -- GitLab From da36643781237614e4bfd4db4668b91af8577b85 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Mon, 10 Jun 2024 17:08:58 +0000 Subject: [PATCH 152/205] KPI manager - Flow tested sucessfully --- src/kpi_manager/service/KPI_configs.json | 8 + .../service/KpiManagerServiceServicerImpl.py | 73 ++------ src/kpi_manager/service/database/Kpi_DB.py | 5 +- src/kpi_manager/tests/test_kpi_manager.py | 51 +++--- src/kpi_manager/tests/test_messages.py | 158 +++++++++--------- 5 files changed, 135 insertions(+), 160 deletions(-) create mode 100644 src/kpi_manager/service/KPI_configs.json diff --git a/src/kpi_manager/service/KPI_configs.json b/src/kpi_manager/service/KPI_configs.json new file mode 100644 index 000000000..ba73bc41a --- /dev/null +++ b/src/kpi_manager/service/KPI_configs.json @@ -0,0 +1,8 @@ +{ + "KPIs": + [ + "node_network_receive_packets_total", + "node_network_receive_bytes_total", + "node_network_transmit_bytes_total" + ] +} \ No newline at end of file diff --git a/src/kpi_manager/service/KpiManagerServiceServicerImpl.py b/src/kpi_manager/service/KpiManagerServiceServicerImpl.py index d099f8a5e..4b2e9fc3f 100644 --- a/src/kpi_manager/service/KpiManagerServiceServicerImpl.py +++ b/src/kpi_manager/service/KpiManagerServiceServicerImpl.py @@ -37,10 +37,6 @@ class KpiManagerServiceServicerImpl(KpiManagerServiceServicer): LOGGER.info('Init KpiManagerService') self.Kpi_DBobj = Kpi_DB() - @staticmethod - def create_database_if_not_exist(engine: sqlalchemy.engine.Engine) -> None: - if not sqlalchemy_utils.database_exists(engine.url): - sqlalchemy_utils.create_database(engine.url) @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) def SetKpiDescriptor(self, request: KpiDescriptor, grpc_context: grpc.ServicerContext # type: ignore @@ -82,12 +78,13 @@ class KpiManagerServiceServicerImpl(KpiManagerServiceServicer): response.slice_id.slice_uuid.uuid = row.slice_id response.endpoint_id.endpoint_uuid.uuid = row.endpoint_id response.connection_id.connection_uuid.uuid = row.connection_id + response.link_id.link_uuid.uuid = row.link_id return response except Exception as e: LOGGER.info('Unable to search kpi id. {:}'.format(e)) @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def DeleteKpiDescriptor(self, request: KpiId, grpc_context: grpc.ServicerContext + def DeleteKpiDescriptor(self, request: KpiId, grpc_context: grpc.ServicerContext # type: ignore ) -> Empty: # type: ignore LOGGER.info("Received gRPC message object: {:}".format(request)) try: @@ -106,8 +103,13 @@ class KpiManagerServiceServicerImpl(KpiManagerServiceServicer): # LOGGER.info("Recevied requested Object: {:}".format(request)) # re-structre the filter. create dynamic filter filter_to_apply = dict() - filter_to_apply['device_id'] = request.device_id[0].device_uuid.uuid filter_to_apply['kpi_sample_type'] = request.kpi_sample_type[0] + filter_to_apply['device_id'] = request.device_id[0].device_uuid.uuid + filter_to_apply['endpoint_id'] = request.endpoint_id[0].endpoint_uuid.uuid + filter_to_apply['service_id'] = request.service_id[0].service_uuid.uuid + filter_to_apply['slice_id'] = request.slice_id[0].slice_uuid.uuid + filter_to_apply['connection_id'] = request.connection_id[0].connection_uuid.uuid + filter_to_apply['link_id'] = request.link_id[0].link_uuid.uuid try: rows = self.Kpi_DBobj.select_with_filter(KpiModel, **filter_to_apply) except Exception as e: @@ -116,58 +118,15 @@ class KpiManagerServiceServicerImpl(KpiManagerServiceServicer): if len(rows) != 0: for row in rows: kpiDescriptor_obj = KpiDescriptor() - kpiDescriptor_obj.kpi_id.kpi_id.uuid = row.kpi_id - # kpiDescriptor_obj.kpi_description = row.kpi_description - + kpiDescriptor_obj.kpi_id.kpi_id.uuid = row.kpi_id + kpiDescriptor_obj.kpi_description = row.kpi_description + kpiDescriptor_obj.kpi_sample_type = row.kpi_sample_type + kpiDescriptor_obj.service_id.service_uuid.uuid = row.service_id + kpiDescriptor_obj.device_id.device_uuid.uuid = row.device_id + kpiDescriptor_obj.slice_id.slice_uuid.uuid = row.slice_id + kpiDescriptor_obj.endpoint_id.endpoint_uuid.uuid = row.endpoint_id + kpiDescriptor_obj.connection_id.connection_uuid.uuid = row.connection_id response.kpi_descriptor_list.append(kpiDescriptor_obj) return response except Exception as e: LOGGER.info('Unable to process response {:}'.format(e)) - - - # @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - # def DeleteKpiDescriptor(self, request: KpiId, grpc_context: grpc.ServicerContext) -> Empty: # type: ignore - # kpi_id = int(request.kpi_id.uuid) - # kpi = self.management_db.get_KPI(kpi_id) - # if kpi: - # self.management_db.delete_KPI(kpi_id) - # else: - # LOGGER.info('DeleteKpi error: KpiID({:s}): not found in database'.format(str(kpi_id))) - # return Empty() - - # @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - # def GetKpiDescriptor(self, request: KpiId, grpc_context: grpc.ServicerContext) -> KpiDescriptor: # type: ignore - # kpi_id = request.kpi_id.uuid - # kpi_db = self.management_db.get_KPI(int(kpi_id)) - # kpiDescriptor = KpiDescriptor() - # if kpi_db is None: - # LOGGER.info('GetKpiDescriptor error: KpiID({:s}): not found in database'.format(str(kpi_id))) - # else: - # kpiDescriptor.kpi_description = kpi_db[1] - # kpiDescriptor.kpi_sample_type = kpi_db[2] - # kpiDescriptor.device_id.device_uuid.uuid = str(kpi_db[3]) - # kpiDescriptor.endpoint_id.endpoint_uuid.uuid = str(kpi_db[4]) - # kpiDescriptor.service_id.service_uuid.uuid = str(kpi_db[5]) - # kpiDescriptor.slice_id.slice_uuid.uuid = str(kpi_db[6]) - # kpiDescriptor.connection_id.connection_uuid.uuid = str(kpi_db[7]) - # kpiDescriptor.link_id.link_uuid.uuid = str(kpi_db[8]) - # return kpiDescriptor - - # @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - # def SelectKpiDescriptor(self, request: KpiDescriptorFilter, grpc_context: grpc.ServicerContext) -> KpiDescriptorList: # type: ignore - # kpi_descriptor_list = KpiDescriptorList() - # data = self.management_db.get_KPIS() - # LOGGER.debug(f"data: {data}") - # for item in data: - # kpi_descriptor = KpiDescriptor() - # kpi_descriptor.kpi_id.kpi_id.uuid = str(item[0]) - # kpi_descriptor.kpi_description = item[1] - # kpi_descriptor.kpi_sample_type = item[2] - # kpi_descriptor.device_id.device_uuid.uuid = str(item[3]) - # kpi_descriptor.endpoint_id.endpoint_uuid.uuid = str(item[4]) - # kpi_descriptor.service_id.service_uuid.uuid = str(item[5]) - # kpi_descriptor.slice_id.slice_uuid.uuid = str(item[6]) - # kpi_descriptor.connection_id.connection_uuid.uuid = str(item[7]) - # kpi_descriptor.link_id.link_uuid.uuid = str(item[8]) - # kpi_descriptor_list.kpi_descriptor_list.append(kpi_descriptor) - # return kpi_descriptor_list \ No newline at end of file diff --git a/src/kpi_manager/service/database/Kpi_DB.py b/src/kpi_manager/service/database/Kpi_DB.py index 45c9ff7ed..df03deba4 100644 --- a/src/kpi_manager/service/database/Kpi_DB.py +++ b/src/kpi_manager/service/database/Kpi_DB.py @@ -70,7 +70,10 @@ class Kpi_DB: return True except Exception as e: session.rollback() - LOGGER.error(f"Failed to insert new row into {row.__class__.__name__} table. {str(e)}") + if "psycopg2.errors.UniqueViolation" in str(e): + LOGGER.warning(f"Unique key voilation: {row.__class__.__name__} table. {str(e)}") + else: + LOGGER.error(f"Failed to insert new row into {row.__class__.__name__} table. {str(e)}") return False finally: session.close() diff --git a/src/kpi_manager/tests/test_kpi_manager.py b/src/kpi_manager/tests/test_kpi_manager.py index f6d8460d9..977c85fb8 100755 --- a/src/kpi_manager/tests/test_kpi_manager.py +++ b/src/kpi_manager/tests/test_kpi_manager.py @@ -43,10 +43,8 @@ from device.service.driver_api.DriverInstanceCache import DriverInstanceCache from device.service.DeviceService import DeviceService from device.client.DeviceClient import DeviceClient -from kpi_manager.tests.test_messages import create_kpi_request, create_kpi_request_b, \ - create_kpi_request_c, create_kpi_request_d, create_kpi_filter_request, \ - create_kpi_descriptor_request, create_kpi_id_request, create_kpi_filter_request_a, \ - create_kpi_descriptor_request_a +from kpi_manager.tests.test_messages import create_kpi_descriptor_request, create_kpi_id_request, \ + create_kpi_filter_request_a, create_kpi_descriptor_request_a # from monitoring.service.MonitoringService import MonitoringService from kpi_manager.service.KpiManagerService import KpiManagerService # from monitoring.client.MonitoringClient import MonitoringClient @@ -215,33 +213,34 @@ def kpi_manager_client(kpi_manager_service : KpiManagerService): # pylint: disab # ---------- 2nd Iteration Tests ----------------- def test_SetKpiDescriptor(kpi_manager_client): LOGGER.info(" >>> test_SetKpiDescriptor: START <<< ") - _descriptors = ["node_network_receive_packets_total", - "node_network_receive_bytes_total", - "node_network_transmit_bytes_total"] + with open("kpi_manager/service/KPI_configs.json", 'r') as file: + data = json.load(file) + _descriptors = data.get('KPIs', []) for _descritor_name in _descriptors: response = kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request_a(_descritor_name)) LOGGER.info("Response gRPC message object: {:}".format(response)) assert isinstance(response, KpiId) -# def test_GetKpiDescriptor(kpi_manager_client): -# LOGGER.info(" >>> test_GetKpiDescriptor: START <<< ") -# response = kpi_manager_client.GetKpiDescriptor(create_kpi_id_request()) -# LOGGER.info("Response gRPC message object: {:}".format(response)) -# assert isinstance(response, KpiDescriptor) - -# def test_DeleteKpiDescriptor(kpi_manager_client): -# LOGGER.info(" >>> test_DeleteKpiDescriptor: START <<< ") -# response = kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request()) -# del_response = kpi_manager_client.DeleteKpiDescriptor(response) -# kpi_manager_client.GetKpiDescriptor(response) -# LOGGER.info("Response of delete method gRPC message object: {:}".format(del_response)) -# assert isinstance(response, KpiId) - -# def test_SelectKpiDescriptor(kpi_manager_client): -# LOGGER.info(" >>> test_SelectKpiDescriptor: START <<< ") -# response = kpi_manager_client.SelectKpiDescriptor(create_kpi_filter_request_a()) -# LOGGER.info("Response gRPC message object: {:}".format(response)) -# assert isinstance(response, KpiDescriptorList) +def test_GetKpiDescriptor(kpi_manager_client): + LOGGER.info(" >>> test_GetKpiDescriptor: START <<< ") + response = kpi_manager_client.GetKpiDescriptor(create_kpi_id_request()) + LOGGER.info("Response gRPC message object: {:}".format(response)) + assert isinstance(response, KpiDescriptor) + +def test_DeleteKpiDescriptor(kpi_manager_client): + LOGGER.info(" >>> test_DeleteKpiDescriptor: START <<< ") + response = kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request()) + del_response = kpi_manager_client.DeleteKpiDescriptor(response) + kpi_manager_client.GetKpiDescriptor(response) + LOGGER.info("Response of delete method gRPC message object: {:}".format(del_response)) + assert isinstance(del_response, Empty) + +def test_SelectKpiDescriptor(kpi_manager_client): + LOGGER.info(" >>> test_SelectKpiDescriptor: START <<< ") + kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request_a()) + response = kpi_manager_client.SelectKpiDescriptor(create_kpi_filter_request_a()) + LOGGER.info("Response gRPC message object: {:}".format(response)) + assert isinstance(response, KpiDescriptorList) # ------------- INITIAL TESTs ---------------- # Test case that makes use of client fixture to test server's CreateKpi method diff --git a/src/kpi_manager/tests/test_messages.py b/src/kpi_manager/tests/test_messages.py index bc4c5b9d1..93e2d6472 100755 --- a/src/kpi_manager/tests/test_messages.py +++ b/src/kpi_manager/tests/test_messages.py @@ -24,7 +24,7 @@ def create_kpi_id_request(): _kpi_id.kpi_id.uuid = "34f73604-eca6-424f-9995-18b519ad0978" return _kpi_id -def create_kpi_descriptor_request_a(descriptor_name: str): +def create_kpi_descriptor_request_a(descriptor_name: str = "Test_name"): _create_kpi_request = kpi_manager_pb2.KpiDescriptor() _create_kpi_request.kpi_id.kpi_id.uuid = str(uuid.uuid4()) _create_kpi_request.kpi_description = descriptor_name @@ -51,89 +51,95 @@ def create_kpi_descriptor_request(): return _create_kpi_request def create_kpi_filter_request_a(): - _create_kpi_filter_request = kpi_manager_pb2.KpiDescriptorFilter() - _create_kpi_filter_request.kpi_sample_type.append(102) + _create_kpi_filter_request = kpi_manager_pb2.KpiDescriptorFilter() + _create_kpi_filter_request.kpi_sample_type.append(KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED) + + device_id_obj = DeviceId() + endpoint_id_obj = EndPointId() + service_id_obj = ServiceId() + slice_id_obj = SliceId() + connection_id_obj = ConnectionId() + link_id_obj = LinkId() + + device_id_obj.device_uuid.uuid = "DEV1" + endpoint_id_obj.endpoint_uuid.uuid = "END1" + service_id_obj.service_uuid.uuid = "SERV1" + slice_id_obj.slice_uuid.uuid = "SLC1" + connection_id_obj.connection_uuid.uuid = "CON1" + link_id_obj.link_uuid.uuid = "LNK1" - device_id_obj = DeviceId() - device_id_obj.device_uuid.uuid = "SERV3" _create_kpi_filter_request.device_id.append(device_id_obj) - - # new_device_id = _create_kpi_filter_request.device_id.add() - # new_device_id.device_uuid.uuid = 'DEV3' - # new_service_id = _create_kpi_filter_request.service_id.add() - # new_service_id.service_uuid.uuid = 'SERV1' - # new_slice_id = _create_kpi_filter_request.slice_id.add() - # new_slice_id.slice_uuid.uuid = 'SLC1' - # new_endpoint_id = _create_kpi_filter_request.endpoint_id.add() - # new_endpoint_id.endpoint_uuid.uuid = 'END1' - # new_connection_id = _create_kpi_filter_request.connection_id.add() - # new_connection_id.connection_uuid.uuid = 'CON1' + _create_kpi_filter_request.endpoint_id.append(endpoint_id_obj) + _create_kpi_filter_request.service_id.append(service_id_obj) + _create_kpi_filter_request.slice_id.append(slice_id_obj) + _create_kpi_filter_request.connection_id.append(connection_id_obj) + _create_kpi_filter_request.link_id.append(link_id_obj) return _create_kpi_filter_request # -------------------- Initial Test messages ------------------------------------- -def create_kpi_request(kpi_id_str): - _create_kpi_request = kpi_manager_pb2.KpiDescriptor() - _create_kpi_request.kpi_description = 'KPI Description Test' - _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED - _create_kpi_request.device_id.device_uuid.uuid = 'DEV' + str(kpi_id_str) - _create_kpi_request.service_id.service_uuid.uuid = 'SERV' + str(kpi_id_str) - _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC' + str(kpi_id_str) - _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END' + str(kpi_id_str) - _create_kpi_request.connection_id.connection_uuid.uuid = 'CON' + str(kpi_id_str) - return _create_kpi_request +# def create_kpi_request(kpi_id_str): +# _create_kpi_request = kpi_manager_pb2.KpiDescriptor() +# _create_kpi_request.kpi_description = 'KPI Description Test' +# _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED +# _create_kpi_request.device_id.device_uuid.uuid = 'DEV' + str(kpi_id_str) +# _create_kpi_request.service_id.service_uuid.uuid = 'SERV' + str(kpi_id_str) +# _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC' + str(kpi_id_str) +# _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END' + str(kpi_id_str) +# _create_kpi_request.connection_id.connection_uuid.uuid = 'CON' + str(kpi_id_str) +# return _create_kpi_request -def create_kpi_request_b(): - _create_kpi_request = kpi_manager_pb2.KpiDescriptor() - _create_kpi_request = str(uuid.uuid4()) - _create_kpi_request.kpi_description = 'KPI Description Test' - _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED - _create_kpi_request.device_id.device_uuid.uuid = 'DEV2' # pylint: disable=maybe-no-member - _create_kpi_request.service_id.service_uuid.uuid = 'SERV2' # pylint: disable=maybe-no-member - _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC2' # pylint: disable=maybe-no-member - _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END2' # pylint: disable=maybe-no-member - _create_kpi_request.connection_id.connection_uuid.uuid = 'CON2' # pylint: disable=maybe-no-member - return _create_kpi_request +# def create_kpi_request_b(): +# _create_kpi_request = kpi_manager_pb2.KpiDescriptor() +# _create_kpi_request = str(uuid.uuid4()) +# _create_kpi_request.kpi_description = 'KPI Description Test' +# _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED +# _create_kpi_request.device_id.device_uuid.uuid = 'DEV2' # pylint: disable=maybe-no-member +# _create_kpi_request.service_id.service_uuid.uuid = 'SERV2' # pylint: disable=maybe-no-member +# _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC2' # pylint: disable=maybe-no-member +# _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END2' # pylint: disable=maybe-no-member +# _create_kpi_request.connection_id.connection_uuid.uuid = 'CON2' # pylint: disable=maybe-no-member +# return _create_kpi_request -def create_kpi_request_c(): - _create_kpi_request = kpi_manager_pb2.KpiDescriptor() - _create_kpi_request.kpi_description = 'KPI Description Test' - _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED - _create_kpi_request.device_id.device_uuid.uuid = 'DEV3' # pylint: disable=maybe-no-member - _create_kpi_request.service_id.service_uuid.uuid = 'SERV3' # pylint: disable=maybe-no-member - _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC3' # pylint: disable=maybe-no-member - _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END3' # pylint: disable=maybe-no-member - _create_kpi_request.connection_id.connection_uuid.uuid = 'CON3' # pylint: disable=maybe-no-member - return _create_kpi_request +# def create_kpi_request_c(): +# _create_kpi_request = kpi_manager_pb2.KpiDescriptor() +# _create_kpi_request.kpi_description = 'KPI Description Test' +# _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED +# _create_kpi_request.device_id.device_uuid.uuid = 'DEV3' # pylint: disable=maybe-no-member +# _create_kpi_request.service_id.service_uuid.uuid = 'SERV3' # pylint: disable=maybe-no-member +# _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC3' # pylint: disable=maybe-no-member +# _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END3' # pylint: disable=maybe-no-member +# _create_kpi_request.connection_id.connection_uuid.uuid = 'CON3' # pylint: disable=maybe-no-member +# return _create_kpi_request -def create_kpi_request_d(): - _create_kpi_request = kpi_manager_pb2.KpiDescriptor() - _create_kpi_request.kpi_description = 'KPI Description Test' - _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED - _create_kpi_request.device_id.device_uuid.uuid = 'DEV4' # pylint: disable=maybe-no-member - _create_kpi_request.service_id.service_uuid.uuid = 'SERV4' # pylint: disable=maybe-no-member - _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC4' # pylint: disable=maybe-no-member - _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END4' # pylint: disable=maybe-no-member - _create_kpi_request.connection_id.connection_uuid.uuid = 'CON4' # pylint: disable=maybe-no-member - return _create_kpi_request +# def create_kpi_request_d(): +# _create_kpi_request = kpi_manager_pb2.KpiDescriptor() +# _create_kpi_request.kpi_description = 'KPI Description Test' +# _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED +# _create_kpi_request.device_id.device_uuid.uuid = 'DEV4' # pylint: disable=maybe-no-member +# _create_kpi_request.service_id.service_uuid.uuid = 'SERV4' # pylint: disable=maybe-no-member +# _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC4' # pylint: disable=maybe-no-member +# _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END4' # pylint: disable=maybe-no-member +# _create_kpi_request.connection_id.connection_uuid.uuid = 'CON4' # pylint: disable=maybe-no-member +# return _create_kpi_request + +# def kpi_descriptor_list(): +# _kpi_descriptor_list = kpi_manager_pb2.KpiDescriptorList() +# return _kpi_descriptor_list + +# def create_kpi_filter_request(): +# _create_kpi_filter_request = kpi_manager_pb2.KpiDescriptorFilter() +# _create_kpi_filter_request.kpi_sample_type.append(KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED) +# new_device_id = _create_kpi_filter_request.device_id.add() +# new_device_id.device_uuid.uuid = 'DEV1' +# new_service_id = _create_kpi_filter_request.service_id.add() +# new_service_id.service_uuid.uuid = 'SERV1' +# new_slice_id = _create_kpi_filter_request.slice_id.add() +# new_slice_id.slice_uuid.uuid = 'SLC1' +# new_endpoint_id = _create_kpi_filter_request.endpoint_id.add() +# new_endpoint_id.endpoint_uuid.uuid = 'END1' +# new_connection_id = _create_kpi_filter_request.connection_id.add() +# new_connection_id.connection_uuid.uuid = 'CON1' -def kpi_descriptor_list(): - _kpi_descriptor_list = kpi_manager_pb2.KpiDescriptorList() - return _kpi_descriptor_list - -def create_kpi_filter_request(): - _create_kpi_filter_request = kpi_manager_pb2.KpiDescriptorFilter() - _create_kpi_filter_request.kpi_sample_type.append(KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED) - new_device_id = _create_kpi_filter_request.device_id.add() - new_device_id.device_uuid.uuid = 'DEV1' - new_service_id = _create_kpi_filter_request.service_id.add() - new_service_id.service_uuid.uuid = 'SERV1' - new_slice_id = _create_kpi_filter_request.slice_id.add() - new_slice_id.slice_uuid.uuid = 'SLC1' - new_endpoint_id = _create_kpi_filter_request.endpoint_id.add() - new_endpoint_id.endpoint_uuid.uuid = 'END1' - new_connection_id = _create_kpi_filter_request.connection_id.add() - new_connection_id.connection_uuid.uuid = 'CON1' - - return _create_kpi_filter_request \ No newline at end of file +# return _create_kpi_filter_request \ No newline at end of file -- GitLab From 6b2078ef920acdf016b47e316cfb87c835aecec2 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Mon, 10 Jun 2024 17:38:51 +0000 Subject: [PATCH 153/205] readme in progress --- src/kpi_manager/README.md | 22 ++++++++++++++++++ src/kpi_manager/requirements.in | 41 ++++++++++++++++++++++++++++++++- 2 files changed, 62 insertions(+), 1 deletion(-) create mode 100644 src/kpi_manager/README.md diff --git a/src/kpi_manager/README.md b/src/kpi_manager/README.md new file mode 100644 index 000000000..131bf1efd --- /dev/null +++ b/src/kpi_manager/README.md @@ -0,0 +1,22 @@ +# How to locally run and test KPI Manager service + +### Pre-requisets +The following requirements should be fulfilled before the execuation of this module. + +1. verify that kpi_manager.proto file exists and grpc file are generated sucessfully. +2. virtual enviornment exist with all the required packages listed in ["requirements.in"](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_manager/requirements.in) are installed sucessfully. +3. verify the creation of required database and table. +[KPI DB test](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_manager/service/database/KpiDBtests.py) python file enlist the functions to create tables and database. +[KPI Engine](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_manager/service/database/KpiEngine.py) contains the DB string, update the string as per your deployment. + +### Messages format templates +["Messages"](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_manager/tests/test_messages.py) python file enlist the basic messages format used during the testing. + +### Test File +["KPI manager test"](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_manager/tests/test_kpi_manager.py) python file enlist the different tests conducted during the experiment. + +### Flow of execution +1. Call "" and "" function, this will create the required database and table, if they doesn't exist. +` +GRPC_HEALTH_PROBE_VERSION=v0.2.0 +` diff --git a/src/kpi_manager/requirements.in b/src/kpi_manager/requirements.in index a6183b57e..d96e4b1b8 100644 --- a/src/kpi_manager/requirements.in +++ b/src/kpi_manager/requirements.in @@ -14,11 +14,50 @@ anytree==2.8.0 APScheduler==3.10.1 +attrs==23.2.0 +certifi==2024.2.2 +charset-normalizer==2.0.12 +colorama==0.4.6 +confluent-kafka==2.3.0 +coverage==6.3 +future-fstrings==1.2.0 +greenlet==3.0.3 +grpcio==1.47.5 +grpcio-health-checking==1.47.5 +grpcio-tools==1.47.5 +grpclib==0.4.4 +h2==4.1.0 +hpack==4.0.0 +hyperframe==6.0.1 +idna==3.7 influx-line-protocol==0.1.4 +iniconfig==2.0.0 +kafka-python==2.0.2 +multidict==6.0.5 +networkx==3.3 +packaging==24.0 +pluggy==1.5.0 +prettytable==3.5.0 +prometheus-client==0.13.0 +protobuf==3.20.3 psycopg2-binary==2.9.3 +py==1.11.0 +py-cpuinfo==9.0.0 +pytest==6.2.5 +pytest-benchmark==3.4.1 +pytest-depends==1.0.1 python-dateutil==2.8.2 python-json-logger==2.0.2 +pytz==2024.1 questdb==1.0.1 requests==2.27.1 +six==1.16.0 +SQLAlchemy==1.4.52 +sqlalchemy-cockroachdb==1.4.4 +SQLAlchemy-Utils==0.38.3 +toml==0.10.2 +typing_extensions==4.12.0 +tzlocal==5.2 +urllib3==1.26.18 +wcwidth==0.2.13 xmltodict==0.12.0 -# grpc_health_probe==0.2.0 #getting error on this library \ No newline at end of file -- GitLab From de38c5d0d9c8b9969dac52ca17ecceeaf6fdffe8 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 11 Jun 2024 10:16:25 +0000 Subject: [PATCH 154/205] KpiManager Service README.md file completed --- src/kpi_manager/README.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/kpi_manager/README.md b/src/kpi_manager/README.md index 131bf1efd..fdfdf7f54 100644 --- a/src/kpi_manager/README.md +++ b/src/kpi_manager/README.md @@ -12,11 +12,11 @@ The following requirements should be fulfilled before the execuation of this mod ### Messages format templates ["Messages"](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_manager/tests/test_messages.py) python file enlist the basic messages format used during the testing. -### Test File +### Test file ["KPI manager test"](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_manager/tests/test_kpi_manager.py) python file enlist the different tests conducted during the experiment. ### Flow of execution -1. Call "" and "" function, this will create the required database and table, if they doesn't exist. -` -GRPC_HEALTH_PROBE_VERSION=v0.2.0 -` +1. Call the `create_database()` and `create_tables()` functions from `Kpi_DB` class to create the required database and table if they don't exist. +2. Call the gRPC method `SetKpiDescriptor(KpiDescriptor)->KpiId` to add the KpiDescriptor in `Kpi` DB. `KpiDescriptor` and `KpiId` are both pre-defined gRPC message types. +3. Call `GetKpiDescriptor(KpiId)->KpiDescriptor` to read the `KpiDescriptor` from DB and `DeleteKpiDescriptor(KpiId)` to delete the `KpiDescriptor` from DB. +4. Call `SelectKpiDescriptor(KpiDescriptorFilter)->KpiDescriptorList` to get all `KpiDescriptor` objects that matches the filter criteria. `KpiDescriptorFilter` and `KpiDescriptorList` are pre-defined gRPC message types. \ No newline at end of file -- GitLab From 4aeda2988da1f672a102405232d4daac066b1953 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 11 Jun 2024 10:21:12 +0000 Subject: [PATCH 155/205] Minor change in KpiManager service README.md file --- src/kpi_manager/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/kpi_manager/README.md b/src/kpi_manager/README.md index fdfdf7f54..a97950c4b 100644 --- a/src/kpi_manager/README.md +++ b/src/kpi_manager/README.md @@ -3,7 +3,7 @@ ### Pre-requisets The following requirements should be fulfilled before the execuation of this module. -1. verify that kpi_manager.proto file exists and grpc file are generated sucessfully. +1. verify that [kpi_manager.proto](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/proto/kpi_manager.proto) file exists and grpcs file are generated sucessfully. 2. virtual enviornment exist with all the required packages listed in ["requirements.in"](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_manager/requirements.in) are installed sucessfully. 3. verify the creation of required database and table. [KPI DB test](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_manager/service/database/KpiDBtests.py) python file enlist the functions to create tables and database. -- GitLab From d2ff0a3aa28e1ab346a7a5dbfaf1ae8b673fed3f Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 11 Jun 2024 11:02:06 +0000 Subject: [PATCH 156/205] Analytic_frontend.proto added --- proto/analytics_frontend.proto | 49 ++++++++++++++++++++++++++++++++++ 1 file changed, 49 insertions(+) create mode 100644 proto/analytics_frontend.proto diff --git a/proto/analytics_frontend.proto b/proto/analytics_frontend.proto new file mode 100644 index 000000000..6af761ae5 --- /dev/null +++ b/proto/analytics_frontend.proto @@ -0,0 +1,49 @@ +syntax = "proto3"; +package device; + +import "context.proto"; +import "kpi_manager.proto"; +import "kpi_sample_types.proto"; + +service AnalyticsFrontendService { + rpc StartAnalyzer (Analyzer ) returns (AnalyzerId ) {} + rpc StopAnalyzer (AnalyzerId ) returns (context.Empty) {} + rpc SelectAnalyzers(AnalyzerFilter) returns (AnalyzerList ) {} +} + +message AnalyzerId { + context.Uuid analyzer_id = 1; +} + +enum AnalyzerMode { + ANALYZERMODE_BATCH = 0; + ANALYZERMODE_STRAMING = 1; +} + +message Analyzer { + repeated kpi_manager.KpiId kpi_id = 1; // The KPI Ids to be processed by the analyzer + AnalyzerMode mode = 2; // Operation mode of the analyzer + float batch_min_duration_s = 3; // In batch mode, min duration to collect before executing batch + float batch_max_duration_s = 4; // In batch mode, max duration collected to execute the batch + uint batch_min_size = 5; // In batch mode, min number of samples to collect before executing batch + uint batch_max_size = 6; // In batch mode, max number of samples collected to execute the batch +} + +message AnalyzerFilter { + // Analyzer that fulfill the filter are those that match ALL the following fields. + // An empty list means: any value is accepted. + // All fields empty means: list all Analyzers + repeated AnalyzerId analyzer_id = 1; + repeated kpi_manager.KpiId kpi_id = 2; + repeated kpi_sample_types.KpiSampleType kpi_sample_type = 3; + repeated context.DeviceId device_id = 4; + repeated context.EndPointId endpoint_id = 5; + repeated context.ServiceId service_id = 6; + repeated context.SliceId slice_id = 7; + repeated context.ConnectionId connection_id = 8; + repeated context.LinkId link_id = 9; +} + +message AnalyzerList { + repeated Analyzer analyzer_list = 1; +} -- GitLab From c4a662d36127782129a9d0084ee8ff4b96861f1e Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 25 Jun 2024 10:42:07 +0000 Subject: [PATCH 157/205] baisc version of README files --- src/kpi_manager/README.md | 11 +++++++---- src/telemetry/README.md | 10 ++++++++++ 2 files changed, 17 insertions(+), 4 deletions(-) create mode 100644 src/telemetry/README.md diff --git a/src/kpi_manager/README.md b/src/kpi_manager/README.md index a97950c4b..88c52bb4f 100644 --- a/src/kpi_manager/README.md +++ b/src/kpi_manager/README.md @@ -1,7 +1,7 @@ # How to locally run and test KPI Manager service ### Pre-requisets -The following requirements should be fulfilled before the execuation of this module. +The following requirements should be fulfilled before the execuation of KPI Manager service. 1. verify that [kpi_manager.proto](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/proto/kpi_manager.proto) file exists and grpcs file are generated sucessfully. 2. virtual enviornment exist with all the required packages listed in ["requirements.in"](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_manager/requirements.in) are installed sucessfully. @@ -10,13 +10,16 @@ The following requirements should be fulfilled before the execuation of this mod [KPI Engine](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_manager/service/database/KpiEngine.py) contains the DB string, update the string as per your deployment. ### Messages format templates -["Messages"](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_manager/tests/test_messages.py) python file enlist the basic messages format used during the testing. +["Messages"](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_manager/tests/test_messages.py) python file enlist the basic gRPC messages format used during the testing. ### Test file ["KPI manager test"](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_manager/tests/test_kpi_manager.py) python file enlist the different tests conducted during the experiment. -### Flow of execution +### Flow of execution (Kpi Maanager Service functions) 1. Call the `create_database()` and `create_tables()` functions from `Kpi_DB` class to create the required database and table if they don't exist. 2. Call the gRPC method `SetKpiDescriptor(KpiDescriptor)->KpiId` to add the KpiDescriptor in `Kpi` DB. `KpiDescriptor` and `KpiId` are both pre-defined gRPC message types. 3. Call `GetKpiDescriptor(KpiId)->KpiDescriptor` to read the `KpiDescriptor` from DB and `DeleteKpiDescriptor(KpiId)` to delete the `KpiDescriptor` from DB. -4. Call `SelectKpiDescriptor(KpiDescriptorFilter)->KpiDescriptorList` to get all `KpiDescriptor` objects that matches the filter criteria. `KpiDescriptorFilter` and `KpiDescriptorList` are pre-defined gRPC message types. \ No newline at end of file +4. Call `SelectKpiDescriptor(KpiDescriptorFilter)->KpiDescriptorList` to get all `KpiDescriptor` objects that matches the filter criteria. `KpiDescriptorFilter` and `KpiDescriptorList` are pre-defined gRPC message types. + +## For KPI composer and KPI writer +The functionalities of KPI composer and writer is heavily dependent upon Telemetery service. Therfore, these services has other pre-requsites that are mention [here](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/telemetry/requirements.in). \ No newline at end of file diff --git a/src/telemetry/README.md b/src/telemetry/README.md new file mode 100644 index 000000000..da43bd471 --- /dev/null +++ b/src/telemetry/README.md @@ -0,0 +1,10 @@ +# How to locally run and test Telemetry service + +### Pre-requisets +The following requirements should be fulfilled before the execuation of Telemetry service. + +1. verify that [telmetry_frontend.proto](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/proto/telemetry_frontend.proto) file exists and grpcs file are generated sucessfully. +2. virtual enviornment exist with all the required packages listed in ["requirements.in"](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/telemetry/telemetry_virenv.txt) are installed sucessfully. +3. verify the creation of required database and table. +[DB test](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/telemetry/database/tests/managementDBtests.py) python file enlist the functions to create tables and database. +[KPI Engine](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_manager/service/database/KpiEngine.py) contains the DB string, update the string as per your deployment. -- GitLab From b17cb7f852c032bae109a97e02d5ea1a78364b63 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 9 Jul 2024 09:37:43 +0000 Subject: [PATCH 158/205] some changes required to do the tests. --- src/kpi_manager/service/database/Kpi_DB.py | 2 +- .../{service => tests}/KPI_configs.json | 0 src/kpi_manager/tests/test_kpi_composer.py | 8 ++-- src/kpi_manager/tests/test_kpi_manager.py | 42 +++++++++---------- .../service/TelemetryBackendService.py | 2 +- .../backend/tests/testTelemetryBackend.py | 18 ++++---- 6 files changed, 36 insertions(+), 36 deletions(-) rename src/kpi_manager/{service => tests}/KPI_configs.json (100%) diff --git a/src/kpi_manager/service/database/Kpi_DB.py b/src/kpi_manager/service/database/Kpi_DB.py index df03deba4..eafa38aad 100644 --- a/src/kpi_manager/service/database/Kpi_DB.py +++ b/src/kpi_manager/service/database/Kpi_DB.py @@ -31,7 +31,7 @@ class Kpi_DB: return False self.db_name = DB_NAME # self.drop_database(self.db_engine) # added to test - self.create_database(self.db_engine) # to add database + # self.create_database(self.db_engine) # to add database self.Session = sessionmaker(bind=self.db_engine) @staticmethod diff --git a/src/kpi_manager/service/KPI_configs.json b/src/kpi_manager/tests/KPI_configs.json similarity index 100% rename from src/kpi_manager/service/KPI_configs.json rename to src/kpi_manager/tests/KPI_configs.json diff --git a/src/kpi_manager/tests/test_kpi_composer.py b/src/kpi_manager/tests/test_kpi_composer.py index 6b96f4fc2..787ca6676 100644 --- a/src/kpi_manager/tests/test_kpi_composer.py +++ b/src/kpi_manager/tests/test_kpi_composer.py @@ -22,10 +22,10 @@ def test_compose_kpi(): LOGGER.info(' >>> test_compose_kpi START <<< ') KpiValueComposer.compose_kpi() -def test_request_kpi_descriptor_from_db(): - LOGGER.info(' >>> test_request_kpi_descriptor_from_db START <<< ') - KpiValueComposer.request_kpi_descriptor_from_db() +# def test_request_kpi_descriptor_from_db(): +# LOGGER.info(' >>> test_request_kpi_descriptor_from_db START <<< ') +# KpiValueComposer.request_kpi_descriptor_from_db() # def test_delete_kpi_by_id(): -# LOGGER.info(' >>> test_request_kpi_descriptor_from_db START <<< ') +# LOGGER.info(' >>> test_test_delete_kpi_by_id START <<< ') # KpiValueComposer.delete_kpi_by_id() \ No newline at end of file diff --git a/src/kpi_manager/tests/test_kpi_manager.py b/src/kpi_manager/tests/test_kpi_manager.py index 977c85fb8..680847005 100755 --- a/src/kpi_manager/tests/test_kpi_manager.py +++ b/src/kpi_manager/tests/test_kpi_manager.py @@ -213,7 +213,7 @@ def kpi_manager_client(kpi_manager_service : KpiManagerService): # pylint: disab # ---------- 2nd Iteration Tests ----------------- def test_SetKpiDescriptor(kpi_manager_client): LOGGER.info(" >>> test_SetKpiDescriptor: START <<< ") - with open("kpi_manager/service/KPI_configs.json", 'r') as file: + with open("kpi_manager/tests/KPI_configs.json", 'r') as file: data = json.load(file) _descriptors = data.get('KPIs', []) for _descritor_name in _descriptors: @@ -221,26 +221,26 @@ def test_SetKpiDescriptor(kpi_manager_client): LOGGER.info("Response gRPC message object: {:}".format(response)) assert isinstance(response, KpiId) -def test_GetKpiDescriptor(kpi_manager_client): - LOGGER.info(" >>> test_GetKpiDescriptor: START <<< ") - response = kpi_manager_client.GetKpiDescriptor(create_kpi_id_request()) - LOGGER.info("Response gRPC message object: {:}".format(response)) - assert isinstance(response, KpiDescriptor) - -def test_DeleteKpiDescriptor(kpi_manager_client): - LOGGER.info(" >>> test_DeleteKpiDescriptor: START <<< ") - response = kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request()) - del_response = kpi_manager_client.DeleteKpiDescriptor(response) - kpi_manager_client.GetKpiDescriptor(response) - LOGGER.info("Response of delete method gRPC message object: {:}".format(del_response)) - assert isinstance(del_response, Empty) - -def test_SelectKpiDescriptor(kpi_manager_client): - LOGGER.info(" >>> test_SelectKpiDescriptor: START <<< ") - kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request_a()) - response = kpi_manager_client.SelectKpiDescriptor(create_kpi_filter_request_a()) - LOGGER.info("Response gRPC message object: {:}".format(response)) - assert isinstance(response, KpiDescriptorList) +# def test_GetKpiDescriptor(kpi_manager_client): +# LOGGER.info(" >>> test_GetKpiDescriptor: START <<< ") +# response = kpi_manager_client.GetKpiDescriptor(create_kpi_id_request()) +# LOGGER.info("Response gRPC message object: {:}".format(response)) +# assert isinstance(response, KpiDescriptor) + +# def test_DeleteKpiDescriptor(kpi_manager_client): +# LOGGER.info(" >>> test_DeleteKpiDescriptor: START <<< ") +# response = kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request()) +# del_response = kpi_manager_client.DeleteKpiDescriptor(response) +# kpi_manager_client.GetKpiDescriptor(response) +# LOGGER.info("Response of delete method gRPC message object: {:}".format(del_response)) +# assert isinstance(del_response, Empty) + +# def test_SelectKpiDescriptor(kpi_manager_client): +# LOGGER.info(" >>> test_SelectKpiDescriptor: START <<< ") +# kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request_a()) +# response = kpi_manager_client.SelectKpiDescriptor(create_kpi_filter_request_a()) +# LOGGER.info("Response gRPC message object: {:}".format(response)) +# assert isinstance(response, KpiDescriptorList) # ------------- INITIAL TESTs ---------------- # Test case that makes use of client fixture to test server's CreateKpi method diff --git a/src/telemetry/backend/service/TelemetryBackendService.py b/src/telemetry/backend/service/TelemetryBackendService.py index ad0132e47..ef57ea8e8 100755 --- a/src/telemetry/backend/service/TelemetryBackendService.py +++ b/src/telemetry/backend/service/TelemetryBackendService.py @@ -152,7 +152,7 @@ class TelemetryBackendService: if topic not in topic_metadata.topics: # If the topic does not exist, create a new topic print(f"Topic '{topic}' does not exist. Creating...") - LOGGER.info("Topic {:} does not exist. Creating...") + LOGGER.warning("Topic {:} does not exist. Creating...".format(topic)) new_topic = NewTopic(topic, num_partitions=1, replication_factor=1) ADMIN_KAFKA_CLIENT.create_topics([new_topic]) except KafkaException as e: diff --git a/src/telemetry/backend/tests/testTelemetryBackend.py b/src/telemetry/backend/tests/testTelemetryBackend.py index f8abc08cf..5f0697b72 100644 --- a/src/telemetry/backend/tests/testTelemetryBackend.py +++ b/src/telemetry/backend/tests/testTelemetryBackend.py @@ -36,18 +36,18 @@ def test_verify_kafka_topics(): LOGGER.debug(str(response)) assert isinstance(response, bool) -def test_run_kafka_listener(): - LOGGER.info('test_receive_kafka_request requesting') - TelemetryBackendServiceObj = TelemetryBackendService() - response = TelemetryBackendServiceObj.run_kafka_listener() - LOGGER.debug(str(response)) - assert isinstance(response, bool) +# def test_run_kafka_listener(): +# LOGGER.info('test_receive_kafka_request requesting') +# TelemetryBackendServiceObj = TelemetryBackendService() +# response = TelemetryBackendServiceObj.run_kafka_listener() +# LOGGER.debug(str(response)) +# assert isinstance(response, bool) # def test_fetch_node_exporter_metrics(): # LOGGER.info(' >>> test_fetch_node_exporter_metrics START <<< ') # TelemetryBackendService.fetch_single_node_exporter_metric() -# def test_stream_node_export_metrics_to_raw_topic(): -# LOGGER.info(' >>> test_stream_node_export_metrics_to_raw_topic START <<< ') -# threading.Thread(target=TelemetryBackendService.stream_node_export_metrics_to_raw_topic, args=()).start() +def test_stream_node_export_metrics_to_raw_topic(): + LOGGER.info(' >>> test_stream_node_export_metrics_to_raw_topic START <<< ') + threading.Thread(target=TelemetryBackendService.stream_node_export_metrics_to_raw_topic, args=()).start() -- GitLab From 38b1e333645e51d3e89fda813e46dc9b77081d7f Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 9 Jul 2024 12:23:41 +0000 Subject: [PATCH 159/205] KPI Manager "Add Descriptor" and "Delete Descriptor" working fine. --- proto/analytics_frontend.proto | 16 +++--- proto/device.proto | 6 +- ...kpi_manager.proto => kpi_management.proto} | 3 +- proto/monitoring.proto | 14 ++--- proto/optical_attack_detector.proto | 4 +- proto/policy_condition.proto | 4 +- proto/telemetry_frontend.proto | 22 +++----- scripts/run_tests_locally-kpi-DB.sh | 2 +- scripts/run_tests_locally-kpi-manager.sh | 2 +- .../Dockerfile | 10 ++-- src/{kpi_manager => kpi_management}/README.md | 18 +++--- .../__init__.py | 0 .../kpi_manager}/__init__.py | 0 .../kpi_manager/client/KpiManagerClient.py | 4 +- .../kpi_manager/client}/__init__.py | 0 .../kpi_manager}/database/__init__.py | 0 .../database/service}/KpiEngine.py | 0 .../kpi_manager/database/service}/KpiModel.py | 35 +++++++++--- .../kpi_manager/database/service}/Kpi_DB.py | 6 +- .../kpi_manager/database/service}/__init__.py | 0 .../kpi_manager/database/tests}/KpiDBtests.py | 7 +-- .../kpi_manager/service/KpiManagerService.py | 6 +- .../service/KpiManagerServiceServicerImpl.py | 29 +++++----- .../kpi_manager/service/__init__.py | 14 +++++ .../kpi_manager/tests/test_kpi_manager.py | 56 ++++++++++++------- .../kpi_manager/tests/test_messages.py | 30 ++++++++++ src/kpi_management/kpi_value_api/__init__.py | 14 +++++ .../kpi_value_api/client/__init__.py | 14 +++++ .../kpi_value_api/service/__init__.py | 14 +++++ .../kpi_value_writer/__init__.py | 14 +++++ .../service/KpiValueComposer.py | 4 +- .../kpi_value_writer}/service/KpiWriter.py | 0 .../kpi_value_writer/service/__init__.py | 14 +++++ .../requirements.in | 0 src/kpi_management/service/__init__.py | 14 +++++ .../service/__main__.py | 0 .../service/database/__init__.py | 14 +++++ .../service/database/__main__.py | 0 .../tests/KPI_configs.json | 0 src/kpi_management/tests/__init__.py | 14 +++++ .../tests/test_kpi_composer.py | 0 .../tests/test_kpi_writer.py | 0 .../tests/test_messages.py | 0 src/monitoring/client/MonitoringClient.py | 2 +- 44 files changed, 293 insertions(+), 113 deletions(-) rename proto/{kpi_manager.proto => kpi_management.proto} (99%) rename src/{kpi_manager => kpi_management}/Dockerfile (91%) rename src/{kpi_manager => kpi_management}/README.md (57%) rename src/{kpi_manager => kpi_management}/__init__.py (100%) rename src/{kpi_manager/client => kpi_management/kpi_manager}/__init__.py (100%) rename src/{ => kpi_management}/kpi_manager/client/KpiManagerClient.py (95%) rename src/{kpi_manager/service => kpi_management/kpi_manager/client}/__init__.py (100%) rename src/{kpi_manager/service => kpi_management/kpi_manager}/database/__init__.py (100%) rename src/{kpi_manager/service/database => kpi_management/kpi_manager/database/service}/KpiEngine.py (100%) rename src/{kpi_manager/service/database => kpi_management/kpi_manager/database/service}/KpiModel.py (56%) rename src/{kpi_manager/service/database => kpi_management/kpi_manager/database/service}/Kpi_DB.py (96%) rename src/{kpi_manager/tests => kpi_management/kpi_manager/database/service}/__init__.py (100%) rename src/{kpi_manager/service/database => kpi_management/kpi_manager/database/tests}/KpiDBtests.py (83%) rename src/{ => kpi_management}/kpi_manager/service/KpiManagerService.py (78%) rename src/{ => kpi_management}/kpi_manager/service/KpiManagerServiceServicerImpl.py (84%) create mode 100644 src/kpi_management/kpi_manager/service/__init__.py rename src/{ => kpi_management}/kpi_manager/tests/test_kpi_manager.py (83%) create mode 100644 src/kpi_management/kpi_manager/tests/test_messages.py create mode 100644 src/kpi_management/kpi_value_api/__init__.py create mode 100644 src/kpi_management/kpi_value_api/client/__init__.py create mode 100644 src/kpi_management/kpi_value_api/service/__init__.py create mode 100644 src/kpi_management/kpi_value_writer/__init__.py rename src/{kpi_manager => kpi_management/kpi_value_writer}/service/KpiValueComposer.py (97%) rename src/{kpi_manager => kpi_management/kpi_value_writer}/service/KpiWriter.py (100%) create mode 100644 src/kpi_management/kpi_value_writer/service/__init__.py rename src/{kpi_manager => kpi_management}/requirements.in (100%) create mode 100644 src/kpi_management/service/__init__.py rename src/{kpi_manager => kpi_management}/service/__main__.py (100%) create mode 100644 src/kpi_management/service/database/__init__.py rename src/{kpi_manager => kpi_management}/service/database/__main__.py (100%) rename src/{kpi_manager => kpi_management}/tests/KPI_configs.json (100%) create mode 100644 src/kpi_management/tests/__init__.py rename src/{kpi_manager => kpi_management}/tests/test_kpi_composer.py (100%) rename src/{kpi_manager => kpi_management}/tests/test_kpi_writer.py (100%) rename src/{kpi_manager => kpi_management}/tests/test_messages.py (100%) diff --git a/proto/analytics_frontend.proto b/proto/analytics_frontend.proto index 6af761ae5..070d0b957 100644 --- a/proto/analytics_frontend.proto +++ b/proto/analytics_frontend.proto @@ -2,7 +2,7 @@ syntax = "proto3"; package device; import "context.proto"; -import "kpi_manager.proto"; +import "kpi_management.proto"; import "kpi_sample_types.proto"; service AnalyticsFrontendService { @@ -21,12 +21,12 @@ enum AnalyzerMode { } message Analyzer { - repeated kpi_manager.KpiId kpi_id = 1; // The KPI Ids to be processed by the analyzer - AnalyzerMode mode = 2; // Operation mode of the analyzer - float batch_min_duration_s = 3; // In batch mode, min duration to collect before executing batch - float batch_max_duration_s = 4; // In batch mode, max duration collected to execute the batch - uint batch_min_size = 5; // In batch mode, min number of samples to collect before executing batch - uint batch_max_size = 6; // In batch mode, max number of samples collected to execute the batch + repeated kpi_management.KpiId kpi_id = 1; // The KPI Ids to be processed by the analyzer + AnalyzerMode mode = 2; // Operation mode of the analyzer + float batch_min_duration_s = 3; // In batch mode, min duration to collect before executing batch + float batch_max_duration_s = 4; // In batch mode, max duration collected to execute the batch + uint64 batch_min_size = 5; // In batch mode, min number of samples to collect before executing batch + uint64 batch_max_size = 6; // In batch mode, max number of samples collected to execute the batch } message AnalyzerFilter { @@ -34,7 +34,7 @@ message AnalyzerFilter { // An empty list means: any value is accepted. // All fields empty means: list all Analyzers repeated AnalyzerId analyzer_id = 1; - repeated kpi_manager.KpiId kpi_id = 2; + repeated kpi_management.KpiId kpi_id = 2; repeated kpi_sample_types.KpiSampleType kpi_sample_type = 3; repeated context.DeviceId device_id = 4; repeated context.EndPointId endpoint_id = 5; diff --git a/proto/device.proto b/proto/device.proto index 98cca8ce9..519bdeae9 100644 --- a/proto/device.proto +++ b/proto/device.proto @@ -17,7 +17,7 @@ package device; import "context.proto"; //import "monitoring.proto"; -import "kpi_manager.proto"; +import "kpi_management.proto"; service DeviceService { rpc AddDevice (context.Device ) returns (context.DeviceId ) {} @@ -28,8 +28,8 @@ service DeviceService { } message MonitoringSettings { - kpi_manager.KpiId kpi_id = 1; - kpi_manager.KpiDescriptor kpi_descriptor = 2; + kpi_management.KpiId kpi_id = 1; + kpi_management.KpiDescriptor kpi_descriptor = 2; float sampling_duration_s = 3; float sampling_interval_s = 4; } diff --git a/proto/kpi_manager.proto b/proto/kpi_management.proto similarity index 99% rename from proto/kpi_manager.proto rename to proto/kpi_management.proto index ad48eb84f..a2a8f9d72 100644 --- a/proto/kpi_manager.proto +++ b/proto/kpi_management.proto @@ -13,7 +13,7 @@ // limitations under the License. syntax = "proto3"; -package kpi_manager; +package kpi_management; import "context.proto"; import "kpi_sample_types.proto"; @@ -25,7 +25,6 @@ service KpiManagerService{ rpc SelectKpiDescriptor (KpiDescriptorFilter) returns (KpiDescriptorList ) {} // Stable and final } - message KpiId { context.Uuid kpi_id = 1; } diff --git a/proto/monitoring.proto b/proto/monitoring.proto index 2706988aa..e316fbdc1 100755 --- a/proto/monitoring.proto +++ b/proto/monitoring.proto @@ -16,7 +16,7 @@ syntax = "proto3"; package monitoring; import "context.proto"; -import "kpi_manager.proto"; +import "kpi_management.proto"; //import "kpi_sample_types.proto"; service MonitoringService { @@ -43,14 +43,14 @@ service MonitoringService { message MonitorKpiRequest { - kpi_manager.KpiId kpi_id = 1; + kpi_management.KpiId kpi_id = 1; float monitoring_window_s = 2; float sampling_rate_s = 3; // Pending add field to reflect Available Device Protocols } message KpiQuery { - repeated kpi_manager.KpiId kpi_ids = 1; + repeated kpi_management.KpiId kpi_ids = 1; float monitoring_window_s = 2; uint32 last_n_samples = 3; // used when you want something like "get the last N many samples context.Timestamp start_timestamp = 4; // used when you want something like "get the samples since X date/time" @@ -64,7 +64,7 @@ message RawKpi { // cell } message RawKpiList { // column - kpi_manager.KpiId kpi_id = 1; + kpi_management.KpiId kpi_id = 1; repeated RawKpi raw_kpis = 2; } @@ -75,7 +75,7 @@ message RawKpiTable { // table message Kpi { - kpi_manager.KpiId kpi_id = 1; + kpi_management.KpiId kpi_id = 1; context.Timestamp timestamp = 2; KpiValue kpi_value = 3; } @@ -109,7 +109,7 @@ message KpiList { message SubsDescriptor{ SubscriptionID subs_id = 1; - kpi_manager.KpiId kpi_id = 2; + kpi_management.KpiId kpi_id = 2; float sampling_duration_s = 3; float sampling_interval_s = 4; context.Timestamp start_timestamp = 5; // used when you want something like "get the samples since X date/time" @@ -134,7 +134,7 @@ message AlarmDescriptor { AlarmID alarm_id = 1; string alarm_description = 2; string name = 3; - kpi_manager.KpiId kpi_id = 4; + kpi_management.KpiId kpi_id = 4; KpiValueRange kpi_value_range = 5; context.Timestamp timestamp = 6; } diff --git a/proto/optical_attack_detector.proto b/proto/optical_attack_detector.proto index 0d3ed58de..61f93c299 100644 --- a/proto/optical_attack_detector.proto +++ b/proto/optical_attack_detector.proto @@ -18,7 +18,7 @@ package optical_attack_detector; import "context.proto"; //import "monitoring.proto"; -import "kpi_manager.proto"; +import "kpi_management.proto"; service OpticalAttackDetectorService { @@ -29,5 +29,5 @@ service OpticalAttackDetectorService { message DetectionRequest { context.ServiceId service_id = 1; - kpi_manager.KpiId kpi_id = 2; + kpi_management.KpiId kpi_id = 2; } diff --git a/proto/policy_condition.proto b/proto/policy_condition.proto index c0af929ef..8f1211208 100644 --- a/proto/policy_condition.proto +++ b/proto/policy_condition.proto @@ -16,11 +16,11 @@ syntax = "proto3"; package policy; import "monitoring.proto"; -import "kpi_manager.proto"; +import "kpi_management.proto"; // Condition message PolicyRuleCondition { - kpi_manager.KpiId kpiId = 1; + kpi_management.KpiId kpiId = 1; NumericalOperator numericalOperator = 2; monitoring.KpiValue kpiValue = 3; } diff --git a/proto/telemetry_frontend.proto b/proto/telemetry_frontend.proto index 48bfd7a0e..ffc851a4c 100644 --- a/proto/telemetry_frontend.proto +++ b/proto/telemetry_frontend.proto @@ -2,8 +2,7 @@ syntax = "proto3"; package device; import "context.proto"; -import "kpi_manager.proto"; -import "kpi_sample_types.proto"; +import "kpi_management.proto"; service TelemetryFrontendService { rpc StartCollector (Collector ) returns (CollectorId ) {} @@ -16,25 +15,18 @@ message CollectorId { } message Collector { - CollectorId collector_id = 1; // The Collector ID - kpi_manager.KpiId kpi_id = 2; // The KPI Id to be associated to the collected samples - float duration_s = 3; // Terminate data collection after duration[seconds]; duration==0 means indefinitely - float interval_s = 4; // Interval between collected samples + CollectorId collector_id = 1; // The Collector ID + kpi_management.KpiId kpi_id = 2; // The KPI Id to be associated to the collected samples + float duration_s = 3; // Terminate data collection after duration[seconds]; duration==0 means indefinitely + float interval_s = 4; // Interval between collected samples } message CollectorFilter { // Collector that fulfill the filter are those that match ALL the following fields. // An empty list means: any value is accepted. // All fields empty means: list all Collectors - repeated CollectorId collector_id = 1; - repeated kpi_manager.KpiId kpi_id = 2; - // repeated kpi_sample_types.KpiSampleType kpi_sample_type = 3; - // repeated context.DeviceId device_id = 4; - // repeated context.EndPointId endpoint_id = 5; - // repeated context.ServiceId service_id = 6; - // repeated context.SliceId slice_id = 7; - // repeated context.ConnectionId connection_id = 8; - // repeated context.LinkId link_id = 9; + repeated CollectorId collector_id = 1; + repeated kpi_management.KpiId kpi_id = 2; } message CollectorList { diff --git a/scripts/run_tests_locally-kpi-DB.sh b/scripts/run_tests_locally-kpi-DB.sh index aa9767620..75d922061 100755 --- a/scripts/run_tests_locally-kpi-DB.sh +++ b/scripts/run_tests_locally-kpi-DB.sh @@ -25,4 +25,4 @@ cd $PROJECTDIR/src RCFILE=$PROJECTDIR/coverage/.coveragerc python3 -m pytest --log-level=INFO --log-cli-level=INFO --verbose \ - kpi_manager/service/database/KpiDBtests.py \ No newline at end of file + kpi_management/kpi_manager/database/tests/KpiDBtests.py \ No newline at end of file diff --git a/scripts/run_tests_locally-kpi-manager.sh b/scripts/run_tests_locally-kpi-manager.sh index be69980e0..17c0c02ac 100755 --- a/scripts/run_tests_locally-kpi-manager.sh +++ b/scripts/run_tests_locally-kpi-manager.sh @@ -25,4 +25,4 @@ cd $PROJECTDIR/src RCFILE=$PROJECTDIR/coverage/.coveragerc python3 -m pytest --log-level=INFO --log-cli-level=INFO --verbose \ - kpi_manager/tests/test_kpi_manager.py \ No newline at end of file + kpi_management/kpi_manager/tests/test_kpi_manager.py \ No newline at end of file diff --git a/src/kpi_manager/Dockerfile b/src/kpi_management/Dockerfile similarity index 91% rename from src/kpi_manager/Dockerfile rename to src/kpi_management/Dockerfile index d3d962b9f..0369fc0c8 100644 --- a/src/kpi_manager/Dockerfile +++ b/src/kpi_management/Dockerfile @@ -54,9 +54,9 @@ RUN rm *.proto RUN find . -type f -exec sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' {} \; # Create component sub-folders, get specific Python packages -RUN mkdir -p /var/teraflow/kpi_manager -WORKDIR /var/teraflow/kpi_manager -COPY src/kpi_manager/requirements.in requirements.in +RUN mkdir -p /var/teraflow/kpi_management +WORKDIR /var/teraflow/kpi_management +COPY src/kpi_management/requirements.in requirements.in RUN pip-compile --quiet --output-file=requirements.txt requirements.in RUN python3 -m pip install -r requirements.txt @@ -65,7 +65,7 @@ WORKDIR /var/teraflow COPY src/context/. context/ COPY src/device/. device/ COPY src/monitoring/. monitoring/ -COPY src/kpi_manager/. kpi_manager/ +COPY src/kpi_management/. kpi_management/ # Start the service -ENTRYPOINT ["python", "-m", "kpi_manager.service"] +ENTRYPOINT ["python", "-m", "kpi_management.service"] diff --git a/src/kpi_manager/README.md b/src/kpi_management/README.md similarity index 57% rename from src/kpi_manager/README.md rename to src/kpi_management/README.md index 88c52bb4f..b73f0e8af 100644 --- a/src/kpi_manager/README.md +++ b/src/kpi_management/README.md @@ -1,19 +1,21 @@ -# How to locally run and test KPI Manager service +# How to locally run and test KPI management service + +## ----- Update Required (Files structure has been changed) ----- ### Pre-requisets -The following requirements should be fulfilled before the execuation of KPI Manager service. +The following requirements should be fulfilled before the execuation of KPI management service. -1. verify that [kpi_manager.proto](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/proto/kpi_manager.proto) file exists and grpcs file are generated sucessfully. -2. virtual enviornment exist with all the required packages listed in ["requirements.in"](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_manager/requirements.in) are installed sucessfully. +1. verify that [kpi_management.proto](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/proto/kpi_management.proto) file exists and grpcs file are generated sucessfully. +2. virtual enviornment exist with all the required packages listed in ["requirements.in"](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_management/requirements.in) are installed sucessfully. 3. verify the creation of required database and table. -[KPI DB test](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_manager/service/database/KpiDBtests.py) python file enlist the functions to create tables and database. -[KPI Engine](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_manager/service/database/KpiEngine.py) contains the DB string, update the string as per your deployment. +[KPI DB test](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_management/service/database/KpiDBtests.py) python file enlist the functions to create tables and database. +[KPI Engine](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_management/service/database/KpiEngine.py) contains the DB string, update the string as per your deployment. ### Messages format templates -["Messages"](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_manager/tests/test_messages.py) python file enlist the basic gRPC messages format used during the testing. +["Messages"](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_management/tests/test_messages.py) python file enlist the basic gRPC messages format used during the testing. ### Test file -["KPI manager test"](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_manager/tests/test_kpi_manager.py) python file enlist the different tests conducted during the experiment. +["KPI management test"](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_management/tests/test_kpi_management.py) python file enlist the different tests conducted during the experiment. ### Flow of execution (Kpi Maanager Service functions) 1. Call the `create_database()` and `create_tables()` functions from `Kpi_DB` class to create the required database and table if they don't exist. diff --git a/src/kpi_manager/__init__.py b/src/kpi_management/__init__.py similarity index 100% rename from src/kpi_manager/__init__.py rename to src/kpi_management/__init__.py diff --git a/src/kpi_manager/client/__init__.py b/src/kpi_management/kpi_manager/__init__.py similarity index 100% rename from src/kpi_manager/client/__init__.py rename to src/kpi_management/kpi_manager/__init__.py diff --git a/src/kpi_manager/client/KpiManagerClient.py b/src/kpi_management/kpi_manager/client/KpiManagerClient.py similarity index 95% rename from src/kpi_manager/client/KpiManagerClient.py rename to src/kpi_management/kpi_manager/client/KpiManagerClient.py index 30b1720fb..140381d3a 100755 --- a/src/kpi_manager/client/KpiManagerClient.py +++ b/src/kpi_management/kpi_manager/client/KpiManagerClient.py @@ -19,8 +19,8 @@ from common.Settings import get_service_host, get_service_port_grpc from common.proto.context_pb2 import Empty from common.tools.grpc.Tools import grpc_message_to_json_string from common.tools.client.RetryDecorator import retry, delay_exponential -from common.proto.kpi_manager_pb2_grpc import KpiManagerServiceStub -from common.proto.kpi_manager_pb2 import KpiId, KpiDescriptor, KpiDescriptorFilter, KpiDescriptorList +from common.proto.kpi_management_pb2_grpc import KpiManagerServiceStub +from common.proto.kpi_management_pb2 import KpiId, KpiDescriptor, KpiDescriptorFilter, KpiDescriptorList LOGGER = logging.getLogger(__name__) MAX_RETRIES = 10 diff --git a/src/kpi_manager/service/__init__.py b/src/kpi_management/kpi_manager/client/__init__.py similarity index 100% rename from src/kpi_manager/service/__init__.py rename to src/kpi_management/kpi_manager/client/__init__.py diff --git a/src/kpi_manager/service/database/__init__.py b/src/kpi_management/kpi_manager/database/__init__.py similarity index 100% rename from src/kpi_manager/service/database/__init__.py rename to src/kpi_management/kpi_manager/database/__init__.py diff --git a/src/kpi_manager/service/database/KpiEngine.py b/src/kpi_management/kpi_manager/database/service/KpiEngine.py similarity index 100% rename from src/kpi_manager/service/database/KpiEngine.py rename to src/kpi_management/kpi_manager/database/service/KpiEngine.py diff --git a/src/kpi_manager/service/database/KpiModel.py b/src/kpi_management/kpi_manager/database/service/KpiModel.py similarity index 56% rename from src/kpi_manager/service/database/KpiModel.py rename to src/kpi_management/kpi_manager/database/service/KpiModel.py index 5bfc5525b..846f738f9 100644 --- a/src/kpi_manager/service/database/KpiModel.py +++ b/src/kpi_management/kpi_manager/database/service/KpiModel.py @@ -31,14 +31,14 @@ class Kpi(Base): __tablename__ = 'kpi' kpi_id = Column(UUID(as_uuid=False), primary_key=True) - kpi_description = Column(Text, unique=True) - kpi_sample_type = Column(Integer) - device_id = Column(String) - endpoint_id = Column(String) - service_id = Column(String) - slice_id = Column(String) - connection_id = Column(String) - link_id = Column(String) + kpi_description = Column(Text, nullable=False) + kpi_sample_type = Column(Integer, nullable=False) + device_id = Column(String, nullable=False) + endpoint_id = Column(String, nullable=False) + service_id = Column(String, nullable=False) + slice_id = Column(String, nullable=False) + connection_id = Column(String, nullable=False) + link_id = Column(String, nullable=False) # helps in logging the information def __repr__(self): @@ -47,3 +47,22 @@ class Kpi(Base): f"endpoint_id='{self.endpoint_id}', service_id='{self.service_id}', " f"slice_id='{self.slice_id}', connection_id='{self.connection_id}', " f"link_id='{self.link_id}')>") + + @classmethod + def create_row_from_kpiDescriptor(cls, request): + """ + Create an instance of Kpi from a request object. + Args: request: The request object containing the data. + Returns: An instance of Kpi initialized with data from the request. + """ + return cls( + kpi_id=request.kpi_id.kpi_id.uuid, + kpi_description=request.kpi_description, + kpi_sample_type=request.kpi_sample_type, + device_id=request.device_id.device_uuid.uuid, + endpoint_id=request.endpoint_id.endpoint_uuid.uuid, + service_id=request.service_id.service_uuid.uuid, + slice_id=request.slice_id.slice_uuid.uuid, + connection_id=request.connection_id.connection_uuid.uuid, + link_id=request.link_id.link_uuid.uuid + ) \ No newline at end of file diff --git a/src/kpi_manager/service/database/Kpi_DB.py b/src/kpi_management/kpi_manager/database/service/Kpi_DB.py similarity index 96% rename from src/kpi_manager/service/database/Kpi_DB.py rename to src/kpi_management/kpi_manager/database/service/Kpi_DB.py index eafa38aad..847e4b70a 100644 --- a/src/kpi_manager/service/database/Kpi_DB.py +++ b/src/kpi_management/kpi_manager/database/service/Kpi_DB.py @@ -17,8 +17,8 @@ import sqlalchemy import sqlalchemy_utils from sqlalchemy.orm import sessionmaker from sqlalchemy.ext.declarative import declarative_base -from kpi_manager.service.database.KpiEngine import KpiEngine -from kpi_manager.service.database.KpiModel import Kpi +from kpi_management.kpi_manager.database.service.KpiEngine import KpiEngine +from kpi_management.kpi_manager.database.service.KpiModel import Kpi LOGGER = logging.getLogger(__name__) DB_NAME = "kpi" @@ -31,7 +31,7 @@ class Kpi_DB: return False self.db_name = DB_NAME # self.drop_database(self.db_engine) # added to test - # self.create_database(self.db_engine) # to add database + # self.create_database(self.db_engine) # to add database self.Session = sessionmaker(bind=self.db_engine) @staticmethod diff --git a/src/kpi_manager/tests/__init__.py b/src/kpi_management/kpi_manager/database/service/__init__.py similarity index 100% rename from src/kpi_manager/tests/__init__.py rename to src/kpi_management/kpi_manager/database/service/__init__.py diff --git a/src/kpi_manager/service/database/KpiDBtests.py b/src/kpi_management/kpi_manager/database/tests/KpiDBtests.py similarity index 83% rename from src/kpi_manager/service/database/KpiDBtests.py rename to src/kpi_management/kpi_manager/database/tests/KpiDBtests.py index 022a7633d..24313e138 100644 --- a/src/kpi_manager/service/database/KpiDBtests.py +++ b/src/kpi_management/kpi_manager/database/tests/KpiDBtests.py @@ -14,15 +14,10 @@ import logging -from kpi_manager.service.database.Kpi_DB import Kpi_DB +from kpi_management.kpi_manager.database.service.Kpi_DB import Kpi_DB LOGGER = logging.getLogger(__name__) - -def test_create_db_object(): - LOGGER.info('>>> test_create_db_object : START<<< ') - kpiDBobj = Kpi_DB() - def test_verify_Tables(): LOGGER.info('>>> test_verify_Tables : START <<< ') kpiDBobj = Kpi_DB() diff --git a/src/kpi_manager/service/KpiManagerService.py b/src/kpi_management/kpi_manager/service/KpiManagerService.py similarity index 78% rename from src/kpi_manager/service/KpiManagerService.py rename to src/kpi_management/kpi_manager/service/KpiManagerService.py index dbbcec2cf..d4a8a8104 100755 --- a/src/kpi_manager/service/KpiManagerService.py +++ b/src/kpi_management/kpi_manager/service/KpiManagerService.py @@ -14,11 +14,9 @@ from common.Constants import ServiceNameEnum from common.Settings import get_service_port_grpc -# from common.proto.monitoring_pb2_grpc import add_MonitoringServiceServicer_to_server -from common.proto.kpi_manager_pb2_grpc import add_KpiManagerServiceServicer_to_server +from common.proto.kpi_management_pb2_grpc import add_KpiManagerServiceServicer_to_server from common.tools.service.GenericGrpcService import GenericGrpcService -from kpi_manager.service.KpiManagerServiceServicerImpl import KpiManagerServiceServicerImpl -# from monitoring.service.MonitoringServiceServicerImpl import MonitoringServiceServicerImpl +from kpi_management.kpi_manager.service.KpiManagerServiceServicerImpl import KpiManagerServiceServicerImpl from monitoring.service.NameMapping import NameMapping class KpiManagerService(GenericGrpcService): diff --git a/src/kpi_manager/service/KpiManagerServiceServicerImpl.py b/src/kpi_management/kpi_manager/service/KpiManagerServiceServicerImpl.py similarity index 84% rename from src/kpi_manager/service/KpiManagerServiceServicerImpl.py rename to src/kpi_management/kpi_manager/service/KpiManagerServiceServicerImpl.py index 4b2e9fc3f..3d96c98dd 100644 --- a/src/kpi_manager/service/KpiManagerServiceServicerImpl.py +++ b/src/kpi_management/kpi_manager/service/KpiManagerServiceServicerImpl.py @@ -17,13 +17,13 @@ import logging, grpc import sqlalchemy, sqlalchemy_utils from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method from common.proto.context_pb2 import Empty -from common.proto.kpi_manager_pb2_grpc import KpiManagerServiceServicer -from common.proto.kpi_manager_pb2 import KpiId, KpiDescriptor, KpiDescriptorFilter, KpiDescriptorList +from common.proto.kpi_management_pb2_grpc import KpiManagerServiceServicer +from common.proto.kpi_management_pb2 import KpiId, KpiDescriptor, KpiDescriptorFilter, KpiDescriptorList from monitoring.service.NameMapping import NameMapping # from monitoring.service import ManagementDBTools -from kpi_manager.service.database.Kpi_DB import Kpi_DB -from kpi_manager.service.database.KpiModel import Kpi as KpiModel +from kpi_management.kpi_manager.database.service.Kpi_DB import Kpi_DB +from kpi_management.kpi_manager.database.service.KpiModel import Kpi as KpiModel # from telemetry.database.TelemetryModel import Kpi as KpiModel from common.proto.context_pb2 import DeviceId, LinkId, ServiceId, SliceId,\ ConnectionId, EndPointId @@ -44,16 +44,17 @@ class KpiManagerServiceServicerImpl(KpiManagerServiceServicer): response = KpiId() LOGGER.info("Received gRPC message object: {:}".format(request)) try: - kpi_to_insert = KpiModel() - kpi_to_insert.kpi_id = request.kpi_id.kpi_id.uuid - kpi_to_insert.kpi_description = request.kpi_description - kpi_to_insert.kpi_sample_type = request.kpi_sample_type - kpi_to_insert.device_id = request.device_id.device_uuid.uuid - kpi_to_insert.endpoint_id = request.endpoint_id.endpoint_uuid.uuid - kpi_to_insert.service_id = request.service_id.service_uuid.uuid - kpi_to_insert.slice_id = request.slice_id.slice_uuid.uuid - kpi_to_insert.connection_id = request.connection_id.connection_uuid.uuid - kpi_to_insert.link_id = request.link_id.link_uuid.uuid + # kpi_to_insert = KpiModel() + # kpi_to_insert.kpi_id = request.kpi_id.kpi_id.uuid + # kpi_to_insert.kpi_description = request.kpi_description + # kpi_to_insert.kpi_sample_type = request.kpi_sample_type + # kpi_to_insert.device_id = request.device_id.device_uuid.uuid + # kpi_to_insert.endpoint_id = request.endpoint_id.endpoint_uuid.uuid + # kpi_to_insert.service_id = request.service_id.service_uuid.uuid + # kpi_to_insert.slice_id = request.slice_id.slice_uuid.uuid + # kpi_to_insert.connection_id = request.connection_id.connection_uuid.uuid + # kpi_to_insert.link_id = request.link_id.link_uuid.uuid + kpi_to_insert = KpiModel.create_row_from_kpiDescriptor(request) if(self.Kpi_DBobj.add_row_to_db(kpi_to_insert)): response.kpi_id.uuid = request.kpi_id.kpi_id.uuid # LOGGER.info("Added Row: {:}".format(response)) diff --git a/src/kpi_management/kpi_manager/service/__init__.py b/src/kpi_management/kpi_manager/service/__init__.py new file mode 100644 index 000000000..1549d9811 --- /dev/null +++ b/src/kpi_management/kpi_manager/service/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/src/kpi_manager/tests/test_kpi_manager.py b/src/kpi_management/kpi_manager/tests/test_kpi_manager.py similarity index 83% rename from src/kpi_manager/tests/test_kpi_manager.py rename to src/kpi_management/kpi_manager/tests/test_kpi_manager.py index 680847005..ca5f6af6f 100755 --- a/src/kpi_manager/tests/test_kpi_manager.py +++ b/src/kpi_management/kpi_manager/tests/test_kpi_manager.py @@ -23,17 +23,17 @@ from typing import Union from common.proto.context_pb2 import ConfigActionEnum, Context, ContextId, DeviceOperationalStatusEnum, EventTypeEnum, DeviceEvent, Device, Empty, Topology, TopologyId from common.Constants import ServiceNameEnum -from common.Constants import DEFAULT_CONTEXT_NAME, DEFAULT_TOPOLOGY_NAME, ServiceNameEnum +# from common.Constants import DEFAULT_CONTEXT_NAME, DEFAULT_TOPOLOGY_NAME, ServiceNameEnum from common.Settings import ( ENVVAR_SUFIX_SERVICE_HOST, ENVVAR_SUFIX_SERVICE_PORT_GRPC, get_env_var_name, get_service_port_grpc) from common.tests.MockServicerImpl_Context import MockServicerImpl_Context from common.proto.context_pb2_grpc import add_ContextServiceServicer_to_server -from common.proto.kpi_sample_types_pb2 import KpiSampleType -from common.tools.object_factory.Context import json_context, json_context_id -from common.tools.object_factory.Topology import json_topology, json_topology_id +# from common.proto.kpi_sample_types_pb2 import KpiSampleType +# from common.tools.object_factory.Context import json_context, json_context_id +# from common.tools.object_factory.Topology import json_topology, json_topology_id # from common.proto.monitoring_pb2 import KpiId, KpiDescriptor, SubsDescriptor, SubsList, AlarmID, \ # AlarmDescriptor, AlarmList, KpiDescriptorList, SubsResponse, AlarmResponse, RawKpiTable #, Kpi, KpiList -from common.proto.kpi_manager_pb2 import KpiId, KpiDescriptor, KpiDescriptorFilter, KpiDescriptorList +from common.proto.kpi_management_pb2 import KpiId, KpiDescriptor, KpiDescriptorFilter, KpiDescriptorList from common.tools.service.GenericGrpcService import GenericGrpcService from context.client.ContextClient import ContextClient @@ -43,17 +43,16 @@ from device.service.driver_api.DriverInstanceCache import DriverInstanceCache from device.service.DeviceService import DeviceService from device.client.DeviceClient import DeviceClient -from kpi_manager.tests.test_messages import create_kpi_descriptor_request, create_kpi_id_request, \ - create_kpi_filter_request_a, create_kpi_descriptor_request_a +from kpi_management.kpi_manager.tests.test_messages import create_kpi_descriptor_request # from monitoring.service.MonitoringService import MonitoringService -from kpi_manager.service.KpiManagerService import KpiManagerService +from kpi_management.kpi_manager.service.KpiManagerService import KpiManagerService # from monitoring.client.MonitoringClient import MonitoringClient -from kpi_manager.client.KpiManagerClient import KpiManagerClient +from kpi_management.kpi_manager.client.KpiManagerClient import KpiManagerClient -from kpi_manager.service.KpiManagerServiceServicerImpl import KpiManagerServiceServicerImpl +from kpi_management.kpi_manager.service.KpiManagerServiceServicerImpl import KpiManagerServiceServicerImpl -from monitoring.service.ManagementDBTools import ManagementDB -from monitoring.service.MetricsDBTools import MetricsDB +# from monitoring.service.ManagementDBTools import ManagementDB +# from monitoring.service.MetricsDBTools import MetricsDB from monitoring.service.NameMapping import NameMapping os.environ['DEVICE_EMULATED_ONLY'] = 'TRUE' @@ -210,17 +209,36 @@ def kpi_manager_client(kpi_manager_service : KpiManagerService): # pylint: disab # Tests Implementation of Kpi Manager ########################### -# ---------- 2nd Iteration Tests ----------------- +# ---------- 3rd Iteration Tests ---------------- def test_SetKpiDescriptor(kpi_manager_client): LOGGER.info(" >>> test_SetKpiDescriptor: START <<< ") - with open("kpi_manager/tests/KPI_configs.json", 'r') as file: - data = json.load(file) - _descriptors = data.get('KPIs', []) - for _descritor_name in _descriptors: - response = kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request_a(_descritor_name)) - LOGGER.info("Response gRPC message object: {:}".format(response)) + response = kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request()) + LOGGER.info("Response gRPC message object: {:}".format(response)) assert isinstance(response, KpiId) +def test_DeleteKpiDescriptor(kpi_manager_client): + LOGGER.info(" >>> test_DeleteKpiDescriptor: START <<< ") + # adding KPI + response = kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request()) + # deleting KPI + del_response = kpi_manager_client.DeleteKpiDescriptor(response) + # verifing KPI + kpi_manager_client.GetKpiDescriptor(response) + LOGGER.info("Response of delete method gRPC message object: {:}".format(del_response)) + assert isinstance(del_response, Empty) + + +# ---------- 2nd Iteration Tests ----------------- +# def test_SetKpiDescriptor(kpi_manager_client): +# LOGGER.info(" >>> test_SetKpiDescriptor: START <<< ") +# with open("kpi_manager/tests/KPI_configs.json", 'r') as file: +# data = json.load(file) +# _descriptors = data.get('KPIs', []) +# for _descritor_name in _descriptors: +# response = kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request_a(_descritor_name)) +# LOGGER.info("Response gRPC message object: {:}".format(response)) +# assert isinstance(response, KpiId) + # def test_GetKpiDescriptor(kpi_manager_client): # LOGGER.info(" >>> test_GetKpiDescriptor: START <<< ") # response = kpi_manager_client.GetKpiDescriptor(create_kpi_id_request()) diff --git a/src/kpi_management/kpi_manager/tests/test_messages.py b/src/kpi_management/kpi_manager/tests/test_messages.py new file mode 100644 index 000000000..7c2933b7a --- /dev/null +++ b/src/kpi_management/kpi_manager/tests/test_messages.py @@ -0,0 +1,30 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import uuid +from common.proto import kpi_management_pb2 +from common.proto.kpi_sample_types_pb2 import KpiSampleType + +def create_kpi_descriptor_request(descriptor_name: str = "Test_name"): + _create_kpi_request = kpi_management_pb2.KpiDescriptor() + _create_kpi_request.kpi_id.kpi_id.uuid = str(uuid.uuid4()) + _create_kpi_request.kpi_description = descriptor_name + _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED + _create_kpi_request.device_id.device_uuid.uuid = 'DEV1' # pylint: disable=maybe-no-member + _create_kpi_request.service_id.service_uuid.uuid = 'SERV1' # pylint: disable=maybe-no-member + _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC1' # pylint: disable=maybe-no-member + _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END1' # pylint: disable=maybe-no-member + _create_kpi_request.connection_id.connection_uuid.uuid = 'CON1' # pylint: disable=maybe-no-member + _create_kpi_request.link_id.link_uuid.uuid = 'LNK1' # pylint: disable=maybe-no-member + return _create_kpi_request \ No newline at end of file diff --git a/src/kpi_management/kpi_value_api/__init__.py b/src/kpi_management/kpi_value_api/__init__.py new file mode 100644 index 000000000..1549d9811 --- /dev/null +++ b/src/kpi_management/kpi_value_api/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/src/kpi_management/kpi_value_api/client/__init__.py b/src/kpi_management/kpi_value_api/client/__init__.py new file mode 100644 index 000000000..1549d9811 --- /dev/null +++ b/src/kpi_management/kpi_value_api/client/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/src/kpi_management/kpi_value_api/service/__init__.py b/src/kpi_management/kpi_value_api/service/__init__.py new file mode 100644 index 000000000..1549d9811 --- /dev/null +++ b/src/kpi_management/kpi_value_api/service/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/src/kpi_management/kpi_value_writer/__init__.py b/src/kpi_management/kpi_value_writer/__init__.py new file mode 100644 index 000000000..1549d9811 --- /dev/null +++ b/src/kpi_management/kpi_value_writer/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/src/kpi_manager/service/KpiValueComposer.py b/src/kpi_management/kpi_value_writer/service/KpiValueComposer.py similarity index 97% rename from src/kpi_manager/service/KpiValueComposer.py rename to src/kpi_management/kpi_value_writer/service/KpiValueComposer.py index bb2b6ebf3..61e558121 100644 --- a/src/kpi_manager/service/KpiValueComposer.py +++ b/src/kpi_management/kpi_value_writer/service/KpiValueComposer.py @@ -20,8 +20,8 @@ import threading from confluent_kafka import KafkaError from confluent_kafka import Producer as KafkaProducer from confluent_kafka import Consumer as KafkaConsumer -from kpi_manager.service.database.Kpi_DB import Kpi_DB -from kpi_manager.service.database.KpiModel import Kpi as KpiModel +from kpi_management.service.database.Kpi_DB import Kpi_DB +from kpi_management.service.database.KpiModel import Kpi as KpiModel LOGGER = logging.getLogger(__name__) # KAFKA_SERVER_IP = '10.152.183.175:30092' diff --git a/src/kpi_manager/service/KpiWriter.py b/src/kpi_management/kpi_value_writer/service/KpiWriter.py similarity index 100% rename from src/kpi_manager/service/KpiWriter.py rename to src/kpi_management/kpi_value_writer/service/KpiWriter.py diff --git a/src/kpi_management/kpi_value_writer/service/__init__.py b/src/kpi_management/kpi_value_writer/service/__init__.py new file mode 100644 index 000000000..1549d9811 --- /dev/null +++ b/src/kpi_management/kpi_value_writer/service/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/src/kpi_manager/requirements.in b/src/kpi_management/requirements.in similarity index 100% rename from src/kpi_manager/requirements.in rename to src/kpi_management/requirements.in diff --git a/src/kpi_management/service/__init__.py b/src/kpi_management/service/__init__.py new file mode 100644 index 000000000..1549d9811 --- /dev/null +++ b/src/kpi_management/service/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/src/kpi_manager/service/__main__.py b/src/kpi_management/service/__main__.py similarity index 100% rename from src/kpi_manager/service/__main__.py rename to src/kpi_management/service/__main__.py diff --git a/src/kpi_management/service/database/__init__.py b/src/kpi_management/service/database/__init__.py new file mode 100644 index 000000000..1549d9811 --- /dev/null +++ b/src/kpi_management/service/database/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/src/kpi_manager/service/database/__main__.py b/src/kpi_management/service/database/__main__.py similarity index 100% rename from src/kpi_manager/service/database/__main__.py rename to src/kpi_management/service/database/__main__.py diff --git a/src/kpi_manager/tests/KPI_configs.json b/src/kpi_management/tests/KPI_configs.json similarity index 100% rename from src/kpi_manager/tests/KPI_configs.json rename to src/kpi_management/tests/KPI_configs.json diff --git a/src/kpi_management/tests/__init__.py b/src/kpi_management/tests/__init__.py new file mode 100644 index 000000000..1549d9811 --- /dev/null +++ b/src/kpi_management/tests/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/src/kpi_manager/tests/test_kpi_composer.py b/src/kpi_management/tests/test_kpi_composer.py similarity index 100% rename from src/kpi_manager/tests/test_kpi_composer.py rename to src/kpi_management/tests/test_kpi_composer.py diff --git a/src/kpi_manager/tests/test_kpi_writer.py b/src/kpi_management/tests/test_kpi_writer.py similarity index 100% rename from src/kpi_manager/tests/test_kpi_writer.py rename to src/kpi_management/tests/test_kpi_writer.py diff --git a/src/kpi_manager/tests/test_messages.py b/src/kpi_management/tests/test_messages.py similarity index 100% rename from src/kpi_manager/tests/test_messages.py rename to src/kpi_management/tests/test_messages.py diff --git a/src/monitoring/client/MonitoringClient.py b/src/monitoring/client/MonitoringClient.py index 493e96ca8..0486b8083 100644 --- a/src/monitoring/client/MonitoringClient.py +++ b/src/monitoring/client/MonitoringClient.py @@ -20,7 +20,7 @@ from common.Settings import get_service_host, get_service_port_grpc from common.tools.client.RetryDecorator import retry, delay_exponential from common.tools.grpc.Tools import grpc_message_to_json_string from common.proto.context_pb2 import Empty -from common.proto.kpi_manager_pb2 import KpiId, KpiDescriptor, KpiDescriptorList +from common.proto.kpi_management_pb2 import KpiId, KpiDescriptor, KpiDescriptorList from common.proto.monitoring_pb2 import Kpi, MonitorKpiRequest, \ KpiQuery, KpiList, SubsDescriptor, SubscriptionID, SubsList, \ SubsResponse, AlarmDescriptor, AlarmID, AlarmList, AlarmResponse, AlarmSubscription, RawKpiTable -- GitLab From dd371d2d33d1fe009482d3d588b2ee06082c46d3 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 9 Jul 2024 16:36:23 +0000 Subject: [PATCH 160/205] KPI gRPC NBI Completely working with Tests --- .../kpi_manager/database/service/KpiModel.py | 61 ++++++++++++------- .../service/KpiManagerServiceServicerImpl.py | 33 +--------- .../kpi_manager/tests/test_kpi_manager.py | 28 +++++++-- .../kpi_manager/tests/test_messages.py | 35 ++++++++++- 4 files changed, 98 insertions(+), 59 deletions(-) diff --git a/src/kpi_management/kpi_manager/database/service/KpiModel.py b/src/kpi_management/kpi_manager/database/service/KpiModel.py index 846f738f9..78276f59e 100644 --- a/src/kpi_management/kpi_manager/database/service/KpiModel.py +++ b/src/kpi_management/kpi_manager/database/service/KpiModel.py @@ -18,7 +18,7 @@ from sqlalchemy import Column, Integer, String, Float, Text, ForeignKey # from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import registry from sqlalchemy.orm import sessionmaker, relationship - +from common.proto.kpi_management_pb2 import KpiDescriptor logging.basicConfig(level=logging.INFO) LOGGER = logging.getLogger(__name__) @@ -31,14 +31,14 @@ class Kpi(Base): __tablename__ = 'kpi' kpi_id = Column(UUID(as_uuid=False), primary_key=True) - kpi_description = Column(Text, nullable=False) - kpi_sample_type = Column(Integer, nullable=False) - device_id = Column(String, nullable=False) - endpoint_id = Column(String, nullable=False) - service_id = Column(String, nullable=False) - slice_id = Column(String, nullable=False) - connection_id = Column(String, nullable=False) - link_id = Column(String, nullable=False) + kpi_description = Column(Text , nullable=False) + kpi_sample_type = Column(Integer , nullable=False) + device_id = Column(String , nullable=False) + endpoint_id = Column(String , nullable=False) + service_id = Column(String , nullable=False) + slice_id = Column(String , nullable=False) + connection_id = Column(String , nullable=False) + link_id = Column(String , nullable=False) # helps in logging the information def __repr__(self): @@ -49,20 +49,39 @@ class Kpi(Base): f"link_id='{self.link_id}')>") @classmethod - def create_row_from_kpiDescriptor(cls, request): + def convert_KpiDescriptor_to_row(cls, request): """ Create an instance of Kpi from a request object. - Args: request: The request object containing the data. + Args: request: The request object containing the data. Returns: An instance of Kpi initialized with data from the request. """ return cls( - kpi_id=request.kpi_id.kpi_id.uuid, - kpi_description=request.kpi_description, - kpi_sample_type=request.kpi_sample_type, - device_id=request.device_id.device_uuid.uuid, - endpoint_id=request.endpoint_id.endpoint_uuid.uuid, - service_id=request.service_id.service_uuid.uuid, - slice_id=request.slice_id.slice_uuid.uuid, - connection_id=request.connection_id.connection_uuid.uuid, - link_id=request.link_id.link_uuid.uuid - ) \ No newline at end of file + kpi_id = request.kpi_id.kpi_id.uuid, + kpi_description = request.kpi_description, + kpi_sample_type = request.kpi_sample_type, + device_id = request.device_id.device_uuid.uuid, + endpoint_id = request.endpoint_id.endpoint_uuid.uuid, + service_id = request.service_id.service_uuid.uuid, + slice_id = request.slice_id.slice_uuid.uuid, + connection_id = request.connection_id.connection_uuid.uuid, + link_id = request.link_id.link_uuid.uuid + ) + + @classmethod + def convert_row_to_KpiDescriptor(cls, row): + """ + Create and return a dictionary representation of a Kpi instance. + Args: row: The Kpi instance (row) containing the data. + Returns: KpiDescriptor object + """ + response = KpiDescriptor() + response.kpi_id.kpi_id.uuid = row.kpi_id + response.kpi_description = row.kpi_description + response.kpi_sample_type = row.kpi_sample_type + response.service_id.service_uuid.uuid = row.service_id + response.device_id.device_uuid.uuid = row.device_id + response.slice_id.slice_uuid.uuid = row.slice_id + response.endpoint_id.endpoint_uuid.uuid = row.endpoint_id + response.connection_id.connection_uuid.uuid = row.connection_id + response.link_id.link_uuid.uuid = row.link_id + return response \ No newline at end of file diff --git a/src/kpi_management/kpi_manager/service/KpiManagerServiceServicerImpl.py b/src/kpi_management/kpi_manager/service/KpiManagerServiceServicerImpl.py index 3d96c98dd..3a3052182 100644 --- a/src/kpi_management/kpi_manager/service/KpiManagerServiceServicerImpl.py +++ b/src/kpi_management/kpi_manager/service/KpiManagerServiceServicerImpl.py @@ -37,24 +37,13 @@ class KpiManagerServiceServicerImpl(KpiManagerServiceServicer): LOGGER.info('Init KpiManagerService') self.Kpi_DBobj = Kpi_DB() - @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) def SetKpiDescriptor(self, request: KpiDescriptor, grpc_context: grpc.ServicerContext # type: ignore ) -> KpiId: # type: ignore response = KpiId() LOGGER.info("Received gRPC message object: {:}".format(request)) try: - # kpi_to_insert = KpiModel() - # kpi_to_insert.kpi_id = request.kpi_id.kpi_id.uuid - # kpi_to_insert.kpi_description = request.kpi_description - # kpi_to_insert.kpi_sample_type = request.kpi_sample_type - # kpi_to_insert.device_id = request.device_id.device_uuid.uuid - # kpi_to_insert.endpoint_id = request.endpoint_id.endpoint_uuid.uuid - # kpi_to_insert.service_id = request.service_id.service_uuid.uuid - # kpi_to_insert.slice_id = request.slice_id.slice_uuid.uuid - # kpi_to_insert.connection_id = request.connection_id.connection_uuid.uuid - # kpi_to_insert.link_id = request.link_id.link_uuid.uuid - kpi_to_insert = KpiModel.create_row_from_kpiDescriptor(request) + kpi_to_insert = KpiModel.convert_KpiDescriptor_to_row(request) if(self.Kpi_DBobj.add_row_to_db(kpi_to_insert)): response.kpi_id.uuid = request.kpi_id.kpi_id.uuid # LOGGER.info("Added Row: {:}".format(response)) @@ -71,15 +60,7 @@ class KpiManagerServiceServicerImpl(KpiManagerServiceServicer): kpi_id_to_search = request.kpi_id.uuid row = self.Kpi_DBobj.search_db_row_by_id(KpiModel, 'kpi_id', kpi_id_to_search) if row is not None: - response.kpi_id.kpi_id.uuid = row.kpi_id - response.kpi_description = row.kpi_description - response.kpi_sample_type = row.kpi_sample_type - response.service_id.service_uuid.uuid = row.service_id - response.device_id.device_uuid.uuid = row.device_id - response.slice_id.slice_uuid.uuid = row.slice_id - response.endpoint_id.endpoint_uuid.uuid = row.endpoint_id - response.connection_id.connection_uuid.uuid = row.connection_id - response.link_id.link_uuid.uuid = row.link_id + response = KpiModel.convert_row_to_KpiDescriptor(row) return response except Exception as e: LOGGER.info('Unable to search kpi id. {:}'.format(e)) @@ -118,15 +99,7 @@ class KpiManagerServiceServicerImpl(KpiManagerServiceServicer): try: if len(rows) != 0: for row in rows: - kpiDescriptor_obj = KpiDescriptor() - kpiDescriptor_obj.kpi_id.kpi_id.uuid = row.kpi_id - kpiDescriptor_obj.kpi_description = row.kpi_description - kpiDescriptor_obj.kpi_sample_type = row.kpi_sample_type - kpiDescriptor_obj.service_id.service_uuid.uuid = row.service_id - kpiDescriptor_obj.device_id.device_uuid.uuid = row.device_id - kpiDescriptor_obj.slice_id.slice_uuid.uuid = row.slice_id - kpiDescriptor_obj.endpoint_id.endpoint_uuid.uuid = row.endpoint_id - kpiDescriptor_obj.connection_id.connection_uuid.uuid = row.connection_id + kpiDescriptor_obj = KpiModel.convert_row_to_KpiDescriptor(row) response.kpi_descriptor_list.append(kpiDescriptor_obj) return response except Exception as e: diff --git a/src/kpi_management/kpi_manager/tests/test_kpi_manager.py b/src/kpi_management/kpi_manager/tests/test_kpi_manager.py index ca5f6af6f..531efc20c 100755 --- a/src/kpi_management/kpi_manager/tests/test_kpi_manager.py +++ b/src/kpi_management/kpi_manager/tests/test_kpi_manager.py @@ -43,7 +43,7 @@ from device.service.driver_api.DriverInstanceCache import DriverInstanceCache from device.service.DeviceService import DeviceService from device.client.DeviceClient import DeviceClient -from kpi_management.kpi_manager.tests.test_messages import create_kpi_descriptor_request +from kpi_management.kpi_manager.tests.test_messages import create_kpi_descriptor_request, create_kpi_filter_request # from monitoring.service.MonitoringService import MonitoringService from kpi_management.kpi_manager.service.KpiManagerService import KpiManagerService # from monitoring.client.MonitoringClient import MonitoringClient @@ -219,14 +219,32 @@ def test_SetKpiDescriptor(kpi_manager_client): def test_DeleteKpiDescriptor(kpi_manager_client): LOGGER.info(" >>> test_DeleteKpiDescriptor: START <<< ") # adding KPI - response = kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request()) + response_id = kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request()) # deleting KPI - del_response = kpi_manager_client.DeleteKpiDescriptor(response) - # verifing KPI - kpi_manager_client.GetKpiDescriptor(response) + del_response = kpi_manager_client.DeleteKpiDescriptor(response_id) + # select KPI + kpi_manager_client.GetKpiDescriptor(response_id) LOGGER.info("Response of delete method gRPC message object: {:}".format(del_response)) assert isinstance(del_response, Empty) +def test_GetKpiDescriptor(kpi_manager_client): + LOGGER.info(" >>> test_GetKpiDescriptor: START <<< ") + # adding KPI + response_id = kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request()) + # get KPI + response = kpi_manager_client.GetKpiDescriptor(response_id) + LOGGER.info("Response gRPC message object: {:}".format(response)) + assert isinstance(response, KpiDescriptor) + +def test_SelectKpiDescriptor(kpi_manager_client): + LOGGER.info(" >>> test_SelectKpiDescriptor: START <<< ") + # adding KPI + kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request()) + # select KPI(s) + response = kpi_manager_client.SelectKpiDescriptor(create_kpi_filter_request()) + LOGGER.info("Response gRPC message object: {:}".format(response)) + assert isinstance(response, KpiDescriptorList) + # ---------- 2nd Iteration Tests ----------------- # def test_SetKpiDescriptor(kpi_manager_client): diff --git a/src/kpi_management/kpi_manager/tests/test_messages.py b/src/kpi_management/kpi_manager/tests/test_messages.py index 7c2933b7a..e1cb4ddf6 100644 --- a/src/kpi_management/kpi_manager/tests/test_messages.py +++ b/src/kpi_management/kpi_manager/tests/test_messages.py @@ -15,16 +15,45 @@ import uuid from common.proto import kpi_management_pb2 from common.proto.kpi_sample_types_pb2 import KpiSampleType +from common.proto.context_pb2 import DeviceId, LinkId, ServiceId, SliceId,\ + ConnectionId, EndPointId def create_kpi_descriptor_request(descriptor_name: str = "Test_name"): _create_kpi_request = kpi_management_pb2.KpiDescriptor() _create_kpi_request.kpi_id.kpi_id.uuid = str(uuid.uuid4()) _create_kpi_request.kpi_description = descriptor_name _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED - _create_kpi_request.device_id.device_uuid.uuid = 'DEV1' # pylint: disable=maybe-no-member - _create_kpi_request.service_id.service_uuid.uuid = 'SERV1' # pylint: disable=maybe-no-member + _create_kpi_request.device_id.device_uuid.uuid = 'DEV2' # pylint: disable=maybe-no-member + _create_kpi_request.service_id.service_uuid.uuid = 'SERV2' # pylint: disable=maybe-no-member _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC1' # pylint: disable=maybe-no-member _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END1' # pylint: disable=maybe-no-member _create_kpi_request.connection_id.connection_uuid.uuid = 'CON1' # pylint: disable=maybe-no-member _create_kpi_request.link_id.link_uuid.uuid = 'LNK1' # pylint: disable=maybe-no-member - return _create_kpi_request \ No newline at end of file + return _create_kpi_request + +def create_kpi_filter_request(): + _create_kpi_filter_request = kpi_management_pb2.KpiDescriptorFilter() + _create_kpi_filter_request.kpi_sample_type.append(KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED) + + device_id_obj = DeviceId() + service_id_obj = ServiceId() + slice_id_obj = SliceId() + endpoint_id_obj = EndPointId() + connection_id_obj = ConnectionId() + link_id_obj = LinkId() + + device_id_obj.device_uuid.uuid = "DEV2" + service_id_obj.service_uuid.uuid = "SERV2" + slice_id_obj.slice_uuid.uuid = "SLC1" + endpoint_id_obj.endpoint_uuid.uuid = "END1" + connection_id_obj.connection_uuid.uuid = "CON1" + link_id_obj.link_uuid.uuid = "LNK1" + + _create_kpi_filter_request.device_id.append(device_id_obj) + _create_kpi_filter_request.service_id.append(service_id_obj) + _create_kpi_filter_request.slice_id.append(slice_id_obj) + _create_kpi_filter_request.endpoint_id.append(endpoint_id_obj) + _create_kpi_filter_request.connection_id.append(connection_id_obj) + _create_kpi_filter_request.link_id.append(link_id_obj) + + return _create_kpi_filter_request \ No newline at end of file -- GitLab From 2cb264adf50b942bf8c6441fcc5837c5e3bf83e9 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 9 Jul 2024 16:47:15 +0000 Subject: [PATCH 161/205] Slight changes to verify KPI_DB test --- .../kpi_manager/database/service/Kpi_DB.py | 16 +++++++--------- .../kpi_manager/database/tests/KpiDBtests.py | 3 +++ 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/src/kpi_management/kpi_manager/database/service/Kpi_DB.py b/src/kpi_management/kpi_manager/database/service/Kpi_DB.py index 847e4b70a..21027fa7b 100644 --- a/src/kpi_management/kpi_manager/database/service/Kpi_DB.py +++ b/src/kpi_management/kpi_manager/database/service/Kpi_DB.py @@ -34,16 +34,14 @@ class Kpi_DB: # self.create_database(self.db_engine) # to add database self.Session = sessionmaker(bind=self.db_engine) - @staticmethod - def create_database(engine : sqlalchemy.engine.Engine) -> None: - if not sqlalchemy_utils.database_exists(engine.url): - LOGGER.info("Database created. {:}".format(engine.url)) - sqlalchemy_utils.create_database(engine.url) + def create_database(self) -> None: + if not sqlalchemy_utils.database_exists(self.db_engine.url): + LOGGER.info("Database created. {:}".format(self.db_engine.url)) + sqlalchemy_utils.create_database(self.db_engine.url) - @staticmethod - def drop_database(engine : sqlalchemy.engine.Engine) -> None: - if sqlalchemy_utils.database_exists(engine.url): - sqlalchemy_utils.drop_database(engine.url) + def drop_database(self) -> None: + if sqlalchemy_utils.database_exists(self.db_engine.url): + sqlalchemy_utils.drop_database(self.db_engine.url) def create_tables(self): try: diff --git a/src/kpi_management/kpi_manager/database/tests/KpiDBtests.py b/src/kpi_management/kpi_manager/database/tests/KpiDBtests.py index 24313e138..402dac8e0 100644 --- a/src/kpi_management/kpi_manager/database/tests/KpiDBtests.py +++ b/src/kpi_management/kpi_manager/database/tests/KpiDBtests.py @@ -21,5 +21,8 @@ LOGGER = logging.getLogger(__name__) def test_verify_Tables(): LOGGER.info('>>> test_verify_Tables : START <<< ') kpiDBobj = Kpi_DB() + kpiDBobj.drop_database() + kpiDBobj.verify_tables() + kpiDBobj.create_database() kpiDBobj.create_tables() kpiDBobj.verify_tables() -- GitLab From 8323fadf78adb6c5972422694b6f4b4cf54d46b1 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 9 Jul 2024 16:55:55 +0000 Subject: [PATCH 162/205] update KPI Manager README file. --- src/kpi_management/kpi_manager/README.md | 28 ++++++++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 src/kpi_management/kpi_manager/README.md diff --git a/src/kpi_management/kpi_manager/README.md b/src/kpi_management/kpi_manager/README.md new file mode 100644 index 000000000..68885dd13 --- /dev/null +++ b/src/kpi_management/kpi_manager/README.md @@ -0,0 +1,28 @@ +# How to locally run and test KPI manager micro-service + +### Pre-requisets +The following requirements should be fulfilled before the execuation of KPI management service. + +1. Verify that [kpi_management.proto](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/proto/kpi_management.proto) file exists and grpcs file are generated sucessfully. +2. Virtual enviornment exist with all the required packages listed in ["requirements.in"](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_management/requirements.in) are installed sucessfully. +3. Verify the creation of required database and table. +[KPI DB test](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_management/kpi_manager/database/tests/KpiDBtests.py) python file enlist the functions to create tables and database and +[KPI Engine](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_management/service/database/KpiEngine.py) contains the DB string, update the string as per your deployment. + +### Messages format templates +["Messages"](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_management/kpi_manager/tests/test_messages.py) python file enlist the basic gRPC messages format used during the testing. + +### Test file +["KPI management test"](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_management/kpi_manager/tests/test_kpi_manager.py) python file enlist different tests conducted during the experiment. + +### Flow of execution (Kpi Maanager Service functions) +1. Call the `create_database()` and `create_tables()` functions from `Kpi_DB` class to create the required database and table if they don't exist. Call `verify_tables` to verify the existence of KPI table. + +2. Call the gRPC method `SetKpiDescriptor(KpiDescriptor)->KpiId` to add the KpiDescriptor in `Kpi` DB. `KpiDescriptor` and `KpiId` are both pre-defined gRPC message types. + +3. Call `GetKpiDescriptor(KpiId)->KpiDescriptor` to read the `KpiDescriptor` from DB and `DeleteKpiDescriptor(KpiId)` to delete the `KpiDescriptor` from DB. + +4. Call `SelectKpiDescriptor(KpiDescriptorFilter)->KpiDescriptorList` to get all `KpiDescriptor` objects that matches the filter criteria. `KpiDescriptorFilter` and `KpiDescriptorList` are pre-defined gRPC message types. + +## For KPI composer and KPI writer +The functionalities of KPI composer and writer is heavily dependent upon Telemetery service. Therfore, these services has other pre-requsites that are mention [here](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/telemetry/requirements.in). \ No newline at end of file -- GitLab From 2f89f95ba6fd14c84b21f5cdd4015d10568a9ce6 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Wed, 10 Jul 2024 13:13:18 +0000 Subject: [PATCH 163/205] After re-structring of KPI Manager --- scripts/run_tests_locally-kpi-DB.sh | 2 +- scripts/run_tests_locally-kpi-manager.sh | 2 +- .../kpi_manager/service/__init__.py | 14 ---- src/kpi_manager/Dockerfile | 70 +++++++++++++++++++ .../kpi_manager/README.md | 1 + .../kpi_manager/__init__.py | 0 .../kpi_manager/client/KpiManagerClient.py | 0 .../kpi_manager/client/__init__.py | 0 .../database}/KpiEngine.py | 2 +- .../database}/KpiModel.py | 0 .../database}/Kpi_DB.py | 4 +- .../kpi_manager/database/__init__.py | 0 .../kpi_manager/database/tests/KpiDBtests.py | 2 +- src/kpi_manager/requirements.in | 63 +++++++++++++++++ .../kpi_manager/service/KpiManagerService.py | 2 +- .../service/KpiManagerServiceServicerImpl.py | 4 +- .../service/__init__.py | 0 .../kpi_manager/tests/test_kpi_manager.py | 30 ++------ .../kpi_manager/tests/test_messages.py | 0 19 files changed, 150 insertions(+), 46 deletions(-) delete mode 100644 src/kpi_management/kpi_manager/service/__init__.py create mode 100644 src/kpi_manager/Dockerfile rename src/{kpi_management => }/kpi_manager/README.md (98%) rename src/{kpi_management => }/kpi_manager/__init__.py (100%) rename src/{kpi_management => }/kpi_manager/client/KpiManagerClient.py (100%) rename src/{kpi_management => }/kpi_manager/client/__init__.py (100%) rename src/{kpi_management/kpi_manager/database/service => kpi_manager/database}/KpiEngine.py (97%) rename src/{kpi_management/kpi_manager/database/service => kpi_manager/database}/KpiModel.py (100%) rename src/{kpi_management/kpi_manager/database/service => kpi_manager/database}/Kpi_DB.py (97%) rename src/{kpi_management => }/kpi_manager/database/__init__.py (100%) rename src/{kpi_management => }/kpi_manager/database/tests/KpiDBtests.py (92%) create mode 100644 src/kpi_manager/requirements.in rename src/{kpi_management => }/kpi_manager/service/KpiManagerService.py (92%) rename src/{kpi_management => }/kpi_manager/service/KpiManagerServiceServicerImpl.py (97%) rename src/{kpi_management/kpi_manager/database => kpi_manager}/service/__init__.py (100%) rename src/{kpi_management => }/kpi_manager/tests/test_kpi_manager.py (88%) rename src/{kpi_management => }/kpi_manager/tests/test_messages.py (100%) diff --git a/scripts/run_tests_locally-kpi-DB.sh b/scripts/run_tests_locally-kpi-DB.sh index 75d922061..895f986cc 100755 --- a/scripts/run_tests_locally-kpi-DB.sh +++ b/scripts/run_tests_locally-kpi-DB.sh @@ -25,4 +25,4 @@ cd $PROJECTDIR/src RCFILE=$PROJECTDIR/coverage/.coveragerc python3 -m pytest --log-level=INFO --log-cli-level=INFO --verbose \ - kpi_management/kpi_manager/database/tests/KpiDBtests.py \ No newline at end of file + kpi_manager/database/tests/KpiDBtests.py \ No newline at end of file diff --git a/scripts/run_tests_locally-kpi-manager.sh b/scripts/run_tests_locally-kpi-manager.sh index 17c0c02ac..be69980e0 100755 --- a/scripts/run_tests_locally-kpi-manager.sh +++ b/scripts/run_tests_locally-kpi-manager.sh @@ -25,4 +25,4 @@ cd $PROJECTDIR/src RCFILE=$PROJECTDIR/coverage/.coveragerc python3 -m pytest --log-level=INFO --log-cli-level=INFO --verbose \ - kpi_management/kpi_manager/tests/test_kpi_manager.py \ No newline at end of file + kpi_manager/tests/test_kpi_manager.py \ No newline at end of file diff --git a/src/kpi_management/kpi_manager/service/__init__.py b/src/kpi_management/kpi_manager/service/__init__.py deleted file mode 100644 index 1549d9811..000000000 --- a/src/kpi_management/kpi_manager/service/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - diff --git a/src/kpi_manager/Dockerfile b/src/kpi_manager/Dockerfile new file mode 100644 index 000000000..9454f5932 --- /dev/null +++ b/src/kpi_manager/Dockerfile @@ -0,0 +1,70 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM python:3.9-slim + +# Install dependencies +RUN apt-get --yes --quiet --quiet update && \ + apt-get --yes --quiet --quiet install wget g++ git && \ + rm -rf /var/lib/apt/lists/* + +# Set Python to show logs as they occur +ENV PYTHONUNBUFFERED=0 + +# Download the gRPC health probe +RUN GRPC_HEALTH_PROBE_VERSION=v0.2.0 && \ + wget -qO/bin/grpc_health_probe https://github.com/grpc-ecosystem/grpc-health-probe/releases/download/${GRPC_HEALTH_PROBE_VERSION}/grpc_health_probe-linux-amd64 && \ + chmod +x /bin/grpc_health_probe + +# Get generic Python packages +RUN python3 -m pip install --upgrade pip +RUN python3 -m pip install --upgrade setuptools wheel +RUN python3 -m pip install --upgrade pip-tools + +# Get common Python packages +# Note: this step enables sharing the previous Docker build steps among all the Python components +WORKDIR /var/teraflow +COPY common_requirements.in common_requirements.in +RUN pip-compile --quiet --output-file=common_requirements.txt common_requirements.in +RUN python3 -m pip install -r common_requirements.txt + +# Add common files into working directory +WORKDIR /var/teraflow/common +COPY src/common/. ./ +RUN rm -rf proto + +# Create proto sub-folder, copy .proto files, and generate Python code +RUN mkdir -p /var/teraflow/common/proto +WORKDIR /var/teraflow/common/proto +RUN touch __init__.py +COPY proto/*.proto ./ +RUN python3 -m grpc_tools.protoc -I=. --python_out=. --grpc_python_out=. *.proto +RUN rm *.proto +RUN find . -type f -exec sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' {} \; + +# Create component sub-folders, get specific Python packages +RUN mkdir -p /var/teraflow/kpi_manager +WORKDIR /var/teraflow/kpi_manager +COPY src/kpi_manager/requirements.in requirements.in +RUN pip-compile --quiet --output-file=requirements.txt requirements.in +RUN python3 -m pip install -r requirements.txt + +# Add component files into working directory +WORKDIR /var/teraflow +COPY src/context/. context/ +COPY src/device/. device/ +COPY src/kpi_manager/. kpi_manager/ + +# Start the service +ENTRYPOINT ["python", "-m", "kpi_manager.service"] diff --git a/src/kpi_management/kpi_manager/README.md b/src/kpi_manager/README.md similarity index 98% rename from src/kpi_management/kpi_manager/README.md rename to src/kpi_manager/README.md index 68885dd13..72ba6e559 100644 --- a/src/kpi_management/kpi_manager/README.md +++ b/src/kpi_manager/README.md @@ -1,5 +1,6 @@ # How to locally run and test KPI manager micro-service +## --- File links need to be updated. --- ### Pre-requisets The following requirements should be fulfilled before the execuation of KPI management service. diff --git a/src/kpi_management/kpi_manager/__init__.py b/src/kpi_manager/__init__.py similarity index 100% rename from src/kpi_management/kpi_manager/__init__.py rename to src/kpi_manager/__init__.py diff --git a/src/kpi_management/kpi_manager/client/KpiManagerClient.py b/src/kpi_manager/client/KpiManagerClient.py similarity index 100% rename from src/kpi_management/kpi_manager/client/KpiManagerClient.py rename to src/kpi_manager/client/KpiManagerClient.py diff --git a/src/kpi_management/kpi_manager/client/__init__.py b/src/kpi_manager/client/__init__.py similarity index 100% rename from src/kpi_management/kpi_manager/client/__init__.py rename to src/kpi_manager/client/__init__.py diff --git a/src/kpi_management/kpi_manager/database/service/KpiEngine.py b/src/kpi_manager/database/KpiEngine.py similarity index 97% rename from src/kpi_management/kpi_manager/database/service/KpiEngine.py rename to src/kpi_manager/database/KpiEngine.py index 041784ff4..7fe31946b 100644 --- a/src/kpi_management/kpi_manager/database/service/KpiEngine.py +++ b/src/kpi_manager/database/KpiEngine.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -import logging, sqlalchemy, sqlalchemy_utils +import logging, sqlalchemy # from common.Settings import get_setting LOGGER = logging.getLogger(__name__) diff --git a/src/kpi_management/kpi_manager/database/service/KpiModel.py b/src/kpi_manager/database/KpiModel.py similarity index 100% rename from src/kpi_management/kpi_manager/database/service/KpiModel.py rename to src/kpi_manager/database/KpiModel.py diff --git a/src/kpi_management/kpi_manager/database/service/Kpi_DB.py b/src/kpi_manager/database/Kpi_DB.py similarity index 97% rename from src/kpi_management/kpi_manager/database/service/Kpi_DB.py rename to src/kpi_manager/database/Kpi_DB.py index 21027fa7b..a414609bb 100644 --- a/src/kpi_management/kpi_manager/database/service/Kpi_DB.py +++ b/src/kpi_manager/database/Kpi_DB.py @@ -17,8 +17,8 @@ import sqlalchemy import sqlalchemy_utils from sqlalchemy.orm import sessionmaker from sqlalchemy.ext.declarative import declarative_base -from kpi_management.kpi_manager.database.service.KpiEngine import KpiEngine -from kpi_management.kpi_manager.database.service.KpiModel import Kpi +from kpi_manager.database.KpiEngine import KpiEngine +from kpi_manager.database.KpiModel import Kpi LOGGER = logging.getLogger(__name__) DB_NAME = "kpi" diff --git a/src/kpi_management/kpi_manager/database/__init__.py b/src/kpi_manager/database/__init__.py similarity index 100% rename from src/kpi_management/kpi_manager/database/__init__.py rename to src/kpi_manager/database/__init__.py diff --git a/src/kpi_management/kpi_manager/database/tests/KpiDBtests.py b/src/kpi_manager/database/tests/KpiDBtests.py similarity index 92% rename from src/kpi_management/kpi_manager/database/tests/KpiDBtests.py rename to src/kpi_manager/database/tests/KpiDBtests.py index 402dac8e0..f75f05c14 100644 --- a/src/kpi_management/kpi_manager/database/tests/KpiDBtests.py +++ b/src/kpi_manager/database/tests/KpiDBtests.py @@ -14,7 +14,7 @@ import logging -from kpi_management.kpi_manager.database.service.Kpi_DB import Kpi_DB +from kpi_manager.database.Kpi_DB import Kpi_DB LOGGER = logging.getLogger(__name__) diff --git a/src/kpi_manager/requirements.in b/src/kpi_manager/requirements.in new file mode 100644 index 000000000..d96e4b1b8 --- /dev/null +++ b/src/kpi_manager/requirements.in @@ -0,0 +1,63 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +anytree==2.8.0 +APScheduler==3.10.1 +attrs==23.2.0 +certifi==2024.2.2 +charset-normalizer==2.0.12 +colorama==0.4.6 +confluent-kafka==2.3.0 +coverage==6.3 +future-fstrings==1.2.0 +greenlet==3.0.3 +grpcio==1.47.5 +grpcio-health-checking==1.47.5 +grpcio-tools==1.47.5 +grpclib==0.4.4 +h2==4.1.0 +hpack==4.0.0 +hyperframe==6.0.1 +idna==3.7 +influx-line-protocol==0.1.4 +iniconfig==2.0.0 +kafka-python==2.0.2 +multidict==6.0.5 +networkx==3.3 +packaging==24.0 +pluggy==1.5.0 +prettytable==3.5.0 +prometheus-client==0.13.0 +protobuf==3.20.3 +psycopg2-binary==2.9.3 +py==1.11.0 +py-cpuinfo==9.0.0 +pytest==6.2.5 +pytest-benchmark==3.4.1 +pytest-depends==1.0.1 +python-dateutil==2.8.2 +python-json-logger==2.0.2 +pytz==2024.1 +questdb==1.0.1 +requests==2.27.1 +six==1.16.0 +SQLAlchemy==1.4.52 +sqlalchemy-cockroachdb==1.4.4 +SQLAlchemy-Utils==0.38.3 +toml==0.10.2 +typing_extensions==4.12.0 +tzlocal==5.2 +urllib3==1.26.18 +wcwidth==0.2.13 +xmltodict==0.12.0 diff --git a/src/kpi_management/kpi_manager/service/KpiManagerService.py b/src/kpi_manager/service/KpiManagerService.py similarity index 92% rename from src/kpi_management/kpi_manager/service/KpiManagerService.py rename to src/kpi_manager/service/KpiManagerService.py index d4a8a8104..ecc7105a7 100755 --- a/src/kpi_management/kpi_manager/service/KpiManagerService.py +++ b/src/kpi_manager/service/KpiManagerService.py @@ -16,7 +16,7 @@ from common.Constants import ServiceNameEnum from common.Settings import get_service_port_grpc from common.proto.kpi_management_pb2_grpc import add_KpiManagerServiceServicer_to_server from common.tools.service.GenericGrpcService import GenericGrpcService -from kpi_management.kpi_manager.service.KpiManagerServiceServicerImpl import KpiManagerServiceServicerImpl +from kpi_manager.service.KpiManagerServiceServicerImpl import KpiManagerServiceServicerImpl from monitoring.service.NameMapping import NameMapping class KpiManagerService(GenericGrpcService): diff --git a/src/kpi_management/kpi_manager/service/KpiManagerServiceServicerImpl.py b/src/kpi_manager/service/KpiManagerServiceServicerImpl.py similarity index 97% rename from src/kpi_management/kpi_manager/service/KpiManagerServiceServicerImpl.py rename to src/kpi_manager/service/KpiManagerServiceServicerImpl.py index 3a3052182..cf13c0526 100644 --- a/src/kpi_management/kpi_manager/service/KpiManagerServiceServicerImpl.py +++ b/src/kpi_manager/service/KpiManagerServiceServicerImpl.py @@ -22,8 +22,8 @@ from common.proto.kpi_management_pb2 import KpiId, KpiDescriptor, KpiDescriptorF from monitoring.service.NameMapping import NameMapping # from monitoring.service import ManagementDBTools -from kpi_management.kpi_manager.database.service.Kpi_DB import Kpi_DB -from kpi_management.kpi_manager.database.service.KpiModel import Kpi as KpiModel +from kpi_manager.database.Kpi_DB import Kpi_DB +from kpi_manager.database.KpiModel import Kpi as KpiModel # from telemetry.database.TelemetryModel import Kpi as KpiModel from common.proto.context_pb2 import DeviceId, LinkId, ServiceId, SliceId,\ ConnectionId, EndPointId diff --git a/src/kpi_management/kpi_manager/database/service/__init__.py b/src/kpi_manager/service/__init__.py similarity index 100% rename from src/kpi_management/kpi_manager/database/service/__init__.py rename to src/kpi_manager/service/__init__.py diff --git a/src/kpi_management/kpi_manager/tests/test_kpi_manager.py b/src/kpi_manager/tests/test_kpi_manager.py similarity index 88% rename from src/kpi_management/kpi_manager/tests/test_kpi_manager.py rename to src/kpi_manager/tests/test_kpi_manager.py index 531efc20c..ccb7f16c2 100755 --- a/src/kpi_management/kpi_manager/tests/test_kpi_manager.py +++ b/src/kpi_manager/tests/test_kpi_manager.py @@ -13,46 +13,30 @@ # limitations under the License. -# import sys -# sys.path.append('.') import os, pytest -import logging, json +import logging from typing import Union -# from apscheduler.schedulers.background import BackgroundScheduler - -from common.proto.context_pb2 import ConfigActionEnum, Context, ContextId, DeviceOperationalStatusEnum, EventTypeEnum, DeviceEvent, Device, Empty, Topology, TopologyId +from common.proto.context_pb2 import Empty from common.Constants import ServiceNameEnum -# from common.Constants import DEFAULT_CONTEXT_NAME, DEFAULT_TOPOLOGY_NAME, ServiceNameEnum -from common.Settings import ( +from common.Settings import ( ENVVAR_SUFIX_SERVICE_HOST, ENVVAR_SUFIX_SERVICE_PORT_GRPC, get_env_var_name, get_service_port_grpc) from common.tests.MockServicerImpl_Context import MockServicerImpl_Context from common.proto.context_pb2_grpc import add_ContextServiceServicer_to_server -# from common.proto.kpi_sample_types_pb2 import KpiSampleType -# from common.tools.object_factory.Context import json_context, json_context_id -# from common.tools.object_factory.Topology import json_topology, json_topology_id -# from common.proto.monitoring_pb2 import KpiId, KpiDescriptor, SubsDescriptor, SubsList, AlarmID, \ -# AlarmDescriptor, AlarmList, KpiDescriptorList, SubsResponse, AlarmResponse, RawKpiTable #, Kpi, KpiList + from common.proto.kpi_management_pb2 import KpiId, KpiDescriptor, KpiDescriptorFilter, KpiDescriptorList from common.tools.service.GenericGrpcService import GenericGrpcService from context.client.ContextClient import ContextClient - from device.service.driver_api.DriverFactory import DriverFactory from device.service.driver_api.DriverInstanceCache import DriverInstanceCache from device.service.DeviceService import DeviceService from device.client.DeviceClient import DeviceClient -from kpi_management.kpi_manager.tests.test_messages import create_kpi_descriptor_request, create_kpi_filter_request -# from monitoring.service.MonitoringService import MonitoringService -from kpi_management.kpi_manager.service.KpiManagerService import KpiManagerService -# from monitoring.client.MonitoringClient import MonitoringClient -from kpi_management.kpi_manager.client.KpiManagerClient import KpiManagerClient - -from kpi_management.kpi_manager.service.KpiManagerServiceServicerImpl import KpiManagerServiceServicerImpl +from kpi_manager.tests.test_messages import create_kpi_descriptor_request, create_kpi_filter_request +from kpi_manager.service.KpiManagerService import KpiManagerService +from kpi_manager.client.KpiManagerClient import KpiManagerClient -# from monitoring.service.ManagementDBTools import ManagementDB -# from monitoring.service.MetricsDBTools import MetricsDB from monitoring.service.NameMapping import NameMapping os.environ['DEVICE_EMULATED_ONLY'] = 'TRUE' diff --git a/src/kpi_management/kpi_manager/tests/test_messages.py b/src/kpi_manager/tests/test_messages.py similarity index 100% rename from src/kpi_management/kpi_manager/tests/test_messages.py rename to src/kpi_manager/tests/test_messages.py -- GitLab From d0c3b72833af2cbd7982f1832d950740a4ebbef4 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Wed, 10 Jul 2024 13:33:56 +0000 Subject: [PATCH 164/205] main add to Kpi Manager --- src/kpi_manager/service/__main__.py | 107 ++++++++++++++++++++++++++++ 1 file changed, 107 insertions(+) create mode 100644 src/kpi_manager/service/__main__.py diff --git a/src/kpi_manager/service/__main__.py b/src/kpi_manager/service/__main__.py new file mode 100644 index 000000000..9f0e53246 --- /dev/null +++ b/src/kpi_manager/service/__main__.py @@ -0,0 +1,107 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging, signal, sys, threading, time +from prometheus_client import start_http_server +from common.Constants import ServiceNameEnum +from common.Settings import ( + ENVVAR_SUFIX_SERVICE_HOST, ENVVAR_SUFIX_SERVICE_PORT_GRPC, get_env_var_name, get_log_level, get_metrics_port, + wait_for_environment_variables) +from common.proto import monitoring_pb2 +from monitoring.service.EventTools import EventsDeviceCollector # import updated +from monitoring.service.NameMapping import NameMapping # import updated +# from .MonitoringService import MonitoringService +from .KpiManagerService import KpiManagerService + +terminate = threading.Event() +LOGGER = None + +def signal_handler(signal, frame): # pylint: disable=redefined-outer-name + LOGGER.warning('Terminate signal received') + terminate.set() + +def start_kpi_manager(name_mapping : NameMapping): + LOGGER.info('Start Monitoring...',) + + events_collector = EventsDeviceCollector(name_mapping) + events_collector.start() + + # TODO: redesign this method to be more clear and clean + + # Iterate while terminate is not set + while not terminate.is_set(): + list_new_kpi_ids = events_collector.listen_events() + + # Monitor Kpis + if bool(list_new_kpi_ids): + for kpi_id in list_new_kpi_ids: + # Create Monitor Kpi Requests + monitor_kpi_request = monitoring_pb2.MonitorKpiRequest() + monitor_kpi_request.kpi_id.CopyFrom(kpi_id) + monitor_kpi_request.monitoring_window_s = 86400 + monitor_kpi_request.sampling_rate_s = 10 + events_collector._monitoring_client.MonitorKpi(monitor_kpi_request) + + time.sleep(0.5) # let other tasks run; do not overload CPU + else: + # Terminate is set, looping terminates + LOGGER.warning("Stopping execution...") + + events_collector.start() + +def main(): + global LOGGER # pylint: disable=global-statement + + log_level = get_log_level() + logging.basicConfig(level=log_level) + LOGGER = logging.getLogger(__name__) + + wait_for_environment_variables([ + get_env_var_name(ServiceNameEnum.CONTEXT, ENVVAR_SUFIX_SERVICE_HOST ), + get_env_var_name(ServiceNameEnum.CONTEXT, ENVVAR_SUFIX_SERVICE_PORT_GRPC), + get_env_var_name(ServiceNameEnum.DEVICE, ENVVAR_SUFIX_SERVICE_HOST ), + get_env_var_name(ServiceNameEnum.DEVICE, ENVVAR_SUFIX_SERVICE_PORT_GRPC), + ]) + + signal.signal(signal.SIGINT, signal_handler) + signal.signal(signal.SIGTERM, signal_handler) + + LOGGER.info('Starting...') + + # Start metrics server + metrics_port = get_metrics_port() + start_http_server(metrics_port) + + name_mapping = NameMapping() + # Starting monitoring service + # grpc_service = MonitoringService(name_mapping) + # grpc_service.start() + # start_monitoring(name_mapping) + + grpc_service = KpiManagerService(name_mapping) + grpc_service.start() + + start_kpi_manager(name_mapping) + + # Wait for Ctrl+C or termination signal + while not terminate.wait(timeout=1.0): pass + + LOGGER.info('Terminating...') + grpc_service.stop() + + LOGGER.info('Bye') + return 0 + +if __name__ == '__main__': + sys.exit(main()) -- GitLab From 58fd2c5acc582b8fb94365661c3ddd5a440c317c Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Thu, 11 Jul 2024 10:35:47 +0000 Subject: [PATCH 165/205] changs to make KPI manager inline with TFS formats --- manifests/kpi_managerservice.yaml | 99 +++++++++++++ scripts/run_tests_locally-kpi-manager.sh | 2 +- src/kpi_manager/.gitlab-ci.yml | 133 ++++++++++++++++++ src/kpi_manager/Dockerfile | 2 - src/kpi_manager/__init__.py | 2 +- src/kpi_manager/client/KpiManagerClient.py | 10 +- src/kpi_manager/client/__init__.py | 4 +- src/kpi_manager/database/KpiEngine.py | 2 +- src/kpi_manager/database/KpiModel.py | 2 +- src/kpi_manager/database/Kpi_DB.py | 72 ++++++---- src/kpi_manager/database/__init__.py | 2 +- src/kpi_manager/service/KpiManagerService.py | 2 +- .../service/KpiManagerServiceServicerImpl.py | 66 ++++----- src/kpi_manager/service/__init__.py | 2 +- src/kpi_manager/service/__main__.py | 3 +- .../KpiDBtests.py => tests/test_kpi_db.py} | 8 +- src/kpi_manager/tests/test_kpi_manager.py | 2 +- src/kpi_manager/tests/test_messages.py | 2 +- 18 files changed, 326 insertions(+), 89 deletions(-) create mode 100644 manifests/kpi_managerservice.yaml create mode 100644 src/kpi_manager/.gitlab-ci.yml rename src/kpi_manager/{database/tests/KpiDBtests.py => tests/test_kpi_db.py} (81%) diff --git a/manifests/kpi_managerservice.yaml b/manifests/kpi_managerservice.yaml new file mode 100644 index 000000000..45ee77895 --- /dev/null +++ b/manifests/kpi_managerservice.yaml @@ -0,0 +1,99 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: apps/v1 +kind: Deployment +metadata: + name: kpi_managerservice +spec: + selector: + matchLabels: + app: kpi_managerservice + #replicas: 1 + template: + metadata: + annotations: + config.linkerd.io/skip-outbound-ports: "4222" + labels: + app: kpi_managerservice + spec: + terminationGracePeriodSeconds: 5 + containers: + - name: server + image: labs.etsi.org:5050/tfs/controller/context:latest + imagePullPolicy: Always + ports: + - containerPort: 7071 + - containerPort: 9192 + env: + - name: LOG_LEVEL + value: "INFO" + envFrom: + - secretRef: + name: crdb-data + readinessProbe: + exec: + command: ["/bin/grpc_health_probe", "-addr=:7071"] + livenessProbe: + exec: + command: ["/bin/grpc_health_probe", "-addr=:7071"] + resources: + requests: + cpu: 250m + memory: 128Mi + limits: + cpu: 1000m + memory: 1024Mi +--- +apiVersion: v1 +kind: Service +metadata: + name: kpi_managerservice + labels: + app: kpi_managerservice +spec: + type: ClusterIP + selector: + app: kpi_managerservice + ports: + - name: grpc + protocol: TCP + port: 7071 + targetPort: 7071 + - name: metrics + protocol: TCP + port: 9192 + targetPort: 9192 +--- +apiVersion: autoscaling/v2 +kind: HorizontalPodAutoscaler +metadata: + name: kpi_managerservice-hpa +spec: + scaleTargetRef: + apiVersion: apps/v1 + kind: Deployment + name: kpi_managerservice + minReplicas: 1 + maxReplicas: 20 + metrics: + - type: Resource + resource: + name: cpu + target: + type: Utilization + averageUtilization: 80 + #behavior: + # scaleDown: + # stabilizationWindowSeconds: 30 diff --git a/scripts/run_tests_locally-kpi-manager.sh b/scripts/run_tests_locally-kpi-manager.sh index be69980e0..742a52685 100755 --- a/scripts/run_tests_locally-kpi-manager.sh +++ b/scripts/run_tests_locally-kpi-manager.sh @@ -24,5 +24,5 @@ cd $PROJECTDIR/src # python3 kpi_manager/tests/test_unitary.py RCFILE=$PROJECTDIR/coverage/.coveragerc -python3 -m pytest --log-level=INFO --log-cli-level=INFO --verbose \ +python3 -m pytest --log-level=DEBUG --log-cli-level=DEBUG --verbose \ kpi_manager/tests/test_kpi_manager.py \ No newline at end of file diff --git a/src/kpi_manager/.gitlab-ci.yml b/src/kpi_manager/.gitlab-ci.yml new file mode 100644 index 000000000..ffd4e38ff --- /dev/null +++ b/src/kpi_manager/.gitlab-ci.yml @@ -0,0 +1,133 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Build, tag, and push the Docker image to the GitLab Docker registry +build kpi_manager: + variables: + IMAGE_NAME: 'kpi_manager' # name of the microservice + IMAGE_TAG: 'latest' # tag of the container image (production, development, etc) + stage: build + before_script: + - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY + script: + - docker buildx build -t "$IMAGE_NAME:$IMAGE_TAG" -f ./src/$IMAGE_NAME/Dockerfile . + - docker tag "$IMAGE_NAME:$IMAGE_TAG" "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG" + - docker push "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG" + after_script: + - docker images --filter="dangling=true" --quiet | xargs -r docker rmi + rules: + - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)' + - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' + - changes: + - src/common/**/*.py + - proto/*.proto + - src/$IMAGE_NAME/**/*.{py,in,yml} + - src/$IMAGE_NAME/Dockerfile + - src/$IMAGE_NAME/tests/*.py + - manifests/${IMAGE_NAME}service.yaml + - .gitlab-ci.yml + +# Apply unit test to the component +unit_test context: + variables: + IMAGE_NAME: 'kpi_manager' # name of the microservice + IMAGE_TAG: 'latest' # tag of the container image (production, development, etc) + stage: unit_test + needs: + - build context + before_script: + - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY + - if docker network list | grep teraflowbridge; then echo "teraflowbridge is already created"; else docker network create -d bridge teraflowbridge; fi + - if docker container ls | grep crdb; then docker rm -f crdb; else echo "CockroachDB container is not in the system"; fi + - if docker volume ls | grep crdb; then docker volume rm -f crdb; else echo "CockroachDB volume is not in the system"; fi + - if docker container ls | grep $IMAGE_NAME; then docker rm -f $IMAGE_NAME; else echo "$IMAGE_NAME container is not in the system"; fi + - docker container prune -f + script: + - docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG" + - docker pull "cockroachdb/cockroach:latest-v22.2" + - docker volume create crdb + - > + docker run --name crdb -d --network=teraflowbridge -p 26257:26257 -p 8080:8080 + --env COCKROACH_DATABASE=tfs_test --env COCKROACH_USER=tfs --env COCKROACH_PASSWORD=tfs123 + --volume "crdb:/cockroach/cockroach-data" + cockroachdb/cockroach:latest-v22.2 start-single-node + - echo "Waiting for initialization..." + - while ! docker logs crdb 2>&1 | grep -q 'finished creating default user \"tfs\"'; do sleep 1; done + - docker logs crdb + - docker ps -a + - CRDB_ADDRESS=$(docker inspect crdb --format "{{.NetworkSettings.Networks.teraflowbridge.IPAddress}}") + - echo $CRDB_ADDRESS + - NATS_ADDRESS=$(docker inspect nats --format "{{.NetworkSettings.Networks.teraflowbridge.IPAddress}}") + - echo $NATS_ADDRESS + - > + docker run --name $IMAGE_NAME -d -p 1010:1010 + --env "CRDB_URI=cockroachdb://tfs:tfs123@${CRDB_ADDRESS}:26257/tfs_test?sslmode=require" + --volume "$PWD/src/$IMAGE_NAME/tests:/opt/results" + --network=teraflowbridge + $CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG + - docker ps -a + - sleep 5 + - docker logs $IMAGE_NAME + - > + docker exec -i $IMAGE_NAME bash -c + "coverage run -m pytest --log-level=INFO --verbose --junitxml=/opt/results/${IMAGE_NAME}_report.xml $IMAGE_NAME/tests/test_*.py" + - docker exec -i $IMAGE_NAME bash -c "coverage report --include='${IMAGE_NAME}/*' --show-missing" + coverage: '/TOTAL\s+\d+\s+\d+\s+(\d+%)/' + after_script: + - docker volume rm -f crdb + - docker network rm teraflowbridge + - docker volume prune --force + - docker image prune --force + rules: + - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)' + - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' + - changes: + - src/common/**/*.py + - proto/*.proto + - src/$IMAGE_NAME/**/*.{py,in,yml} + - src/$IMAGE_NAME/Dockerfile + - src/$IMAGE_NAME/tests/*.py + - src/$IMAGE_NAME/tests/Dockerfile + - manifests/${IMAGE_NAME}service.yaml + - .gitlab-ci.yml + artifacts: + when: always + reports: + junit: src/$IMAGE_NAME/tests/${IMAGE_NAME}_report.xml + +## Deployment of the service in Kubernetes Cluster +#deploy context: +# variables: +# IMAGE_NAME: 'context' # name of the microservice +# IMAGE_TAG: 'latest' # tag of the container image (production, development, etc) +# stage: deploy +# needs: +# - unit test context +# # - integ_test execute +# script: +# - 'sed -i "s/$IMAGE_NAME:.*/$IMAGE_NAME:$IMAGE_TAG/" manifests/${IMAGE_NAME}service.yaml' +# - kubectl version +# - kubectl get all +# - kubectl apply -f "manifests/${IMAGE_NAME}service.yaml" +# - kubectl get all +# # environment: +# # name: test +# # url: https://example.com +# # kubernetes: +# # namespace: test +# rules: +# - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)' +# when: manual +# - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' +# when: manual diff --git a/src/kpi_manager/Dockerfile b/src/kpi_manager/Dockerfile index 9454f5932..4d74030e7 100644 --- a/src/kpi_manager/Dockerfile +++ b/src/kpi_manager/Dockerfile @@ -62,8 +62,6 @@ RUN python3 -m pip install -r requirements.txt # Add component files into working directory WORKDIR /var/teraflow -COPY src/context/. context/ -COPY src/device/. device/ COPY src/kpi_manager/. kpi_manager/ # Start the service diff --git a/src/kpi_manager/__init__.py b/src/kpi_manager/__init__.py index 1549d9811..3ee6f7071 100644 --- a/src/kpi_manager/__init__.py +++ b/src/kpi_manager/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/kpi_manager/client/KpiManagerClient.py b/src/kpi_manager/client/KpiManagerClient.py index 140381d3a..cd1e98c05 100755 --- a/src/kpi_manager/client/KpiManagerClient.py +++ b/src/kpi_manager/client/KpiManagerClient.py @@ -1,4 +1,4 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -58,7 +58,7 @@ class KpiManagerClient: def DeleteKpiDescriptor(self,request : KpiId) -> Empty: LOGGER.debug('DeleteKpiDescriptor: {:s}'.format(grpc_message_to_json_string(request))) response = self.stub.DeleteKpiDescriptor(request) - LOGGER.info('DeleteKpiDescriptor result: {:s}'.format(grpc_message_to_json_string(response))) + LOGGER.debug('DeleteKpiDescriptor result: {:s}'.format(grpc_message_to_json_string(response))) return response @RETRY_DECORATOR @@ -69,8 +69,8 @@ class KpiManagerClient: return response @RETRY_DECORATOR - def SelectKpiDescriptor(self, request : KpiDescriptorFilter) -> KpiDescriptorList: - LOGGER.debug('SelectKpiDescriptor: {:s}'.format(grpc_message_to_json_string(request))) - response = self.stub.SelectKpiDescriptor(request) + def SelectKpiDescriptor(self, filter : KpiDescriptorFilter) -> KpiDescriptorList: + LOGGER.debug('SelectKpiDescriptor: {:s}'.format(grpc_message_to_json_string(filter))) + response = self.stub.SelectKpiDescriptor(filter) LOGGER.debug('SelectKpiDescriptor result: {:s}'.format(grpc_message_to_json_string(response))) return response \ No newline at end of file diff --git a/src/kpi_manager/client/__init__.py b/src/kpi_manager/client/__init__.py index 1549d9811..48f7d354a 100644 --- a/src/kpi_manager/client/__init__.py +++ b/src/kpi_manager/client/__init__.py @@ -1,5 +1,5 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) -# +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at diff --git a/src/kpi_manager/database/KpiEngine.py b/src/kpi_manager/database/KpiEngine.py index 7fe31946b..620ac9796 100644 --- a/src/kpi_manager/database/KpiEngine.py +++ b/src/kpi_manager/database/KpiEngine.py @@ -1,4 +1,4 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/kpi_manager/database/KpiModel.py b/src/kpi_manager/database/KpiModel.py index 78276f59e..e0f4c47a8 100644 --- a/src/kpi_manager/database/KpiModel.py +++ b/src/kpi_manager/database/KpiModel.py @@ -1,4 +1,4 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/kpi_manager/database/Kpi_DB.py b/src/kpi_manager/database/Kpi_DB.py index a414609bb..2e8eeeb8c 100644 --- a/src/kpi_manager/database/Kpi_DB.py +++ b/src/kpi_manager/database/Kpi_DB.py @@ -1,4 +1,4 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,17 +13,18 @@ # limitations under the License. import logging, time -import sqlalchemy +from typing import List, Tuple +from sqlalchemy import select, and_ import sqlalchemy_utils from sqlalchemy.orm import sessionmaker from sqlalchemy.ext.declarative import declarative_base from kpi_manager.database.KpiEngine import KpiEngine -from kpi_manager.database.KpiModel import Kpi +from kpi_manager.database.KpiModel import Kpi as KpiModel LOGGER = logging.getLogger(__name__) DB_NAME = "kpi" -class Kpi_DB: +class KpiDB: def __init__(self): self.db_engine = KpiEngine.get_engine() if self.db_engine is None: @@ -36,7 +37,7 @@ class Kpi_DB: def create_database(self) -> None: if not sqlalchemy_utils.database_exists(self.db_engine.url): - LOGGER.info("Database created. {:}".format(self.db_engine.url)) + LOGGER.debug("Database created. {:}".format(self.db_engine.url)) sqlalchemy_utils.create_database(self.db_engine.url) def drop_database(self) -> None: @@ -45,31 +46,31 @@ class Kpi_DB: def create_tables(self): try: - Kpi.metadata.create_all(self.db_engine) # type: ignore - LOGGER.info("Tables created in the DB Name: {:}".format(self.db_name)) + KpiModel.metadata.create_all(self.db_engine) # type: ignore + LOGGER.debug("Tables created in the DB Name: {:}".format(self.db_name)) except Exception as e: - LOGGER.info("Tables cannot be created in the kpi database. {:s}".format(str(e))) + LOGGER.debug("Tables cannot be created in the kpi database. {:s}".format(str(e))) def verify_tables(self): try: with self.db_engine.connect() as connection: result = connection.execute("SHOW TABLES;") tables = result.fetchall() # type: ignore - LOGGER.info("Tables verified: {:}".format(tables)) + LOGGER.debug("Tables verified: {:}".format(tables)) except Exception as e: - LOGGER.info("Unable to fetch Table names. {:s}".format(str(e))) + LOGGER.debug("Unable to fetch Table names. {:s}".format(str(e))) def add_row_to_db(self, row): session = self.Session() try: session.add(row) session.commit() - LOGGER.info(f"Row inserted into {row.__class__.__name__} table.") + LOGGER.debug(f"Row inserted into {row.__class__.__name__} table.") return True except Exception as e: session.rollback() if "psycopg2.errors.UniqueViolation" in str(e): - LOGGER.warning(f"Unique key voilation: {row.__class__.__name__} table. {str(e)}") + LOGGER.debug(f"Unique key voilation: {row.__class__.__name__} table. {str(e)}") else: LOGGER.error(f"Failed to insert new row into {row.__class__.__name__} table. {str(e)}") return False @@ -81,14 +82,14 @@ class Kpi_DB: try: entity = session.query(model).filter_by(**{col_name: id_to_search}).first() if entity: - # LOGGER.info(f"{model.__name__} ID found: {str(entity)}") + # LOGGER.debug(f"{model.__name__} ID found: {str(entity)}") return entity else: - LOGGER.warning(f"{model.__name__} ID not found: {str(id_to_search)}") + LOGGER.debug(f"{model.__name__} ID not found: {str(id_to_search)}") return None except Exception as e: session.rollback() - LOGGER.info(f"Failed to retrieve {model.__name__} ID. {str(e)}") + LOGGER.debug(f"Failed to retrieve {model.__name__} ID. {str(e)}") raise finally: session.close() @@ -100,9 +101,9 @@ class Kpi_DB: if record: session.delete(record) session.commit() - LOGGER.info("Deleted %s with %s: %s", model.__name__, col_name, id_to_search) + LOGGER.debug("Deleted %s with %s: %s", model.__name__, col_name, id_to_search) else: - LOGGER.warning("%s with %s %s not found", model.__name__, col_name, id_to_search) + LOGGER.debug("%s with %s %s not found", model.__name__, col_name, id_to_search) return None except Exception as e: session.rollback() @@ -110,20 +111,43 @@ class Kpi_DB: finally: session.close() - def select_with_filter(self, model, **filters): + def select_with_filter(self, model, filter_object): session = self.Session() try: - query = session.query(model) - for column, value in filters.items(): - query = query.filter(getattr(model, column) == value) # type: ignore + query = session.query(KpiModel) + # Apply filters based on the filter_object + if filter_object.kpi_id: + query = query.filter(KpiModel.kpi_id.in_([k.kpi_id.uuid for k in filter_object.kpi_id])) + + if filter_object.kpi_sample_type: + query = query.filter(KpiModel.kpi_sample_type.in_(filter_object.kpi_sample_type)) + + if filter_object.device_id: + query = query.filter(KpiModel.device_id.in_([d.device_uuid.uuid for d in filter_object.device_id])) + + if filter_object.endpoint_id: + query = query.filter(KpiModel.endpoint_id.in_([e.endpoint_uuid.uuid for e in filter_object.endpoint_id])) + + if filter_object.service_id: + query = query.filter(KpiModel.service_id.in_([s.service_uuid.uuid for s in filter_object.service_id])) + + if filter_object.slice_id: + query = query.filter(KpiModel.slice_id.in_([s.slice_uuid.uuid for s in filter_object.slice_id])) + + if filter_object.connection_id: + query = query.filter(KpiModel.connection_id.in_([c.connection_uuid.uuid for c in filter_object.connection_id])) + + if filter_object.link_id: + query = query.filter(KpiModel.link_id.in_([l.link_uuid.uuid for l in filter_object.link_id])) result = query.all() + if result: - LOGGER.info(f"Fetched filtered rows from {model.__name__} table with filters: {filters}") # - Results: {result} + LOGGER.debug(f"Fetched filtered rows from {model.__name__} table with filters: {filter_object}") # - Results: {result} else: - LOGGER.warning(f"No matching row found in {model.__name__} table with filters: {filters}") + LOGGER.debug(f"No matching row found in {model.__name__} table with filters: {filter_object}") return result except Exception as e: - LOGGER.error(f"Error fetching filtered rows from {model.__name__} table with filters {filters} ::: {e}") + LOGGER.error(f"Error fetching filtered rows from {model.__name__} table with filters {filter_object} ::: {e}") return [] finally: session.close() \ No newline at end of file diff --git a/src/kpi_manager/database/__init__.py b/src/kpi_manager/database/__init__.py index 1549d9811..3ee6f7071 100644 --- a/src/kpi_manager/database/__init__.py +++ b/src/kpi_manager/database/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/kpi_manager/service/KpiManagerService.py b/src/kpi_manager/service/KpiManagerService.py index ecc7105a7..d3eed7944 100755 --- a/src/kpi_manager/service/KpiManagerService.py +++ b/src/kpi_manager/service/KpiManagerService.py @@ -1,4 +1,4 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/kpi_manager/service/KpiManagerServiceServicerImpl.py b/src/kpi_manager/service/KpiManagerServiceServicerImpl.py index cf13c0526..d88d6d8eb 100644 --- a/src/kpi_manager/service/KpiManagerServiceServicerImpl.py +++ b/src/kpi_manager/service/KpiManagerServiceServicerImpl.py @@ -1,4 +1,4 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,93 +14,77 @@ import logging, grpc -import sqlalchemy, sqlalchemy_utils +from typing import List, Set +from sqlalchemy.sql.expression import BinaryExpression from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method from common.proto.context_pb2 import Empty from common.proto.kpi_management_pb2_grpc import KpiManagerServiceServicer from common.proto.kpi_management_pb2 import KpiId, KpiDescriptor, KpiDescriptorFilter, KpiDescriptorList from monitoring.service.NameMapping import NameMapping -# from monitoring.service import ManagementDBTools - -from kpi_manager.database.Kpi_DB import Kpi_DB +from kpi_manager.database.Kpi_DB import KpiDB from kpi_manager.database.KpiModel import Kpi as KpiModel -# from telemetry.database.TelemetryModel import Kpi as KpiModel -from common.proto.context_pb2 import DeviceId, LinkId, ServiceId, SliceId,\ - ConnectionId, EndPointId LOGGER = logging.getLogger(__name__) - -METRICS_POOL = MetricsPool('Monitoring', 'KpiManager') +METRICS_POOL = MetricsPool('KpiManager', 'NBIgRPC') class KpiManagerServiceServicerImpl(KpiManagerServiceServicer): def __init__(self, name_mapping : NameMapping): - LOGGER.info('Init KpiManagerService') - self.Kpi_DBobj = Kpi_DB() + LOGGER.debug('Init KpiManagerService') + self.kpi_db_obj = KpiDB() @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) def SetKpiDescriptor(self, request: KpiDescriptor, grpc_context: grpc.ServicerContext # type: ignore ) -> KpiId: # type: ignore response = KpiId() - LOGGER.info("Received gRPC message object: {:}".format(request)) + LOGGER.debug("Received gRPC message object: {:}".format(request)) try: kpi_to_insert = KpiModel.convert_KpiDescriptor_to_row(request) - if(self.Kpi_DBobj.add_row_to_db(kpi_to_insert)): + if(self.kpi_db_obj.add_row_to_db(kpi_to_insert)): response.kpi_id.uuid = request.kpi_id.kpi_id.uuid - # LOGGER.info("Added Row: {:}".format(response)) + # LOGGER.debug("Added Row: {:}".format(response)) return response except Exception as e: - LOGGER.info("Unable to create KpiModel class object. {:}".format(e)) + LOGGER.debug("Unable to create KpiModel class object. {:}".format(e)) @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) def GetKpiDescriptor(self, request: KpiId, grpc_context: grpc.ServicerContext # type: ignore ) -> KpiDescriptor: # type: ignore response = KpiDescriptor() - LOGGER.info("Received gRPC message object: {:}".format(request)) + LOGGER.debug("Received gRPC message object: {:}".format(request)) try: kpi_id_to_search = request.kpi_id.uuid - row = self.Kpi_DBobj.search_db_row_by_id(KpiModel, 'kpi_id', kpi_id_to_search) + row = self.kpi_db_obj.search_db_row_by_id(KpiModel, 'kpi_id', kpi_id_to_search) if row is not None: response = KpiModel.convert_row_to_KpiDescriptor(row) return response except Exception as e: - LOGGER.info('Unable to search kpi id. {:}'.format(e)) + LOGGER.debug('Unable to search kpi id. {:}'.format(e)) @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) def DeleteKpiDescriptor(self, request: KpiId, grpc_context: grpc.ServicerContext # type: ignore ) -> Empty: # type: ignore - LOGGER.info("Received gRPC message object: {:}".format(request)) + LOGGER.debug("Received gRPC message object: {:}".format(request)) try: kpi_id_to_search = request.kpi_id.uuid - self.Kpi_DBobj.delete_db_row_by_id(KpiModel, 'kpi_id', kpi_id_to_search) + self.kpi_db_obj.delete_db_row_by_id(KpiModel, 'kpi_id', kpi_id_to_search) except Exception as e: - LOGGER.info('Unable to search kpi id. {:}'.format(e)) + LOGGER.debug('Unable to search kpi id. {:}'.format(e)) finally: return Empty() @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) - def SelectKpiDescriptor(self, request: KpiDescriptorFilter, grpc_context: grpc.ServicerContext # type: ignore + def SelectKpiDescriptor(self, filter: KpiDescriptorFilter, grpc_context: grpc.ServicerContext # type: ignore ) -> KpiDescriptorList: # type: ignore - LOGGER.info("Received gRPC message object: {:}".format(request)) + LOGGER.debug("Received gRPC message object: {:}".format(filter)) response = KpiDescriptorList() - # LOGGER.info("Recevied requested Object: {:}".format(request)) - # re-structre the filter. create dynamic filter - filter_to_apply = dict() - filter_to_apply['kpi_sample_type'] = request.kpi_sample_type[0] - filter_to_apply['device_id'] = request.device_id[0].device_uuid.uuid - filter_to_apply['endpoint_id'] = request.endpoint_id[0].endpoint_uuid.uuid - filter_to_apply['service_id'] = request.service_id[0].service_uuid.uuid - filter_to_apply['slice_id'] = request.slice_id[0].slice_uuid.uuid - filter_to_apply['connection_id'] = request.connection_id[0].connection_uuid.uuid - filter_to_apply['link_id'] = request.link_id[0].link_uuid.uuid try: - rows = self.Kpi_DBobj.select_with_filter(KpiModel, **filter_to_apply) + rows = self.kpi_db_obj.select_with_filter(KpiModel, filter) except Exception as e: - LOGGER.info('Unable to apply filter on kpi descriptor. {:}'.format(e)) + LOGGER.debug('Unable to apply filter on kpi descriptor. {:}'.format(e)) try: - if len(rows) != 0: - for row in rows: - kpiDescriptor_obj = KpiModel.convert_row_to_KpiDescriptor(row) - response.kpi_descriptor_list.append(kpiDescriptor_obj) + for row in rows: + kpiDescriptor_obj = KpiModel.convert_row_to_KpiDescriptor(row) + response.kpi_descriptor_list.append(kpiDescriptor_obj) return response except Exception as e: - LOGGER.info('Unable to process response {:}'.format(e)) + LOGGER.debug('Unable to process filter response {:}'.format(e)) diff --git a/src/kpi_manager/service/__init__.py b/src/kpi_manager/service/__init__.py index 1549d9811..3ee6f7071 100644 --- a/src/kpi_manager/service/__init__.py +++ b/src/kpi_manager/service/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/kpi_manager/service/__main__.py b/src/kpi_manager/service/__main__.py index 9f0e53246..9085bc468 100644 --- a/src/kpi_manager/service/__main__.py +++ b/src/kpi_manager/service/__main__.py @@ -1,4 +1,4 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -21,7 +21,6 @@ from common.Settings import ( from common.proto import monitoring_pb2 from monitoring.service.EventTools import EventsDeviceCollector # import updated from monitoring.service.NameMapping import NameMapping # import updated -# from .MonitoringService import MonitoringService from .KpiManagerService import KpiManagerService terminate = threading.Event() diff --git a/src/kpi_manager/database/tests/KpiDBtests.py b/src/kpi_manager/tests/test_kpi_db.py similarity index 81% rename from src/kpi_manager/database/tests/KpiDBtests.py rename to src/kpi_manager/tests/test_kpi_db.py index f75f05c14..e961c12ba 100644 --- a/src/kpi_manager/database/tests/KpiDBtests.py +++ b/src/kpi_manager/tests/test_kpi_db.py @@ -1,4 +1,4 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,13 +14,13 @@ import logging -from kpi_manager.database.Kpi_DB import Kpi_DB +from kpi_manager.database.Kpi_DB import KpiDB LOGGER = logging.getLogger(__name__) -def test_verify_Tables(): +def test_verify_databases_and_Tables(): LOGGER.info('>>> test_verify_Tables : START <<< ') - kpiDBobj = Kpi_DB() + kpiDBobj = KpiDB() kpiDBobj.drop_database() kpiDBobj.verify_tables() kpiDBobj.create_database() diff --git a/src/kpi_manager/tests/test_kpi_manager.py b/src/kpi_manager/tests/test_kpi_manager.py index ccb7f16c2..fb77eb1a8 100755 --- a/src/kpi_manager/tests/test_kpi_manager.py +++ b/src/kpi_manager/tests/test_kpi_manager.py @@ -1,4 +1,4 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/kpi_manager/tests/test_messages.py b/src/kpi_manager/tests/test_messages.py index e1cb4ddf6..2d4a121fd 100644 --- a/src/kpi_manager/tests/test_messages.py +++ b/src/kpi_manager/tests/test_messages.py @@ -1,4 +1,4 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. -- GitLab From 8bd84c7c1a5753482f9d98d0af508f4eaba6a536 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Thu, 11 Jul 2024 12:05:36 +0000 Subject: [PATCH 166/205] Exception added --- scripts/run_tests_locally-kpi-DB.sh | 4 ++-- src/kpi_manager/database/KpiModel.py | 4 +--- src/kpi_manager/database/Kpi_DB.py | 21 +++++++++++---------- 3 files changed, 14 insertions(+), 15 deletions(-) diff --git a/scripts/run_tests_locally-kpi-DB.sh b/scripts/run_tests_locally-kpi-DB.sh index 895f986cc..e46df4657 100755 --- a/scripts/run_tests_locally-kpi-DB.sh +++ b/scripts/run_tests_locally-kpi-DB.sh @@ -24,5 +24,5 @@ cd $PROJECTDIR/src # python3 kpi_manager/tests/test_unitary.py RCFILE=$PROJECTDIR/coverage/.coveragerc -python3 -m pytest --log-level=INFO --log-cli-level=INFO --verbose \ - kpi_manager/database/tests/KpiDBtests.py \ No newline at end of file +python3 -m pytest --log-level=DEBUG --log-cli-level=DEBUG --verbose \ + kpi_manager/tests/test_kpi_db.py \ No newline at end of file diff --git a/src/kpi_manager/database/KpiModel.py b/src/kpi_manager/database/KpiModel.py index e0f4c47a8..74620c33d 100644 --- a/src/kpi_manager/database/KpiModel.py +++ b/src/kpi_manager/database/KpiModel.py @@ -14,10 +14,8 @@ import logging from sqlalchemy.dialects.postgresql import UUID -from sqlalchemy import Column, Integer, String, Float, Text, ForeignKey -# from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy import Column, Integer, String, Float, Text from sqlalchemy.orm import registry -from sqlalchemy.orm import sessionmaker, relationship from common.proto.kpi_management_pb2 import KpiDescriptor logging.basicConfig(level=logging.INFO) diff --git a/src/kpi_manager/database/Kpi_DB.py b/src/kpi_manager/database/Kpi_DB.py index 2e8eeeb8c..6ab2c52f6 100644 --- a/src/kpi_manager/database/Kpi_DB.py +++ b/src/kpi_manager/database/Kpi_DB.py @@ -12,14 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -import logging, time -from typing import List, Tuple -from sqlalchemy import select, and_ +import logging import sqlalchemy_utils from sqlalchemy.orm import sessionmaker -from sqlalchemy.ext.declarative import declarative_base from kpi_manager.database.KpiEngine import KpiEngine from kpi_manager.database.KpiModel import Kpi as KpiModel +from common.method_wrappers.ServiceExceptions import ( + AlreadyExistsException, OperationFailedException) LOGGER = logging.getLogger(__name__) DB_NAME = "kpi" @@ -31,8 +30,6 @@ class KpiDB: LOGGER.error('Unable to get SQLAlchemy DB Engine...') return False self.db_name = DB_NAME - # self.drop_database(self.db_engine) # added to test - # self.create_database(self.db_engine) # to add database self.Session = sessionmaker(bind=self.db_engine) def create_database(self) -> None: @@ -50,6 +47,7 @@ class KpiDB: LOGGER.debug("Tables created in the DB Name: {:}".format(self.db_name)) except Exception as e: LOGGER.debug("Tables cannot be created in the kpi database. {:s}".format(str(e))) + raise OperationFailedException ("Tables can't be created", extra_details=["unable to create table {:}".format(e)]) def verify_tables(self): try: @@ -70,10 +68,12 @@ class KpiDB: except Exception as e: session.rollback() if "psycopg2.errors.UniqueViolation" in str(e): - LOGGER.debug(f"Unique key voilation: {row.__class__.__name__} table. {str(e)}") + LOGGER.error(f"Unique key voilation: {row.__class__.__name__} table. {str(e)}") + raise AlreadyExistsException(row.__class__.__name__, row, + extra_details=["Unique key voilation: {:}".format(e)] ) else: LOGGER.error(f"Failed to insert new row into {row.__class__.__name__} table. {str(e)}") - return False + raise OperationFailedException ("Deletion by column id", extra_details=["unable to delete row {:}".format(e)]) finally: session.close() @@ -90,7 +90,7 @@ class KpiDB: except Exception as e: session.rollback() LOGGER.debug(f"Failed to retrieve {model.__name__} ID. {str(e)}") - raise + raise OperationFailedException ("search by column id", extra_details=["unable to search row {:}".format(e)]) finally: session.close() @@ -108,6 +108,7 @@ class KpiDB: except Exception as e: session.rollback() LOGGER.error("Error deleting %s with %s %s: %s", model.__name__, col_name, id_to_search, e) + raise OperationFailedException ("Deletion by column id", extra_details=["unable to delete row {:}".format(e)]) finally: session.close() @@ -148,6 +149,6 @@ class KpiDB: return result except Exception as e: LOGGER.error(f"Error fetching filtered rows from {model.__name__} table with filters {filter_object} ::: {e}") - return [] + raise OperationFailedException ("Select by filter", extra_details=["unable to apply the filter {:}".format(e)]) finally: session.close() \ No newline at end of file -- GitLab From a9f232abd64acf8dc25b94b789651d8a61f3748d Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Thu, 11 Jul 2024 12:19:32 +0000 Subject: [PATCH 167/205] change of proto file name from "Kpi_management" to "Kpi_manager" --- proto/analytics_frontend.proto | 6 +++--- proto/device.proto | 6 +++--- proto/{kpi_management.proto => kpi_manager.proto} | 2 +- proto/monitoring.proto | 14 +++++++------- proto/optical_attack_detector.proto | 4 ++-- proto/policy_condition.proto | 4 ++-- proto/telemetry_frontend.proto | 6 +++--- src/kpi_manager/client/KpiManagerClient.py | 4 ++-- src/kpi_manager/database/KpiModel.py | 2 +- src/kpi_manager/service/KpiManagerService.py | 2 +- .../service/KpiManagerServiceServicerImpl.py | 4 ++-- src/kpi_manager/tests/test_kpi_manager.py | 2 +- src/kpi_manager/tests/test_messages.py | 6 +++--- src/monitoring/client/MonitoringClient.py | 2 +- 14 files changed, 32 insertions(+), 32 deletions(-) rename proto/{kpi_management.proto => kpi_manager.proto} (99%) diff --git a/proto/analytics_frontend.proto b/proto/analytics_frontend.proto index 070d0b957..c37acceaa 100644 --- a/proto/analytics_frontend.proto +++ b/proto/analytics_frontend.proto @@ -2,7 +2,7 @@ syntax = "proto3"; package device; import "context.proto"; -import "kpi_management.proto"; +import "kpi_manager.proto"; import "kpi_sample_types.proto"; service AnalyticsFrontendService { @@ -21,7 +21,7 @@ enum AnalyzerMode { } message Analyzer { - repeated kpi_management.KpiId kpi_id = 1; // The KPI Ids to be processed by the analyzer + repeated kpi_manager.KpiId kpi_id = 1; // The KPI Ids to be processed by the analyzer AnalyzerMode mode = 2; // Operation mode of the analyzer float batch_min_duration_s = 3; // In batch mode, min duration to collect before executing batch float batch_max_duration_s = 4; // In batch mode, max duration collected to execute the batch @@ -34,7 +34,7 @@ message AnalyzerFilter { // An empty list means: any value is accepted. // All fields empty means: list all Analyzers repeated AnalyzerId analyzer_id = 1; - repeated kpi_management.KpiId kpi_id = 2; + repeated kpi_manager.KpiId kpi_id = 2; repeated kpi_sample_types.KpiSampleType kpi_sample_type = 3; repeated context.DeviceId device_id = 4; repeated context.EndPointId endpoint_id = 5; diff --git a/proto/device.proto b/proto/device.proto index 0ce159d5b..57780adae 100644 --- a/proto/device.proto +++ b/proto/device.proto @@ -17,7 +17,7 @@ package device; import "context.proto"; //import "monitoring.proto"; -import "kpi_management.proto"; +import "kpi_manager.proto"; service DeviceService { rpc AddDevice (context.Device ) returns (context.DeviceId ) {} @@ -28,8 +28,8 @@ service DeviceService { } message MonitoringSettings { - kpi_management.KpiId kpi_id = 1; - kpi_management.KpiDescriptor kpi_descriptor = 2; + kpi_manager.KpiId kpi_id = 1; + kpi_manager.KpiDescriptor kpi_descriptor = 2; float sampling_duration_s = 3; float sampling_interval_s = 4; } diff --git a/proto/kpi_management.proto b/proto/kpi_manager.proto similarity index 99% rename from proto/kpi_management.proto rename to proto/kpi_manager.proto index a2a8f9d72..dbb464d73 100644 --- a/proto/kpi_management.proto +++ b/proto/kpi_manager.proto @@ -13,7 +13,7 @@ // limitations under the License. syntax = "proto3"; -package kpi_management; +package kpi_manager; import "context.proto"; import "kpi_sample_types.proto"; diff --git a/proto/monitoring.proto b/proto/monitoring.proto index 27a04d1d8..f240fc3ce 100755 --- a/proto/monitoring.proto +++ b/proto/monitoring.proto @@ -16,7 +16,7 @@ syntax = "proto3"; package monitoring; import "context.proto"; -import "kpi_management.proto"; +import "kpi_manager.proto"; //import "kpi_sample_types.proto"; service MonitoringService { @@ -43,14 +43,14 @@ service MonitoringService { message MonitorKpiRequest { - kpi_management.KpiId kpi_id = 1; + kpi_manager.KpiId kpi_id = 1; float monitoring_window_s = 2; float sampling_rate_s = 3; // Pending add field to reflect Available Device Protocols } message KpiQuery { - repeated kpi_management.KpiId kpi_ids = 1; + repeated kpi_manager.KpiId kpi_ids = 1; float monitoring_window_s = 2; uint32 last_n_samples = 3; // used when you want something like "get the last N many samples context.Timestamp start_timestamp = 4; // used when you want something like "get the samples since X date/time" @@ -64,7 +64,7 @@ message RawKpi { // cell } message RawKpiList { // column - kpi_management.KpiId kpi_id = 1; + kpi_manager.KpiId kpi_id = 1; repeated RawKpi raw_kpis = 2; } @@ -75,7 +75,7 @@ message RawKpiTable { // table message Kpi { - kpi_management.KpiId kpi_id = 1; + kpi_manager.KpiId kpi_id = 1; context.Timestamp timestamp = 2; KpiValue kpi_value = 3; } @@ -109,7 +109,7 @@ message KpiList { message SubsDescriptor{ SubscriptionID subs_id = 1; - kpi_management.KpiId kpi_id = 2; + kpi_manager.KpiId kpi_id = 2; float sampling_duration_s = 3; float sampling_interval_s = 4; context.Timestamp start_timestamp = 5; // used when you want something like "get the samples since X date/time" @@ -134,7 +134,7 @@ message AlarmDescriptor { AlarmID alarm_id = 1; string alarm_description = 2; string name = 3; - kpi_management.KpiId kpi_id = 4; + kpi_manager.KpiId kpi_id = 4; KpiValueRange kpi_value_range = 5; context.Timestamp timestamp = 6; } diff --git a/proto/optical_attack_detector.proto b/proto/optical_attack_detector.proto index 88b499415..bf5cf4db1 100644 --- a/proto/optical_attack_detector.proto +++ b/proto/optical_attack_detector.proto @@ -18,7 +18,7 @@ package optical_attack_detector; import "context.proto"; //import "monitoring.proto"; -import "kpi_management.proto"; +import "kpi_manager.proto"; service OpticalAttackDetectorService { @@ -29,5 +29,5 @@ service OpticalAttackDetectorService { message DetectionRequest { context.ServiceId service_id = 1; - kpi_management.KpiId kpi_id = 2; + kpi_manager.KpiId kpi_id = 2; } diff --git a/proto/policy_condition.proto b/proto/policy_condition.proto index e7fee2a6c..2904f4756 100644 --- a/proto/policy_condition.proto +++ b/proto/policy_condition.proto @@ -16,11 +16,11 @@ syntax = "proto3"; package policy; import "monitoring.proto"; -import "kpi_management.proto"; +import "kpi_manager.proto"; // Condition message PolicyRuleCondition { - kpi_management.KpiId kpiId = 1; + kpi_manager.KpiId kpiId = 1; NumericalOperator numericalOperator = 2; monitoring.KpiValue kpiValue = 3; } diff --git a/proto/telemetry_frontend.proto b/proto/telemetry_frontend.proto index ffc851a4c..01fedd4f3 100644 --- a/proto/telemetry_frontend.proto +++ b/proto/telemetry_frontend.proto @@ -2,7 +2,7 @@ syntax = "proto3"; package device; import "context.proto"; -import "kpi_management.proto"; +import "kpi_manager.proto"; service TelemetryFrontendService { rpc StartCollector (Collector ) returns (CollectorId ) {} @@ -16,7 +16,7 @@ message CollectorId { message Collector { CollectorId collector_id = 1; // The Collector ID - kpi_management.KpiId kpi_id = 2; // The KPI Id to be associated to the collected samples + kpi_manager.KpiId kpi_id = 2; // The KPI Id to be associated to the collected samples float duration_s = 3; // Terminate data collection after duration[seconds]; duration==0 means indefinitely float interval_s = 4; // Interval between collected samples } @@ -26,7 +26,7 @@ message CollectorFilter { // An empty list means: any value is accepted. // All fields empty means: list all Collectors repeated CollectorId collector_id = 1; - repeated kpi_management.KpiId kpi_id = 2; + repeated kpi_manager.KpiId kpi_id = 2; } message CollectorList { diff --git a/src/kpi_manager/client/KpiManagerClient.py b/src/kpi_manager/client/KpiManagerClient.py index cd1e98c05..cfc3a0b1c 100755 --- a/src/kpi_manager/client/KpiManagerClient.py +++ b/src/kpi_manager/client/KpiManagerClient.py @@ -19,8 +19,8 @@ from common.Settings import get_service_host, get_service_port_grpc from common.proto.context_pb2 import Empty from common.tools.grpc.Tools import grpc_message_to_json_string from common.tools.client.RetryDecorator import retry, delay_exponential -from common.proto.kpi_management_pb2_grpc import KpiManagerServiceStub -from common.proto.kpi_management_pb2 import KpiId, KpiDescriptor, KpiDescriptorFilter, KpiDescriptorList +from common.proto.kpi_manager_pb2_grpc import KpiManagerServiceStub +from common.proto.kpi_manager_pb2 import KpiId, KpiDescriptor, KpiDescriptorFilter, KpiDescriptorList LOGGER = logging.getLogger(__name__) MAX_RETRIES = 10 diff --git a/src/kpi_manager/database/KpiModel.py b/src/kpi_manager/database/KpiModel.py index 74620c33d..b8794ef68 100644 --- a/src/kpi_manager/database/KpiModel.py +++ b/src/kpi_manager/database/KpiModel.py @@ -16,7 +16,7 @@ import logging from sqlalchemy.dialects.postgresql import UUID from sqlalchemy import Column, Integer, String, Float, Text from sqlalchemy.orm import registry -from common.proto.kpi_management_pb2 import KpiDescriptor +from common.proto.kpi_manager_pb2 import KpiDescriptor logging.basicConfig(level=logging.INFO) LOGGER = logging.getLogger(__name__) diff --git a/src/kpi_manager/service/KpiManagerService.py b/src/kpi_manager/service/KpiManagerService.py index d3eed7944..3868a848f 100755 --- a/src/kpi_manager/service/KpiManagerService.py +++ b/src/kpi_manager/service/KpiManagerService.py @@ -14,7 +14,7 @@ from common.Constants import ServiceNameEnum from common.Settings import get_service_port_grpc -from common.proto.kpi_management_pb2_grpc import add_KpiManagerServiceServicer_to_server +from common.proto.kpi_manager_pb2_grpc import add_KpiManagerServiceServicer_to_server from common.tools.service.GenericGrpcService import GenericGrpcService from kpi_manager.service.KpiManagerServiceServicerImpl import KpiManagerServiceServicerImpl from monitoring.service.NameMapping import NameMapping diff --git a/src/kpi_manager/service/KpiManagerServiceServicerImpl.py b/src/kpi_manager/service/KpiManagerServiceServicerImpl.py index d88d6d8eb..2fd4e6ac8 100644 --- a/src/kpi_manager/service/KpiManagerServiceServicerImpl.py +++ b/src/kpi_manager/service/KpiManagerServiceServicerImpl.py @@ -18,8 +18,8 @@ from typing import List, Set from sqlalchemy.sql.expression import BinaryExpression from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method from common.proto.context_pb2 import Empty -from common.proto.kpi_management_pb2_grpc import KpiManagerServiceServicer -from common.proto.kpi_management_pb2 import KpiId, KpiDescriptor, KpiDescriptorFilter, KpiDescriptorList +from common.proto.kpi_manager_pb2_grpc import KpiManagerServiceServicer +from common.proto.kpi_manager_pb2 import KpiId, KpiDescriptor, KpiDescriptorFilter, KpiDescriptorList from monitoring.service.NameMapping import NameMapping from kpi_manager.database.Kpi_DB import KpiDB from kpi_manager.database.KpiModel import Kpi as KpiModel diff --git a/src/kpi_manager/tests/test_kpi_manager.py b/src/kpi_manager/tests/test_kpi_manager.py index fb77eb1a8..c746ee406 100755 --- a/src/kpi_manager/tests/test_kpi_manager.py +++ b/src/kpi_manager/tests/test_kpi_manager.py @@ -24,7 +24,7 @@ from common.Settings import ( from common.tests.MockServicerImpl_Context import MockServicerImpl_Context from common.proto.context_pb2_grpc import add_ContextServiceServicer_to_server -from common.proto.kpi_management_pb2 import KpiId, KpiDescriptor, KpiDescriptorFilter, KpiDescriptorList +from common.proto.kpi_manager_pb2 import KpiId, KpiDescriptor, KpiDescriptorFilter, KpiDescriptorList from common.tools.service.GenericGrpcService import GenericGrpcService from context.client.ContextClient import ContextClient diff --git a/src/kpi_manager/tests/test_messages.py b/src/kpi_manager/tests/test_messages.py index 2d4a121fd..230cfabd0 100644 --- a/src/kpi_manager/tests/test_messages.py +++ b/src/kpi_manager/tests/test_messages.py @@ -13,13 +13,13 @@ # limitations under the License. import uuid -from common.proto import kpi_management_pb2 +from common.proto import kpi_manager_pb2 from common.proto.kpi_sample_types_pb2 import KpiSampleType from common.proto.context_pb2 import DeviceId, LinkId, ServiceId, SliceId,\ ConnectionId, EndPointId def create_kpi_descriptor_request(descriptor_name: str = "Test_name"): - _create_kpi_request = kpi_management_pb2.KpiDescriptor() + _create_kpi_request = kpi_manager_pb2.KpiDescriptor() _create_kpi_request.kpi_id.kpi_id.uuid = str(uuid.uuid4()) _create_kpi_request.kpi_description = descriptor_name _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED @@ -32,7 +32,7 @@ def create_kpi_descriptor_request(descriptor_name: str = "Test_name"): return _create_kpi_request def create_kpi_filter_request(): - _create_kpi_filter_request = kpi_management_pb2.KpiDescriptorFilter() + _create_kpi_filter_request = kpi_manager_pb2.KpiDescriptorFilter() _create_kpi_filter_request.kpi_sample_type.append(KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED) device_id_obj = DeviceId() diff --git a/src/monitoring/client/MonitoringClient.py b/src/monitoring/client/MonitoringClient.py index 5d529bb3c..b66a8061e 100644 --- a/src/monitoring/client/MonitoringClient.py +++ b/src/monitoring/client/MonitoringClient.py @@ -20,7 +20,7 @@ from common.Settings import get_service_host, get_service_port_grpc from common.tools.client.RetryDecorator import retry, delay_exponential from common.tools.grpc.Tools import grpc_message_to_json_string from common.proto.context_pb2 import Empty -from common.proto.kpi_management_pb2 import KpiId, KpiDescriptor, KpiDescriptorList +from common.proto.kpi_manager_pb2 import KpiId, KpiDescriptor, KpiDescriptorList from common.proto.monitoring_pb2 import Kpi, MonitorKpiRequest, \ KpiQuery, KpiList, SubsDescriptor, SubscriptionID, SubsList, \ SubsResponse, AlarmDescriptor, AlarmID, AlarmList, AlarmResponse, AlarmSubscription, RawKpiTable -- GitLab From 9e41b81dfca929d9305047c906488766ef4111ca Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Thu, 11 Jul 2024 12:22:05 +0000 Subject: [PATCH 168/205] kpi_manager module name added in my_deploy.sh --- my_deploy.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/my_deploy.sh b/my_deploy.sh index 552aa6440..d7da19b86 100755 --- a/my_deploy.sh +++ b/my_deploy.sh @@ -23,7 +23,7 @@ export TFS_REGISTRY_IMAGES="http://localhost:32000/tfs/" export TFS_COMPONENTS="context device pathcomp service slice nbi webui load_generator" # Uncomment to activate Monitoring -export TFS_COMPONENTS="${TFS_COMPONENTS} monitoring" +export TFS_COMPONENTS="${TFS_COMPONENTS} monitoring kpi_manager" # Uncomment to activate BGP-LS Speaker #export TFS_COMPONENTS="${TFS_COMPONENTS} bgpls_speaker" -- GitLab From 9365d528d2b403b1243de94b3b29f76b9ae9c777 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 12 Jul 2024 07:18:18 +0000 Subject: [PATCH 169/205] initial changes for KPI value writer and api --- proto/kpi_value.proto | 38 +++++ ....sh => run_tests_locally-kpi-value-API.sh} | 4 +- ... => run_tests_locally-kpi-value-writer.sh} | 4 +- .../kpi_value_writer/service/__init__.py | 14 -- src/kpi_management/requirements.in | 63 --------- src/kpi_management/service/__init__.py | 14 -- .../service/database/__main__.py | 107 -------------- src/kpi_management/tests/KPI_configs.json | 8 -- src/kpi_management/tests/__init__.py | 14 -- src/kpi_manager/tests/test_kpi_manager.py | 77 +++++----- src/kpi_manager/tests/test_messages.py | 13 ++ .../__init__.py | 0 .../client}/__init__.py | 0 .../service}/__init__.py | 0 src/kpi_value_writer/.gitlab-ci.yml | 133 ++++++++++++++++++ .../Dockerfile | 13 +- .../README.md | 24 ++-- .../service => kpi_value_writer}/__init__.py | 0 .../service/KpiValueComposer.py | 0 .../service/KpiValueWriter.py | 0 .../kpi_value_writer/service/KpiWriter.py | 0 .../service}/__init__.py | 0 .../service/__main__.py | 3 +- .../tests/test_kpi_composer.py | 0 .../tests/test_kpi_value_writer.py} | 5 + .../tests/test_kpi_writer.py | 0 .../tests/test_messages.py | 108 +++++++------- 27 files changed, 315 insertions(+), 327 deletions(-) create mode 100644 proto/kpi_value.proto rename scripts/{run_tests_locally-kpi-composer.sh => run_tests_locally-kpi-value-API.sh} (86%) rename scripts/{run_tests_locally-kpi-writer.sh => run_tests_locally-kpi-value-writer.sh} (85%) delete mode 100644 src/kpi_management/kpi_value_writer/service/__init__.py delete mode 100644 src/kpi_management/requirements.in delete mode 100644 src/kpi_management/service/__init__.py delete mode 100644 src/kpi_management/service/database/__main__.py delete mode 100644 src/kpi_management/tests/KPI_configs.json delete mode 100644 src/kpi_management/tests/__init__.py rename src/{kpi_management => kpi_value_api}/__init__.py (100%) rename src/{kpi_management/kpi_value_api => kpi_value_api/client}/__init__.py (100%) rename src/{kpi_management/kpi_value_api/client => kpi_value_api/service}/__init__.py (100%) create mode 100644 src/kpi_value_writer/.gitlab-ci.yml rename src/{kpi_management => kpi_value_writer}/Dockerfile (88%) rename src/{kpi_management => kpi_value_writer}/README.md (70%) rename src/{kpi_management/kpi_value_api/service => kpi_value_writer}/__init__.py (100%) rename src/{kpi_management => }/kpi_value_writer/service/KpiValueComposer.py (100%) create mode 100644 src/kpi_value_writer/service/KpiValueWriter.py rename src/{kpi_management => }/kpi_value_writer/service/KpiWriter.py (100%) rename src/{kpi_management/kpi_value_writer => kpi_value_writer/service}/__init__.py (100%) rename src/{kpi_management => kpi_value_writer}/service/__main__.py (96%) rename src/{kpi_management => kpi_value_writer}/tests/test_kpi_composer.py (100%) rename src/{kpi_management/service/database/__init__.py => kpi_value_writer/tests/test_kpi_value_writer.py} (75%) mode change 100644 => 100755 rename src/{kpi_management => kpi_value_writer}/tests/test_kpi_writer.py (100%) rename src/{kpi_management => kpi_value_writer}/tests/test_messages.py (64%) diff --git a/proto/kpi_value.proto b/proto/kpi_value.proto new file mode 100644 index 000000000..4f9f4edc5 --- /dev/null +++ b/proto/kpi_value.proto @@ -0,0 +1,38 @@ +syntax = "proto3"; +package kpi_value_api; + +import "context.proto"; +import "kpi_manager.proto"; + +service KpiValueAPI { + rpc StoreKpiValues (KpiValueList) returns (context.Empty) {} + rpc SelectKpiValues (KpiValueFilter) returns (KpiValueList) {} +} + +message KpiValue { + kpi_manager.KpiId kpi_id = 1; + context.Timestamp timestamp = 2; + KpiValueType kpi_value_type = 3; +} + +message KpiValueList { + repeated KpiValue kpi_value_list = 1; +} + +message KpiValueType { + oneof value { + int32 int32Val = 1; + uint32 uint32Val = 2; + int64 int64Val = 3; + uint64 uint64Val = 4; + float floatVal = 5; + string stringVal = 6; + bool boolVal = 7; + } +} + +message KpiValueFilter { + repeated kpi_manager.KpiId kpi_id = 1; + repeated context.Timestamp start_timestamp = 2; + repeated context.Timestamp end_timestamp = 3; +} diff --git a/scripts/run_tests_locally-kpi-composer.sh b/scripts/run_tests_locally-kpi-value-API.sh similarity index 86% rename from scripts/run_tests_locally-kpi-composer.sh rename to scripts/run_tests_locally-kpi-value-API.sh index c61b25788..65a269ec6 100755 --- a/scripts/run_tests_locally-kpi-composer.sh +++ b/scripts/run_tests_locally-kpi-value-API.sh @@ -19,5 +19,5 @@ PROJECTDIR=`pwd` cd $PROJECTDIR/src RCFILE=$PROJECTDIR/coverage/.coveragerc -python3 -m pytest --log-level=INFO --log-cli-level=INFO --verbose \ - kpi_manager/tests/test_kpi_composer.py \ No newline at end of file +python3 -m pytest --log-level=DEBUG --log-cli-level=DEBUG --verbose \ + kpi_value_api/tests/test_kpi_value_api.py \ No newline at end of file diff --git a/scripts/run_tests_locally-kpi-writer.sh b/scripts/run_tests_locally-kpi-value-writer.sh similarity index 85% rename from scripts/run_tests_locally-kpi-writer.sh rename to scripts/run_tests_locally-kpi-value-writer.sh index 2bc2e5130..95cf396f6 100755 --- a/scripts/run_tests_locally-kpi-writer.sh +++ b/scripts/run_tests_locally-kpi-value-writer.sh @@ -19,5 +19,5 @@ PROJECTDIR=`pwd` cd $PROJECTDIR/src RCFILE=$PROJECTDIR/coverage/.coveragerc -python3 -m pytest --log-level=INFO --log-cli-level=INFO --verbose \ - kpi_manager/tests/test_kpi_writer.py \ No newline at end of file +python3 -m pytest --log-level=DEBUG --log-cli-level=DEBUG --verbose \ + kpi_value_writer/tests/test_kpi_value_writer.py \ No newline at end of file diff --git a/src/kpi_management/kpi_value_writer/service/__init__.py b/src/kpi_management/kpi_value_writer/service/__init__.py deleted file mode 100644 index 1549d9811..000000000 --- a/src/kpi_management/kpi_value_writer/service/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - diff --git a/src/kpi_management/requirements.in b/src/kpi_management/requirements.in deleted file mode 100644 index d96e4b1b8..000000000 --- a/src/kpi_management/requirements.in +++ /dev/null @@ -1,63 +0,0 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -anytree==2.8.0 -APScheduler==3.10.1 -attrs==23.2.0 -certifi==2024.2.2 -charset-normalizer==2.0.12 -colorama==0.4.6 -confluent-kafka==2.3.0 -coverage==6.3 -future-fstrings==1.2.0 -greenlet==3.0.3 -grpcio==1.47.5 -grpcio-health-checking==1.47.5 -grpcio-tools==1.47.5 -grpclib==0.4.4 -h2==4.1.0 -hpack==4.0.0 -hyperframe==6.0.1 -idna==3.7 -influx-line-protocol==0.1.4 -iniconfig==2.0.0 -kafka-python==2.0.2 -multidict==6.0.5 -networkx==3.3 -packaging==24.0 -pluggy==1.5.0 -prettytable==3.5.0 -prometheus-client==0.13.0 -protobuf==3.20.3 -psycopg2-binary==2.9.3 -py==1.11.0 -py-cpuinfo==9.0.0 -pytest==6.2.5 -pytest-benchmark==3.4.1 -pytest-depends==1.0.1 -python-dateutil==2.8.2 -python-json-logger==2.0.2 -pytz==2024.1 -questdb==1.0.1 -requests==2.27.1 -six==1.16.0 -SQLAlchemy==1.4.52 -sqlalchemy-cockroachdb==1.4.4 -SQLAlchemy-Utils==0.38.3 -toml==0.10.2 -typing_extensions==4.12.0 -tzlocal==5.2 -urllib3==1.26.18 -wcwidth==0.2.13 -xmltodict==0.12.0 diff --git a/src/kpi_management/service/__init__.py b/src/kpi_management/service/__init__.py deleted file mode 100644 index 1549d9811..000000000 --- a/src/kpi_management/service/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - diff --git a/src/kpi_management/service/database/__main__.py b/src/kpi_management/service/database/__main__.py deleted file mode 100644 index 9f0e53246..000000000 --- a/src/kpi_management/service/database/__main__.py +++ /dev/null @@ -1,107 +0,0 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging, signal, sys, threading, time -from prometheus_client import start_http_server -from common.Constants import ServiceNameEnum -from common.Settings import ( - ENVVAR_SUFIX_SERVICE_HOST, ENVVAR_SUFIX_SERVICE_PORT_GRPC, get_env_var_name, get_log_level, get_metrics_port, - wait_for_environment_variables) -from common.proto import monitoring_pb2 -from monitoring.service.EventTools import EventsDeviceCollector # import updated -from monitoring.service.NameMapping import NameMapping # import updated -# from .MonitoringService import MonitoringService -from .KpiManagerService import KpiManagerService - -terminate = threading.Event() -LOGGER = None - -def signal_handler(signal, frame): # pylint: disable=redefined-outer-name - LOGGER.warning('Terminate signal received') - terminate.set() - -def start_kpi_manager(name_mapping : NameMapping): - LOGGER.info('Start Monitoring...',) - - events_collector = EventsDeviceCollector(name_mapping) - events_collector.start() - - # TODO: redesign this method to be more clear and clean - - # Iterate while terminate is not set - while not terminate.is_set(): - list_new_kpi_ids = events_collector.listen_events() - - # Monitor Kpis - if bool(list_new_kpi_ids): - for kpi_id in list_new_kpi_ids: - # Create Monitor Kpi Requests - monitor_kpi_request = monitoring_pb2.MonitorKpiRequest() - monitor_kpi_request.kpi_id.CopyFrom(kpi_id) - monitor_kpi_request.monitoring_window_s = 86400 - monitor_kpi_request.sampling_rate_s = 10 - events_collector._monitoring_client.MonitorKpi(monitor_kpi_request) - - time.sleep(0.5) # let other tasks run; do not overload CPU - else: - # Terminate is set, looping terminates - LOGGER.warning("Stopping execution...") - - events_collector.start() - -def main(): - global LOGGER # pylint: disable=global-statement - - log_level = get_log_level() - logging.basicConfig(level=log_level) - LOGGER = logging.getLogger(__name__) - - wait_for_environment_variables([ - get_env_var_name(ServiceNameEnum.CONTEXT, ENVVAR_SUFIX_SERVICE_HOST ), - get_env_var_name(ServiceNameEnum.CONTEXT, ENVVAR_SUFIX_SERVICE_PORT_GRPC), - get_env_var_name(ServiceNameEnum.DEVICE, ENVVAR_SUFIX_SERVICE_HOST ), - get_env_var_name(ServiceNameEnum.DEVICE, ENVVAR_SUFIX_SERVICE_PORT_GRPC), - ]) - - signal.signal(signal.SIGINT, signal_handler) - signal.signal(signal.SIGTERM, signal_handler) - - LOGGER.info('Starting...') - - # Start metrics server - metrics_port = get_metrics_port() - start_http_server(metrics_port) - - name_mapping = NameMapping() - # Starting monitoring service - # grpc_service = MonitoringService(name_mapping) - # grpc_service.start() - # start_monitoring(name_mapping) - - grpc_service = KpiManagerService(name_mapping) - grpc_service.start() - - start_kpi_manager(name_mapping) - - # Wait for Ctrl+C or termination signal - while not terminate.wait(timeout=1.0): pass - - LOGGER.info('Terminating...') - grpc_service.stop() - - LOGGER.info('Bye') - return 0 - -if __name__ == '__main__': - sys.exit(main()) diff --git a/src/kpi_management/tests/KPI_configs.json b/src/kpi_management/tests/KPI_configs.json deleted file mode 100644 index ba73bc41a..000000000 --- a/src/kpi_management/tests/KPI_configs.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "KPIs": - [ - "node_network_receive_packets_total", - "node_network_receive_bytes_total", - "node_network_transmit_bytes_total" - ] -} \ No newline at end of file diff --git a/src/kpi_management/tests/__init__.py b/src/kpi_management/tests/__init__.py deleted file mode 100644 index 1549d9811..000000000 --- a/src/kpi_management/tests/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - diff --git a/src/kpi_manager/tests/test_kpi_manager.py b/src/kpi_manager/tests/test_kpi_manager.py index c746ee406..b517c9568 100755 --- a/src/kpi_manager/tests/test_kpi_manager.py +++ b/src/kpi_manager/tests/test_kpi_manager.py @@ -33,7 +33,7 @@ from device.service.driver_api.DriverInstanceCache import DriverInstanceCache from device.service.DeviceService import DeviceService from device.client.DeviceClient import DeviceClient -from kpi_manager.tests.test_messages import create_kpi_descriptor_request, create_kpi_filter_request +from kpi_manager.tests.test_messages import create_kpi_descriptor_request, create_kpi_filter_request, create_kpi_descriptor_request_a from kpi_manager.service.KpiManagerService import KpiManagerService from kpi_manager.client.KpiManagerClient import KpiManagerClient @@ -194,41 +194,48 @@ def kpi_manager_client(kpi_manager_service : KpiManagerService): # pylint: disab ########################### # ---------- 3rd Iteration Tests ---------------- -def test_SetKpiDescriptor(kpi_manager_client): - LOGGER.info(" >>> test_SetKpiDescriptor: START <<< ") - response = kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request()) - LOGGER.info("Response gRPC message object: {:}".format(response)) - assert isinstance(response, KpiId) - -def test_DeleteKpiDescriptor(kpi_manager_client): - LOGGER.info(" >>> test_DeleteKpiDescriptor: START <<< ") - # adding KPI - response_id = kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request()) - # deleting KPI - del_response = kpi_manager_client.DeleteKpiDescriptor(response_id) - # select KPI - kpi_manager_client.GetKpiDescriptor(response_id) - LOGGER.info("Response of delete method gRPC message object: {:}".format(del_response)) - assert isinstance(del_response, Empty) - -def test_GetKpiDescriptor(kpi_manager_client): - LOGGER.info(" >>> test_GetKpiDescriptor: START <<< ") - # adding KPI - response_id = kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request()) - # get KPI - response = kpi_manager_client.GetKpiDescriptor(response_id) - LOGGER.info("Response gRPC message object: {:}".format(response)) - assert isinstance(response, KpiDescriptor) - -def test_SelectKpiDescriptor(kpi_manager_client): - LOGGER.info(" >>> test_SelectKpiDescriptor: START <<< ") - # adding KPI - kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request()) - # select KPI(s) - response = kpi_manager_client.SelectKpiDescriptor(create_kpi_filter_request()) - LOGGER.info("Response gRPC message object: {:}".format(response)) - assert isinstance(response, KpiDescriptorList) +# def test_SetKpiDescriptor(kpi_manager_client): +# LOGGER.info(" >>> test_SetKpiDescriptor: START <<< ") +# response = kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request()) +# LOGGER.info("Response gRPC message object: {:}".format(response)) +# assert isinstance(response, KpiId) + +# def test_DeleteKpiDescriptor(kpi_manager_client): +# LOGGER.info(" >>> test_DeleteKpiDescriptor: START <<< ") +# # adding KPI +# response_id = kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request()) +# # deleting KPI +# del_response = kpi_manager_client.DeleteKpiDescriptor(response_id) +# # select KPI +# kpi_manager_client.GetKpiDescriptor(response_id) +# LOGGER.info("Response of delete method gRPC message object: {:}".format(del_response)) +# assert isinstance(del_response, Empty) +# def test_GetKpiDescriptor(kpi_manager_client): +# LOGGER.info(" >>> test_GetKpiDescriptor: START <<< ") +# # adding KPI +# response_id = kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request()) +# # get KPI +# response = kpi_manager_client.GetKpiDescriptor(response_id) +# LOGGER.info("Response gRPC message object: {:}".format(response)) +# assert isinstance(response, KpiDescriptor) + +# def test_SelectKpiDescriptor(kpi_manager_client): +# LOGGER.info(" >>> test_SelectKpiDescriptor: START <<< ") +# # adding KPI +# kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request()) +# # select KPI(s) +# response = kpi_manager_client.SelectKpiDescriptor(create_kpi_filter_request()) +# LOGGER.info("Response gRPC message object: {:}".format(response)) +# assert isinstance(response, KpiDescriptorList) + +def test_set_list_of_KPIs(kpi_manager_client): + LOGGER.info(" >>> test_set_list_of_KPIs: START <<< ") + KPIs_TO_SEARCH = ["node_in_power_total", "node_in_current_total", "node_out_power_total"] + # adding KPI + for kpi in KPIs_TO_SEARCH: + kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request_a(kpi)) + # ---------- 2nd Iteration Tests ----------------- # def test_SetKpiDescriptor(kpi_manager_client): diff --git a/src/kpi_manager/tests/test_messages.py b/src/kpi_manager/tests/test_messages.py index 230cfabd0..6294d1969 100644 --- a/src/kpi_manager/tests/test_messages.py +++ b/src/kpi_manager/tests/test_messages.py @@ -31,6 +31,19 @@ def create_kpi_descriptor_request(descriptor_name: str = "Test_name"): _create_kpi_request.link_id.link_uuid.uuid = 'LNK1' # pylint: disable=maybe-no-member return _create_kpi_request +def create_kpi_descriptor_request_a(description: str = "Test Description"): + _create_kpi_request = kpi_manager_pb2.KpiDescriptor() + _create_kpi_request.kpi_id.kpi_id.uuid = str(uuid.uuid4()) + _create_kpi_request.kpi_description = description + _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED + _create_kpi_request.device_id.device_uuid.uuid = 'DEV4' # pylint: disable=maybe-no-member + _create_kpi_request.service_id.service_uuid.uuid = 'SERV3' # pylint: disable=maybe-no-member + _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC3' # pylint: disable=maybe-no-member + _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END2' # pylint: disable=maybe-no-member + _create_kpi_request.connection_id.connection_uuid.uuid = 'CON2' # pylint: disable=maybe-no-member + _create_kpi_request.link_id.link_uuid.uuid = 'LNK2' # pylint: disable=maybe-no-member + return _create_kpi_request + def create_kpi_filter_request(): _create_kpi_filter_request = kpi_manager_pb2.KpiDescriptorFilter() _create_kpi_filter_request.kpi_sample_type.append(KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED) diff --git a/src/kpi_management/__init__.py b/src/kpi_value_api/__init__.py similarity index 100% rename from src/kpi_management/__init__.py rename to src/kpi_value_api/__init__.py diff --git a/src/kpi_management/kpi_value_api/__init__.py b/src/kpi_value_api/client/__init__.py similarity index 100% rename from src/kpi_management/kpi_value_api/__init__.py rename to src/kpi_value_api/client/__init__.py diff --git a/src/kpi_management/kpi_value_api/client/__init__.py b/src/kpi_value_api/service/__init__.py similarity index 100% rename from src/kpi_management/kpi_value_api/client/__init__.py rename to src/kpi_value_api/service/__init__.py diff --git a/src/kpi_value_writer/.gitlab-ci.yml b/src/kpi_value_writer/.gitlab-ci.yml new file mode 100644 index 000000000..ffd4e38ff --- /dev/null +++ b/src/kpi_value_writer/.gitlab-ci.yml @@ -0,0 +1,133 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Build, tag, and push the Docker image to the GitLab Docker registry +build kpi_manager: + variables: + IMAGE_NAME: 'kpi_manager' # name of the microservice + IMAGE_TAG: 'latest' # tag of the container image (production, development, etc) + stage: build + before_script: + - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY + script: + - docker buildx build -t "$IMAGE_NAME:$IMAGE_TAG" -f ./src/$IMAGE_NAME/Dockerfile . + - docker tag "$IMAGE_NAME:$IMAGE_TAG" "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG" + - docker push "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG" + after_script: + - docker images --filter="dangling=true" --quiet | xargs -r docker rmi + rules: + - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)' + - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' + - changes: + - src/common/**/*.py + - proto/*.proto + - src/$IMAGE_NAME/**/*.{py,in,yml} + - src/$IMAGE_NAME/Dockerfile + - src/$IMAGE_NAME/tests/*.py + - manifests/${IMAGE_NAME}service.yaml + - .gitlab-ci.yml + +# Apply unit test to the component +unit_test context: + variables: + IMAGE_NAME: 'kpi_manager' # name of the microservice + IMAGE_TAG: 'latest' # tag of the container image (production, development, etc) + stage: unit_test + needs: + - build context + before_script: + - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY + - if docker network list | grep teraflowbridge; then echo "teraflowbridge is already created"; else docker network create -d bridge teraflowbridge; fi + - if docker container ls | grep crdb; then docker rm -f crdb; else echo "CockroachDB container is not in the system"; fi + - if docker volume ls | grep crdb; then docker volume rm -f crdb; else echo "CockroachDB volume is not in the system"; fi + - if docker container ls | grep $IMAGE_NAME; then docker rm -f $IMAGE_NAME; else echo "$IMAGE_NAME container is not in the system"; fi + - docker container prune -f + script: + - docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG" + - docker pull "cockroachdb/cockroach:latest-v22.2" + - docker volume create crdb + - > + docker run --name crdb -d --network=teraflowbridge -p 26257:26257 -p 8080:8080 + --env COCKROACH_DATABASE=tfs_test --env COCKROACH_USER=tfs --env COCKROACH_PASSWORD=tfs123 + --volume "crdb:/cockroach/cockroach-data" + cockroachdb/cockroach:latest-v22.2 start-single-node + - echo "Waiting for initialization..." + - while ! docker logs crdb 2>&1 | grep -q 'finished creating default user \"tfs\"'; do sleep 1; done + - docker logs crdb + - docker ps -a + - CRDB_ADDRESS=$(docker inspect crdb --format "{{.NetworkSettings.Networks.teraflowbridge.IPAddress}}") + - echo $CRDB_ADDRESS + - NATS_ADDRESS=$(docker inspect nats --format "{{.NetworkSettings.Networks.teraflowbridge.IPAddress}}") + - echo $NATS_ADDRESS + - > + docker run --name $IMAGE_NAME -d -p 1010:1010 + --env "CRDB_URI=cockroachdb://tfs:tfs123@${CRDB_ADDRESS}:26257/tfs_test?sslmode=require" + --volume "$PWD/src/$IMAGE_NAME/tests:/opt/results" + --network=teraflowbridge + $CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG + - docker ps -a + - sleep 5 + - docker logs $IMAGE_NAME + - > + docker exec -i $IMAGE_NAME bash -c + "coverage run -m pytest --log-level=INFO --verbose --junitxml=/opt/results/${IMAGE_NAME}_report.xml $IMAGE_NAME/tests/test_*.py" + - docker exec -i $IMAGE_NAME bash -c "coverage report --include='${IMAGE_NAME}/*' --show-missing" + coverage: '/TOTAL\s+\d+\s+\d+\s+(\d+%)/' + after_script: + - docker volume rm -f crdb + - docker network rm teraflowbridge + - docker volume prune --force + - docker image prune --force + rules: + - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)' + - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' + - changes: + - src/common/**/*.py + - proto/*.proto + - src/$IMAGE_NAME/**/*.{py,in,yml} + - src/$IMAGE_NAME/Dockerfile + - src/$IMAGE_NAME/tests/*.py + - src/$IMAGE_NAME/tests/Dockerfile + - manifests/${IMAGE_NAME}service.yaml + - .gitlab-ci.yml + artifacts: + when: always + reports: + junit: src/$IMAGE_NAME/tests/${IMAGE_NAME}_report.xml + +## Deployment of the service in Kubernetes Cluster +#deploy context: +# variables: +# IMAGE_NAME: 'context' # name of the microservice +# IMAGE_TAG: 'latest' # tag of the container image (production, development, etc) +# stage: deploy +# needs: +# - unit test context +# # - integ_test execute +# script: +# - 'sed -i "s/$IMAGE_NAME:.*/$IMAGE_NAME:$IMAGE_TAG/" manifests/${IMAGE_NAME}service.yaml' +# - kubectl version +# - kubectl get all +# - kubectl apply -f "manifests/${IMAGE_NAME}service.yaml" +# - kubectl get all +# # environment: +# # name: test +# # url: https://example.com +# # kubernetes: +# # namespace: test +# rules: +# - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)' +# when: manual +# - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' +# when: manual diff --git a/src/kpi_management/Dockerfile b/src/kpi_value_writer/Dockerfile similarity index 88% rename from src/kpi_management/Dockerfile rename to src/kpi_value_writer/Dockerfile index 0369fc0c8..4d74030e7 100644 --- a/src/kpi_management/Dockerfile +++ b/src/kpi_value_writer/Dockerfile @@ -54,18 +54,15 @@ RUN rm *.proto RUN find . -type f -exec sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' {} \; # Create component sub-folders, get specific Python packages -RUN mkdir -p /var/teraflow/kpi_management -WORKDIR /var/teraflow/kpi_management -COPY src/kpi_management/requirements.in requirements.in +RUN mkdir -p /var/teraflow/kpi_manager +WORKDIR /var/teraflow/kpi_manager +COPY src/kpi_manager/requirements.in requirements.in RUN pip-compile --quiet --output-file=requirements.txt requirements.in RUN python3 -m pip install -r requirements.txt # Add component files into working directory WORKDIR /var/teraflow -COPY src/context/. context/ -COPY src/device/. device/ -COPY src/monitoring/. monitoring/ -COPY src/kpi_management/. kpi_management/ +COPY src/kpi_manager/. kpi_manager/ # Start the service -ENTRYPOINT ["python", "-m", "kpi_management.service"] +ENTRYPOINT ["python", "-m", "kpi_manager.service"] diff --git a/src/kpi_management/README.md b/src/kpi_value_writer/README.md similarity index 70% rename from src/kpi_management/README.md rename to src/kpi_value_writer/README.md index b73f0e8af..72ba6e559 100644 --- a/src/kpi_management/README.md +++ b/src/kpi_value_writer/README.md @@ -1,27 +1,29 @@ -# How to locally run and test KPI management service - -## ----- Update Required (Files structure has been changed) ----- +# How to locally run and test KPI manager micro-service +## --- File links need to be updated. --- ### Pre-requisets The following requirements should be fulfilled before the execuation of KPI management service. -1. verify that [kpi_management.proto](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/proto/kpi_management.proto) file exists and grpcs file are generated sucessfully. -2. virtual enviornment exist with all the required packages listed in ["requirements.in"](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_management/requirements.in) are installed sucessfully. -3. verify the creation of required database and table. -[KPI DB test](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_management/service/database/KpiDBtests.py) python file enlist the functions to create tables and database. +1. Verify that [kpi_management.proto](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/proto/kpi_management.proto) file exists and grpcs file are generated sucessfully. +2. Virtual enviornment exist with all the required packages listed in ["requirements.in"](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_management/requirements.in) are installed sucessfully. +3. Verify the creation of required database and table. +[KPI DB test](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_management/kpi_manager/database/tests/KpiDBtests.py) python file enlist the functions to create tables and database and [KPI Engine](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_management/service/database/KpiEngine.py) contains the DB string, update the string as per your deployment. ### Messages format templates -["Messages"](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_management/tests/test_messages.py) python file enlist the basic gRPC messages format used during the testing. +["Messages"](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_management/kpi_manager/tests/test_messages.py) python file enlist the basic gRPC messages format used during the testing. ### Test file -["KPI management test"](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_management/tests/test_kpi_management.py) python file enlist the different tests conducted during the experiment. +["KPI management test"](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/kpi_management/kpi_manager/tests/test_kpi_manager.py) python file enlist different tests conducted during the experiment. ### Flow of execution (Kpi Maanager Service functions) -1. Call the `create_database()` and `create_tables()` functions from `Kpi_DB` class to create the required database and table if they don't exist. +1. Call the `create_database()` and `create_tables()` functions from `Kpi_DB` class to create the required database and table if they don't exist. Call `verify_tables` to verify the existence of KPI table. + 2. Call the gRPC method `SetKpiDescriptor(KpiDescriptor)->KpiId` to add the KpiDescriptor in `Kpi` DB. `KpiDescriptor` and `KpiId` are both pre-defined gRPC message types. + 3. Call `GetKpiDescriptor(KpiId)->KpiDescriptor` to read the `KpiDescriptor` from DB and `DeleteKpiDescriptor(KpiId)` to delete the `KpiDescriptor` from DB. + 4. Call `SelectKpiDescriptor(KpiDescriptorFilter)->KpiDescriptorList` to get all `KpiDescriptor` objects that matches the filter criteria. `KpiDescriptorFilter` and `KpiDescriptorList` are pre-defined gRPC message types. ## For KPI composer and KPI writer -The functionalities of KPI composer and writer is heavily dependent upon Telemetery service. Therfore, these services has other pre-requsites that are mention [here](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/telemetry/requirements.in). \ No newline at end of file +The functionalities of KPI composer and writer is heavily dependent upon Telemetery service. Therfore, these services has other pre-requsites that are mention [here](https://labs.etsi.org/rep/tfs/controller/-/blob/feat/71-cttc-separation-of-monitoring/src/telemetry/requirements.in). \ No newline at end of file diff --git a/src/kpi_management/kpi_value_api/service/__init__.py b/src/kpi_value_writer/__init__.py similarity index 100% rename from src/kpi_management/kpi_value_api/service/__init__.py rename to src/kpi_value_writer/__init__.py diff --git a/src/kpi_management/kpi_value_writer/service/KpiValueComposer.py b/src/kpi_value_writer/service/KpiValueComposer.py similarity index 100% rename from src/kpi_management/kpi_value_writer/service/KpiValueComposer.py rename to src/kpi_value_writer/service/KpiValueComposer.py diff --git a/src/kpi_value_writer/service/KpiValueWriter.py b/src/kpi_value_writer/service/KpiValueWriter.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/kpi_management/kpi_value_writer/service/KpiWriter.py b/src/kpi_value_writer/service/KpiWriter.py similarity index 100% rename from src/kpi_management/kpi_value_writer/service/KpiWriter.py rename to src/kpi_value_writer/service/KpiWriter.py diff --git a/src/kpi_management/kpi_value_writer/__init__.py b/src/kpi_value_writer/service/__init__.py similarity index 100% rename from src/kpi_management/kpi_value_writer/__init__.py rename to src/kpi_value_writer/service/__init__.py diff --git a/src/kpi_management/service/__main__.py b/src/kpi_value_writer/service/__main__.py similarity index 96% rename from src/kpi_management/service/__main__.py rename to src/kpi_value_writer/service/__main__.py index 9f0e53246..9085bc468 100644 --- a/src/kpi_management/service/__main__.py +++ b/src/kpi_value_writer/service/__main__.py @@ -1,4 +1,4 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -21,7 +21,6 @@ from common.Settings import ( from common.proto import monitoring_pb2 from monitoring.service.EventTools import EventsDeviceCollector # import updated from monitoring.service.NameMapping import NameMapping # import updated -# from .MonitoringService import MonitoringService from .KpiManagerService import KpiManagerService terminate = threading.Event() diff --git a/src/kpi_management/tests/test_kpi_composer.py b/src/kpi_value_writer/tests/test_kpi_composer.py similarity index 100% rename from src/kpi_management/tests/test_kpi_composer.py rename to src/kpi_value_writer/tests/test_kpi_composer.py diff --git a/src/kpi_management/service/database/__init__.py b/src/kpi_value_writer/tests/test_kpi_value_writer.py old mode 100644 new mode 100755 similarity index 75% rename from src/kpi_management/service/database/__init__.py rename to src/kpi_value_writer/tests/test_kpi_value_writer.py index 1549d9811..7b3362667 --- a/src/kpi_management/service/database/__init__.py +++ b/src/kpi_value_writer/tests/test_kpi_value_writer.py @@ -12,3 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging +from kpi_manager.client.KpiManagerClient import KpiManagerClient +from kpi_value_writer.tests.test_messages import create_kpi_descriptor_request + +LOGGER = logging.getLogger(__name__) diff --git a/src/kpi_management/tests/test_kpi_writer.py b/src/kpi_value_writer/tests/test_kpi_writer.py similarity index 100% rename from src/kpi_management/tests/test_kpi_writer.py rename to src/kpi_value_writer/tests/test_kpi_writer.py diff --git a/src/kpi_management/tests/test_messages.py b/src/kpi_value_writer/tests/test_messages.py similarity index 64% rename from src/kpi_management/tests/test_messages.py rename to src/kpi_value_writer/tests/test_messages.py index 93e2d6472..7e59499e9 100755 --- a/src/kpi_management/tests/test_messages.py +++ b/src/kpi_value_writer/tests/test_messages.py @@ -18,29 +18,11 @@ from common.proto.kpi_sample_types_pb2 import KpiSampleType from common.proto.context_pb2 import DeviceId, LinkId, ServiceId, SliceId,\ ConnectionId, EndPointId -# ---------------------- 2nd iteration Test Messages --------------------------------- -def create_kpi_id_request(): - _kpi_id = kpi_manager_pb2.KpiId() - _kpi_id.kpi_id.uuid = "34f73604-eca6-424f-9995-18b519ad0978" - return _kpi_id - -def create_kpi_descriptor_request_a(descriptor_name: str = "Test_name"): - _create_kpi_request = kpi_manager_pb2.KpiDescriptor() - _create_kpi_request.kpi_id.kpi_id.uuid = str(uuid.uuid4()) - _create_kpi_request.kpi_description = descriptor_name - _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED - _create_kpi_request.device_id.device_uuid.uuid = 'DEV1' # pylint: disable=maybe-no-member - _create_kpi_request.service_id.service_uuid.uuid = 'SERV1' # pylint: disable=maybe-no-member - _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC1' # pylint: disable=maybe-no-member - _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END1' # pylint: disable=maybe-no-member - _create_kpi_request.connection_id.connection_uuid.uuid = 'CON1' # pylint: disable=maybe-no-member - _create_kpi_request.link_id.link_uuid.uuid = 'LNK1' # pylint: disable=maybe-no-member - return _create_kpi_request - -def create_kpi_descriptor_request(): +# ---------------------- 3rd iteration Test Messages --------------------------------- +def create_kpi_descriptor_request(description: str = "Test Description"): _create_kpi_request = kpi_manager_pb2.KpiDescriptor() _create_kpi_request.kpi_id.kpi_id.uuid = str(uuid.uuid4()) - _create_kpi_request.kpi_description = 'KPI Description Test' + _create_kpi_request.kpi_description = description _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED _create_kpi_request.device_id.device_uuid.uuid = 'DEV4' # pylint: disable=maybe-no-member _create_kpi_request.service_id.service_uuid.uuid = 'SERV3' # pylint: disable=maybe-no-member @@ -50,32 +32,64 @@ def create_kpi_descriptor_request(): _create_kpi_request.link_id.link_uuid.uuid = 'LNK2' # pylint: disable=maybe-no-member return _create_kpi_request -def create_kpi_filter_request_a(): - _create_kpi_filter_request = kpi_manager_pb2.KpiDescriptorFilter() - _create_kpi_filter_request.kpi_sample_type.append(KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED) - - device_id_obj = DeviceId() - endpoint_id_obj = EndPointId() - service_id_obj = ServiceId() - slice_id_obj = SliceId() - connection_id_obj = ConnectionId() - link_id_obj = LinkId() - - device_id_obj.device_uuid.uuid = "DEV1" - endpoint_id_obj.endpoint_uuid.uuid = "END1" - service_id_obj.service_uuid.uuid = "SERV1" - slice_id_obj.slice_uuid.uuid = "SLC1" - connection_id_obj.connection_uuid.uuid = "CON1" - link_id_obj.link_uuid.uuid = "LNK1" - - _create_kpi_filter_request.device_id.append(device_id_obj) - _create_kpi_filter_request.endpoint_id.append(endpoint_id_obj) - _create_kpi_filter_request.service_id.append(service_id_obj) - _create_kpi_filter_request.slice_id.append(slice_id_obj) - _create_kpi_filter_request.connection_id.append(connection_id_obj) - _create_kpi_filter_request.link_id.append(link_id_obj) - - return _create_kpi_filter_request +# ---------------------- 2nd iteration Test Messages --------------------------------- +# def create_kpi_id_request(): +# _kpi_id = kpi_manager_pb2.KpiId() +# _kpi_id.kpi_id.uuid = "34f73604-eca6-424f-9995-18b519ad0978" +# return _kpi_id + +# def create_kpi_descriptor_request_a(descriptor_name: str = "Test_name"): +# _create_kpi_request = kpi_manager_pb2.KpiDescriptor() +# _create_kpi_request.kpi_id.kpi_id.uuid = str(uuid.uuid4()) +# _create_kpi_request.kpi_description = descriptor_name +# _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED +# _create_kpi_request.device_id.device_uuid.uuid = 'DEV1' # pylint: disable=maybe-no-member +# _create_kpi_request.service_id.service_uuid.uuid = 'SERV1' # pylint: disable=maybe-no-member +# _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC1' # pylint: disable=maybe-no-member +# _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END1' # pylint: disable=maybe-no-member +# _create_kpi_request.connection_id.connection_uuid.uuid = 'CON1' # pylint: disable=maybe-no-member +# _create_kpi_request.link_id.link_uuid.uuid = 'LNK1' # pylint: disable=maybe-no-member +# return _create_kpi_request + +# def create_kpi_descriptor_request(): +# _create_kpi_request = kpi_manager_pb2.KpiDescriptor() +# _create_kpi_request.kpi_id.kpi_id.uuid = str(uuid.uuid4()) +# _create_kpi_request.kpi_description = 'KPI Description Test' +# _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED +# _create_kpi_request.device_id.device_uuid.uuid = 'DEV4' # pylint: disable=maybe-no-member +# _create_kpi_request.service_id.service_uuid.uuid = 'SERV3' # pylint: disable=maybe-no-member +# _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC3' # pylint: disable=maybe-no-member +# _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END2' # pylint: disable=maybe-no-member +# _create_kpi_request.connection_id.connection_uuid.uuid = 'CON2' # pylint: disable=maybe-no-member +# _create_kpi_request.link_id.link_uuid.uuid = 'LNK2' # pylint: disable=maybe-no-member +# return _create_kpi_request + +# def create_kpi_filter_request_a(): +# _create_kpi_filter_request = kpi_manager_pb2.KpiDescriptorFilter() +# _create_kpi_filter_request.kpi_sample_type.append(KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED) + +# device_id_obj = DeviceId() +# endpoint_id_obj = EndPointId() +# service_id_obj = ServiceId() +# slice_id_obj = SliceId() +# connection_id_obj = ConnectionId() +# link_id_obj = LinkId() + +# device_id_obj.device_uuid.uuid = "DEV1" +# endpoint_id_obj.endpoint_uuid.uuid = "END1" +# service_id_obj.service_uuid.uuid = "SERV1" +# slice_id_obj.slice_uuid.uuid = "SLC1" +# connection_id_obj.connection_uuid.uuid = "CON1" +# link_id_obj.link_uuid.uuid = "LNK1" + +# _create_kpi_filter_request.device_id.append(device_id_obj) +# _create_kpi_filter_request.endpoint_id.append(endpoint_id_obj) +# _create_kpi_filter_request.service_id.append(service_id_obj) +# _create_kpi_filter_request.slice_id.append(slice_id_obj) +# _create_kpi_filter_request.connection_id.append(connection_id_obj) +# _create_kpi_filter_request.link_id.append(link_id_obj) + +# return _create_kpi_filter_request # -------------------- Initial Test messages ------------------------------------- -- GitLab From b3f13a9f5daea1598dad0cd9abdf544e8c97c32f Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Fri, 12 Jul 2024 17:24:20 +0000 Subject: [PATCH 170/205] Kpi value api in progress --- .../{kpi_value.proto => kpi_value_api.proto} | 2 +- src/common/Constants.py | 6 +- src/common/tools/kafka/Variables.py | 27 ++++++ src/kpi_manager/service/KpiManagerService.py | 5 +- src/kpi_manager/tests/test_kpi_manager.py | 4 +- src/kpi_value_api/__init__.py | 2 +- src/kpi_value_api/client/KpiValueApiClient.py | 63 ++++++++++++++ src/kpi_value_api/client/__init__.py | 2 +- .../service/KpiValueApiService.py | 31 +++++++ .../service/KpiValueApiServiceServicerImpl.py | 65 +++++++++++++++ src/kpi_value_api/service/NameMapping.py | 46 ++++++++++ src/kpi_value_api/service/__init__.py | 2 +- src/kpi_value_api/tests/messages.py | 35 ++++++++ src/kpi_value_api/tests/test_kpi_value_api.py | 83 +++++++++++++++++++ 14 files changed, 363 insertions(+), 10 deletions(-) rename proto/{kpi_value.proto => kpi_value_api.proto} (92%) create mode 100644 src/common/tools/kafka/Variables.py create mode 100644 src/kpi_value_api/client/KpiValueApiClient.py create mode 100644 src/kpi_value_api/service/KpiValueApiService.py create mode 100644 src/kpi_value_api/service/KpiValueApiServiceServicerImpl.py create mode 100644 src/kpi_value_api/service/NameMapping.py create mode 100644 src/kpi_value_api/tests/messages.py create mode 100644 src/kpi_value_api/tests/test_kpi_value_api.py diff --git a/proto/kpi_value.proto b/proto/kpi_value_api.proto similarity index 92% rename from proto/kpi_value.proto rename to proto/kpi_value_api.proto index 4f9f4edc5..19069f547 100644 --- a/proto/kpi_value.proto +++ b/proto/kpi_value_api.proto @@ -4,7 +4,7 @@ package kpi_value_api; import "context.proto"; import "kpi_manager.proto"; -service KpiValueAPI { +service KpiValueAPIService { rpc StoreKpiValues (KpiValueList) returns (context.Empty) {} rpc SelectKpiValues (KpiValueFilter) returns (KpiValueList) {} } diff --git a/src/common/Constants.py b/src/common/Constants.py index 229bd15f1..c616fed09 100644 --- a/src/common/Constants.py +++ b/src/common/Constants.py @@ -44,6 +44,7 @@ class ServiceNameEnum(Enum): POLICY = 'policy' MONITORING = 'monitoring' KPIMANAGER = 'kpiManager' + KPIVALUEAPI = 'kpiValueApi' TELEMETRYFRONTEND = 'telemetryfrontend' DLT = 'dlt' NBI = 'nbi' @@ -77,8 +78,6 @@ DEFAULT_SERVICE_GRPC_PORTS = { ServiceNameEnum.ZTP .value : 5050, ServiceNameEnum.POLICY .value : 6060, ServiceNameEnum.MONITORING .value : 7070, - ServiceNameEnum.KPIMANAGER .value : 7071, - ServiceNameEnum.TELEMETRYFRONTEND .value : 7072, ServiceNameEnum.DLT .value : 8080, ServiceNameEnum.NBI .value : 9090, ServiceNameEnum.L3_CAD .value : 10001, @@ -94,6 +93,9 @@ DEFAULT_SERVICE_GRPC_PORTS = { ServiceNameEnum.E2EORCHESTRATOR .value : 10050, ServiceNameEnum.OPTICALCONTROLLER .value : 10060, ServiceNameEnum.BGPLS .value : 20030, + ServiceNameEnum.KPIMANAGER .value : 30010, + ServiceNameEnum.KPIVALUEAPI .value : 30020, + ServiceNameEnum.TELEMETRYFRONTEND .value : 30050, # Used for test and debugging only ServiceNameEnum.DLT_GATEWAY .value : 50051, diff --git a/src/common/tools/kafka/Variables.py b/src/common/tools/kafka/Variables.py new file mode 100644 index 000000000..afe1ee67b --- /dev/null +++ b/src/common/tools/kafka/Variables.py @@ -0,0 +1,27 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from enum import Enum + +class KafkaConfig(Enum): + SERVER_IP = "127.0.0.1:9092" + +class KafkaTopic(Enum): + REQUEST = 'topic_request' + RESPONSE = 'topic_response' + RAW = 'topic_raw' + LABELED = 'topic_labeled' + VALUE = 'topic_value' + +# create all topics after the deployments (Telemetry and Analytics) \ No newline at end of file diff --git a/src/kpi_manager/service/KpiManagerService.py b/src/kpi_manager/service/KpiManagerService.py index 3868a848f..434246a43 100755 --- a/src/kpi_manager/service/KpiManagerService.py +++ b/src/kpi_manager/service/KpiManagerService.py @@ -14,10 +14,11 @@ from common.Constants import ServiceNameEnum from common.Settings import get_service_port_grpc -from common.proto.kpi_manager_pb2_grpc import add_KpiManagerServiceServicer_to_server +from monitoring.service.NameMapping import NameMapping from common.tools.service.GenericGrpcService import GenericGrpcService +from common.proto.kpi_manager_pb2_grpc import add_KpiManagerServiceServicer_to_server from kpi_manager.service.KpiManagerServiceServicerImpl import KpiManagerServiceServicerImpl -from monitoring.service.NameMapping import NameMapping + class KpiManagerService(GenericGrpcService): def __init__(self, name_mapping : NameMapping, cls_name: str = __name__) -> None: diff --git a/src/kpi_manager/tests/test_kpi_manager.py b/src/kpi_manager/tests/test_kpi_manager.py index b517c9568..2f475cc0f 100755 --- a/src/kpi_manager/tests/test_kpi_manager.py +++ b/src/kpi_manager/tests/test_kpi_manager.py @@ -53,7 +53,7 @@ KPIMANAGER_SERVICE_PORT = MOCKSERVICE_PORT + get_service_port_grpc(ServiceNameEn os.environ[get_env_var_name(ServiceNameEnum.KPIMANAGER, ENVVAR_SUFIX_SERVICE_HOST )] = str(LOCAL_HOST) os.environ[get_env_var_name(ServiceNameEnum.KPIMANAGER, ENVVAR_SUFIX_SERVICE_PORT_GRPC)] = str(KPIMANAGER_SERVICE_PORT) -METRICSDB_HOSTNAME = os.environ.get('METRICSDB_HOSTNAME') +# METRICSDB_HOSTNAME = os.environ.get('METRICSDB_HOSTNAME') LOGGER = logging.getLogger(__name__) @@ -230,7 +230,7 @@ def kpi_manager_client(kpi_manager_service : KpiManagerService): # pylint: disab # assert isinstance(response, KpiDescriptorList) def test_set_list_of_KPIs(kpi_manager_client): - LOGGER.info(" >>> test_set_list_of_KPIs: START <<< ") + LOGGER.debug(" >>> test_set_list_of_KPIs: START <<< ") KPIs_TO_SEARCH = ["node_in_power_total", "node_in_current_total", "node_out_power_total"] # adding KPI for kpi in KPIs_TO_SEARCH: diff --git a/src/kpi_value_api/__init__.py b/src/kpi_value_api/__init__.py index 1549d9811..3ee6f7071 100644 --- a/src/kpi_value_api/__init__.py +++ b/src/kpi_value_api/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/kpi_value_api/client/KpiValueApiClient.py b/src/kpi_value_api/client/KpiValueApiClient.py new file mode 100644 index 000000000..adf17da5d --- /dev/null +++ b/src/kpi_value_api/client/KpiValueApiClient.py @@ -0,0 +1,63 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import grpc, logging + +from common.Constants import ServiceNameEnum +from common.Settings import get_service_host, get_service_port_grpc +from common.tools.client.RetryDecorator import retry, delay_exponential +from common.tools.grpc.Tools import grpc_message_to_json_string + +from common.proto.context_pb2 import Empty +from common.proto.kpi_value_api_pb2 import KpiValue, KpiValueList, KpiValueType, KpiValueFilter +from common.proto.kpi_value_api_pb2_grpc import KpiValueAPIServiceStub + +LOGGER = logging.getLogger(__name__) +MAX_RETRIES = 10 +DELAY_FUNCTION = delay_exponential(initial=0.01, increment=2.0, maximum=5.0) +RETRY_DECORATOR = retry(max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect') + +class KpiValueApiClient: + def __init__(self, host=None, port=None): + if not host: host = get_service_host(ServiceNameEnum.KPIVALUEAPI) + if not port: port = get_service_port_grpc(ServiceNameEnum.KPIVALUEAPI) + self.endpoint = '{:s}:{:s}'.format(str(host), str(port)) + LOGGER.debug('Creating channel to {:s}...'.format(str(self.endpoint))) + self.channel = None + self.stub = None + self.connect() + LOGGER.debug('Channel created') + + def connect(self): + self.channel = grpc.insecure_channel(self.endpoint) + self.stub = KpiValueAPIServiceStub(self.channel) + + def close(self): + if self.channel is not None: self.channel.close() + self.channel = None + self.stub = None + + @RETRY_DECORATOR + def StoreKpiValues(self, request: KpiValueList) -> Empty: + LOGGER.debug('StoreKpiValues: {:s}'.format(grpc_message_to_json_string(request))) + response = self.stub.StoreKpiValues(request) + LOGGER.debug('StoreKpiValues result: {:s}'.format(grpc_message_to_json_string(response))) + return response + + @RETRY_DECORATOR + def SelectKpiValues(self, request: KpiValueFilter) -> KpiValueList: + LOGGER.debug('SelectKpiValues: {:s}'.format(grpc_message_to_json_string(request))) + response = self.stub.SelectKpiValues(request) + LOGGER.debug('SelectKpiValues result: {:s}'.format(grpc_message_to_json_string(response))) + return response \ No newline at end of file diff --git a/src/kpi_value_api/client/__init__.py b/src/kpi_value_api/client/__init__.py index 1549d9811..3ee6f7071 100644 --- a/src/kpi_value_api/client/__init__.py +++ b/src/kpi_value_api/client/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/kpi_value_api/service/KpiValueApiService.py b/src/kpi_value_api/service/KpiValueApiService.py new file mode 100644 index 000000000..2fb24aaac --- /dev/null +++ b/src/kpi_value_api/service/KpiValueApiService.py @@ -0,0 +1,31 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from .NameMapping import NameMapping +from common.Constants import ServiceNameEnum +from common.Settings import get_service_port_grpc +from common.tools.service.GenericGrpcService import GenericGrpcService +from .KpiValueApiServiceServicerImpl import KpiValueApiServiceServicerImpl +from common.proto.kpi_value_api_pb2_grpc import add_KpiValueAPIServiceServicer_to_server + + +class KpiValueApiService(GenericGrpcService): + def __init__(self, name_mapping : NameMapping, cls_name : str = __name__ ) -> None: + port = get_service_port_grpc(ServiceNameEnum.KPIVALUEAPI) + super().__init__(port, cls_name=cls_name) + self.kpiValueApiService_servicer = KpiValueApiServiceServicerImpl(name_mapping) + + def install_servicers(self): + add_KpiValueAPIServiceServicer_to_server(self.kpiValueApiService_servicer, self.server) \ No newline at end of file diff --git a/src/kpi_value_api/service/KpiValueApiServiceServicerImpl.py b/src/kpi_value_api/service/KpiValueApiServiceServicerImpl.py new file mode 100644 index 000000000..e0b8b550a --- /dev/null +++ b/src/kpi_value_api/service/KpiValueApiServiceServicerImpl.py @@ -0,0 +1,65 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging, grpc +from typing import Tuple, Any +from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method +from common.tools.kafka.Variables import KafkaConfig, KafkaTopic + +from common.proto.context_pb2 import Empty +from common.proto.kpi_value_api_pb2_grpc import KpiValueAPIServiceServicer +from common.proto.kpi_value_api_pb2 import KpiValueList, KpiValueFilter + +from confluent_kafka import Producer as KafkaProducer + +from .NameMapping import NameMapping + + +LOGGER = logging.getLogger(__name__) +METRICS_POOL = MetricsPool('KpiValueAPI', 'NBIgRPC') + +class KpiValueApiServiceServicerImpl(KpiValueAPIServiceServicer): + def __init__(self, name_mapping : NameMapping): + LOGGER.debug('Init KpiValueApiService') + + @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def StoreKpiValues(self, request: KpiValueList, grpc_context: grpc.ServicerContext + ) -> Empty: + LOGGER.debug('StoreKpiValues: Received gRPC message object: {:}'.format(request)) + producer_obj = KafkaProducer({'bootstrap.servers' : KafkaConfig.SERVER_IP.value}) + for kpi_value in request.kpi_value_list: + kpi_value_to_produce : Tuple [str, Any, Any] = ( + kpi_value.kpi_id.kpi_id, # kpi_value.kpi_id.kpi_id.uuid + kpi_value.timestamp, # kpi_value.timestamp.timestamp + kpi_value.kpi_value_type # kpi_value.kpi_value_type.(many options) + ) + LOGGER.debug('KPI to produce is {:}'.format(kpi_value_to_produce)) + msg_key = "gRPC-KpiValueApi" # str(__class__.__name__) + # write this KPI to Kafka + producer_obj.produce(KafkaTopic.VALUE.value, + key = msg_key, + value = str(kpi_value_to_produce), + callback = self.delivery_callback + ) + producer_obj.flush() + return Empty() + + @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def SelectKpiValues(self, request: KpiValueFilter, grpc_context: grpc.ServicerContext + ) -> KpiValueList: + LOGGER.debug('SelectKpiValues: Received gRPC message object: {:}'.format(request)) + + def delivery_callback(self, err, msg): + if err: print(f'Message delivery failed: {err}') + else: print(f'Message delivered to topic {msg.topic()}') diff --git a/src/kpi_value_api/service/NameMapping.py b/src/kpi_value_api/service/NameMapping.py new file mode 100644 index 000000000..f98e367b1 --- /dev/null +++ b/src/kpi_value_api/service/NameMapping.py @@ -0,0 +1,46 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import threading +from typing import Dict, Optional + +class NameMapping: + def __init__(self) -> None: + self.__lock = threading.Lock() + self.__device_to_name : Dict[str, str] = dict() + self.__endpoint_to_name : Dict[str, str] = dict() + + def get_device_name(self, device_uuid : str) -> Optional[str]: + with self.__lock: + return self.__device_to_name.get(device_uuid) + + def get_endpoint_name(self, endpoint_uuid : str) -> Optional[str]: + with self.__lock: + return self.__endpoint_to_name.get(endpoint_uuid) + + def set_device_name(self, device_uuid : str, device_name : str) -> None: + with self.__lock: + self.__device_to_name[device_uuid] = device_name + + def set_endpoint_name(self, endpoint_uuid : str, endpoint_name : str) -> None: + with self.__lock: + self.__endpoint_to_name[endpoint_uuid] = endpoint_name + + def delete_device_name(self, device_uuid : str) -> None: + with self.__lock: + self.__device_to_name.pop(device_uuid, None) + + def delete_endpoint_name(self, endpoint_uuid : str) -> None: + with self.__lock: + self.__endpoint_to_name.pop(endpoint_uuid, None) diff --git a/src/kpi_value_api/service/__init__.py b/src/kpi_value_api/service/__init__.py index 1549d9811..3ee6f7071 100644 --- a/src/kpi_value_api/service/__init__.py +++ b/src/kpi_value_api/service/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/kpi_value_api/tests/messages.py b/src/kpi_value_api/tests/messages.py new file mode 100644 index 000000000..b06f4ab1d --- /dev/null +++ b/src/kpi_value_api/tests/messages.py @@ -0,0 +1,35 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import uuid, time +from common.proto.kpi_value_api_pb2 import KpiValue, KpiValueList + + +def create_kpi_value_list(): + _create_kpi_value_list = KpiValueList() + # To run this experiment sucessfully, already existing UUID in KPI DB in necessary. + # because the UUID is used to get the descriptor form KPI DB. + EXISTING_KPI_IDs = ["198a5a83-ddd3-4818-bdcb-e468eda03e18", + "c288ea27-db40-419e-81d3-f675df22c8f4", + str(uuid.uuid4())] + + for kpi_id_uuid in EXISTING_KPI_IDs: + kpi_value_object = KpiValue() + kpi_value_object.kpi_id.kpi_id.uuid = kpi_id_uuid + kpi_value_object.timestamp.timestamp = float(time.time()) + kpi_value_object.kpi_value_type.floatVal = 100 + + _create_kpi_value_list.kpi_value_list.append(kpi_value_object) + + return _create_kpi_value_list \ No newline at end of file diff --git a/src/kpi_value_api/tests/test_kpi_value_api.py b/src/kpi_value_api/tests/test_kpi_value_api.py new file mode 100644 index 000000000..bfa9485a8 --- /dev/null +++ b/src/kpi_value_api/tests/test_kpi_value_api.py @@ -0,0 +1,83 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import os, logging, pytest + +from common.proto.context_pb2 import Empty +from common.Constants import ServiceNameEnum +from common.Settings import ( + ENVVAR_SUFIX_SERVICE_HOST, ENVVAR_SUFIX_SERVICE_PORT_GRPC, get_env_var_name, get_service_port_grpc) + +from kpi_value_api.service.NameMapping import NameMapping +from kpi_value_api.service.KpiValueApiService import KpiValueApiService +from kpi_value_api.client.KpiValueApiClient import KpiValueApiClient +from kpi_value_api.tests.messages import create_kpi_value_list + +LOCAL_HOST = '127.0.0.1' + +KPIVALUEAPI_SERVICE_PORT = get_service_port_grpc(ServiceNameEnum.KPIVALUEAPI) # type: ignore +os.environ[get_env_var_name(ServiceNameEnum.KPIVALUEAPI, ENVVAR_SUFIX_SERVICE_HOST )] = str(LOCAL_HOST) +os.environ[get_env_var_name(ServiceNameEnum.KPIVALUEAPI, ENVVAR_SUFIX_SERVICE_PORT_GRPC)] = str(KPIVALUEAPI_SERVICE_PORT) + +LOGGER = logging.getLogger(__name__) + +# This fixture will be requested by test cases and last during testing session +@pytest.fixture(scope='session') +def kpi_value_api_service(): + LOGGER.info('Initializing KpiValueApiService...') + name_mapping = NameMapping() + # _service = MonitoringService(name_mapping) + _service = KpiValueApiService(name_mapping) + _service.start() + + # yield the server, when test finishes, execution will resume to stop it + LOGGER.info('Yielding KpiValueApiService...') + yield _service + + LOGGER.info('Terminating KpiValueApiService...') + _service.stop() + + LOGGER.info('Terminated KpiValueApiService...') + +# This fixture will be requested by test cases and last during testing session. +# The client requires the server, so client fixture has the server as dependency. +@pytest.fixture(scope='session') +def kpi_value_api_client(kpi_value_api_service : KpiValueApiService ): + LOGGER.info('Initializing KpiValueApiClient...') + _client = KpiValueApiClient() + + # yield the server, when test finishes, execution will resume to stop it + LOGGER.info('Yielding KpiValueApiClient...') + yield _client + + LOGGER.info('Closing KpiValueApiClient...') + _client.close() + + LOGGER.info('Closed KpiValueApiClient...') + +################################################## +# Prepare Environment, should be the first test +################################################## + +# To be added here + +########################### +# Tests Implementation of Kpi Value Api +########################### + +def test_store_kpi_values(kpi_value_api_client): + LOGGER.debug(" >>> test_set_list_of_KPIs: START <<< ") + response = kpi_value_api_client.StoreKpiValues(create_kpi_value_list()) + assert isinstance(response, Empty) -- GitLab From 5883eac9abcf621366c84644bb794c1eb2cc453d Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Mon, 15 Jul 2024 14:18:31 +0000 Subject: [PATCH 171/205] KPI Value writer and API in progress --- src/common/tools/kafka/Variables.py | 47 ++++++- src/kpi_manager/service/__main__.py | 2 +- src/kpi_manager/tests/test_kpi_manager.py | 28 ++-- .../service/KpiValueApiServiceServicerImpl.py | 30 +++-- src/kpi_value_api/tests/test_kpi_value_api.py | 9 ++ .../service/KpiValueComposer.py | 2 +- .../service/KpiValueWriter.py | 127 ++++++++++++++++++ .../tests/test_kpi_value_writer.py | 15 ++- 8 files changed, 230 insertions(+), 30 deletions(-) diff --git a/src/common/tools/kafka/Variables.py b/src/common/tools/kafka/Variables.py index afe1ee67b..ba58e31ef 100644 --- a/src/common/tools/kafka/Variables.py +++ b/src/common/tools/kafka/Variables.py @@ -12,10 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging from enum import Enum +from confluent_kafka import KafkaException +from confluent_kafka.admin import AdminClient, NewTopic + + +LOGGER = logging.getLogger(__name__) class KafkaConfig(Enum): - SERVER_IP = "127.0.0.1:9092" + SERVER_IP = "127.0.0.1:9092" + ADMIN_CLIENT = AdminClient({'bootstrap.servers': SERVER_IP}) class KafkaTopic(Enum): REQUEST = 'topic_request' @@ -24,4 +31,42 @@ class KafkaTopic(Enum): LABELED = 'topic_labeled' VALUE = 'topic_value' + @staticmethod + def create_all_topics() -> bool: + """ + Method to create Kafka topics defined as class members + """ + # LOGGER.debug("Topics to be created: {:}".format(KafkaTopic.__members__.values())) + # LOGGER.debug("Topics to be created: {:}".format(KafkaTopic.__members__.keys())) + # LOGGER.debug("Topics to be created: {:}".format([member.value for member in KafkaTopic])) + all_topics = [member.value for member in KafkaTopic] + if( KafkaTopic.create_new_topic_if_not_exists( all_topics )): + LOGGER.debug("All topics created sucsessfully") + return True + else: + LOGGER.debug("Error creating all topics") + return False + + @staticmethod + def create_new_topic_if_not_exists(new_topics: list) -> bool: + """ + Method to create Kafka topic if it does not exist. + Args: + list of topic: containing the topic name(s) to be created on Kafka + """ + LOGGER.debug("Recevied topic List: {:}".format(new_topics)) + for topic in new_topics: + try: + topic_metadata = KafkaConfig.ADMIN_CLIENT.value.list_topics(timeout=5) + if topic not in topic_metadata.topics: + # If the topic does not exist, create a new topic + print(f"Topic '{topic}' does not exist. Creating...") + LOGGER.debug("Topic {:} does not exist. Creating...".format(topic)) + new_topic = NewTopic(topic, num_partitions=1, replication_factor=1) + KafkaConfig.ADMIN_CLIENT.value.create_topics([new_topic]) + except Exception as e: + LOGGER.debug("Failed to create topic: {:}".format(e)) + return False + return True + # create all topics after the deployments (Telemetry and Analytics) \ No newline at end of file diff --git a/src/kpi_manager/service/__main__.py b/src/kpi_manager/service/__main__.py index 9085bc468..9dd0f97cf 100644 --- a/src/kpi_manager/service/__main__.py +++ b/src/kpi_manager/service/__main__.py @@ -31,7 +31,7 @@ def signal_handler(signal, frame): # pylint: disable=redefined-outer-name terminate.set() def start_kpi_manager(name_mapping : NameMapping): - LOGGER.info('Start Monitoring...',) + LOGGER.info('Start Kpi Manager...',) events_collector = EventsDeviceCollector(name_mapping) events_collector.start() diff --git a/src/kpi_manager/tests/test_kpi_manager.py b/src/kpi_manager/tests/test_kpi_manager.py index 2f475cc0f..968eafbfe 100755 --- a/src/kpi_manager/tests/test_kpi_manager.py +++ b/src/kpi_manager/tests/test_kpi_manager.py @@ -211,14 +211,14 @@ def kpi_manager_client(kpi_manager_service : KpiManagerService): # pylint: disab # LOGGER.info("Response of delete method gRPC message object: {:}".format(del_response)) # assert isinstance(del_response, Empty) -# def test_GetKpiDescriptor(kpi_manager_client): -# LOGGER.info(" >>> test_GetKpiDescriptor: START <<< ") -# # adding KPI -# response_id = kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request()) -# # get KPI -# response = kpi_manager_client.GetKpiDescriptor(response_id) -# LOGGER.info("Response gRPC message object: {:}".format(response)) -# assert isinstance(response, KpiDescriptor) +def test_GetKpiDescriptor(kpi_manager_client): + LOGGER.info(" >>> test_GetKpiDescriptor: START <<< ") + # adding KPI + response_id = kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request()) + # get KPI + response = kpi_manager_client.GetKpiDescriptor(response_id) + LOGGER.info("Response gRPC message object: {:}".format(response)) + assert isinstance(response, KpiDescriptor) # def test_SelectKpiDescriptor(kpi_manager_client): # LOGGER.info(" >>> test_SelectKpiDescriptor: START <<< ") @@ -229,12 +229,12 @@ def kpi_manager_client(kpi_manager_service : KpiManagerService): # pylint: disab # LOGGER.info("Response gRPC message object: {:}".format(response)) # assert isinstance(response, KpiDescriptorList) -def test_set_list_of_KPIs(kpi_manager_client): - LOGGER.debug(" >>> test_set_list_of_KPIs: START <<< ") - KPIs_TO_SEARCH = ["node_in_power_total", "node_in_current_total", "node_out_power_total"] - # adding KPI - for kpi in KPIs_TO_SEARCH: - kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request_a(kpi)) +# def test_set_list_of_KPIs(kpi_manager_client): +# LOGGER.debug(" >>> test_set_list_of_KPIs: START <<< ") +# KPIs_TO_SEARCH = ["node_in_power_total", "node_in_current_total", "node_out_power_total"] +# # adding KPI +# for kpi in KPIs_TO_SEARCH: +# kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request_a(kpi)) # ---------- 2nd Iteration Tests ----------------- diff --git a/src/kpi_value_api/service/KpiValueApiServiceServicerImpl.py b/src/kpi_value_api/service/KpiValueApiServiceServicerImpl.py index e0b8b550a..ce1dd1282 100644 --- a/src/kpi_value_api/service/KpiValueApiServiceServicerImpl.py +++ b/src/kpi_value_api/service/KpiValueApiServiceServicerImpl.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -import logging, grpc -from typing import Tuple, Any +import logging, grpc, json +from typing import Tuple, Any, List, Dict from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method from common.tools.kafka.Variables import KafkaConfig, KafkaTopic @@ -37,22 +37,30 @@ class KpiValueApiServiceServicerImpl(KpiValueAPIServiceServicer): def StoreKpiValues(self, request: KpiValueList, grpc_context: grpc.ServicerContext ) -> Empty: LOGGER.debug('StoreKpiValues: Received gRPC message object: {:}'.format(request)) - producer_obj = KafkaProducer({'bootstrap.servers' : KafkaConfig.SERVER_IP.value}) + + producer_obj = KafkaProducer({ + 'bootstrap.servers' : KafkaConfig.SERVER_IP.value + }) + for kpi_value in request.kpi_value_list: kpi_value_to_produce : Tuple [str, Any, Any] = ( kpi_value.kpi_id.kpi_id, # kpi_value.kpi_id.kpi_id.uuid kpi_value.timestamp, # kpi_value.timestamp.timestamp - kpi_value.kpi_value_type # kpi_value.kpi_value_type.(many options) + kpi_value.kpi_value_type # kpi_value.kpi_value_type.(many options) how? ) LOGGER.debug('KPI to produce is {:}'.format(kpi_value_to_produce)) msg_key = "gRPC-KpiValueApi" # str(__class__.__name__) # write this KPI to Kafka - producer_obj.produce(KafkaTopic.VALUE.value, - key = msg_key, - value = str(kpi_value_to_produce), - callback = self.delivery_callback - ) + + producer_obj.produce( + KafkaTopic.VALUE.value, + key = msg_key, + # value = json.dumps(kpi_value_to_produce), + value = kpi_value.SerializeToString(), + callback = self.delivery_callback + ) producer_obj.flush() + return Empty() @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) @@ -61,5 +69,5 @@ class KpiValueApiServiceServicerImpl(KpiValueAPIServiceServicer): LOGGER.debug('SelectKpiValues: Received gRPC message object: {:}'.format(request)) def delivery_callback(self, err, msg): - if err: print(f'Message delivery failed: {err}') - else: print(f'Message delivered to topic {msg.topic()}') + if err: LOGGER.debug('Message delivery failed: {:}'.format(err)) + else: print('Message delivered to topic {:}'.format(msg.topic())) diff --git a/src/kpi_value_api/tests/test_kpi_value_api.py b/src/kpi_value_api/tests/test_kpi_value_api.py index bfa9485a8..519a61704 100644 --- a/src/kpi_value_api/tests/test_kpi_value_api.py +++ b/src/kpi_value_api/tests/test_kpi_value_api.py @@ -17,14 +17,17 @@ import os, logging, pytest from common.proto.context_pb2 import Empty from common.Constants import ServiceNameEnum +from common.tools.kafka.Variables import KafkaTopic from common.Settings import ( ENVVAR_SUFIX_SERVICE_HOST, ENVVAR_SUFIX_SERVICE_PORT_GRPC, get_env_var_name, get_service_port_grpc) + from kpi_value_api.service.NameMapping import NameMapping from kpi_value_api.service.KpiValueApiService import KpiValueApiService from kpi_value_api.client.KpiValueApiClient import KpiValueApiClient from kpi_value_api.tests.messages import create_kpi_value_list + LOCAL_HOST = '127.0.0.1' KPIVALUEAPI_SERVICE_PORT = get_service_port_grpc(ServiceNameEnum.KPIVALUEAPI) # type: ignore @@ -77,6 +80,12 @@ def kpi_value_api_client(kpi_value_api_service : KpiValueApiService ): # Tests Implementation of Kpi Value Api ########################### +def test_validate_kafka_topics(): + LOGGER.debug(" >>> test_validate_kafka_topics: START <<< ") + response = KafkaTopic.create_all_topics() + assert isinstance(response, bool) + + def test_store_kpi_values(kpi_value_api_client): LOGGER.debug(" >>> test_set_list_of_KPIs: START <<< ") response = kpi_value_api_client.StoreKpiValues(create_kpi_value_list()) diff --git a/src/kpi_value_writer/service/KpiValueComposer.py b/src/kpi_value_writer/service/KpiValueComposer.py index 61e558121..e2f315eda 100644 --- a/src/kpi_value_writer/service/KpiValueComposer.py +++ b/src/kpi_value_writer/service/KpiValueComposer.py @@ -1,4 +1,4 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/src/kpi_value_writer/service/KpiValueWriter.py b/src/kpi_value_writer/service/KpiValueWriter.py index e69de29bb..2142d76df 100644 --- a/src/kpi_value_writer/service/KpiValueWriter.py +++ b/src/kpi_value_writer/service/KpiValueWriter.py @@ -0,0 +1,127 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import grpc +import json +import logging +import threading +from common.tools.kafka.Variables import KafkaConfig, KafkaTopic + +from common.proto.kpi_value_api_pb2 import KpiValue +from common.proto.kpi_manager_pb2 import KpiDescriptor, KpiId +from common.proto.kpi_manager_pb2_grpc import KpiManagerServiceStub + +from confluent_kafka import KafkaError +from confluent_kafka import Consumer as KafkaConsumer + +from kpi_manager.client.KpiManagerClient import KpiManagerClient +from monitoring.service.NameMapping import NameMapping +from kpi_manager.service.KpiManagerService import KpiManagerService + + +LOGGER = logging.getLogger(__name__) +ACTIVE_CONSUMERS = [] + +class KpiValueWriter: + + @staticmethod + def RunKafkaConsumer(): + thread = threading.Thread(target=KpiValueWriter.KafkaConsumer, args=()) + ACTIVE_CONSUMERS.append(thread) + thread.start() + + @staticmethod + def KafkaConsumer(): + kafka_consumer = KafkaConsumer( + { 'bootstrap.servers' : KafkaConfig.SERVER_IP.value, + 'group.id' : __class__, + 'auto.offset.reset' : 'latest'} + ) + kafka_consumer.subscribe([KafkaTopic.VALUE.value]) + LOGGER.debug("Kafka Consumer start listenng on topic: ".format(KafkaTopic.VALUE.value)) + print("Kafka Consumer start listenng on topic: ".format(KafkaTopic.VALUE.value)) + while True: + raw_kpi = kafka_consumer.poll(1.0) + if raw_kpi is None: + continue + elif raw_kpi.error(): + if raw_kpi.error().code() == KafkaError._PARTITION_EOF: + continue + else: + print("Consumer error: {}".format(raw_kpi.error())) + continue + try: + kpi_value = KpiValue() + kpi_value.ParseFromString(raw_kpi.value()) + LOGGER.debug("Received KPI : {:}".format(kpi_value)) + print("Received KPI : {:}".format(kpi_value)) + KpiValueWriter.get_kpi_descriptor_from_db(kpi_value.kpi_id.kpi_id.uuid) + except Exception as e: + print("Error detail: {:}".format(e)) + continue + + @staticmethod + def get_kpi_descriptor_from_db(kpi_value_uuid: str): + print("--- START -----") + + kpi_id = KpiId() + kpi_id.kpi_id.uuid = kpi_value_uuid + print("KpiId generated: {:}".format(kpi_id)) + + kpi_manager_client = KpiManagerClient() + print("Kpi manger client created: {:}".format(kpi_manager_client)) + + try: + kpi_descriptor_object = KpiDescriptor() + kpi_descriptor_object = kpi_manager_client.GetKpiDescriptor(kpi_id) + + print("kpi descriptor received: {:}".format(kpi_descriptor_object)) + if isinstance (kpi_descriptor_object, KpiDescriptor): + LOGGER.debug("Extracted row: {:}".format(kpi_descriptor_object)) + else: + LOGGER.debug("Error in extracting row {:}".format(kpi_descriptor_object)) + except Exception as e: + print ("Unable to get Descriptor. Error: {:}".format(e)) + + + +def kpi_manager_service(): + LOGGER.info('Initializing KpiManagerService...') + name_mapping = NameMapping() + # _service = MonitoringService(name_mapping) + _service = KpiManagerService(name_mapping) + _service.start() + + # yield the server, when test finishes, execution will resume to stop it + LOGGER.info('Yielding KpiManagerService...') + yield _service + + LOGGER.info('Terminating KpiManagerService...') + _service.stop() + + LOGGER.info('Terminated KpiManagerService...') + + +def kpi_manager_client_a(kpi_manager_service : KpiManagerService): # pylint: disable=redefined-outer-name,unused-argument + LOGGER.info('Initializing KpiManagerClient...') + _client = KpiManagerClient() + + # yield the server, when test finishes, execution will resume to stop it + LOGGER.info('Yielding KpiManagerClient...') + yield _client + + LOGGER.info('Closing KpiManagerClient...') + _client.close() + + LOGGER.info('Closed KpiManagerClient...') \ No newline at end of file diff --git a/src/kpi_value_writer/tests/test_kpi_value_writer.py b/src/kpi_value_writer/tests/test_kpi_value_writer.py index 7b3362667..a87a0b6ea 100755 --- a/src/kpi_value_writer/tests/test_kpi_value_writer.py +++ b/src/kpi_value_writer/tests/test_kpi_value_writer.py @@ -13,7 +13,18 @@ # limitations under the License. import logging -from kpi_manager.client.KpiManagerClient import KpiManagerClient -from kpi_value_writer.tests.test_messages import create_kpi_descriptor_request +from kpi_value_writer.service.KpiValueWriter import KpiValueWriter +from common.tools.kafka.Variables import KafkaTopic + LOGGER = logging.getLogger(__name__) + +# def test_validate_kafka_topics(): +# LOGGER.debug(" >>> test_validate_kafka_topics: START <<< ") +# response = KafkaTopic.create_all_topics() +# assert isinstance(response, bool) + +def test_KafkaConsumer(): + LOGGER.debug(" >>> test_validate_kafka_topics: START <<< ") + KpiValueWriter.RunKafkaConsumer() + \ No newline at end of file -- GitLab From 2f5b421566f1619fd5b59c69812a4dcbe463eab9 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Tue, 16 Jul 2024 10:05:24 +0000 Subject: [PATCH 172/205] Changes for KPI value API and writer (in progress) --- src/kpi_manager/service/NameMapping.py | 46 ++++++++++++++ src/kpi_manager/service/__main__.py | 60 +++++++++---------- src/kpi_value_api/tests/messages.py | 4 +- .../service/KpiValueWriter.py | 32 ---------- src/kpi_value_writer/service/NameMapping.py | 46 ++++++++++++++ 5 files changed, 124 insertions(+), 64 deletions(-) create mode 100644 src/kpi_manager/service/NameMapping.py create mode 100644 src/kpi_value_writer/service/NameMapping.py diff --git a/src/kpi_manager/service/NameMapping.py b/src/kpi_manager/service/NameMapping.py new file mode 100644 index 000000000..f98e367b1 --- /dev/null +++ b/src/kpi_manager/service/NameMapping.py @@ -0,0 +1,46 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import threading +from typing import Dict, Optional + +class NameMapping: + def __init__(self) -> None: + self.__lock = threading.Lock() + self.__device_to_name : Dict[str, str] = dict() + self.__endpoint_to_name : Dict[str, str] = dict() + + def get_device_name(self, device_uuid : str) -> Optional[str]: + with self.__lock: + return self.__device_to_name.get(device_uuid) + + def get_endpoint_name(self, endpoint_uuid : str) -> Optional[str]: + with self.__lock: + return self.__endpoint_to_name.get(endpoint_uuid) + + def set_device_name(self, device_uuid : str, device_name : str) -> None: + with self.__lock: + self.__device_to_name[device_uuid] = device_name + + def set_endpoint_name(self, endpoint_uuid : str, endpoint_name : str) -> None: + with self.__lock: + self.__endpoint_to_name[endpoint_uuid] = endpoint_name + + def delete_device_name(self, device_uuid : str) -> None: + with self.__lock: + self.__device_to_name.pop(device_uuid, None) + + def delete_endpoint_name(self, endpoint_uuid : str) -> None: + with self.__lock: + self.__endpoint_to_name.pop(endpoint_uuid, None) diff --git a/src/kpi_manager/service/__main__.py b/src/kpi_manager/service/__main__.py index 9dd0f97cf..ef39263ff 100644 --- a/src/kpi_manager/service/__main__.py +++ b/src/kpi_manager/service/__main__.py @@ -30,34 +30,34 @@ def signal_handler(signal, frame): # pylint: disable=redefined-outer-name LOGGER.warning('Terminate signal received') terminate.set() -def start_kpi_manager(name_mapping : NameMapping): - LOGGER.info('Start Kpi Manager...',) - - events_collector = EventsDeviceCollector(name_mapping) - events_collector.start() - - # TODO: redesign this method to be more clear and clean - - # Iterate while terminate is not set - while not terminate.is_set(): - list_new_kpi_ids = events_collector.listen_events() - - # Monitor Kpis - if bool(list_new_kpi_ids): - for kpi_id in list_new_kpi_ids: - # Create Monitor Kpi Requests - monitor_kpi_request = monitoring_pb2.MonitorKpiRequest() - monitor_kpi_request.kpi_id.CopyFrom(kpi_id) - monitor_kpi_request.monitoring_window_s = 86400 - monitor_kpi_request.sampling_rate_s = 10 - events_collector._monitoring_client.MonitorKpi(monitor_kpi_request) +# def start_kpi_manager(name_mapping : NameMapping): +# LOGGER.info('Start Kpi Manager...',) + +# events_collector = EventsDeviceCollector(name_mapping) +# events_collector.start() + +# # TODO: redesign this method to be more clear and clean + +# # Iterate while terminate is not set +# while not terminate.is_set(): +# list_new_kpi_ids = events_collector.listen_events() + +# # Monitor Kpis +# if bool(list_new_kpi_ids): +# for kpi_id in list_new_kpi_ids: +# # Create Monitor Kpi Requests +# monitor_kpi_request = monitoring_pb2.MonitorKpiRequest() +# monitor_kpi_request.kpi_id.CopyFrom(kpi_id) +# monitor_kpi_request.monitoring_window_s = 86400 +# monitor_kpi_request.sampling_rate_s = 10 +# events_collector._monitoring_client.MonitorKpi(monitor_kpi_request) - time.sleep(0.5) # let other tasks run; do not overload CPU - else: - # Terminate is set, looping terminates - LOGGER.warning("Stopping execution...") +# time.sleep(0.5) # let other tasks run; do not overload CPU +# else: +# # Terminate is set, looping terminates +# LOGGER.warning("Stopping execution...") - events_collector.start() +# events_collector.start() def main(): global LOGGER # pylint: disable=global-statement @@ -76,7 +76,7 @@ def main(): signal.signal(signal.SIGINT, signal_handler) signal.signal(signal.SIGTERM, signal_handler) - LOGGER.info('Starting...') + LOGGER.debug('Starting...') # Start metrics server metrics_port = get_metrics_port() @@ -91,15 +91,15 @@ def main(): grpc_service = KpiManagerService(name_mapping) grpc_service.start() - start_kpi_manager(name_mapping) + # start_kpi_manager(name_mapping) # Wait for Ctrl+C or termination signal while not terminate.wait(timeout=1.0): pass - LOGGER.info('Terminating...') + LOGGER.debug('Terminating...') grpc_service.stop() - LOGGER.info('Bye') + LOGGER.debug('Bye') return 0 if __name__ == '__main__': diff --git a/src/kpi_value_api/tests/messages.py b/src/kpi_value_api/tests/messages.py index b06f4ab1d..fc883db1f 100644 --- a/src/kpi_value_api/tests/messages.py +++ b/src/kpi_value_api/tests/messages.py @@ -20,8 +20,8 @@ def create_kpi_value_list(): _create_kpi_value_list = KpiValueList() # To run this experiment sucessfully, already existing UUID in KPI DB in necessary. # because the UUID is used to get the descriptor form KPI DB. - EXISTING_KPI_IDs = ["198a5a83-ddd3-4818-bdcb-e468eda03e18", - "c288ea27-db40-419e-81d3-f675df22c8f4", + EXISTING_KPI_IDs = ["725ce3ad-ac67-4373-bd35-8cd9d6a86e09", + str(uuid.uuid4()), str(uuid.uuid4())] for kpi_id_uuid in EXISTING_KPI_IDs: diff --git a/src/kpi_value_writer/service/KpiValueWriter.py b/src/kpi_value_writer/service/KpiValueWriter.py index 2142d76df..1385ac564 100644 --- a/src/kpi_value_writer/service/KpiValueWriter.py +++ b/src/kpi_value_writer/service/KpiValueWriter.py @@ -93,35 +93,3 @@ class KpiValueWriter: LOGGER.debug("Error in extracting row {:}".format(kpi_descriptor_object)) except Exception as e: print ("Unable to get Descriptor. Error: {:}".format(e)) - - - -def kpi_manager_service(): - LOGGER.info('Initializing KpiManagerService...') - name_mapping = NameMapping() - # _service = MonitoringService(name_mapping) - _service = KpiManagerService(name_mapping) - _service.start() - - # yield the server, when test finishes, execution will resume to stop it - LOGGER.info('Yielding KpiManagerService...') - yield _service - - LOGGER.info('Terminating KpiManagerService...') - _service.stop() - - LOGGER.info('Terminated KpiManagerService...') - - -def kpi_manager_client_a(kpi_manager_service : KpiManagerService): # pylint: disable=redefined-outer-name,unused-argument - LOGGER.info('Initializing KpiManagerClient...') - _client = KpiManagerClient() - - # yield the server, when test finishes, execution will resume to stop it - LOGGER.info('Yielding KpiManagerClient...') - yield _client - - LOGGER.info('Closing KpiManagerClient...') - _client.close() - - LOGGER.info('Closed KpiManagerClient...') \ No newline at end of file diff --git a/src/kpi_value_writer/service/NameMapping.py b/src/kpi_value_writer/service/NameMapping.py new file mode 100644 index 000000000..f98e367b1 --- /dev/null +++ b/src/kpi_value_writer/service/NameMapping.py @@ -0,0 +1,46 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import threading +from typing import Dict, Optional + +class NameMapping: + def __init__(self) -> None: + self.__lock = threading.Lock() + self.__device_to_name : Dict[str, str] = dict() + self.__endpoint_to_name : Dict[str, str] = dict() + + def get_device_name(self, device_uuid : str) -> Optional[str]: + with self.__lock: + return self.__device_to_name.get(device_uuid) + + def get_endpoint_name(self, endpoint_uuid : str) -> Optional[str]: + with self.__lock: + return self.__endpoint_to_name.get(endpoint_uuid) + + def set_device_name(self, device_uuid : str, device_name : str) -> None: + with self.__lock: + self.__device_to_name[device_uuid] = device_name + + def set_endpoint_name(self, endpoint_uuid : str, endpoint_name : str) -> None: + with self.__lock: + self.__endpoint_to_name[endpoint_uuid] = endpoint_name + + def delete_device_name(self, device_uuid : str) -> None: + with self.__lock: + self.__device_to_name.pop(device_uuid, None) + + def delete_endpoint_name(self, endpoint_uuid : str) -> None: + with self.__lock: + self.__endpoint_to_name.pop(endpoint_uuid, None) -- GitLab From 9f94eb55a31476fd2d14aec532def2f541c05ab7 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Wed, 17 Jul 2024 11:19:19 +0000 Subject: [PATCH 173/205] kpi_writer_to_prom added --- .../run_tests_locally-kpi-prom-writer.sh | 13 +- src/kpi_manager/requirements.in | 48 ++++-- .../service/{KpiWriter.py => KpiWriterOld.py} | 0 .../service/MetricWriterToPrometheus.py | 96 ++++++++++++ .../tests/test_kpi_value_writer.py | 8 +- src/kpi_value_writer/tests/test_messages.py | 139 ++---------------- .../tests/test_metric_writer_to_prom.py | 29 ++++ 7 files changed, 181 insertions(+), 152 deletions(-) rename src/kpi_value_writer/tests/test_kpi_writer.py => scripts/run_tests_locally-kpi-prom-writer.sh (73%) mode change 100644 => 100755 rename src/kpi_value_writer/service/{KpiWriter.py => KpiWriterOld.py} (100%) create mode 100644 src/kpi_value_writer/service/MetricWriterToPrometheus.py create mode 100644 src/kpi_value_writer/tests/test_metric_writer_to_prom.py diff --git a/src/kpi_value_writer/tests/test_kpi_writer.py b/scripts/run_tests_locally-kpi-prom-writer.sh old mode 100644 new mode 100755 similarity index 73% rename from src/kpi_value_writer/tests/test_kpi_writer.py rename to scripts/run_tests_locally-kpi-prom-writer.sh index d2261b6ad..63989a13b --- a/src/kpi_value_writer/tests/test_kpi_writer.py +++ b/scripts/run_tests_locally-kpi-prom-writer.sh @@ -1,3 +1,4 @@ +#!/bin/bash # Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -12,13 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -import threading -import logging -from kpi_manager.service.KpiWriter import KpiWriter -LOGGER = logging.getLogger(__name__) +PROJECTDIR=`pwd` -def test_kpi_writer(): - LOGGER.info(' >>> test_kpi_writer START <<< ') - KpiWriter.kpi_writer() +cd $PROJECTDIR/src +RCFILE=$PROJECTDIR/coverage/.coveragerc +python3 -m pytest --log-level=DEBUG --log-cli-level=DEBUG --verbose \ + kpi_value_writer/tests/test_metric_writer_to_prom.py \ No newline at end of file diff --git a/src/kpi_manager/requirements.in b/src/kpi_manager/requirements.in index d96e4b1b8..b66e07d20 100644 --- a/src/kpi_manager/requirements.in +++ b/src/kpi_manager/requirements.in @@ -1,26 +1,24 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - +aniso8601==9.0.1 anytree==2.8.0 APScheduler==3.10.1 attrs==23.2.0 +bcrypt==4.1.3 certifi==2024.2.2 +cffi==1.16.0 charset-normalizer==2.0.12 +click==8.1.7 colorama==0.4.6 confluent-kafka==2.3.0 coverage==6.3 +cryptography==36.0.2 +deepdiff==6.7.1 +deepmerge==1.1.1 +enum34==1.1.10 +Flask==2.1.3 +Flask-HTTPAuth==4.5.0 +Flask-RESTful==0.3.9 future-fstrings==1.2.0 +googleapis-common-protos==1.63.2 greenlet==3.0.3 grpcio==1.47.5 grpcio-health-checking==1.47.5 @@ -32,10 +30,22 @@ hyperframe==6.0.1 idna==3.7 influx-line-protocol==0.1.4 iniconfig==2.0.0 +ipaddress==1.0.23 +itsdangerous==2.2.0 +Jinja2==3.0.3 kafka-python==2.0.2 +lxml==5.2.2 +macaddress==2.0.2 +MarkupSafe==2.1.5 multidict==6.0.5 +ncclient==0.6.15 networkx==3.3 +numpy==2.0.0 +ordered-set==4.1.0 +p4runtime==1.3.0 packaging==24.0 +pandas==1.5.3 +paramiko==2.9.2 pluggy==1.5.0 prettytable==3.5.0 prometheus-client==0.13.0 @@ -43,6 +53,10 @@ protobuf==3.20.3 psycopg2-binary==2.9.3 py==1.11.0 py-cpuinfo==9.0.0 +pyang==2.6.0 +pyangbind @ git+https://github.com/robshakir/pyangbind.git@daf530f882c14bdb1bae4dc94fb4b4ad04d1295c +pycparser==2.22 +PyNaCl==1.5.0 pytest==6.2.5 pytest-benchmark==3.4.1 pytest-depends==1.0.1 @@ -50,14 +64,20 @@ python-dateutil==2.8.2 python-json-logger==2.0.2 pytz==2024.1 questdb==1.0.1 +regex==2024.5.15 requests==2.27.1 +requests-mock==1.9.3 six==1.16.0 SQLAlchemy==1.4.52 sqlalchemy-cockroachdb==1.4.4 SQLAlchemy-Utils==0.38.3 +tabulate==0.9.0 toml==0.10.2 typing_extensions==4.12.0 tzlocal==5.2 urllib3==1.26.18 wcwidth==0.2.13 +websockets==10.4 +Werkzeug==2.3.7 xmltodict==0.12.0 +yattag==1.15.2 diff --git a/src/kpi_value_writer/service/KpiWriter.py b/src/kpi_value_writer/service/KpiWriterOld.py similarity index 100% rename from src/kpi_value_writer/service/KpiWriter.py rename to src/kpi_value_writer/service/KpiWriterOld.py diff --git a/src/kpi_value_writer/service/MetricWriterToPrometheus.py b/src/kpi_value_writer/service/MetricWriterToPrometheus.py new file mode 100644 index 000000000..b2bfc07a4 --- /dev/null +++ b/src/kpi_value_writer/service/MetricWriterToPrometheus.py @@ -0,0 +1,96 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# read Kafka stream from Kafka topic + +import ast +import time +import threading +import logging +from prometheus_client import start_http_server, Gauge, CollectorRegistry +from common.proto.kpi_sample_types_pb2 import KpiSampleType + +from common.proto.kpi_value_api_pb2 import KpiValue +from common.proto.kpi_manager_pb2 import KpiDescriptor + +LOGGER = logging.getLogger(__name__) +PROM_METRICS = {} +PROM_REGISTERY = CollectorRegistry() + +class MetricWriterToPrometheus: + ''' + This class exposes the *cooked KPI* on the endpoint to be scraped by the Prometheus server. + cooked KPI value = KpiDescriptor (gRPC message) + KpiValue (gRPC message) + ''' + def __init__(self): + # prometheus server address and configs + self.start_prometheus_client() + pass + + def start_prometheus_client(self): + start_http_server(10808, registry=PROM_REGISTERY) + LOGGER.debug("Prometheus client is started on port 10808") + + def merge_kpi_descriptor_and_kpi_value(self, kpi_descriptor, kpi_value): + # Creating a dictionary from the kpi_descriptor's attributes + cooked_kpi = { + 'kpi_id' : kpi_descriptor.kpi_id.kpi_id.uuid, + 'kpi_description': kpi_descriptor.kpi_description, + 'kpi_sample_type': KpiSampleType.Name(kpi_descriptor.kpi_sample_type), + 'device_id' : kpi_descriptor.device_id.device_uuid.uuid, + 'endpoint_id' : kpi_descriptor.endpoint_id.endpoint_uuid.uuid, + 'service_id' : kpi_descriptor.service_id.service_uuid.uuid, + 'slice_id' : kpi_descriptor.slice_id.slice_uuid.uuid, + 'connection_id' : kpi_descriptor.connection_id.connection_uuid.uuid, + 'link_id' : kpi_descriptor.link_id.link_uuid.uuid, + 'time_stamp' : kpi_value.timestamp.timestamp, + 'kpi_value' : kpi_value.kpi_value_type.floatVal + } + # LOGGER.debug("Cooked Kpi: {:}".format(cooked_kpi)) + return cooked_kpi + + def create_and_expose_cooked_kpi(self, kpi_descriptor: KpiDescriptor, kpi_value: KpiValue): + # merge both gRPC messages into single varible. + cooked_kpi = self.merge_kpi_descriptor_and_kpi_value(kpi_descriptor, kpi_value) + tags_to_exclude = {'kpi_description', 'kpi_sample_type', 'kpi_value'} # extracted values will be used as metric tag + metric_tags = [tag for tag in cooked_kpi.keys() if tag not in tags_to_exclude] + metric_name = cooked_kpi['kpi_sample_type'] + try: + if metric_name not in PROM_METRICS: # Only register the metric, when it doesn't exists + PROM_METRICS[metric_name] = Gauge ( + metric_name, + cooked_kpi['kpi_description'], + metric_tags, + registry=PROM_REGISTERY + ) + LOGGER.debug("Metric is created with labels: {:}".format(metric_tags)) + PROM_METRICS[metric_name].labels( + kpi_id = cooked_kpi['kpi_id'], + device_id = cooked_kpi['device_id'], + endpoint_id = cooked_kpi['endpoint_id'], + service_id = cooked_kpi['service_id'], + slice_id = cooked_kpi['slice_id'], + connection_id = cooked_kpi['connection_id'], + link_id = cooked_kpi['link_id'], + time_stamp = cooked_kpi['time_stamp'], + ).set(float(cooked_kpi['kpi_value'])) + LOGGER.debug("Metric pushed to the endpoints: {:}".format(PROM_METRICS[metric_name])) + + except ValueError as e: + if 'Duplicated timeseries' in str(e): + LOGGER.debug("Metric {:} is already registered. Skipping.".format(metric_name)) + print("Metric {:} is already registered. Skipping.".format(metric_name)) + else: + LOGGER.error("Error while pushing metric: {}".format(e)) + raise \ No newline at end of file diff --git a/src/kpi_value_writer/tests/test_kpi_value_writer.py b/src/kpi_value_writer/tests/test_kpi_value_writer.py index a87a0b6ea..e528f1dbb 100755 --- a/src/kpi_value_writer/tests/test_kpi_value_writer.py +++ b/src/kpi_value_writer/tests/test_kpi_value_writer.py @@ -19,12 +19,16 @@ from common.tools.kafka.Variables import KafkaTopic LOGGER = logging.getLogger(__name__) + +# -------- Initial Test ---------------- # def test_validate_kafka_topics(): # LOGGER.debug(" >>> test_validate_kafka_topics: START <<< ") # response = KafkaTopic.create_all_topics() # assert isinstance(response, bool) def test_KafkaConsumer(): - LOGGER.debug(" >>> test_validate_kafka_topics: START <<< ") + LOGGER.debug(" --->>> test_validate_kafka_topics: START <<<--- ") KpiValueWriter.RunKafkaConsumer() - \ No newline at end of file + +def test_metric_composer_and_writer(): + LOGGER.debug(" --->>> test_metric_composer_and_writer: START <<<--- ") diff --git a/src/kpi_value_writer/tests/test_messages.py b/src/kpi_value_writer/tests/test_messages.py index 7e59499e9..d9f4cf80a 100755 --- a/src/kpi_value_writer/tests/test_messages.py +++ b/src/kpi_value_writer/tests/test_messages.py @@ -12,13 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -import uuid +import uuid, time +import random from common.proto import kpi_manager_pb2 +from common.proto.kpi_value_api_pb2 import KpiValue from common.proto.kpi_sample_types_pb2 import KpiSampleType -from common.proto.context_pb2 import DeviceId, LinkId, ServiceId, SliceId,\ - ConnectionId, EndPointId -# ---------------------- 3rd iteration Test Messages --------------------------------- + def create_kpi_descriptor_request(description: str = "Test Description"): _create_kpi_request = kpi_manager_pb2.KpiDescriptor() _create_kpi_request.kpi_id.kpi_id.uuid = str(uuid.uuid4()) @@ -32,128 +32,9 @@ def create_kpi_descriptor_request(description: str = "Test Description"): _create_kpi_request.link_id.link_uuid.uuid = 'LNK2' # pylint: disable=maybe-no-member return _create_kpi_request -# ---------------------- 2nd iteration Test Messages --------------------------------- -# def create_kpi_id_request(): -# _kpi_id = kpi_manager_pb2.KpiId() -# _kpi_id.kpi_id.uuid = "34f73604-eca6-424f-9995-18b519ad0978" -# return _kpi_id - -# def create_kpi_descriptor_request_a(descriptor_name: str = "Test_name"): -# _create_kpi_request = kpi_manager_pb2.KpiDescriptor() -# _create_kpi_request.kpi_id.kpi_id.uuid = str(uuid.uuid4()) -# _create_kpi_request.kpi_description = descriptor_name -# _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED -# _create_kpi_request.device_id.device_uuid.uuid = 'DEV1' # pylint: disable=maybe-no-member -# _create_kpi_request.service_id.service_uuid.uuid = 'SERV1' # pylint: disable=maybe-no-member -# _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC1' # pylint: disable=maybe-no-member -# _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END1' # pylint: disable=maybe-no-member -# _create_kpi_request.connection_id.connection_uuid.uuid = 'CON1' # pylint: disable=maybe-no-member -# _create_kpi_request.link_id.link_uuid.uuid = 'LNK1' # pylint: disable=maybe-no-member -# return _create_kpi_request - -# def create_kpi_descriptor_request(): -# _create_kpi_request = kpi_manager_pb2.KpiDescriptor() -# _create_kpi_request.kpi_id.kpi_id.uuid = str(uuid.uuid4()) -# _create_kpi_request.kpi_description = 'KPI Description Test' -# _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED -# _create_kpi_request.device_id.device_uuid.uuid = 'DEV4' # pylint: disable=maybe-no-member -# _create_kpi_request.service_id.service_uuid.uuid = 'SERV3' # pylint: disable=maybe-no-member -# _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC3' # pylint: disable=maybe-no-member -# _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END2' # pylint: disable=maybe-no-member -# _create_kpi_request.connection_id.connection_uuid.uuid = 'CON2' # pylint: disable=maybe-no-member -# _create_kpi_request.link_id.link_uuid.uuid = 'LNK2' # pylint: disable=maybe-no-member -# return _create_kpi_request - -# def create_kpi_filter_request_a(): -# _create_kpi_filter_request = kpi_manager_pb2.KpiDescriptorFilter() -# _create_kpi_filter_request.kpi_sample_type.append(KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED) - -# device_id_obj = DeviceId() -# endpoint_id_obj = EndPointId() -# service_id_obj = ServiceId() -# slice_id_obj = SliceId() -# connection_id_obj = ConnectionId() -# link_id_obj = LinkId() - -# device_id_obj.device_uuid.uuid = "DEV1" -# endpoint_id_obj.endpoint_uuid.uuid = "END1" -# service_id_obj.service_uuid.uuid = "SERV1" -# slice_id_obj.slice_uuid.uuid = "SLC1" -# connection_id_obj.connection_uuid.uuid = "CON1" -# link_id_obj.link_uuid.uuid = "LNK1" - -# _create_kpi_filter_request.device_id.append(device_id_obj) -# _create_kpi_filter_request.endpoint_id.append(endpoint_id_obj) -# _create_kpi_filter_request.service_id.append(service_id_obj) -# _create_kpi_filter_request.slice_id.append(slice_id_obj) -# _create_kpi_filter_request.connection_id.append(connection_id_obj) -# _create_kpi_filter_request.link_id.append(link_id_obj) - -# return _create_kpi_filter_request - -# -------------------- Initial Test messages ------------------------------------- - -# def create_kpi_request(kpi_id_str): -# _create_kpi_request = kpi_manager_pb2.KpiDescriptor() -# _create_kpi_request.kpi_description = 'KPI Description Test' -# _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED -# _create_kpi_request.device_id.device_uuid.uuid = 'DEV' + str(kpi_id_str) -# _create_kpi_request.service_id.service_uuid.uuid = 'SERV' + str(kpi_id_str) -# _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC' + str(kpi_id_str) -# _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END' + str(kpi_id_str) -# _create_kpi_request.connection_id.connection_uuid.uuid = 'CON' + str(kpi_id_str) -# return _create_kpi_request - -# def create_kpi_request_b(): -# _create_kpi_request = kpi_manager_pb2.KpiDescriptor() -# _create_kpi_request = str(uuid.uuid4()) -# _create_kpi_request.kpi_description = 'KPI Description Test' -# _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED -# _create_kpi_request.device_id.device_uuid.uuid = 'DEV2' # pylint: disable=maybe-no-member -# _create_kpi_request.service_id.service_uuid.uuid = 'SERV2' # pylint: disable=maybe-no-member -# _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC2' # pylint: disable=maybe-no-member -# _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END2' # pylint: disable=maybe-no-member -# _create_kpi_request.connection_id.connection_uuid.uuid = 'CON2' # pylint: disable=maybe-no-member -# return _create_kpi_request - -# def create_kpi_request_c(): -# _create_kpi_request = kpi_manager_pb2.KpiDescriptor() -# _create_kpi_request.kpi_description = 'KPI Description Test' -# _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED -# _create_kpi_request.device_id.device_uuid.uuid = 'DEV3' # pylint: disable=maybe-no-member -# _create_kpi_request.service_id.service_uuid.uuid = 'SERV3' # pylint: disable=maybe-no-member -# _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC3' # pylint: disable=maybe-no-member -# _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END3' # pylint: disable=maybe-no-member -# _create_kpi_request.connection_id.connection_uuid.uuid = 'CON3' # pylint: disable=maybe-no-member -# return _create_kpi_request - -# def create_kpi_request_d(): -# _create_kpi_request = kpi_manager_pb2.KpiDescriptor() -# _create_kpi_request.kpi_description = 'KPI Description Test' -# _create_kpi_request.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED -# _create_kpi_request.device_id.device_uuid.uuid = 'DEV4' # pylint: disable=maybe-no-member -# _create_kpi_request.service_id.service_uuid.uuid = 'SERV4' # pylint: disable=maybe-no-member -# _create_kpi_request.slice_id.slice_uuid.uuid = 'SLC4' # pylint: disable=maybe-no-member -# _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END4' # pylint: disable=maybe-no-member -# _create_kpi_request.connection_id.connection_uuid.uuid = 'CON4' # pylint: disable=maybe-no-member -# return _create_kpi_request - -# def kpi_descriptor_list(): -# _kpi_descriptor_list = kpi_manager_pb2.KpiDescriptorList() -# return _kpi_descriptor_list - -# def create_kpi_filter_request(): -# _create_kpi_filter_request = kpi_manager_pb2.KpiDescriptorFilter() -# _create_kpi_filter_request.kpi_sample_type.append(KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED) -# new_device_id = _create_kpi_filter_request.device_id.add() -# new_device_id.device_uuid.uuid = 'DEV1' -# new_service_id = _create_kpi_filter_request.service_id.add() -# new_service_id.service_uuid.uuid = 'SERV1' -# new_slice_id = _create_kpi_filter_request.slice_id.add() -# new_slice_id.slice_uuid.uuid = 'SLC1' -# new_endpoint_id = _create_kpi_filter_request.endpoint_id.add() -# new_endpoint_id.endpoint_uuid.uuid = 'END1' -# new_connection_id = _create_kpi_filter_request.connection_id.add() -# new_connection_id.connection_uuid.uuid = 'CON1' - -# return _create_kpi_filter_request \ No newline at end of file +def create_kpi_value_request(): + _create_kpi_value_request = KpiValue() + _create_kpi_value_request.kpi_id.kpi_id.uuid = str(uuid.uuid4()) + _create_kpi_value_request.timestamp.timestamp = time.time() + _create_kpi_value_request.kpi_value_type.floatVal = random.randint(10, 10000) + return _create_kpi_value_request \ No newline at end of file diff --git a/src/kpi_value_writer/tests/test_metric_writer_to_prom.py b/src/kpi_value_writer/tests/test_metric_writer_to_prom.py new file mode 100644 index 000000000..cee2877ff --- /dev/null +++ b/src/kpi_value_writer/tests/test_metric_writer_to_prom.py @@ -0,0 +1,29 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import threading +import logging +from kpi_value_writer.service.MetricWriterToPrometheus import MetricWriterToPrometheus +from kpi_value_writer.tests.test_messages import create_kpi_descriptor_request, create_kpi_value_request + +LOGGER = logging.getLogger(__name__) + +def test_metric_writer_to_prometheus(): + LOGGER.info(' >>> test_metric_writer_to_prometheus START <<< ') + metric_writer_obj = MetricWriterToPrometheus() + metric_writer_obj.create_and_expose_cooked_kpi( + create_kpi_descriptor_request(), + create_kpi_value_request() + ) + -- GitLab From f8c3a52f51f90c52720774b2b5e28f4357ae7de6 Mon Sep 17 00:00:00 2001 From: Waleed Akbar Date: Thu, 18 Jul 2024 10:28:33 +0000 Subject: [PATCH 174/205] changes for integration with TFS (service is running) --- deploy/tfs.sh | 2 +- manifests/kpi_managerservice.yaml | 28 +++++++++---------- src/kpi_manager/service/KpiManagerService.py | 2 +- .../service/KpiManagerServiceServicerImpl.py | 2 +- src/kpi_manager/service/__main__.py | 3 +- 5 files changed, 18 insertions(+), 19 deletions(-) diff --git a/deploy/tfs.sh b/deploy/tfs.sh index 04895f984..f85e9bbc9 100755 --- a/deploy/tfs.sh +++ b/deploy/tfs.sh @@ -27,7 +27,7 @@ export TFS_REGISTRY_IMAGES=${TFS_REGISTRY_IMAGES:-"http://localhost:32000/tfs/"} # If not already set, set the list of components, separated by spaces, you want to build images for, and deploy. # By default, only basic components are deployed -export TFS_COMPONENTS=${TFS_COMPONENTS:-"context device ztp monitoring pathcomp service slice nbi webui load_generator"} +export TFS_COMPONENTS=${TFS_COMPONENTS:-"context device ztp monitoring pathcomp service slice nbi webui load_generator kpi_manager"} # If not already set, set the tag you wan