# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json import logging import threading from common.tools.service.GenericGrpcService import GenericGrpcService from analytics.backend.service.SparkStreaming import SparkStreamer from common.tools.kafka.Variables import KafkaConfig, KafkaTopic from confluent_kafka import Consumer as KafkaConsumer from confluent_kafka import KafkaError LOGGER = logging.getLogger(__name__) class AnalyticsBackendService(GenericGrpcService): """ Class listens for ... """ def __init__(self, cls_name : str = __name__) -> None: self.kafka_consumer = KafkaConsumer({'bootstrap.servers' : KafkaConfig.get_kafka_address(), 'group.id' : 'analytics-frontend', 'auto.offset.reset' : 'latest'}) def RunSparkStreamer(self, analyzer): kpi_list = analyzer['input_kpis'] oper_list = [s.replace('_value', '') for s in list(analyzer["thresholds"].keys())] thresholds = analyzer['thresholds'] window_size = analyzer['window_size'] window_slider = analyzer['window_slider'] print ("Received parameters: {:} - {:} - {:} - {:} - {:}".format( kpi_list, oper_list, thresholds, window_size, window_slider)) LOGGER.debug ("Received parameters: {:} - {:} - {:} - {:} - {:}".format( kpi_list, oper_list, thresholds, window_size, window_slider)) threading.Thread(target=SparkStreamer, args=(kpi_list, oper_list, thresholds, window_size, window_slider, None) ).start() return True def RunRequestListener(self)->bool: threading.Thread(target=self.RequestListener).start() return True def RequestListener(self): """ listener for requests on Kafka topic. """ consumer = self.kafka_consumer consumer.subscribe([KafkaTopic.ANALYTICS_REQUEST.value]) while True: receive_msg = consumer.poll(2.0) if receive_msg is None: continue elif receive_msg.error(): if receive_msg.error().code() == KafkaError._PARTITION_EOF: continue else: print("Consumer error: {}".format(receive_msg.error())) break analyzer = json.loads(receive_msg.value().decode('utf-8')) analyzer_id = receive_msg.key().decode('utf-8') LOGGER.debug('Recevied Analyzer: {:} - {:}'.format(analyzer_id, analyzer)) print('Recevied Analyzer: {:} - {:}'.format(analyzer_id, analyzer)) # TODO: Add active analyzer to list self.RunSparkStreamer(analyzer)