Skip to content
Snippets Groups Projects
AnalyticsBackendService.py 3 KiB
Newer Older
# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#      http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


Waleed Akbar's avatar
Waleed Akbar committed
import json
import logging
import threading
from common.tools.service.GenericGrpcService import GenericGrpcService
from analytics.backend.service.SparkStreaming import SparkStreamer
Waleed Akbar's avatar
Waleed Akbar committed
from common.tools.kafka.Variables import KafkaConfig, KafkaTopic
from confluent_kafka import Consumer as KafkaConsumer
from confluent_kafka import KafkaError
Waleed Akbar's avatar
Waleed Akbar committed
LOGGER             = logging.getLogger(__name__)

class AnalyticsBackendService(GenericGrpcService):
    """
    Class listens for ...
    """
    def __init__(self, cls_name : str = __name__) -> None:
Waleed Akbar's avatar
Waleed Akbar committed
        self.kafka_consumer = KafkaConsumer({'bootstrap.servers' : KafkaConfig.get_kafka_address(),
                                            'group.id'           : 'analytics-frontend',
                                            'auto.offset.reset'  : 'latest'})

    def RunSparkStreamer(self, kpi_list, oper_list, thresholds_dict):
        print ("Received parameters: {:} - {:} - {:}".format(kpi_list, oper_list, thresholds_dict))
        LOGGER.debug ("Received parameters: {:} - {:} - {:}".format(kpi_list, oper_list, thresholds_dict))
        threading.Thread(target=SparkStreamer, 
                         args=(kpi_list, oper_list, None, None, thresholds_dict, None)
                         ).start()
        return True
Waleed Akbar's avatar
Waleed Akbar committed

    def RunRequestListener(self)->bool:
        threading.Thread(target=self.RequestListener).start()
        return True
Waleed Akbar's avatar
Waleed Akbar committed
    def RequestListener(self):
        """
        listener for requests on Kafka topic.
        """
        consumer = self.kafka_consumer
        consumer.subscribe([KafkaTopic.ANALYTICS_REQUEST.value])
        while True:
            receive_msg = consumer.poll(2.0)
            if receive_msg is None:
                continue
            elif receive_msg.error():
                if receive_msg.error().code() == KafkaError._PARTITION_EOF:
                    continue
                else:
                    print("Consumer error: {}".format(receive_msg.error()))
                    break
            analyzer    = json.loads(receive_msg.value().decode('utf-8'))
            analyzer_id = receive_msg.key().decode('utf-8')
            LOGGER.debug('Recevied Collector: {:} - {:}'.format(analyzer_id, analyzer))
            print('Recevied Collector: {:} - {:} - {:}'.format(analyzer_id, analyzer, analyzer['input_kpis']))
            self.RunSparkStreamer(analyzer['input_kpis'])                   # TODO: Add active analyzer to list