Skip to content
Snippets Groups Projects
TelemetryFrontendServiceServicerImpl.py 6.7 KiB
Newer Older
# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#      http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

Waleed Akbar's avatar
Waleed Akbar committed
import ast
import threading
import time
Waleed Akbar's avatar
Waleed Akbar committed

from confluent_kafka import Consumer as KafkaConsumer
from common.proto.context_pb2 import Empty
from monitoring.service.NameMapping import NameMapping
from confluent_kafka import Producer as KafkaProducer
from confluent_kafka import KafkaException
Waleed Akbar's avatar
Waleed Akbar committed
from confluent_kafka import KafkaError
from common.proto.telemetry_frontend_pb2 import CollectorId, Collector, CollectorFilter, CollectorList
from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method
from common.proto.telemetry_frontend_pb2_grpc import TelemetryFrontendServiceServicer


LOGGER            = logging.getLogger(__name__)
METRICS_POOL      = MetricsPool('Monitoring', 'TelemetryFrontend')
KAFKA_SERVER_IP   = '127.0.0.1:9092'
ACTIVE_COLLECTORS = []

class TelemetryFrontendServiceServicerImpl(TelemetryFrontendServiceServicer):
    def __init__(self, name_mapping : NameMapping):
        LOGGER.info('Init TelemetryFrontendService')

    # @safe_and_metered_rpc_method(METRICS_POOL, LOGGER)
    def StartCollector(self, 
                       request : Collector, grpc_context: grpc.ServicerContext # type: ignore
                      ) -> CollectorId: # type: ignore
        # push info to frontend db
        response = CollectorId()
Waleed Akbar's avatar
Waleed Akbar committed
        _collector_id       = str(request.collector_id.collector_id.uuid)
        _collector_kpi_id   = str(request.kpi_id.kpi_id.uuid)
        _collector_duration = int(request.duration_s)
        _collector_interval = int(request.interval_s)
Waleed Akbar's avatar
Waleed Akbar committed
        self.generate_kafka_request(_collector_id, _collector_kpi_id, _collector_duration, _collector_interval)
        # self.run_generate_kafka_request(_collector_id, _collector_kpi_id, _collector_duration, _collector_interval)
        response.collector_id.uuid = request.collector_id.collector_id.uuid                                 # type: ignore
Waleed Akbar's avatar
Waleed Akbar committed
    def run_generate_kafka_request(self, msg_key: str, kpi: str, duration : int, interval: int):
        threading.Thread(target=self.generate_kafka_request, args=(msg_key, kpi, duration, interval)).start()

    # @safe_and_metered_rpc_method(METRICS_POOL, LOGGER)
    def generate_kafka_request(self, 
Waleed Akbar's avatar
Waleed Akbar committed
                             msg_key: str, kpi: str, duration : int, interval: int
                             ) -> KafkaProducer:
        """
        Method to generate collector request to Kafka topic.
        """
Waleed Akbar's avatar
Waleed Akbar committed
        # time.sleep(5)
        producer_configs = {
            'bootstrap.servers': KAFKA_SERVER_IP,
        }
        topic_request = "topic_request"
        msg_value   = Tuple [str, int, int]
        msg_value   = (kpi, duration, interval)
Waleed Akbar's avatar
Waleed Akbar committed
        print ("Request generated: ", "Colletcor Id: ", msg_key, \
                ", \nKPI: ", kpi, ", Duration: ", duration, ", Interval: ", interval)
        producerObj = KafkaProducer(producer_configs)
        producerObj.produce(topic_request, key=msg_key, value= str(msg_value), callback=self.delivery_callback)
        producerObj.flush()
        return producerObj

Waleed Akbar's avatar
Waleed Akbar committed
    def run_kafka_listener(self):
        # print ("--- STARTED: run_kafka_listener ---")
        threading.Thread(target=self.kafka_listener).start()
        return True

    def kafka_listener(self):
            """
            listener for response on Kafka topic.
            """
            # print ("--- STARTED: kafka_listener ---")
            conusmer_configs = {
                'bootstrap.servers' : KAFKA_SERVER_IP,
                'group.id'          : 'frontend',
                'auto.offset.reset' : 'latest'
            }
            topic_response = "topic_response"

            consumerObj = KafkaConsumer(conusmer_configs)
            consumerObj.subscribe([topic_response])
            # print (time.time())
            while True:
                receive_msg = consumerObj.poll(2.0)
                if receive_msg is None:
                    print (" - Telemetry frontend listening on Kafka Topic: ", topic_response)     # added for debugging purposes
                    continue
                elif receive_msg.error():
                    if receive_msg.error().code() == KafkaError._PARTITION_EOF:
                        continue
                    else:
                        print("Consumer error: {}".format(receive_msg.error()))
                        break
                try:
                    collector_id = receive_msg.key().decode('utf-8')
                    if collector_id in ACTIVE_COLLECTORS:
                        (kpi_id, kpi_value) = ast.literal_eval(receive_msg.value().decode('utf-8'))
                        self.process_response(kpi_id, kpi_value)
                    else:
                        print(f"collector id does not match.\nRespone ID: '{collector_id}' --- Active IDs: '{ACTIVE_COLLECTORS}' ")
                except Exception as e:
                    print(f"No message key found: {str(e)}")
                    continue
                    # return None
    def process_response(self, kpi_id: str, kpi_value: Any):
Waleed Akbar's avatar
Waleed Akbar committed
        print ("Frontend - KPI: ", kpi_id, ", VALUE: ", kpi_value)

    @safe_and_metered_rpc_method(METRICS_POOL, LOGGER)
    def delivery_callback(self, err, msg):
        """
        Callback function to handle message delivery status.
        Args:
            err (KafkaError): Kafka error object.
            msg (Message): Kafka message object.
        """
        if err:
            print(f'Message delivery failed: {err}')
        else:
Waleed Akbar's avatar
Waleed Akbar committed
            print(f'Message delivered to topic {msg.topic()}')

    @safe_and_metered_rpc_method(METRICS_POOL, LOGGER)
    def StopCollector(self, 
                      request : CollectorId, grpc_context: grpc.ServicerContext # type: ignore
                     ) -> Empty:  # type: ignore
        request.collector_id.uuid = ""
        return Empty()

    @safe_and_metered_rpc_method(METRICS_POOL, LOGGER)
    def SelectCollectors(self, 
                         request : CollectorFilter, contextgrpc_context: grpc.ServicerContext # type: ignore
                        ) -> CollectorList:  # type: ignore
        response = CollectorList()
        return response