Newer
Older
# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

Waleed Akbar
committed
from typing import Tuple, Any
from confluent_kafka import Consumer as KafkaConsumer
from common.proto.context_pb2 import Empty
from monitoring.service.NameMapping import NameMapping
from confluent_kafka import Producer as KafkaProducer
from confluent_kafka import KafkaException
from common.proto.telemetry_frontend_pb2 import CollectorId, Collector, CollectorFilter, CollectorList
from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method
from common.proto.telemetry_frontend_pb2_grpc import TelemetryFrontendServiceServicer

Waleed Akbar
committed
from telemetry.database.TelemetryModel import Collector as CollectorModel
from telemetry.database.managementDB import managementDB

Waleed Akbar
committed
LOGGER = logging.getLogger(__name__)
METRICS_POOL = MetricsPool('Monitoring', 'TelemetryFrontend')
KAFKA_SERVER_IP = '127.0.0.1:9092'
ACTIVE_COLLECTORS = []
KAFKA_TOPICS = {'request' : 'topic_request',
'response': 'topic_response'}
class TelemetryFrontendServiceServicerImpl(TelemetryFrontendServiceServicer):
def __init__(self, name_mapping : NameMapping):
LOGGER.info('Init TelemetryFrontendService')

Waleed Akbar
committed
self.managementDBobj = managementDB()
self.kafka_producer = KafkaProducer({'bootstrap.servers': KAFKA_SERVER_IP,})
self.kafka_consumer = KafkaConsumer({'bootstrap.servers' : KAFKA_SERVER_IP,
'group.id' : 'frontend',
'auto.offset.reset' : 'latest'})

Waleed Akbar
committed
def add_collector_to_db(self, request: Collector ): # type: ignore

Waleed Akbar
committed
try:
# Create a new Collector instance
collector_to_insert = CollectorModel()
collector_to_insert.collector_id = request.collector_id.collector_id.uuid
collector_to_insert.kpi_id = request.kpi_id.kpi_id.uuid
# collector_to_insert.collector_decription= request.collector

Waleed Akbar
committed
collector_to_insert.sampling_duration_s = request.duration_s
collector_to_insert.sampling_interval_s = request.interval_s
collector_to_insert.start_timestamp = time.time()
collector_to_insert.end_timestamp = time.time()
managementDB.add_row_to_db(collector_to_insert)

Waleed Akbar
committed
except Exception as e:
LOGGER.info("Unable to create collectorModel class object. {:}".format(e))
# @safe_and_metered_rpc_method(METRICS_POOL, LOGGER)
def StartCollector(self,
request : Collector, grpc_context: grpc.ServicerContext # type: ignore
) -> CollectorId: # type: ignore
# push info to frontend db
LOGGER.info ("gRPC message: {:}".format(request))
_collector_id = str(request.collector_id.collector_id.uuid)
_collector_kpi_id = str(request.kpi_id.kpi_id.uuid)
_collector_duration = int(request.duration_s)
_collector_interval = int(request.interval_s)

Waleed Akbar
committed
# pushing Collector to DB
self.add_collector_to_db(request)
self.publish_to_kafka_request_topic(_collector_id, _collector_kpi_id, _collector_duration, _collector_interval)
# self.run_publish_to_kafka_request_topic(_collector_id, _collector_kpi_id, _collector_duration, _collector_interval)
response.collector_id.uuid = request.collector_id.collector_id.uuid # type: ignore
return response
def run_publish_to_kafka_request_topic(self, msg_key: str, kpi: str, duration : int, interval: int):
# Add threading.Thread() response to dictonary and call start() in the next statement
threading.Thread(target=self.publish_to_kafka_request_topic, args=(msg_key, kpi, duration, interval)).start()
def publish_to_kafka_request_topic(self,
collector_id: str, kpi: str, duration : int, interval: int
):
"""
Method to generate collector request to Kafka topic.
"""
# producer_configs = {
# 'bootstrap.servers': KAFKA_SERVER_IP,
# }
# topic_request = "topic_request"
msg_value : Tuple [str, int, int] = (kpi, duration, interval)
# print ("Request generated: ", "Colletcor Id: ", collector_id, \

Waleed Akbar
committed
# ", \nKPI: ", kpi, ", Duration: ", duration, ", Interval: ", interval)
# producerObj = KafkaProducer(producer_configs)
self.kafka_producer.produce(KAFKA_TOPICS['request'], key=collector_id, value= str(msg_value), callback=self.delivery_callback)
# producerObj.produce(KAFKA_TOPICS['request'], key=collector_id, value= str(msg_value), callback=self.delivery_callback)
LOGGER.info("Collector Request Generated: {:}, {:}, {:}, {:}".format(collector_id, kpi, duration, interval))
# producerObj.produce(topic_request, key=collector_id, value= str(msg_value), callback=self.delivery_callback)
ACTIVE_COLLECTORS.append(collector_id)
self.kafka_producer.flush()
def run_kafka_listener(self):
# print ("--- STARTED: run_kafka_listener ---")
threading.Thread(target=self.kafka_listener).start()
return True
def kafka_listener(self):

Waleed Akbar
committed
"""
listener for response on Kafka topic.
"""
# # print ("--- STARTED: kafka_listener ---")
# conusmer_configs = {
# 'bootstrap.servers' : KAFKA_SERVER_IP,
# 'group.id' : 'frontend',
# 'auto.offset.reset' : 'latest'
# }
# # topic_response = "topic_response"

Waleed Akbar
committed
# consumerObj = KafkaConsumer(conusmer_configs)
self.kafka_consumer.subscribe([KAFKA_TOPICS['response']])
# print (time.time())
while True:
receive_msg = self.kafka_consumer.poll(2.0)
if receive_msg is None:
# print (" - Telemetry frontend listening on Kafka Topic: ", KAFKA_TOPICS['response']) # added for debugging purposes
continue
elif receive_msg.error():
if receive_msg.error().code() == KafkaError._PARTITION_EOF:

Waleed Akbar
committed
continue

Waleed Akbar
committed
else:
print("Consumer error: {}".format(receive_msg.error()))
break
try:
collector_id = receive_msg.key().decode('utf-8')
if collector_id in ACTIVE_COLLECTORS:
(kpi_id, kpi_value) = ast.literal_eval(receive_msg.value().decode('utf-8'))
self.process_response(collector_id, kpi_id, kpi_value)
else:
print(f"collector id does not match.\nRespone ID: '{collector_id}' --- Active IDs: '{ACTIVE_COLLECTORS}' ")
except Exception as e:
print(f"No message key found: {str(e)}")
continue
# return None
def process_response(self, collector_id: str, kpi_id: str, kpi_value: Any):
if kpi_id == "-1" and kpi_value == -1:
# LOGGER.info("Sucessfully terminated Collector: {:}".format(collector_id))
print ("Sucessfully terminated Collector: ", collector_id)
else:
print ("Frontend-Received values Collector Id:", collector_id, "-KPI:", kpi_id, "-VALUE:", kpi_value)
@safe_and_metered_rpc_method(METRICS_POOL, LOGGER)
def delivery_callback(self, err, msg):
"""
Callback function to handle message delivery status.
Args:
err (KafkaError): Kafka error object.
msg (Message): Kafka message object.
"""
if err:
print(f'Message delivery failed: {err}')
else:
print(f'Message delivered to topic {msg.topic()}')
@safe_and_metered_rpc_method(METRICS_POOL, LOGGER)
def StopCollector(self,
request : CollectorId, grpc_context: grpc.ServicerContext # type: ignore
) -> Empty: # type: ignore
LOGGER.info ("gRPC message: {:}".format(request))
_collector_id = request.collector_id.uuid
self.publish_to_kafka_request_topic(_collector_id, "", -1, -1)
return Empty()
@safe_and_metered_rpc_method(METRICS_POOL, LOGGER)
def SelectCollectors(self,
request : CollectorFilter, contextgrpc_context: grpc.ServicerContext # type: ignore
) -> CollectorList: # type: ignore
LOGGER.info("gRPC message: {:}".format(request))
filter_to_apply = dict()
filter_to_apply['kpi_id'] = request.kpi_id[0].kpi_id.uuid
# filter_to_apply['duration_s'] = request.duration_s[0]
try:
rows = self.managementDBobj.select_with_filter(CollectorModel, **filter_to_apply)
except Exception as e:
LOGGER.info('Unable to apply filter on kpi descriptor. {:}'.format(e))
try:
if len(rows) != 0:
for row in rows:
collector_obj = Collector()
collector_obj.collector_id.collector_id.uuid = row.collector_id
response.collector_list.append(collector_obj)
return response
except Exception as e:
LOGGER.info('Unable to process response {:}'.format(e))