Newer
Older
# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

Waleed Akbar
committed
from typing import Tuple, Any
from confluent_kafka import Consumer as KafkaConsumer
from common.proto.context_pb2 import Empty
from monitoring.service.NameMapping import NameMapping
from confluent_kafka import Producer as KafkaProducer
from confluent_kafka import KafkaException
from common.proto.telemetry_frontend_pb2 import CollectorId, Collector, CollectorFilter, CollectorList
from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method
from common.proto.telemetry_frontend_pb2_grpc import TelemetryFrontendServiceServicer

Waleed Akbar
committed
LOGGER = logging.getLogger(__name__)
METRICS_POOL = MetricsPool('Monitoring', 'TelemetryFrontend')
KAFKA_SERVER_IP = '127.0.0.1:9092'
ACTIVE_COLLECTORS = []
class TelemetryFrontendServiceServicerImpl(TelemetryFrontendServiceServicer):
def __init__(self, name_mapping : NameMapping):
LOGGER.info('Init TelemetryFrontendService')
# @safe_and_metered_rpc_method(METRICS_POOL, LOGGER)
def StartCollector(self,
request : Collector, grpc_context: grpc.ServicerContext # type: ignore
) -> CollectorId: # type: ignore
# push info to frontend db
_collector_id = str(request.collector_id.collector_id.uuid)
_collector_kpi_id = str(request.kpi_id.kpi_id.uuid)
_collector_duration = int(request.duration_s)
_collector_interval = int(request.interval_s)
self.generate_kafka_request(_collector_id, _collector_kpi_id, _collector_duration, _collector_interval)
# self.run_generate_kafka_request(_collector_id, _collector_kpi_id, _collector_duration, _collector_interval)

Waleed Akbar
committed
response.collector_id.uuid = request.collector_id.collector_id.uuid # type: ignore
return response
def run_generate_kafka_request(self, msg_key: str, kpi: str, duration : int, interval: int):
threading.Thread(target=self.generate_kafka_request, args=(msg_key, kpi, duration, interval)).start()
# @safe_and_metered_rpc_method(METRICS_POOL, LOGGER)
def generate_kafka_request(self,
msg_key: str, kpi: str, duration : int, interval: int
) -> KafkaProducer:
"""
Method to generate collector request to Kafka topic.
"""
producer_configs = {
'bootstrap.servers': KAFKA_SERVER_IP,
}
topic_request = "topic_request"
msg_value = Tuple [str, int, int]
msg_value = (kpi, duration, interval)
print ("Request generated: ", "Colletcor Id: ", msg_key, \
", \nKPI: ", kpi, ", Duration: ", duration, ", Interval: ", interval)
producerObj = KafkaProducer(producer_configs)
producerObj.produce(topic_request, key=msg_key, value= str(msg_value), callback=self.delivery_callback)

Waleed Akbar
committed
ACTIVE_COLLECTORS.append(msg_key)
producerObj.flush()
return producerObj
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
def run_kafka_listener(self):
# print ("--- STARTED: run_kafka_listener ---")
threading.Thread(target=self.kafka_listener).start()
return True
def kafka_listener(self):
"""
listener for response on Kafka topic.
"""
# print ("--- STARTED: kafka_listener ---")
conusmer_configs = {
'bootstrap.servers' : KAFKA_SERVER_IP,
'group.id' : 'frontend',
'auto.offset.reset' : 'latest'
}
topic_response = "topic_response"
consumerObj = KafkaConsumer(conusmer_configs)
consumerObj.subscribe([topic_response])
# print (time.time())
while True:
receive_msg = consumerObj.poll(2.0)
if receive_msg is None:
print (" - Telemetry frontend listening on Kafka Topic: ", topic_response) # added for debugging purposes
continue
elif receive_msg.error():
if receive_msg.error().code() == KafkaError._PARTITION_EOF:
continue
else:
print("Consumer error: {}".format(receive_msg.error()))
break

Waleed Akbar
committed
try:
collector_id = receive_msg.key().decode('utf-8')
if collector_id in ACTIVE_COLLECTORS:
(kpi_id, kpi_value) = ast.literal_eval(receive_msg.value().decode('utf-8'))
self.process_response(kpi_id, kpi_value)
else:
print(f"collector id does not match.\nRespone ID: '{collector_id}' --- Active IDs: '{ACTIVE_COLLECTORS}' ")
except Exception as e:
print(f"No message key found: {str(e)}")
continue
# return None

Waleed Akbar
committed
def process_response(self, kpi_id: str, kpi_value: Any):
print ("Frontend - KPI: ", kpi_id, ", VALUE: ", kpi_value)
@safe_and_metered_rpc_method(METRICS_POOL, LOGGER)
def delivery_callback(self, err, msg):
"""
Callback function to handle message delivery status.
Args:
err (KafkaError): Kafka error object.
msg (Message): Kafka message object.
"""
if err:
print(f'Message delivery failed: {err}')
else:
print(f'Message delivered to topic {msg.topic()}')
@safe_and_metered_rpc_method(METRICS_POOL, LOGGER)
def StopCollector(self,
request : CollectorId, grpc_context: grpc.ServicerContext # type: ignore
) -> Empty: # type: ignore
request.collector_id.uuid = ""
return Empty()
@safe_and_metered_rpc_method(METRICS_POOL, LOGGER)
def SelectCollectors(self,
request : CollectorFilter, contextgrpc_context: grpc.ServicerContext # type: ignore
) -> CollectorList: # type: ignore
response = CollectorList()
return response