-
Waleed Akbar authored
- PROM_URL variable is updated with FQDN of Prometheus.
Waleed Akbar authored- PROM_URL variable is updated with FQDN of Prometheus.
Code owners
Assign users and groups as approvers for specific file changes. Learn more.
KpiValueApiServiceServicerImpl.py 7.20 KiB
# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging, grpc
from typing import Tuple, Any
from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method
from common.tools.kafka.Variables import KafkaConfig, KafkaTopic
from common.proto.context_pb2 import Empty
from common.proto.kpi_sample_types_pb2 import KpiSampleType
from common.proto.kpi_manager_pb2 import KpiDescriptor, KpiId
from common.proto.kpi_value_api_pb2_grpc import KpiValueAPIServiceServicer
from common.proto.kpi_value_api_pb2 import KpiValueList, KpiValueFilter, KpiValue, KpiValueType
from confluent_kafka import Producer as KafkaProducer
from prometheus_api_client import PrometheusConnect
from prometheus_api_client.utils import parse_datetime
from kpi_manager.client.KpiManagerClient import KpiManagerClient
LOGGER = logging.getLogger(__name__)
METRICS_POOL = MetricsPool('KpiValueAPI', 'NBIgRPC')
PROM_URL = "http://prometheus-k8s.monitoring.svc.cluster.local:9090" # TODO: updated with the env variables
class KpiValueApiServiceServicerImpl(KpiValueAPIServiceServicer):
def __init__(self):
LOGGER.debug('Init KpiValueApiService')
@safe_and_metered_rpc_method(METRICS_POOL, LOGGER)
def StoreKpiValues(self, request: KpiValueList, grpc_context: grpc.ServicerContext
) -> Empty:
LOGGER.debug('StoreKpiValues: Received gRPC message object: {:}'.format(request))
producer_obj = KafkaProducer({
'bootstrap.servers' : KafkaConfig.SERVER_ADDRESS.value
})
for kpi_value in request.kpi_value_list:
kpi_value_to_produce : Tuple [str, Any, Any] = (
kpi_value.kpi_id.kpi_id,
kpi_value.timestamp,
kpi_value.kpi_value_type # kpi_value.kpi_value_type.(many options) how?
)
LOGGER.debug('KPI to produce is {:}'.format(kpi_value_to_produce))
msg_key = "gRPC-kpivalueapi" # str(__class__.__name__) can be used
producer_obj.produce(
KafkaTopic.VALUE.value,
key = msg_key,
value = kpi_value.SerializeToString(), # value = json.dumps(kpi_value_to_produce),
callback = self.delivery_callback
)
producer_obj.flush()
return Empty()
@safe_and_metered_rpc_method(METRICS_POOL, LOGGER)
def SelectKpiValues(self, request: KpiValueFilter, grpc_context: grpc.ServicerContext
) -> KpiValueList:
LOGGER.debug('StoreKpiValues: Received gRPC message object: {:}'.format(request))
response = KpiValueList()
kpi_manager_client = KpiManagerClient()
prom_connect = PrometheusConnect(url=PROM_URL)
metrics = [self.GetKpiSampleType(kpi, kpi_manager_client) for kpi in request.kpi_id]
start_timestamps = [parse_datetime(timestamp) for timestamp in request.start_timestamp]
end_timestamps = [parse_datetime(timestamp) for timestamp in request.end_timestamp]
prom_response = []
for start_time, end_time in zip(start_timestamps, end_timestamps):
for metric in metrics:
print(start_time, end_time, metric)
LOGGER.debug(">>> Query: {:}".format(metric))
prom_response.append(
prom_connect.custom_query_range(
query = metric, # this is the metric name and label config
start_time = start_time,
end_time = end_time,
step = 30, # or any other step value (missing in gRPC Filter request)
)
)
for single_resposne in prom_response:
# print ("{:}".format(single_resposne))
for record in single_resposne:
# print("Record >>> kpi: {:} >>> time & values set: {:}".format(record['metric']['__name__'], record['values']))
for value in record['values']:
# print("{:} - {:}".format(record['metric']['__name__'], value))
kpi_value = KpiValue()
kpi_value.kpi_id.kpi_id = record['metric']['__name__'],
kpi_value.timestamp = value[0],
kpi_value.kpi_value_type = self.ConverValueToKpiValueType(value[1])
response.kpi_value_list.append(kpi_value)
return response
def GetKpiSampleType(self, kpi_value: str, kpi_manager_client):
print("--- START -----")
kpi_id = KpiId()
kpi_id.kpi_id.uuid = kpi_value.kpi_id.kpi_id.uuid
# print("KpiId generated: {:}".format(kpi_id))
try:
kpi_descriptor_object = KpiDescriptor()
kpi_descriptor_object = kpi_manager_client.GetKpiDescriptor(kpi_id)
# TODO: why kpi_descriptor_object recevies a KpiDescriptor type object not Empty type object???
if kpi_descriptor_object.kpi_id.kpi_id.uuid == kpi_id.kpi_id.uuid:
LOGGER.info("Extracted KpiDescriptor: {:}".format(kpi_descriptor_object))
print("Extracted KpiDescriptor: {:}".format(kpi_descriptor_object))
return KpiSampleType.Name(kpi_descriptor_object.kpi_sample_type) # extract and return the name of KpiSampleType
else:
LOGGER.info("No KPI Descriptor found in DB for Kpi ID: {:}".format(kpi_id))
print("No KPI Descriptor found in DB for Kpi ID: {:}".format(kpi_id))
except Exception as e:
LOGGER.info("Unable to get KpiDescriptor. Error: {:}".format(e))
print ("Unable to get KpiDescriptor. Error: {:}".format(e))
def ConverValueToKpiValueType(self, value):
# Check if the value is an integer (int64)
try:
int_value = int(value)
return KpiValueType(int64Val=int_value)
except ValueError:
pass
# Check if the value is a float
try:
float_value = float(value)
return KpiValueType(floatVal=float_value)
except ValueError:
pass
# Check if the value is a boolean
if value.lower() in ['true', 'false']:
bool_value = value.lower() == 'true'
return KpiValueType(boolVal=bool_value)
# If none of the above, treat it as a string
return KpiValueType(stringVal=value)
def delivery_callback(self, err, msg):
if err: LOGGER.debug('Message delivery failed: {:}'.format(err))
else: LOGGER.debug('Message delivered to topic {:}'.format(msg.topic()))