diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index e2d653e0360b694891adc966d6d0b1124ed72ac4..0c5ff9325944d1a5a54d941d32d6a45782257970 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -45,6 +45,9 @@ include:
   #- local: '/src/dlt/.gitlab-ci.yml'
   - local: '/src/load_generator/.gitlab-ci.yml'
   - local: '/src/bgpls_speaker/.gitlab-ci.yml'
+  - local: '/src/kpi_manager/.gitlab-ci.yml'
+  - local: '/src/kpi_value_api/.gitlab-ci.yml'
+  - local: '/src/kpi_value_writer/.gitlab-ci.yml'
 
   # This should be last one: end-to-end integration tests
   - local: '/src/tests/.gitlab-ci.yml'
diff --git a/deploy/tfs.sh b/deploy/tfs.sh
index f61cdb991b09b389b20fbb82f619c0f8f3b4cdc2..62f36a2c138c99b1ee666c8c5397083266ad699d 100755
--- a/deploy/tfs.sh
+++ b/deploy/tfs.sh
@@ -340,7 +340,7 @@ for COMPONENT in $TFS_COMPONENTS; do
     echo "  Deploying '$COMPONENT' component to Kubernetes..."
     DEPLOY_LOG="$TMP_LOGS_FOLDER/deploy_${COMPONENT}.log"
     kubectl --namespace $TFS_K8S_NAMESPACE apply -f "$MANIFEST" > "$DEPLOY_LOG"
-    COMPONENT_OBJNAME=$(echo "${COMPONENT}" | sed "s/\_/-/")
+    COMPONENT_OBJNAME=$(echo "${COMPONENT}" | sed "s/\_/-/g")
     #kubectl --namespace $TFS_K8S_NAMESPACE scale deployment --replicas=0 ${COMPONENT_OBJNAME}service >> "$DEPLOY_LOG"
     #kubectl --namespace $TFS_K8S_NAMESPACE scale deployment --replicas=1 ${COMPONENT_OBJNAME}service >> "$DEPLOY_LOG"
 
@@ -391,7 +391,7 @@ printf "\n"
 
 for COMPONENT in $TFS_COMPONENTS; do
     echo "Waiting for '$COMPONENT' component..."
-    COMPONENT_OBJNAME=$(echo "${COMPONENT}" | sed "s/\_/-/")
+    COMPONENT_OBJNAME=$(echo "${COMPONENT}" | sed "s/\_/-/g")
     kubectl wait --namespace $TFS_K8S_NAMESPACE \
         --for='condition=available' --timeout=90s deployment/${COMPONENT_OBJNAME}service
     WAIT_EXIT_CODE=$?
diff --git a/scripts/run_tests_locally-kpi-DB.sh b/scripts/run_tests_locally-kpi-DB.sh
index d43be66e1e9843f85e34442d00b10a11d2903c43..4953b49e0a437becfda1648c722bcdcf92c58d93 100755
--- a/scripts/run_tests_locally-kpi-DB.sh
+++ b/scripts/run_tests_locally-kpi-DB.sh
@@ -24,5 +24,7 @@ cd $PROJECTDIR/src
 # python3 kpi_manager/tests/test_unitary.py
 
 RCFILE=$PROJECTDIR/coverage/.coveragerc
+CRDB_SQL_ADDRESS=$(kubectl --namespace ${CRDB_NAMESPACE} get service cockroachdb-public -o 'jsonpath={.spec.clusterIP}')
+export CRDB_URI="cockroachdb://tfs:tfs123@${CRDB_SQL_ADDRESS}:26257/tfs_kpi_mgmt?sslmode=require"
 python3 -m pytest --log-level=DEBUG --log-cli-level=DEBUG --verbose \
     kpi_manager/tests/test_kpi_db.py
diff --git a/scripts/run_tests_locally-kpi-manager.sh b/scripts/run_tests_locally-kpi-manager.sh
index db6e786835bcd4550b53394aef23aaa670d43b08..a6a24f90db93d56300ac997bd00675c479ef13ae 100755
--- a/scripts/run_tests_locally-kpi-manager.sh
+++ b/scripts/run_tests_locally-kpi-manager.sh
@@ -24,5 +24,7 @@ cd $PROJECTDIR/src
 # python3 kpi_manager/tests/test_unitary.py
 
 RCFILE=$PROJECTDIR/coverage/.coveragerc
+CRDB_SQL_ADDRESS=$(kubectl --namespace ${CRDB_NAMESPACE} get service cockroachdb-public -o 'jsonpath={.spec.clusterIP}')
+export CRDB_URI="cockroachdb://tfs:tfs123@${CRDB_SQL_ADDRESS}:26257/tfs_kpi_mgmt?sslmode=require"
 python3 -m pytest --log-level=DEBUG --log-cli-level=DEBUG --verbose \
     kpi_manager/tests/test_kpi_manager.py
diff --git a/scripts/run_tests_locally-kpi-prom-writer.sh b/scripts/run_tests_locally-kpi-prom-writer.sh
index 1179cbf866969607b880eb41be5c25ab95007bfc..8865a8a34495a032525c7585a409f4c32c7249df 100755
--- a/scripts/run_tests_locally-kpi-prom-writer.sh
+++ b/scripts/run_tests_locally-kpi-prom-writer.sh
@@ -19,5 +19,7 @@ PROJECTDIR=`pwd`
 cd $PROJECTDIR/src
 
 RCFILE=$PROJECTDIR/coverage/.coveragerc
+CRDB_SQL_ADDRESS=$(kubectl --namespace ${CRDB_NAMESPACE} get service cockroachdb-public -o 'jsonpath={.spec.clusterIP}')
+export CRDB_URI="cockroachdb://tfs:tfs123@${CRDB_SQL_ADDRESS}:26257/tfs_kpi_mgmt?sslmode=require"
 python3 -m pytest --log-level=DEBUG --log-cli-level=DEBUG --verbose \
     kpi_value_writer/tests/test_metric_writer_to_prom.py
diff --git a/src/kpi_manager/.gitlab-ci.yml b/src/kpi_manager/.gitlab-ci.yml
index 6aef328ea51e835eb06b286478f26d83f9a80f13..498cfd89fb3da85fec1b2ad0c930408eab215dc5 100644
--- a/src/kpi_manager/.gitlab-ci.yml
+++ b/src/kpi_manager/.gitlab-ci.yml
@@ -15,7 +15,7 @@
 # Build, tag, and push the Docker image to the GitLab Docker registry
 build kpi-manager:
   variables:
-    IMAGE_NAME: 'kpi-manager' # name of the microservice
+    IMAGE_NAME: 'kpi_manager' # name of the microservice
     IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: build
   before_script:
@@ -41,7 +41,7 @@ build kpi-manager:
 # Apply unit test to the component
 unit_test kpi-manager:
   variables:
-    IMAGE_NAME: 'kpi-manager' # name of the microservice
+    IMAGE_NAME: 'kpi_manager' # name of the microservice
     IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: unit_test
   needs:
@@ -68,8 +68,6 @@ unit_test kpi-manager:
     - docker ps -a
     - CRDB_ADDRESS=$(docker inspect crdb --format "{{.NetworkSettings.Networks.teraflowbridge.IPAddress}}")
     - echo $CRDB_ADDRESS
-    - NATS_ADDRESS=$(docker inspect nats --format "{{.NetworkSettings.Networks.teraflowbridge.IPAddress}}")
-    - echo $NATS_ADDRESS
     - >
       docker run --name $IMAGE_NAME -d -p 30010:30010
       --env "CRDB_URI=cockroachdb://tfs:tfs123@${CRDB_ADDRESS}:26257/tfs_test?sslmode=require"
diff --git a/src/kpi_manager/database/KpiEngine.py b/src/kpi_manager/database/KpiEngine.py
index 42bda9527dfcb1e5cef0e229d260fd67f4b4a6d5..dff406de666b5f68539b8897fa26e0b3ad51286b 100644
--- a/src/kpi_manager/database/KpiEngine.py
+++ b/src/kpi_manager/database/KpiEngine.py
@@ -27,11 +27,11 @@ class KpiEngine:
         if crdb_uri is None:
             CRDB_NAMESPACE = get_setting('CRDB_NAMESPACE')
             CRDB_SQL_PORT  = get_setting('CRDB_SQL_PORT')
-            CRDB_DATABASE  = get_setting('CRDB_DATABASE')
+            CRDB_DATABASE  = 'tfs_kpi_mgmt'             # TODO: define variable get_setting('CRDB_DATABASE_KPI_MGMT')
             CRDB_USERNAME  = get_setting('CRDB_USERNAME')
             CRDB_PASSWORD  = get_setting('CRDB_PASSWORD')
             CRDB_SSLMODE   = get_setting('CRDB_SSLMODE')
-        crdb_uri = CRDB_URI_TEMPLATE.format(
+            crdb_uri = CRDB_URI_TEMPLATE.format(
                 CRDB_USERNAME, CRDB_PASSWORD, CRDB_NAMESPACE, CRDB_SQL_PORT, CRDB_DATABASE, CRDB_SSLMODE)
         # crdb_uri = CRDB_URI_TEMPLATE.format(
         #         CRDB_USERNAME, CRDB_PASSWORD, CRDB_SQL_PORT, CRDB_DATABASE, CRDB_SSLMODE)
diff --git a/src/kpi_manager/database/Kpi_DB.py b/src/kpi_manager/database/Kpi_DB.py
index 5b2b586b6d72cf73ff3362f9240d1171e8a9974d..dcd28489ba12586373d06ffbedaa02ddce0adae4 100644
--- a/src/kpi_manager/database/Kpi_DB.py
+++ b/src/kpi_manager/database/Kpi_DB.py
@@ -18,10 +18,10 @@ from sqlalchemy.orm import sessionmaker
 from kpi_manager.database.KpiEngine import KpiEngine
 from kpi_manager.database.KpiModel import Kpi as KpiModel
 from common.method_wrappers.ServiceExceptions import ( 
-    AlreadyExistsException, OperationFailedException)
+    AlreadyExistsException, OperationFailedException , NotFoundException)
 
 LOGGER = logging.getLogger(__name__)
-DB_NAME = "kpi"
+DB_NAME = "tfs_kpi_mgmt"
 
 class KpiDB:
     def __init__(self):
@@ -86,7 +86,7 @@ class KpiDB:
                 return entity
             else:
                 LOGGER.debug(f"{model.__name__} ID not found: {str(id_to_search)}")
-                return None
+                raise NotFoundException (model.__name__, id_to_search, extra_details=["Row not found with ID"] )
         except Exception as e:
             session.rollback()
             LOGGER.debug(f"Failed to retrieve {model.__name__} ID. {str(e)}")
diff --git a/src/kpi_manager/service/KpiManagerServiceServicerImpl.py b/src/kpi_manager/service/KpiManagerServiceServicerImpl.py
index 05292fc5b14feaf079cc7691c650775965cc9148..bea2c78b491fe473584aa01a716d7a58eda8b2b7 100644
--- a/src/kpi_manager/service/KpiManagerServiceServicerImpl.py
+++ b/src/kpi_manager/service/KpiManagerServiceServicerImpl.py
@@ -52,13 +52,8 @@ class KpiManagerServiceServicerImpl(KpiManagerServiceServicer):
         try: 
             kpi_id_to_search = request.kpi_id.uuid
             row = self.kpi_db_obj.search_db_row_by_id(KpiModel, 'kpi_id', kpi_id_to_search)
-            if row is not None:
-                response = KpiModel.convert_row_to_KpiDescriptor(row)
-                return response
-            if row is None:
-                print ('No matching row found for kpi id: {:}'.format(kpi_id_to_search))
-                LOGGER.info('No matching row found kpi id: {:}'.format(kpi_id_to_search))
-                return Empty()
+            response = KpiModel.convert_row_to_KpiDescriptor(row)
+            return response
         except Exception as e:
             print ('Unable to search kpi id. {:}'.format(e))
             LOGGER.info('Unable to search kpi id. {:}'.format(e))
diff --git a/src/kpi_value_api/.gitlab-ci.yml b/src/kpi_value_api/.gitlab-ci.yml
index c9107abaac83199cc18ea720e4e3e5e14e353189..166e9d3cbcf3eb09c914384a9906853dddd7bfb5 100644
--- a/src/kpi_value_api/.gitlab-ci.yml
+++ b/src/kpi_value_api/.gitlab-ci.yml
@@ -15,7 +15,7 @@
 # Build, tag, and push the Docker image to the GitLab Docker registry
 build kpi-value-api:
   variables:
-    IMAGE_NAME: 'kpi-value-api' # name of the microservice
+    IMAGE_NAME: 'kpi_value_api' # name of the microservice
     IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: build
   before_script:
@@ -41,7 +41,7 @@ build kpi-value-api:
 # Apply unit test to the component
 unit_test kpi-value-api:
   variables:
-    IMAGE_NAME: 'kpi-value-api' # name of the microservice
+    IMAGE_NAME: 'kpi_value_api' # name of the microservice
     IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: unit_test
   needs:
diff --git a/src/kpi_value_api/service/__main__.py b/src/kpi_value_api/service/__main__.py
index 8b4ebe296e2c4f193aa1fc99aede9364556c2094..f0f265a48812c0ae475e4e079a09b83cdfb7c69e 100644
--- a/src/kpi_value_api/service/__main__.py
+++ b/src/kpi_value_api/service/__main__.py
@@ -13,7 +13,6 @@
 # limitations under the License.
 
 import logging, signal, sys, threading
-from prometheus_client import start_http_server
 from common.Settings import  get_log_level
 from .KpiValueApiService import KpiValueApiService
 
diff --git a/src/kpi_value_writer/.gitlab-ci.yml b/src/kpi_value_writer/.gitlab-ci.yml
index 52b1b8fe6fd41c5fd8b43adf4ca8da464c8c08ba..25619ce7f8b4346172587dbf2e804896aff20e4d 100644
--- a/src/kpi_value_writer/.gitlab-ci.yml
+++ b/src/kpi_value_writer/.gitlab-ci.yml
@@ -15,7 +15,7 @@
 # Build, tag, and push the Docker image to the GitLab Docker registry
 build kpi-value-writer:
   variables:
-    IMAGE_NAME: 'kpi-value-writer' # name of the microservice
+    IMAGE_NAME: 'kpi_value_writer' # name of the microservice
     IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: build
   before_script:
@@ -41,7 +41,7 @@ build kpi-value-writer:
 # Apply unit test to the component
 unit_test kpi-value-writer:
   variables:
-    IMAGE_NAME: 'kpi-value-writer' # name of the microservice
+    IMAGE_NAME: 'kpi_value_writer' # name of the microservice
     IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: unit_test
   needs:
diff --git a/src/kpi_value_writer/service/KpiValueWriter.py b/src/kpi_value_writer/service/KpiValueWriter.py
index a4b10ed6391c4ff0b0ee45a287ce9f12d77e2dea..26bab44657606b1f3edc14659d128c5ccc7a6890 100644
--- a/src/kpi_value_writer/service/KpiValueWriter.py
+++ b/src/kpi_value_writer/service/KpiValueWriter.py
@@ -17,20 +17,29 @@ import threading
 from common.tools.kafka.Variables import KafkaConfig, KafkaTopic
 from common.proto.kpi_value_api_pb2 import KpiValue
 from common.proto.kpi_manager_pb2 import KpiDescriptor, KpiId
+from common.Settings import get_service_port_grpc
+from common.Constants import ServiceNameEnum
+from common.tools.service.GenericGrpcService import GenericGrpcService
+
 
 from confluent_kafka import KafkaError
 from confluent_kafka import Consumer as KafkaConsumer
 
 from kpi_manager.client.KpiManagerClient import KpiManagerClient
 # -- test import --
-from kpi_value_writer.tests.test_messages import create_kpi_descriptor_request
+# from kpi_value_writer.tests.test_messages import create_kpi_descriptor_request
 from .MetricWriterToPrometheus import MetricWriterToPrometheus
 
 
 LOGGER           = logging.getLogger(__name__)
 ACTIVE_CONSUMERS = []
+METRIC_WRITER    = MetricWriterToPrometheus()
+
+class KpiValueWriter(GenericGrpcService):
+    def __init__(self, cls_name : str = __name__) -> None:
+        port = get_service_port_grpc(ServiceNameEnum.KPIVALUEWRITER)
+        super().__init__(port, cls_name=cls_name)
 
-class KpiValueWriter:
     @staticmethod
     def RunKafkaConsumer():
         thread = threading.Thread(target=KpiValueWriter.KafkaConsumer, args=())
@@ -44,11 +53,7 @@ class KpiValueWriter:
               'group.id'          : __class__,
               'auto.offset.reset' : 'latest'}
         )
-        
-        metric_writer_to_prom = MetricWriterToPrometheus()
         kpi_manager_client = KpiManagerClient()
-        print("Kpi manger client created: {:}".format(kpi_manager_client))
-
         kafka_consumer.subscribe([KafkaTopic.VALUE.value])
         LOGGER.debug("Kafka Consumer start listenng on topic: {:}".format(KafkaTopic.VALUE.value))
         print("Kafka Consumer start listenng on topic: {:}".format(KafkaTopic.VALUE.value))
@@ -84,15 +89,15 @@ class KpiValueWriter:
         try:
             kpi_descriptor_object = KpiDescriptor()
             kpi_descriptor_object = kpi_manager_client.GetKpiDescriptor(kpi_id)
-
-            print("kpi descriptor received: {:}".format(kpi_descriptor_object))
-            if isinstance (kpi_descriptor_object, KpiDescriptor):
+            if kpi_descriptor_object.kpi_id.kpi_id.uuid == kpi_id.kpi_id.uuid:
+            # print("kpi descriptor received: {:}".format(kpi_descriptor_object))
+            # if isinstance (kpi_descriptor_object, KpiDescriptor):
                 LOGGER.info("Extracted KpiDescriptor: {:}".format(kpi_descriptor_object))
                 print("Extracted KpiDescriptor: {:}".format(kpi_descriptor_object))
-                MetricWriterToPrometheus.create_and_expose_cooked_kpi(kpi_descriptor_object, kpi_value)
+                METRIC_WRITER.create_and_expose_cooked_kpi(kpi_descriptor_object, kpi_value)
             else:
-                LOGGER.info("Error in extracting KpiDescriptor {:}".format(kpi_descriptor_object))
-                print("Error in extracting KpiDescriptor {:}".format(kpi_descriptor_object))
+                LOGGER.info("No KPI Descriptor found in DB for Kpi ID: {:}".format(kpi_id))
+                print("No KPI Descriptor found in DB for Kpi ID: {:}".format(kpi_id))
         except Exception as e:
             LOGGER.info("Unable to get KpiDescriptor. Error: {:}".format(e))
             print ("Unable to get KpiDescriptor. Error: {:}".format(e))
diff --git a/src/kpi_value_writer/service/KpiWriterOld.py b/src/kpi_value_writer/service/KpiWriterOld.py
deleted file mode 100644
index b9a4316b0f5decb2364127052ebf0b44edc05fbd..0000000000000000000000000000000000000000
--- a/src/kpi_value_writer/service/KpiWriterOld.py
+++ /dev/null
@@ -1,108 +0,0 @@
-# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# read Kafka stream from Kafka topic
-
-import ast
-import time
-import threading
-from confluent_kafka import KafkaError
-from prometheus_client import start_http_server, Gauge, CollectorRegistry
-from confluent_kafka import Consumer as KafkaConsumer
-
-KAFKA_SERVER_IP = '127.0.0.1:9092'
-KAFKA_TOPICS    = {'request' : 'topic_request', 'response': 'topic_response',
-                   'raw'     : 'topic_raw'    , 'labeled' : 'topic_labeled'}
-CONSUMER_CONFIG = {'bootstrap.servers' : KAFKA_SERVER_IP,
-                   'group.id'          : 'kpi_writer',
-                   'auto.offset.reset' : 'latest'}
-KPIs_TO_SEARCH  = ["node_network_receive_packets_total",
-                   "node_network_receive_bytes_total",
-                   "node_network_transmit_bytes_total",
-                   "process_open_fds"]
-PROM_METRICS    = {}
-KAFKA_REGISTERY   = CollectorRegistry()
-
-class KpiWriter:
-    def __init__(self) -> None:
-        pass
-
-    @staticmethod
-    def kpi_writer():
-        KpiWriter.create_prom_metrics_name()
-        threading.Thread(target=KpiWriter.kafka_listener, args=()).start() 
-
-    @staticmethod
-    def kafka_listener():
-        """
-        listener for events on Kafka topic.
-        """
-        # Start up the server to expose the metrics at port number mention below.
-        start_http_server(8101, registry=KAFKA_REGISTERY)
-        kafka_consumer = KafkaConsumer(CONSUMER_CONFIG)
-        kafka_consumer.subscribe([KAFKA_TOPICS['labeled']])
-        while True:
-            receive_msg = kafka_consumer.poll(2.0)
-            if receive_msg is None:
-                # print (" - Telemetry frontend listening on Kafka Topic: ", KAFKA_TOPICS['raw'])     # added for debugging purposes
-                continue
-            elif receive_msg.error():
-                if receive_msg.error().code() == KafkaError._PARTITION_EOF:
-                    continue
-                else:
-                    print("Consumer error: {}".format(receive_msg.error()))
-                    continue
-            try:
-                new_event = receive_msg.value().decode('utf-8')
-                # print("New event on topic '{:}' is {:}".format(KAFKA_TOPICS['raw'], new_event))
-                # LOGGER.info("New event on topic '{:}' is {:}".format(KAFKA_TOPICS['raw'], new_event))
-                KpiWriter.write_metric_to_promtheus(new_event)
-            except Exception as e:
-                print(f"Error to consume event from topic: {KAFKA_TOPICS['labeled']}. Error detail:  {str(e)}")
-                continue
-
-    # send metric to Prometheus
-    @staticmethod
-    def write_metric_to_promtheus(event):
-        event = ast.literal_eval(event)         # converted into dict
-        print("New recevied event: {:}".format(event))
-        event_kpi_name = event['kpi_description']
-        if event_kpi_name in KPIs_TO_SEARCH:
-            PROM_METRICS[event_kpi_name].labels(
-                kpi_id          = event['kpi_id'],
-                kpi_sample_type = event['kpi_sample_type'],
-                device_id       = event['device_id'],
-                endpoint_id     = event['endpoint_id'],
-                service_id      = event['service_id'],
-                slice_id        = event['slice_id'],
-                connection_id   = event['connection_id'],
-                link_id         = event['link_id']
-            ).set(float(event['kpi_value']))
-        time.sleep(0.05)
-
-    @staticmethod
-    def create_prom_metrics_name():
-        metric_tags = ['kpi_id','kpi_sample_type','device_id',
-                       'endpoint_id','service_id','slice_id','connection_id','link_id']
-        for metric_key in KPIs_TO_SEARCH:
-            metric_name        = metric_key
-            metric_description = "description of " + str(metric_key)
-            try:
-                PROM_METRICS[metric_key] = Gauge ( 
-                    metric_name, metric_description, metric_tags, 
-                    registry=KAFKA_REGISTERY )
-                # print("Metric pushed to Prometheus: {:}".format(PROM_METRICS[metric_key]))
-            except ValueError as e:
-                if 'Duplicated timeseries' in str(e):
-                    print("Metric {:} is already registered. Skipping.".format(metric_name))