diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index e2d653e0360b694891adc966d6d0b1124ed72ac4..0c5ff9325944d1a5a54d941d32d6a45782257970 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -45,6 +45,9 @@ include:
   #- local: '/src/dlt/.gitlab-ci.yml'
   - local: '/src/load_generator/.gitlab-ci.yml'
   - local: '/src/bgpls_speaker/.gitlab-ci.yml'
+  - local: '/src/kpi_manager/.gitlab-ci.yml'
+  - local: '/src/kpi_value_api/.gitlab-ci.yml'
+  - local: '/src/kpi_value_writer/.gitlab-ci.yml'
 
   # This should be last one: end-to-end integration tests
   - local: '/src/tests/.gitlab-ci.yml'
diff --git a/deploy/tfs.sh b/deploy/tfs.sh
index f61cdb991b09b389b20fbb82f619c0f8f3b4cdc2..62f36a2c138c99b1ee666c8c5397083266ad699d 100755
--- a/deploy/tfs.sh
+++ b/deploy/tfs.sh
@@ -340,7 +340,7 @@ for COMPONENT in $TFS_COMPONENTS; do
     echo "  Deploying '$COMPONENT' component to Kubernetes..."
     DEPLOY_LOG="$TMP_LOGS_FOLDER/deploy_${COMPONENT}.log"
     kubectl --namespace $TFS_K8S_NAMESPACE apply -f "$MANIFEST" > "$DEPLOY_LOG"
-    COMPONENT_OBJNAME=$(echo "${COMPONENT}" | sed "s/\_/-/")
+    COMPONENT_OBJNAME=$(echo "${COMPONENT}" | sed "s/\_/-/g")
     #kubectl --namespace $TFS_K8S_NAMESPACE scale deployment --replicas=0 ${COMPONENT_OBJNAME}service >> "$DEPLOY_LOG"
     #kubectl --namespace $TFS_K8S_NAMESPACE scale deployment --replicas=1 ${COMPONENT_OBJNAME}service >> "$DEPLOY_LOG"
 
@@ -391,7 +391,7 @@ printf "\n"
 
 for COMPONENT in $TFS_COMPONENTS; do
     echo "Waiting for '$COMPONENT' component..."
-    COMPONENT_OBJNAME=$(echo "${COMPONENT}" | sed "s/\_/-/")
+    COMPONENT_OBJNAME=$(echo "${COMPONENT}" | sed "s/\_/-/g")
     kubectl wait --namespace $TFS_K8S_NAMESPACE \
         --for='condition=available' --timeout=90s deployment/${COMPONENT_OBJNAME}service
     WAIT_EXIT_CODE=$?
diff --git a/scripts/run_tests_locally-kpi-DB.sh b/scripts/run_tests_locally-kpi-DB.sh
index d43be66e1e9843f85e34442d00b10a11d2903c43..4953b49e0a437becfda1648c722bcdcf92c58d93 100755
--- a/scripts/run_tests_locally-kpi-DB.sh
+++ b/scripts/run_tests_locally-kpi-DB.sh
@@ -24,5 +24,7 @@ cd $PROJECTDIR/src
 # python3 kpi_manager/tests/test_unitary.py
 
 RCFILE=$PROJECTDIR/coverage/.coveragerc
+CRDB_SQL_ADDRESS=$(kubectl --namespace ${CRDB_NAMESPACE} get service cockroachdb-public -o 'jsonpath={.spec.clusterIP}')
+export CRDB_URI="cockroachdb://tfs:tfs123@${CRDB_SQL_ADDRESS}:26257/tfs_kpi_mgmt?sslmode=require"
 python3 -m pytest --log-level=DEBUG --log-cli-level=DEBUG --verbose \
     kpi_manager/tests/test_kpi_db.py
diff --git a/scripts/run_tests_locally-kpi-manager.sh b/scripts/run_tests_locally-kpi-manager.sh
index db6e786835bcd4550b53394aef23aaa670d43b08..a6a24f90db93d56300ac997bd00675c479ef13ae 100755
--- a/scripts/run_tests_locally-kpi-manager.sh
+++ b/scripts/run_tests_locally-kpi-manager.sh
@@ -24,5 +24,7 @@ cd $PROJECTDIR/src
 # python3 kpi_manager/tests/test_unitary.py
 
 RCFILE=$PROJECTDIR/coverage/.coveragerc
+CRDB_SQL_ADDRESS=$(kubectl --namespace ${CRDB_NAMESPACE} get service cockroachdb-public -o 'jsonpath={.spec.clusterIP}')
+export CRDB_URI="cockroachdb://tfs:tfs123@${CRDB_SQL_ADDRESS}:26257/tfs_kpi_mgmt?sslmode=require"
 python3 -m pytest --log-level=DEBUG --log-cli-level=DEBUG --verbose \
     kpi_manager/tests/test_kpi_manager.py
diff --git a/scripts/run_tests_locally-kpi-prom-writer.sh b/scripts/run_tests_locally-kpi-prom-writer.sh
index 1179cbf866969607b880eb41be5c25ab95007bfc..8865a8a34495a032525c7585a409f4c32c7249df 100755
--- a/scripts/run_tests_locally-kpi-prom-writer.sh
+++ b/scripts/run_tests_locally-kpi-prom-writer.sh
@@ -19,5 +19,7 @@ PROJECTDIR=`pwd`
 cd $PROJECTDIR/src
 
 RCFILE=$PROJECTDIR/coverage/.coveragerc
+CRDB_SQL_ADDRESS=$(kubectl --namespace ${CRDB_NAMESPACE} get service cockroachdb-public -o 'jsonpath={.spec.clusterIP}')
+export CRDB_URI="cockroachdb://tfs:tfs123@${CRDB_SQL_ADDRESS}:26257/tfs_kpi_mgmt?sslmode=require"
 python3 -m pytest --log-level=DEBUG --log-cli-level=DEBUG --verbose \
     kpi_value_writer/tests/test_metric_writer_to_prom.py
diff --git a/src/kpi_manager/.gitlab-ci.yml b/src/kpi_manager/.gitlab-ci.yml
index 6aef328ea51e835eb06b286478f26d83f9a80f13..498cfd89fb3da85fec1b2ad0c930408eab215dc5 100644
--- a/src/kpi_manager/.gitlab-ci.yml
+++ b/src/kpi_manager/.gitlab-ci.yml
@@ -15,7 +15,7 @@
 # Build, tag, and push the Docker image to the GitLab Docker registry
 build kpi-manager:
   variables:
-    IMAGE_NAME: 'kpi-manager' # name of the microservice
+    IMAGE_NAME: 'kpi_manager' # name of the microservice
     IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: build
   before_script:
@@ -41,7 +41,7 @@ build kpi-manager:
 # Apply unit test to the component
 unit_test kpi-manager:
   variables:
-    IMAGE_NAME: 'kpi-manager' # name of the microservice
+    IMAGE_NAME: 'kpi_manager' # name of the microservice
     IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: unit_test
   needs:
@@ -68,8 +68,6 @@ unit_test kpi-manager:
     - docker ps -a
     - CRDB_ADDRESS=$(docker inspect crdb --format "{{.NetworkSettings.Networks.teraflowbridge.IPAddress}}")
     - echo $CRDB_ADDRESS
-    - NATS_ADDRESS=$(docker inspect nats --format "{{.NetworkSettings.Networks.teraflowbridge.IPAddress}}")
-    - echo $NATS_ADDRESS
     - >
       docker run --name $IMAGE_NAME -d -p 30010:30010
       --env "CRDB_URI=cockroachdb://tfs:tfs123@${CRDB_ADDRESS}:26257/tfs_test?sslmode=require"
diff --git a/src/kpi_manager/database/KpiEngine.py b/src/kpi_manager/database/KpiEngine.py
index 42bda9527dfcb1e5cef0e229d260fd67f4b4a6d5..dff406de666b5f68539b8897fa26e0b3ad51286b 100644
--- a/src/kpi_manager/database/KpiEngine.py
+++ b/src/kpi_manager/database/KpiEngine.py
@@ -27,11 +27,11 @@ class KpiEngine:
         if crdb_uri is None:
             CRDB_NAMESPACE = get_setting('CRDB_NAMESPACE')
             CRDB_SQL_PORT  = get_setting('CRDB_SQL_PORT')
-            CRDB_DATABASE  = get_setting('CRDB_DATABASE')
+            CRDB_DATABASE  = 'tfs_kpi_mgmt'             # TODO: define variable get_setting('CRDB_DATABASE_KPI_MGMT')
             CRDB_USERNAME  = get_setting('CRDB_USERNAME')
             CRDB_PASSWORD  = get_setting('CRDB_PASSWORD')
             CRDB_SSLMODE   = get_setting('CRDB_SSLMODE')
-        crdb_uri = CRDB_URI_TEMPLATE.format(
+            crdb_uri = CRDB_URI_TEMPLATE.format(
                 CRDB_USERNAME, CRDB_PASSWORD, CRDB_NAMESPACE, CRDB_SQL_PORT, CRDB_DATABASE, CRDB_SSLMODE)
         # crdb_uri = CRDB_URI_TEMPLATE.format(
         #         CRDB_USERNAME, CRDB_PASSWORD, CRDB_SQL_PORT, CRDB_DATABASE, CRDB_SSLMODE)
diff --git a/src/kpi_manager/database/Kpi_DB.py b/src/kpi_manager/database/Kpi_DB.py
index 5b2b586b6d72cf73ff3362f9240d1171e8a9974d..4b60640707c8d0c2ce90e5ab135ddf6fd4c91f63 100644
--- a/src/kpi_manager/database/Kpi_DB.py
+++ b/src/kpi_manager/database/Kpi_DB.py
@@ -18,10 +18,10 @@ from sqlalchemy.orm import sessionmaker
 from kpi_manager.database.KpiEngine import KpiEngine
 from kpi_manager.database.KpiModel import Kpi as KpiModel
 from common.method_wrappers.ServiceExceptions import ( 
-    AlreadyExistsException, OperationFailedException)
+    AlreadyExistsException, OperationFailedException , NotFoundException)
 
 LOGGER = logging.getLogger(__name__)
-DB_NAME = "kpi"
+DB_NAME = "tfs_kpi_mgmt"
 
 class KpiDB:
     def __init__(self):
@@ -85,7 +85,8 @@ class KpiDB:
                 # LOGGER.debug(f"{model.__name__} ID found: {str(entity)}")
                 return entity
             else:
-                LOGGER.debug(f"{model.__name__} ID not found: {str(id_to_search)}")
+                LOGGER.debug(f"{model.__name__} ID not found, No matching row: {str(id_to_search)}")
+                print("{:} ID not found, No matching row: {:}".format(model.__name__, id_to_search))
                 return None
         except Exception as e:
             session.rollback()
diff --git a/src/kpi_manager/service/KpiManagerServiceServicerImpl.py b/src/kpi_manager/service/KpiManagerServiceServicerImpl.py
index 05292fc5b14feaf079cc7691c650775965cc9148..fd22474829ea0dfb6b1a25e70bbb4d5440c0216b 100644
--- a/src/kpi_manager/service/KpiManagerServiceServicerImpl.py
+++ b/src/kpi_manager/service/KpiManagerServiceServicerImpl.py
@@ -52,13 +52,13 @@ class KpiManagerServiceServicerImpl(KpiManagerServiceServicer):
         try: 
             kpi_id_to_search = request.kpi_id.uuid
             row = self.kpi_db_obj.search_db_row_by_id(KpiModel, 'kpi_id', kpi_id_to_search)
-            if row is not None:
-                response = KpiModel.convert_row_to_KpiDescriptor(row)
-                return response
             if row is None:
                 print ('No matching row found for kpi id: {:}'.format(kpi_id_to_search))
                 LOGGER.info('No matching row found kpi id: {:}'.format(kpi_id_to_search))
                 return Empty()
+            else:
+                response = KpiModel.convert_row_to_KpiDescriptor(row)
+                return response
         except Exception as e:
             print ('Unable to search kpi id. {:}'.format(e))
             LOGGER.info('Unable to search kpi id. {:}'.format(e))
diff --git a/src/kpi_manager/tests/test_kpi_manager.py b/src/kpi_manager/tests/test_kpi_manager.py
index b41e5139db85cfc462cff1e9545fbc8476c67939..f0d9526d33694a683b70180eb3bc6de833bf1cfa 100755
--- a/src/kpi_manager/tests/test_kpi_manager.py
+++ b/src/kpi_manager/tests/test_kpi_manager.py
@@ -37,7 +37,7 @@ from kpi_manager.tests.test_messages import create_kpi_descriptor_request, creat
 from kpi_manager.service.KpiManagerService import KpiManagerService
 from kpi_manager.client.KpiManagerClient import KpiManagerClient
 from kpi_manager.tests.test_messages import create_kpi_descriptor_request
-from kpi_value_writer.tests.test_messages import create_kpi_id_request
+from kpi_manager.tests.test_messages import create_kpi_id_request
 
 
 #from monitoring.service.NameMapping import NameMapping
diff --git a/src/kpi_manager/tests/test_messages.py b/src/kpi_manager/tests/test_messages.py
index 870660658a2808fc6db2e98a140497980022e5a7..7b5c45859b6c10056211f9f33df950d9668c11ea 100644
--- a/src/kpi_manager/tests/test_messages.py
+++ b/src/kpi_manager/tests/test_messages.py
@@ -18,6 +18,12 @@ from common.proto.kpi_sample_types_pb2 import KpiSampleType
 from common.proto.context_pb2 import DeviceId, LinkId, ServiceId, SliceId,\
                              ConnectionId, EndPointId
 
+
+def create_kpi_id_request():
+    _create_kpi_id = kpi_manager_pb2.KpiId()
+    _create_kpi_id.kpi_id.uuid = str(uuid.uuid4())
+    return _create_kpi_id
+
 def create_kpi_descriptor_request(descriptor_name: str = "Test_name"):
     _create_kpi_request                                    = kpi_manager_pb2.KpiDescriptor()
     _create_kpi_request.kpi_id.kpi_id.uuid                 = str(uuid.uuid4())
diff --git a/src/kpi_value_api/.gitlab-ci.yml b/src/kpi_value_api/.gitlab-ci.yml
index c9107abaac83199cc18ea720e4e3e5e14e353189..166e9d3cbcf3eb09c914384a9906853dddd7bfb5 100644
--- a/src/kpi_value_api/.gitlab-ci.yml
+++ b/src/kpi_value_api/.gitlab-ci.yml
@@ -15,7 +15,7 @@
 # Build, tag, and push the Docker image to the GitLab Docker registry
 build kpi-value-api:
   variables:
-    IMAGE_NAME: 'kpi-value-api' # name of the microservice
+    IMAGE_NAME: 'kpi_value_api' # name of the microservice
     IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: build
   before_script:
@@ -41,7 +41,7 @@ build kpi-value-api:
 # Apply unit test to the component
 unit_test kpi-value-api:
   variables:
-    IMAGE_NAME: 'kpi-value-api' # name of the microservice
+    IMAGE_NAME: 'kpi_value_api' # name of the microservice
     IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: unit_test
   needs:
diff --git a/src/kpi_value_api/service/__main__.py b/src/kpi_value_api/service/__main__.py
index 8b4ebe296e2c4f193aa1fc99aede9364556c2094..f0f265a48812c0ae475e4e079a09b83cdfb7c69e 100644
--- a/src/kpi_value_api/service/__main__.py
+++ b/src/kpi_value_api/service/__main__.py
@@ -13,7 +13,6 @@
 # limitations under the License.
 
 import logging, signal, sys, threading
-from prometheus_client import start_http_server
 from common.Settings import  get_log_level
 from .KpiValueApiService import KpiValueApiService
 
diff --git a/src/kpi_value_writer/.gitlab-ci.yml b/src/kpi_value_writer/.gitlab-ci.yml
index 52b1b8fe6fd41c5fd8b43adf4ca8da464c8c08ba..25619ce7f8b4346172587dbf2e804896aff20e4d 100644
--- a/src/kpi_value_writer/.gitlab-ci.yml
+++ b/src/kpi_value_writer/.gitlab-ci.yml
@@ -15,7 +15,7 @@
 # Build, tag, and push the Docker image to the GitLab Docker registry
 build kpi-value-writer:
   variables:
-    IMAGE_NAME: 'kpi-value-writer' # name of the microservice
+    IMAGE_NAME: 'kpi_value_writer' # name of the microservice
     IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: build
   before_script:
@@ -41,7 +41,7 @@ build kpi-value-writer:
 # Apply unit test to the component
 unit_test kpi-value-writer:
   variables:
-    IMAGE_NAME: 'kpi-value-writer' # name of the microservice
+    IMAGE_NAME: 'kpi_value_writer' # name of the microservice
     IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: unit_test
   needs:
diff --git a/src/kpi_value_writer/service/KpiValueWriter.py b/src/kpi_value_writer/service/KpiValueWriter.py
index a4b10ed6391c4ff0b0ee45a287ce9f12d77e2dea..26bab44657606b1f3edc14659d128c5ccc7a6890 100644
--- a/src/kpi_value_writer/service/KpiValueWriter.py
+++ b/src/kpi_value_writer/service/KpiValueWriter.py
@@ -17,20 +17,29 @@ import threading
 from common.tools.kafka.Variables import KafkaConfig, KafkaTopic
 from common.proto.kpi_value_api_pb2 import KpiValue
 from common.proto.kpi_manager_pb2 import KpiDescriptor, KpiId
+from common.Settings import get_service_port_grpc
+from common.Constants import ServiceNameEnum
+from common.tools.service.GenericGrpcService import GenericGrpcService
+
 
 from confluent_kafka import KafkaError
 from confluent_kafka import Consumer as KafkaConsumer
 
 from kpi_manager.client.KpiManagerClient import KpiManagerClient
 # -- test import --
-from kpi_value_writer.tests.test_messages import create_kpi_descriptor_request
+# from kpi_value_writer.tests.test_messages import create_kpi_descriptor_request
 from .MetricWriterToPrometheus import MetricWriterToPrometheus
 
 
 LOGGER           = logging.getLogger(__name__)
 ACTIVE_CONSUMERS = []
+METRIC_WRITER    = MetricWriterToPrometheus()
+
+class KpiValueWriter(GenericGrpcService):
+    def __init__(self, cls_name : str = __name__) -> None:
+        port = get_service_port_grpc(ServiceNameEnum.KPIVALUEWRITER)
+        super().__init__(port, cls_name=cls_name)
 
-class KpiValueWriter:
     @staticmethod
     def RunKafkaConsumer():
         thread = threading.Thread(target=KpiValueWriter.KafkaConsumer, args=())
@@ -44,11 +53,7 @@ class KpiValueWriter:
               'group.id'          : __class__,
               'auto.offset.reset' : 'latest'}
         )
-        
-        metric_writer_to_prom = MetricWriterToPrometheus()
         kpi_manager_client = KpiManagerClient()
-        print("Kpi manger client created: {:}".format(kpi_manager_client))
-
         kafka_consumer.subscribe([KafkaTopic.VALUE.value])
         LOGGER.debug("Kafka Consumer start listenng on topic: {:}".format(KafkaTopic.VALUE.value))
         print("Kafka Consumer start listenng on topic: {:}".format(KafkaTopic.VALUE.value))
@@ -84,15 +89,15 @@ class KpiValueWriter:
         try:
             kpi_descriptor_object = KpiDescriptor()
             kpi_descriptor_object = kpi_manager_client.GetKpiDescriptor(kpi_id)
-
-            print("kpi descriptor received: {:}".format(kpi_descriptor_object))
-            if isinstance (kpi_descriptor_object, KpiDescriptor):
+            if kpi_descriptor_object.kpi_id.kpi_id.uuid == kpi_id.kpi_id.uuid:
+            # print("kpi descriptor received: {:}".format(kpi_descriptor_object))
+            # if isinstance (kpi_descriptor_object, KpiDescriptor):
                 LOGGER.info("Extracted KpiDescriptor: {:}".format(kpi_descriptor_object))
                 print("Extracted KpiDescriptor: {:}".format(kpi_descriptor_object))
-                MetricWriterToPrometheus.create_and_expose_cooked_kpi(kpi_descriptor_object, kpi_value)
+                METRIC_WRITER.create_and_expose_cooked_kpi(kpi_descriptor_object, kpi_value)
             else:
-                LOGGER.info("Error in extracting KpiDescriptor {:}".format(kpi_descriptor_object))
-                print("Error in extracting KpiDescriptor {:}".format(kpi_descriptor_object))
+                LOGGER.info("No KPI Descriptor found in DB for Kpi ID: {:}".format(kpi_id))
+                print("No KPI Descriptor found in DB for Kpi ID: {:}".format(kpi_id))
         except Exception as e:
             LOGGER.info("Unable to get KpiDescriptor. Error: {:}".format(e))
             print ("Unable to get KpiDescriptor. Error: {:}".format(e))
diff --git a/src/kpi_value_writer/service/KpiWriterOld.py b/src/kpi_value_writer/service/KpiWriterOld.py
deleted file mode 100644
index b9a4316b0f5decb2364127052ebf0b44edc05fbd..0000000000000000000000000000000000000000
--- a/src/kpi_value_writer/service/KpiWriterOld.py
+++ /dev/null
@@ -1,108 +0,0 @@
-# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# read Kafka stream from Kafka topic
-
-import ast
-import time
-import threading
-from confluent_kafka import KafkaError
-from prometheus_client import start_http_server, Gauge, CollectorRegistry
-from confluent_kafka import Consumer as KafkaConsumer
-
-KAFKA_SERVER_IP = '127.0.0.1:9092'
-KAFKA_TOPICS    = {'request' : 'topic_request', 'response': 'topic_response',
-                   'raw'     : 'topic_raw'    , 'labeled' : 'topic_labeled'}
-CONSUMER_CONFIG = {'bootstrap.servers' : KAFKA_SERVER_IP,
-                   'group.id'          : 'kpi_writer',
-                   'auto.offset.reset' : 'latest'}
-KPIs_TO_SEARCH  = ["node_network_receive_packets_total",
-                   "node_network_receive_bytes_total",
-                   "node_network_transmit_bytes_total",
-                   "process_open_fds"]
-PROM_METRICS    = {}
-KAFKA_REGISTERY   = CollectorRegistry()
-
-class KpiWriter:
-    def __init__(self) -> None:
-        pass
-
-    @staticmethod
-    def kpi_writer():
-        KpiWriter.create_prom_metrics_name()
-        threading.Thread(target=KpiWriter.kafka_listener, args=()).start() 
-
-    @staticmethod
-    def kafka_listener():
-        """
-        listener for events on Kafka topic.
-        """
-        # Start up the server to expose the metrics at port number mention below.
-        start_http_server(8101, registry=KAFKA_REGISTERY)
-        kafka_consumer = KafkaConsumer(CONSUMER_CONFIG)
-        kafka_consumer.subscribe([KAFKA_TOPICS['labeled']])
-        while True:
-            receive_msg = kafka_consumer.poll(2.0)
-            if receive_msg is None:
-                # print (" - Telemetry frontend listening on Kafka Topic: ", KAFKA_TOPICS['raw'])     # added for debugging purposes
-                continue
-            elif receive_msg.error():
-                if receive_msg.error().code() == KafkaError._PARTITION_EOF:
-                    continue
-                else:
-                    print("Consumer error: {}".format(receive_msg.error()))
-                    continue
-            try:
-                new_event = receive_msg.value().decode('utf-8')
-                # print("New event on topic '{:}' is {:}".format(KAFKA_TOPICS['raw'], new_event))
-                # LOGGER.info("New event on topic '{:}' is {:}".format(KAFKA_TOPICS['raw'], new_event))
-                KpiWriter.write_metric_to_promtheus(new_event)
-            except Exception as e:
-                print(f"Error to consume event from topic: {KAFKA_TOPICS['labeled']}. Error detail:  {str(e)}")
-                continue
-
-    # send metric to Prometheus
-    @staticmethod
-    def write_metric_to_promtheus(event):
-        event = ast.literal_eval(event)         # converted into dict
-        print("New recevied event: {:}".format(event))
-        event_kpi_name = event['kpi_description']
-        if event_kpi_name in KPIs_TO_SEARCH:
-            PROM_METRICS[event_kpi_name].labels(
-                kpi_id          = event['kpi_id'],
-                kpi_sample_type = event['kpi_sample_type'],
-                device_id       = event['device_id'],
-                endpoint_id     = event['endpoint_id'],
-                service_id      = event['service_id'],
-                slice_id        = event['slice_id'],
-                connection_id   = event['connection_id'],
-                link_id         = event['link_id']
-            ).set(float(event['kpi_value']))
-        time.sleep(0.05)
-
-    @staticmethod
-    def create_prom_metrics_name():
-        metric_tags = ['kpi_id','kpi_sample_type','device_id',
-                       'endpoint_id','service_id','slice_id','connection_id','link_id']
-        for metric_key in KPIs_TO_SEARCH:
-            metric_name        = metric_key
-            metric_description = "description of " + str(metric_key)
-            try:
-                PROM_METRICS[metric_key] = Gauge ( 
-                    metric_name, metric_description, metric_tags, 
-                    registry=KAFKA_REGISTERY )
-                # print("Metric pushed to Prometheus: {:}".format(PROM_METRICS[metric_key]))
-            except ValueError as e:
-                if 'Duplicated timeseries' in str(e):
-                    print("Metric {:} is already registered. Skipping.".format(metric_name))
diff --git a/src/nbi/service/rest_server/nbi_plugins/ietf_hardware/Hardware.py b/src/nbi/service/rest_server/nbi_plugins/ietf_hardware/Hardware.py
index a7404b924a44e9125dbf84bdcdfab3b9af790e5d..2282de557c1a80227c7d50e7c125ab4fe538bd28 100644
--- a/src/nbi/service/rest_server/nbi_plugins/ietf_hardware/Hardware.py
+++ b/src/nbi/service/rest_server/nbi_plugins/ietf_hardware/Hardware.py
@@ -50,4 +50,4 @@ class Hardware(Resource):
             LOGGER.exception(MSG.format(str(device_uuid)))
             response = jsonify({'error': str(e)})
             response.status_code = HTTP_SERVERERROR
-        return response
\ No newline at end of file
+        return response
diff --git a/src/nbi/service/rest_server/nbi_plugins/ietf_hardware/HardwareMultipleDevices.py b/src/nbi/service/rest_server/nbi_plugins/ietf_hardware/HardwareMultipleDevices.py
index 5258455e5f32d62756795288bbe7da8819404e08..b1beff518bb3997fc04a79e78c3467b47bd51483 100644
--- a/src/nbi/service/rest_server/nbi_plugins/ietf_hardware/HardwareMultipleDevices.py
+++ b/src/nbi/service/rest_server/nbi_plugins/ietf_hardware/HardwareMultipleDevices.py
@@ -1,3 +1,17 @@
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
 import logging
 from flask import request
 from flask.json import jsonify
@@ -33,4 +47,4 @@ class HardwareMultipleDevices(Resource):
             MSG = 'Something went wrong Retrieving Hardware of Devices({:s})'
             response = jsonify({'error': str(e)})
             response.status_code = HTTP_SERVERERROR
-        return response
\ No newline at end of file
+        return response
diff --git a/src/nbi/service/rest_server/nbi_plugins/ietf_hardware/YangHandler.py b/src/nbi/service/rest_server/nbi_plugins/ietf_hardware/YangHandler.py
index aa0a90908dabefa2e6fbeb59539bc5cbdec8eb4b..7662261e97b35958f036dc0e69913af7947b9403 100644
--- a/src/nbi/service/rest_server/nbi_plugins/ietf_hardware/YangHandler.py
+++ b/src/nbi/service/rest_server/nbi_plugins/ietf_hardware/YangHandler.py
@@ -46,61 +46,45 @@ class YangHandler:
         dnode.free()
         return message
 
-    
     @staticmethod
     def convert_to_iso_date(date_str: str) -> Optional[str]:
         date_str = date_str.strip('"')
-        # Define the regex pattern for ISO 8601 date format
         pattern = r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?(Z|[\+\-]\d{2}:\d{2})"
-        # Check if the input date string matches the pattern
         if re.match(pattern, date_str):
-            return date_str  # Already in ISO format
+            return date_str 
         else:
             try:
-                # Parse the input date string as a datetime object
                 datetime_obj = datetime.datetime.strptime(date_str, "%Y-%m-%d")
-                # Convert to ISO format
                 iso_date = datetime_obj.isoformat() + "Z"
                 return iso_date
             except ValueError:
-                return None  # Invalid date format
-
+                return None  
 
     def compose(self, device : Device) -> Dict:
         hardware = self._yang_context.create_data_path('/ietf-network-hardware-inventory:network-hardware-inventory')
         network_elements = hardware.create_path('network-elements')
-            
+
         network_element = network_elements.create_path('network-element[uuid="{:s}"]'.format(device.device_id.device_uuid.uuid))
         network_element.create_path('uuid', device.device_id.device_uuid.uuid)
         network_element.create_path('name', device.name)
         components = network_element.create_path('components')
         physical_index = 1
-        
+
         for component in device.components:
             attributes = component.attributes
-
             component_new = components.create_path('component[uuid="{:s}"]'.format(component.component_uuid.uuid))
             component_new.create_path('name', component.name)
-
-            #Cambiar las clases especiales, su formato  y añadir isfru 
             component_type = component.type
             if component_type == "TRANSCEIVER" :
                 component_type = "module"
-
             if component_type == "FRU" :
                 component_type = "slack"
-                
+
             component_type = component_type.replace("_", "-").lower()
             component_type = 'iana-hardware:' + component_type
-
             component_new.create_path('class', component_type)
-
-            #Añadir resto de atributos en IETF
-
             physical_index += 1
-
             component_new.create_path('description', attributes["description"].replace('/"',""))
-            
             if "CHASSIS" not in component.type:
                 parent_component_references = component_new.create_path('parent-component-references')
                 parent = parent_component_references.create_path('component-reference[index="{:d}"]'.format(physical_index))
@@ -108,7 +92,6 @@ class YangHandler:
                     if component.parent == component_parent.name : 
                       parent.create_path('uuid', component_parent.component_uuid.uuid)
                       break
-
             if attributes["mfg-date"] != "":
                 mfg_date = self.convert_to_iso_date(attributes["mfg-date"])
                 component_new.create_path('mfg-date', mfg_date)
@@ -124,7 +107,7 @@ class YangHandler:
                     component_new.create_path('is-fru', True)
                 elif 'false' in removable:
                     component_new.create_path('is-fru', False)
-                
+
             if attributes["id"]:
                 try:
                     if  "CHASSIS" in component.type :  
@@ -137,15 +120,12 @@ class YangHandler:
                     continue
 
             component_new.create_path('uri', component.name)
-   
-
             component_new.create_path('uuid', component.component_uuid.uuid)
-
             for child in device.components:
                 if component.name == child.parent : 
                     component_new.create_path('contained-child', child.component_uuid.uuid)
 
         return json.loads(hardware.print_mem('json'))
-                                    
+
     def destroy(self) -> None:
-        self._yang_context.destroy()
\ No newline at end of file
+        self._yang_context.destroy()
diff --git a/src/nbi/service/rest_server/nbi_plugins/ietf_hardware/__init__.py b/src/nbi/service/rest_server/nbi_plugins/ietf_hardware/__init__.py
index acec9ac45a943b6182cc8bdd64b9356565c67e3c..ba774650e89e26609573a364be520c2d1bd6df84 100644
--- a/src/nbi/service/rest_server/nbi_plugins/ietf_hardware/__init__.py
+++ b/src/nbi/service/rest_server/nbi_plugins/ietf_hardware/__init__.py
@@ -16,10 +16,9 @@ from nbi.service.rest_server.nbi_plugins.ietf_hardware.Hardware import Hardware
 from nbi.service.rest_server.nbi_plugins.ietf_hardware.HardwareMultipleDevices import HardwareMultipleDevices
 from nbi.service.rest_server.RestServer import RestServer
 
-URL_PREFIX_device = "/restconf/data/device=<path:device_uuid>/ietf-network-hardware-inventory:network-hardware-inventory"
-URL_PREFIX_hardware = "/restconf/data/ietf-network-hardware-inventory:network-hardware-inventory"
+URL_PREFIX_DEVICE   = "/restconf/data/device=<path:device_uuid>/ietf-network-hardware-inventory:network-hardware-inventory"
+URL_PREFIX_HARDWARE = "/restconf/data/ietf-network-hardware-inventory:network-hardware-inventory"
 
 def register_ietf_hardware(rest_server: RestServer):
-    rest_server.add_resource(Hardware, URL_PREFIX_device)
-    rest_server.add_resource(HardwareMultipleDevices, URL_PREFIX_hardware)
-    
\ No newline at end of file
+    rest_server.add_resource(Hardware, URL_PREFIX_DEVICE)
+    rest_server.add_resource(HardwareMultipleDevices, URL_PREFIX_HARDWARE)
diff --git a/src/nbi/service/rest_server/nbi_plugins/tfs_api/Resources.py b/src/nbi/service/rest_server/nbi_plugins/tfs_api/Resources.py
index 0e570fe5ae31965bb9a73fc623863940e9f6613e..f360e318127706b4b4c8fdc4130dfdfc0ba711c0 100644
--- a/src/nbi/service/rest_server/nbi_plugins/tfs_api/Resources.py
+++ b/src/nbi/service/rest_server/nbi_plugins/tfs_api/Resources.py
@@ -259,8 +259,17 @@ class Devices(_Resource):
 
 class Device(_Resource):
     def get(self, device_uuid : str):
-        return format_grpc_to_json(self.client.GetDevice(grpc_device_id(device_uuid)))
-    
+        return format_grpc_to_json(self.context_client.GetDevice(grpc_device_id(device_uuid)))
+
+    def put(self, device_uuid : str):
+        device = request.get_json()
+        if device_uuid != device['device_id']['device_uuid']['uuid']:
+            raise BadRequest('Mismatching device_uuid')
+        return format_grpc_to_json(self.device_client.ConfigureDevice(grpc_device(device)))
+
+    def delete(self, device_uuid : str):
+        return format_grpc_to_json(self.device_client.DeleteDevice(grpc_device_id(device_uuid)))
+
 class LinkIds(_Resource):
     def get(self):
         return format_grpc_to_json(self.context_client.ListLinkIds(Empty()))
diff --git a/src/nbi/service/rest_server/nbi_plugins/tfs_api/__init__.py b/src/nbi/service/rest_server/nbi_plugins/tfs_api/__init__.py
index 4443d7e2454dd0f612a1074993a56ab84f230db9..a0ffb7ed5af620e66d3a0eb071a9e9a7b7f7f1b6 100644
--- a/src/nbi/service/rest_server/nbi_plugins/tfs_api/__init__.py
+++ b/src/nbi/service/rest_server/nbi_plugins/tfs_api/__init__.py
@@ -13,7 +13,6 @@
 # limitations under the License.
 
 from nbi.service.rest_server.RestServer import RestServer
-from nbi.service.rest_server.nbi_plugins.ietf_hardware import Hardware, HardwareMultipleDevices
 from nbi.service.rest_server.nbi_plugins.ietf_network_new import Networks
 from .Resources import (
     Connection, ConnectionIds, Connections,
@@ -50,10 +49,9 @@ RESOURCES = [
     ('api.slice',          Slice,         '/context/<path:context_uuid>/slice/<path:slice_uuid>'),
 
     ('api.device_ids',     DeviceIds,     '/device_ids'),
+    ('api.devices',        Devices,       '/devices'),
+    ('api.device',         Device,        '/device/<path:device_uuid>'),
 
-    ('api.devices',        HardwareMultipleDevices,       '/devices'),
-    ('api.device',         Hardware,        '/device/<path:device_uuid>'),
-    
     ('api.link_ids',       LinkIds,       '/link_ids'),
     ('api.links',          Links,         '/links'),
     ('api.link',           Link,          '/link/<path:link_uuid>'),