diff --git a/scripts/run_tests_locally-kpi-DB.sh b/scripts/run_tests_locally-kpi-DB.sh
new file mode 100755
index 0000000000000000000000000000000000000000..aa976762072a15d5481e41c3cfa45fe4a876c0cd
--- /dev/null
+++ b/scripts/run_tests_locally-kpi-DB.sh
@@ -0,0 +1,28 @@
+#!/bin/bash
+# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+PROJECTDIR=`pwd`
+
+cd $PROJECTDIR/src
+# RCFILE=$PROJECTDIR/coverage/.coveragerc
+# coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \
+#     kpi_manager/tests/test_unitary.py
+
+# python3 kpi_manager/tests/test_unitary.py
+
+RCFILE=$PROJECTDIR/coverage/.coveragerc
+python3 -m pytest --log-level=INFO --log-cli-level=INFO --verbose \
+    kpi_manager/service/database/KpiDBtests.py
\ No newline at end of file
diff --git a/src/kpi_manager/service/KpiManagerServiceServicerImpl.py b/src/kpi_manager/service/KpiManagerServiceServicerImpl.py
index c5127a2defc21b4cccd5dbf63b020d5f73318854..7f62280ff9ed01c1f075dffe7088043a57171d02 100644
--- a/src/kpi_manager/service/KpiManagerServiceServicerImpl.py
+++ b/src/kpi_manager/service/KpiManagerServiceServicerImpl.py
@@ -14,13 +14,14 @@
 
 
 import logging, grpc
+import sqlalchemy, sqlalchemy_utils
 from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method
 from common.proto.context_pb2 import Empty
 from common.proto.kpi_manager_pb2_grpc import KpiManagerServiceServicer
 from common.proto.kpi_manager_pb2 import KpiId, KpiDescriptor, KpiDescriptorFilter, KpiDescriptorList
 from monitoring.service.NameMapping import NameMapping
 # from monitoring.service import ManagementDBTools
-from telemetry.database.managementDB import managementDB
+from kpi_manager.service.database.Kpi_DB import Kpi_DB
 
 from telemetry.database.TelemetryModel import Kpi as KpiModel
 from common.proto.context_pb2 import DeviceId, LinkId, ServiceId, SliceId,\
@@ -33,8 +34,13 @@ METRICS_POOL = MetricsPool('Monitoring', 'KpiManager')
 class KpiManagerServiceServicerImpl(KpiManagerServiceServicer):
     def __init__(self, name_mapping : NameMapping):
         LOGGER.info('Init KpiManagerService')
-        self.managementDBobj = managementDB()
-  
+        self.Kpi_DBobj = Kpi_DB()
+    
+    @staticmethod
+    def create_database_if_not_exist(engine: sqlalchemy.engine.Engine) -> None:
+        if not sqlalchemy_utils.database_exists(engine.url):
+            sqlalchemy_utils.create_database(engine.url)
+
     @safe_and_metered_rpc_method(METRICS_POOL, LOGGER)
     def SetKpiDescriptor(self, request: KpiDescriptor, grpc_context: grpc.ServicerContext # type: ignore
                         ) -> KpiId: # type: ignore
@@ -45,15 +51,15 @@ class KpiManagerServiceServicerImpl(KpiManagerServiceServicer):
             kpi_to_insert.kpi_id          = request.kpi_id.kpi_id.uuid
             kpi_to_insert.kpi_description = request.kpi_description
             kpi_to_insert.kpi_sample_type = request.kpi_sample_type
-            kpi_to_insert.device_id       = request.service_id.service_uuid.uuid 
-            kpi_to_insert.endpoint_id     = request.device_id.device_uuid.uuid 
-            kpi_to_insert.service_id      = request.slice_id.slice_uuid.uuid 
-            kpi_to_insert.slice_id        = request.endpoint_id.endpoint_uuid.uuid
+            kpi_to_insert.device_id       = request.device_id.device_uuid.uuid
+            kpi_to_insert.endpoint_id     = request.endpoint_id.endpoint_uuid.uuid 
+            kpi_to_insert.service_id      = request.service_id.service_uuid.uuid 
+            kpi_to_insert.slice_id        = request.slice_id.slice_uuid.uuid 
             kpi_to_insert.connection_id   = request.connection_id.connection_uuid.uuid
             # kpi_to_insert.link_id         = request.link_id.link_id.uuid
-            self.managementDBobj.add_row_to_db(kpi_to_insert)
-            response.kpi_id.uuid = request.kpi_id.kpi_id.uuid
-            LOGGER.info("Added Row: {:}".format(response))
+            if(self.Kpi_DBobj.add_row_to_db(kpi_to_insert)):
+                response.kpi_id.uuid = request.kpi_id.kpi_id.uuid
+                # LOGGER.info("Added Row: {:}".format(response))
             return response
         except Exception as e:
             LOGGER.info("Unable to create KpiModel class object. {:}".format(e))
@@ -65,7 +71,7 @@ class KpiManagerServiceServicerImpl(KpiManagerServiceServicer):
         LOGGER.info("Received gRPC message object: {:}".format(request))
         try: 
             kpi_id_to_search = request.kpi_id.uuid
-            row = self.managementDBobj.search_db_row_by_id(KpiModel, 'kpi_id', kpi_id_to_search)
+            row = self.Kpi_DBobj.search_db_row_by_id(KpiModel, 'kpi_id', kpi_id_to_search)
             if row is not None:
                 response.kpi_id.kpi_id.uuid                 = row.kpi_id
                 response.kpi_description                    = row.kpi_description
@@ -85,7 +91,7 @@ class KpiManagerServiceServicerImpl(KpiManagerServiceServicer):
         LOGGER.info("Received gRPC message object: {:}".format(request))
         try:
             kpi_id_to_search = request.kpi_id.uuid
-            self.managementDBobj.delete_db_row_by_id(KpiModel, 'kpi_id', kpi_id_to_search)
+            self.Kpi_DBobj.delete_db_row_by_id(KpiModel, 'kpi_id', kpi_id_to_search)
         except Exception as e:
             LOGGER.info('Unable to search kpi id. {:}'.format(e))
         finally:
@@ -102,7 +108,7 @@ class KpiManagerServiceServicerImpl(KpiManagerServiceServicer):
         filter_to_apply['device_id']       = request.device_id[0].device_uuid.uuid
         filter_to_apply['kpi_sample_type'] = request.kpi_sample_type[0]
         try:
-            rows = self.managementDBobj.select_with_filter(KpiModel, **filter_to_apply)
+            rows = self.Kpi_DBobj.select_with_filter(KpiModel, **filter_to_apply)
         except Exception as e:
             LOGGER.info('Unable to apply filter on kpi descriptor. {:}'.format(e))
         try:
diff --git a/src/kpi_manager/service/KpiValueComposer.py b/src/kpi_manager/service/KpiValueComposer.py
index 38c07a22acd418a2fa91cfea90b1c813ad64a7e2..38b5b124a0ce820c6dd41326f26c00c01e3bd736 100644
--- a/src/kpi_manager/service/KpiValueComposer.py
+++ b/src/kpi_manager/service/KpiValueComposer.py
@@ -20,6 +20,7 @@ import threading
 from confluent_kafka import KafkaError
 from confluent_kafka import Producer as KafkaProducer
 from confluent_kafka import Consumer as KafkaConsumer
+from kpi_manager.service.database.Kpi_DB import Kpi_DB
 
 LOGGER             = logging.getLogger(__name__)
 KAFKA_SERVER_IP    = '10.152.183.175:9092'
@@ -84,4 +85,8 @@ class KpiValueComposer:
         # LOGGER.info("Extracted Rows that match the KPIs {:}".format(matching_rows))
         return matching_rows
         
-
+    @staticmethod
+    def request_kpi_descriptor_from_db():
+        col_name = "kpi_description"
+        kpi_name = KPIs_TO_SEARCH[0]
+        Kpi_DB.search_db_row_by_id()
diff --git a/src/kpi_manager/service/database/KpiDBtests.py b/src/kpi_manager/service/database/KpiDBtests.py
new file mode 100644
index 0000000000000000000000000000000000000000..022a7633d7c7bf50f3dcd8e9a2fa3ec2970fd4db
--- /dev/null
+++ b/src/kpi_manager/service/database/KpiDBtests.py
@@ -0,0 +1,30 @@
+# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import logging
+from kpi_manager.service.database.Kpi_DB import Kpi_DB
+
+LOGGER = logging.getLogger(__name__)
+
+
+def test_create_db_object():
+    LOGGER.info('>>> test_create_db_object : START<<< ')
+    kpiDBobj = Kpi_DB()
+
+def test_verify_Tables():
+    LOGGER.info('>>> test_verify_Tables : START <<< ')
+    kpiDBobj = Kpi_DB()
+    kpiDBobj.create_tables()
+    kpiDBobj.verify_tables()
diff --git a/src/kpi_manager/service/database/KpiEngine.py b/src/kpi_manager/service/database/KpiEngine.py
new file mode 100644
index 0000000000000000000000000000000000000000..041784ff47e9a5199158193e07f4ac90e394ce5a
--- /dev/null
+++ b/src/kpi_manager/service/database/KpiEngine.py
@@ -0,0 +1,49 @@
+# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging, sqlalchemy, sqlalchemy_utils
+# from common.Settings import get_setting
+
+LOGGER = logging.getLogger(__name__)
+
+APP_NAME = 'tfs'
+ECHO = False                # False: No dump SQL commands and transactions executed
+CRDB_URI_TEMPLATE = 'cockroachdb://{:s}:{:s}@127.0.0.1:{:s}/{:s}?sslmode={:s}'
+CRDB_NAMESPACE = "crdb"
+CRDB_SQL_PORT  = "26257"
+CRDB_DATABASE  = "kpi"
+CRDB_USERNAME  = "tfs"
+CRDB_PASSWORD  = "tfs123"
+CRDB_SSLMODE   = "require"
+# CRDB_URI_TEMPLATE = 'cockroachdb://{:s}:{:s}@cockroachdb-public.{:s}.svc.cluster.local:{:s}/{:s}?sslmode={:s}'
+
+class KpiEngine:
+    # def __init__(self):
+    #     self.engine = self.get_engine()  
+
+    @staticmethod
+    def get_engine() -> sqlalchemy.engine.Engine:
+        crdb_uri = CRDB_URI_TEMPLATE.format(
+                CRDB_USERNAME, CRDB_PASSWORD, CRDB_SQL_PORT, CRDB_DATABASE, CRDB_SSLMODE)
+        # crdb_uri = CRDB_URI_TEMPLATE.format(
+        #         CRDB_USERNAME, CRDB_PASSWORD, CRDB_NAMESPACE, CRDB_SQL_PORT, CRDB_DATABASE, CRDB_SSLMODE)
+        try:
+            # engine = sqlalchemy.create_engine(
+            #     crdb_uri, connect_args={'application_name': APP_NAME}, echo=ECHO, future=True)
+            engine = sqlalchemy.create_engine(crdb_uri, echo=False)
+            LOGGER.info(' KpiDBmanager initalized with DB URL: {:}'.format(crdb_uri))
+        except: # pylint: disable=bare-except # pragma: no cover
+            LOGGER.exception('Failed to connect to database: {:s}'.format(str(crdb_uri)))
+            return None # type: ignore
+        return engine # type: ignore
diff --git a/src/kpi_manager/service/database/KpiModel.py b/src/kpi_manager/service/database/KpiModel.py
new file mode 100644
index 0000000000000000000000000000000000000000..16844fdc05991eb29a06ce50ab3ae9fdaac3162c
--- /dev/null
+++ b/src/kpi_manager/service/database/KpiModel.py
@@ -0,0 +1,49 @@
+# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+from sqlalchemy.dialects.postgresql import UUID
+from sqlalchemy import Column, Integer, String, Float, Text, ForeignKey
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy.orm import sessionmaker, relationship
+
+
+logging.basicConfig(level=logging.INFO)
+LOGGER = logging.getLogger(__name__)
+
+# Create a base class for declarative models
+Base = declarative_base()
+
+class Kpi(Base):
+    __tablename__ = 'kpi'
+
+    kpi_id          = Column(UUID(as_uuid=False), primary_key=True)
+    kpi_description = Column(Text)
+    kpi_sample_type = Column(Integer)
+    device_id       = Column(String)
+    endpoint_id     = Column(String)
+    service_id      = Column(String)
+    slice_id        = Column(String)
+    connection_id   = Column(String)
+    link_id         = Column(String)
+    # monitor_flag    = Column(String)
+
+
+    # helps in logging the information
+    def __repr__(self):
+        return (f"<Kpi(kpi_id='{self.kpi_id}', kpi_description='{self.kpi_description}', "
+                f"kpi_sample_type='{self.kpi_sample_type}', device_id='{self.device_id}', "
+                f"endpoint_id='{self.endpoint_id}', service_id='{self.service_id}', "
+                f"slice_id='{self.slice_id}', connection_id='{self.connection_id}', "
+                f"link_id='{self.link_id}')>")
diff --git a/src/kpi_manager/service/database/Kpi_DB.py b/src/kpi_manager/service/database/Kpi_DB.py
new file mode 100644
index 0000000000000000000000000000000000000000..fd5a1c3197362707a8b4b1b79afc5823438331b3
--- /dev/null
+++ b/src/kpi_manager/service/database/Kpi_DB.py
@@ -0,0 +1,127 @@
+# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging, time
+import sqlalchemy
+import sqlalchemy_utils
+from sqlalchemy.orm import sessionmaker
+from sqlalchemy.ext.declarative import declarative_base
+from kpi_manager.service.database.KpiEngine import KpiEngine
+from kpi_manager.service.database.KpiModel import Kpi
+
+LOGGER = logging.getLogger(__name__)
+DB_NAME = "kpi"
+
+class Kpi_DB:
+    def __init__(self):
+        self.db_engine = KpiEngine.get_engine()
+        if self.db_engine is None:
+            LOGGER.error('Unable to get SQLAlchemy DB Engine...')
+            return False
+        self.db_name = DB_NAME
+        # self.drop_database(self.db_engine)          # added to test
+        self.create_database(self.db_engine)
+        self.Session = sessionmaker(bind=self.db_engine)
+
+    @staticmethod
+    def create_database(engine : sqlalchemy.engine.Engine) -> None:
+        if not sqlalchemy_utils.database_exists(engine.url):
+            LOGGER.info("Database created. {:}".format(engine.url))
+            sqlalchemy_utils.create_database(engine.url)
+
+    @staticmethod
+    def drop_database(engine : sqlalchemy.engine.Engine) -> None:
+        if sqlalchemy_utils.database_exists(engine.url):
+            sqlalchemy_utils.drop_database(engine.url)
+
+    def create_tables(self):
+        try:
+            Kpi.metadata.create_all(self.db_engine)     # type: ignore
+            LOGGER.info("Tables created in the DB Name: {:}".format(self.db_name))
+        except Exception as e:
+            LOGGER.info("Tables cannot be created in the TelemetryFrontend database. {:s}".format(str(e)))
+
+    def verify_tables(self):
+        try:
+            with self.db_engine.connect() as connection:
+                result = connection.execute("SHOW TABLES;")
+                tables = result.fetchall()      # type: ignore
+                LOGGER.info("Tables verified: {:}".format(tables))
+        except Exception as e:
+            LOGGER.info("Unable to fetch Table names. {:s}".format(str(e)))
+
+    def add_row_to_db(self, row):
+        session = self.Session()
+        try:
+            session.add(row)
+            session.commit()
+            LOGGER.info(f"Row inserted into {row.__class__.__name__} table.")
+            return True
+        except Exception as e:
+            session.rollback()
+            LOGGER.error(f"Failed to insert new row into {row.__class__.__name__} table. {str(e)}")
+            return False
+        finally:
+            session.close()
+    
+    def search_db_row_by_id(self, model, col_name, id_to_search):
+        session = self.Session()
+        try:
+            entity = session.query(model).filter_by(**{col_name: id_to_search}).first()
+            if entity:
+                LOGGER.info(f"{model.__name__} ID found: {str(entity)}")
+                return entity
+            else:
+                LOGGER.warning(f"{model.__name__} ID not found: {str(id_to_search)}")
+                return None
+        except Exception as e:
+            session.rollback()
+            LOGGER.info(f"Failed to retrieve {model.__name__} ID. {str(e)}")
+            raise
+        finally:
+            session.close()
+    
+    def delete_db_row_by_id(self, model, col_name, id_to_search):
+        session = self.Session()
+        try:
+            record = session.query(model).filter_by(**{col_name: id_to_search}).first()
+            if record:
+                session.delete(record)
+                session.commit()
+                LOGGER.info("Deleted %s with %s: %s", model.__name__, col_name, id_to_search)
+            else:
+                LOGGER.warning("%s with %s %s not found", model.__name__, col_name, id_to_search)
+        except Exception as e:
+            session.rollback()
+            LOGGER.error("Error deleting %s with %s %s: %s", model.__name__, col_name, id_to_search, e)
+        finally:
+            session.close()
+
+    def select_with_filter(self, model, **filters):
+        session = self.Session()
+        try:
+            query = session.query(model)
+            for column, value in filters.items():
+                query = query.filter(getattr(model, column) == value) # type: ignore   
+            result = query.all()
+            if result:
+                LOGGER.info(f"Fetched filtered rows from {model.__name__} table with filters: {filters}") #  - Results: {result}
+            else:
+                LOGGER.warning(f"No matching row found in {model.__name__} table with filters: {filters}")
+            return result
+        except Exception as e:
+            LOGGER.error(f"Error fetching filtered rows from {model.__name__} table with filters {filters} ::: {e}")
+            return []
+        finally:
+            session.close()
\ No newline at end of file
diff --git a/src/kpi_manager/service/database/__init__.py b/src/kpi_manager/service/database/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..1549d9811aa5d1c193a44ad45d0d7773236c0612
--- /dev/null
+++ b/src/kpi_manager/service/database/__init__.py
@@ -0,0 +1,14 @@
+# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
diff --git a/src/kpi_manager/service/database/__main__.py b/src/kpi_manager/service/database/__main__.py
new file mode 100644
index 0000000000000000000000000000000000000000..9f0e5324644a5a58eb47483688f71306a0808d1a
--- /dev/null
+++ b/src/kpi_manager/service/database/__main__.py
@@ -0,0 +1,107 @@
+# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging, signal, sys, threading, time
+from prometheus_client import start_http_server
+from common.Constants import ServiceNameEnum
+from common.Settings import (
+    ENVVAR_SUFIX_SERVICE_HOST, ENVVAR_SUFIX_SERVICE_PORT_GRPC, get_env_var_name, get_log_level, get_metrics_port,
+    wait_for_environment_variables)
+from common.proto import monitoring_pb2
+from monitoring.service.EventTools import EventsDeviceCollector # import updated
+from monitoring.service.NameMapping import NameMapping          # import updated
+# from .MonitoringService import MonitoringService
+from .KpiManagerService import KpiManagerService
+
+terminate = threading.Event()
+LOGGER = None
+
+def signal_handler(signal, frame): # pylint: disable=redefined-outer-name
+    LOGGER.warning('Terminate signal received')
+    terminate.set()
+
+def start_kpi_manager(name_mapping : NameMapping):
+    LOGGER.info('Start Monitoring...',)
+
+    events_collector = EventsDeviceCollector(name_mapping)
+    events_collector.start()
+
+    # TODO: redesign this method to be more clear and clean
+
+    # Iterate while terminate is not set
+    while not terminate.is_set():
+        list_new_kpi_ids = events_collector.listen_events()
+
+        # Monitor Kpis
+        if bool(list_new_kpi_ids):
+            for kpi_id in list_new_kpi_ids:
+                # Create Monitor Kpi Requests
+                monitor_kpi_request = monitoring_pb2.MonitorKpiRequest()
+                monitor_kpi_request.kpi_id.CopyFrom(kpi_id)
+                monitor_kpi_request.monitoring_window_s = 86400
+                monitor_kpi_request.sampling_rate_s = 10
+                events_collector._monitoring_client.MonitorKpi(monitor_kpi_request)
+        
+        time.sleep(0.5) # let other tasks run; do not overload CPU
+    else:
+        # Terminate is set, looping terminates
+        LOGGER.warning("Stopping execution...")
+
+    events_collector.start()
+
+def main():
+    global LOGGER # pylint: disable=global-statement
+
+    log_level = get_log_level()
+    logging.basicConfig(level=log_level)
+    LOGGER = logging.getLogger(__name__)
+
+    wait_for_environment_variables([
+        get_env_var_name(ServiceNameEnum.CONTEXT, ENVVAR_SUFIX_SERVICE_HOST     ),
+        get_env_var_name(ServiceNameEnum.CONTEXT, ENVVAR_SUFIX_SERVICE_PORT_GRPC),
+        get_env_var_name(ServiceNameEnum.DEVICE,  ENVVAR_SUFIX_SERVICE_HOST     ),
+        get_env_var_name(ServiceNameEnum.DEVICE,  ENVVAR_SUFIX_SERVICE_PORT_GRPC),
+    ])
+
+    signal.signal(signal.SIGINT,  signal_handler)
+    signal.signal(signal.SIGTERM, signal_handler)
+
+    LOGGER.info('Starting...')
+
+    # Start metrics server
+    metrics_port = get_metrics_port()
+    start_http_server(metrics_port)
+
+    name_mapping = NameMapping()
+    # Starting monitoring service
+    # grpc_service = MonitoringService(name_mapping)
+    # grpc_service.start()
+    # start_monitoring(name_mapping)
+
+    grpc_service = KpiManagerService(name_mapping)
+    grpc_service.start()
+
+    start_kpi_manager(name_mapping)
+
+    # Wait for Ctrl+C or termination signal
+    while not terminate.wait(timeout=1.0): pass
+
+    LOGGER.info('Terminating...')
+    grpc_service.stop()
+
+    LOGGER.info('Bye')
+    return 0
+
+if __name__ == '__main__':
+    sys.exit(main())
diff --git a/src/kpi_manager/tests/test_messages.py b/src/kpi_manager/tests/test_messages.py
index 83150c102be541184de220551df23ecaed4c3b41..bc4c5b9d11909b4c4cbbbb9906eef8323625f514 100755
--- a/src/kpi_manager/tests/test_messages.py
+++ b/src/kpi_manager/tests/test_messages.py
@@ -24,6 +24,19 @@ def create_kpi_id_request():
     _kpi_id.kpi_id.uuid = "34f73604-eca6-424f-9995-18b519ad0978"
     return _kpi_id
 
+def create_kpi_descriptor_request_a(descriptor_name: str):
+    _create_kpi_request                                    = kpi_manager_pb2.KpiDescriptor()
+    _create_kpi_request.kpi_id.kpi_id.uuid                 = str(uuid.uuid4())
+    _create_kpi_request.kpi_description                    = descriptor_name
+    _create_kpi_request.kpi_sample_type                    = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED
+    _create_kpi_request.device_id.device_uuid.uuid         = 'DEV1'     # pylint: disable=maybe-no-member
+    _create_kpi_request.service_id.service_uuid.uuid       = 'SERV1'    # pylint: disable=maybe-no-member
+    _create_kpi_request.slice_id.slice_uuid.uuid           = 'SLC1'  # pylint: disable=maybe-no-member
+    _create_kpi_request.endpoint_id.endpoint_uuid.uuid     = 'END1'     # pylint: disable=maybe-no-member
+    _create_kpi_request.connection_id.connection_uuid.uuid = 'CON1'  # pylint: disable=maybe-no-member
+    _create_kpi_request.link_id.link_uuid.uuid             = 'LNK1'  # pylint: disable=maybe-no-member
+    return _create_kpi_request
+
 def create_kpi_descriptor_request():
     _create_kpi_request                                    = kpi_manager_pb2.KpiDescriptor()
     _create_kpi_request.kpi_id.kpi_id.uuid                 = str(uuid.uuid4())
@@ -34,6 +47,7 @@ def create_kpi_descriptor_request():
     _create_kpi_request.slice_id.slice_uuid.uuid           = 'SLC3'  # pylint: disable=maybe-no-member
     _create_kpi_request.endpoint_id.endpoint_uuid.uuid     = 'END2'     # pylint: disable=maybe-no-member
     _create_kpi_request.connection_id.connection_uuid.uuid = 'CON2'  # pylint: disable=maybe-no-member
+    _create_kpi_request.link_id.link_uuid.uuid             = 'LNK2'  # pylint: disable=maybe-no-member
     return _create_kpi_request
 
 def create_kpi_filter_request_a():
diff --git a/src/kpi_manager/tests/test_unitary.py b/src/kpi_manager/tests/test_unitary.py
index 84cf44497eee9193bdb739dbe6716ae894d958f1..8ac50a38bf3110ca12f78e6ea25f609aab0a250c 100755
--- a/src/kpi_manager/tests/test_unitary.py
+++ b/src/kpi_manager/tests/test_unitary.py
@@ -19,7 +19,7 @@ import os, pytest
 import logging, json
 from typing import Union
 
-from apscheduler.schedulers.background import BackgroundScheduler
+# from apscheduler.schedulers.background import BackgroundScheduler
 
 from common.proto.context_pb2 import ConfigActionEnum, Context, ContextId, DeviceOperationalStatusEnum, EventTypeEnum, DeviceEvent, Device, Empty, Topology, TopologyId
 from common.Constants import ServiceNameEnum
@@ -45,12 +45,15 @@ from device.client.DeviceClient import DeviceClient
 
 from kpi_manager.tests.test_messages import create_kpi_request, create_kpi_request_b, \
                 create_kpi_request_c, create_kpi_request_d, create_kpi_filter_request, \
-                create_kpi_descriptor_request, create_kpi_id_request, create_kpi_filter_request_a
+                create_kpi_descriptor_request, create_kpi_id_request, create_kpi_filter_request_a, \
+                create_kpi_descriptor_request_a
 # from monitoring.service.MonitoringService import MonitoringService
 from kpi_manager.service.KpiManagerService import KpiManagerService
 # from monitoring.client.MonitoringClient import MonitoringClient
 from kpi_manager.client.KpiManagerClient import KpiManagerClient
 
+from kpi_manager.service.KpiManagerServiceServicerImpl import KpiManagerServiceServicerImpl
+
 from monitoring.service.ManagementDBTools import ManagementDB
 from monitoring.service.MetricsDBTools import MetricsDB
 from monitoring.service.NameMapping import NameMapping
@@ -212,29 +215,31 @@ def kpi_manager_client(kpi_manager_service : KpiManagerService): # pylint: disab
 # ---------- 2nd Iteration Tests -----------------
 def test_SetKpiDescriptor(kpi_manager_client):
     LOGGER.info(" >>> test_SetKpiDescriptor: START <<< ")
-    response = kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request())
-    LOGGER.info("Response gRPC message object: {:}".format(response))
-    assert isinstance(response, KpiId)
-
-def test_GetKpiDescriptor(kpi_manager_client):
-    LOGGER.info(" >>> test_GetKpiDescriptor: START <<< ")
-    response = kpi_manager_client.GetKpiDescriptor(create_kpi_id_request())
-    LOGGER.info("Response gRPC message object: {:}".format(response))
-    assert isinstance(response, KpiDescriptor)
-
-def test_DeleteKpiDescriptor(kpi_manager_client):
-    LOGGER.info(" >>> test_DeleteKpiDescriptor: START <<< ")
-    response = kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request())
-    del_response = kpi_manager_client.DeleteKpiDescriptor(response)
-    kpi_manager_client.GetKpiDescriptor(response)
-    LOGGER.info("Response of delete method gRPC message object: {:}".format(del_response))
+    _descriptors = ["node_timex_status", "node_timex_sync_status", "node_udp_queues"]
+    for _descritor_name in _descriptors:
+        response = kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request_a(_descritor_name))
+        LOGGER.info("Response gRPC message object: {:}".format(response))
     assert isinstance(response, KpiId)
 
-def test_SelectKpiDescriptor(kpi_manager_client):
-    LOGGER.info(" >>> test_SelectKpiDescriptor: START <<< ")
-    response = kpi_manager_client.SelectKpiDescriptor(create_kpi_filter_request_a())
-    LOGGER.info("Response gRPC message object: {:}".format(response))
-    assert isinstance(response, KpiDescriptorList)
+# def test_GetKpiDescriptor(kpi_manager_client):
+#     LOGGER.info(" >>> test_GetKpiDescriptor: START <<< ")
+#     response = kpi_manager_client.GetKpiDescriptor(create_kpi_id_request())
+#     LOGGER.info("Response gRPC message object: {:}".format(response))
+#     assert isinstance(response, KpiDescriptor)
+
+# def test_DeleteKpiDescriptor(kpi_manager_client):
+#     LOGGER.info(" >>> test_DeleteKpiDescriptor: START <<< ")
+#     response = kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request())
+#     del_response = kpi_manager_client.DeleteKpiDescriptor(response)
+#     kpi_manager_client.GetKpiDescriptor(response)
+#     LOGGER.info("Response of delete method gRPC message object: {:}".format(del_response))
+#     assert isinstance(response, KpiId)
+
+# def test_SelectKpiDescriptor(kpi_manager_client):
+#     LOGGER.info(" >>> test_SelectKpiDescriptor: START <<< ")
+#     response = kpi_manager_client.SelectKpiDescriptor(create_kpi_filter_request_a())
+#     LOGGER.info("Response gRPC message object: {:}".format(response))
+#     assert isinstance(response, KpiDescriptorList)
 
 # ------------- INITIAL TESTs ----------------
 # Test case that makes use of client fixture to test server's CreateKpi method