diff --git a/manifests/kpi_managerservice.yaml b/manifests/kpi_managerservice.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..45ee778956a93f2311cf50e4374c5b79dc88de2b
--- /dev/null
+++ b/manifests/kpi_managerservice.yaml
@@ -0,0 +1,99 @@
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  name: kpi_managerservice
+spec:
+  selector:
+    matchLabels:
+      app: kpi_managerservice
+  #replicas: 1
+  template:
+    metadata:
+      annotations:
+        config.linkerd.io/skip-outbound-ports: "4222"
+      labels:
+        app: kpi_managerservice
+    spec:
+      terminationGracePeriodSeconds: 5
+      containers:
+        - name: server
+          image: labs.etsi.org:5050/tfs/controller/context:latest
+          imagePullPolicy: Always
+          ports:
+            - containerPort: 7071
+            - containerPort: 9192
+          env:
+            - name: LOG_LEVEL
+              value: "INFO"
+          envFrom:
+            - secretRef:
+                name: crdb-data
+          readinessProbe:
+            exec:
+              command: ["/bin/grpc_health_probe", "-addr=:7071"]
+          livenessProbe:
+            exec:
+              command: ["/bin/grpc_health_probe", "-addr=:7071"]
+          resources:
+            requests:
+              cpu: 250m
+              memory: 128Mi
+            limits:
+              cpu: 1000m
+              memory: 1024Mi
+---
+apiVersion: v1
+kind: Service
+metadata:
+  name: kpi_managerservice
+  labels:
+    app: kpi_managerservice
+spec:
+  type: ClusterIP
+  selector:
+    app: kpi_managerservice
+  ports:
+    - name: grpc
+      protocol: TCP
+      port: 7071
+      targetPort: 7071
+    - name: metrics
+      protocol: TCP
+      port: 9192
+      targetPort: 9192
+---
+apiVersion: autoscaling/v2
+kind: HorizontalPodAutoscaler
+metadata:
+  name: kpi_managerservice-hpa
+spec:
+  scaleTargetRef:
+    apiVersion: apps/v1
+    kind: Deployment
+    name: kpi_managerservice
+  minReplicas: 1
+  maxReplicas: 20
+  metrics:
+    - type: Resource
+      resource:
+        name: cpu
+        target:
+          type: Utilization
+          averageUtilization: 80
+  #behavior:
+  #  scaleDown:
+  #    stabilizationWindowSeconds: 30
diff --git a/scripts/run_tests_locally-kpi-manager.sh b/scripts/run_tests_locally-kpi-manager.sh
index be69980e05f2b4f922a970df77f7d15b4a178fcc..742a52685fbb6d327d16d88bb74db85c5c79dff1 100755
--- a/scripts/run_tests_locally-kpi-manager.sh
+++ b/scripts/run_tests_locally-kpi-manager.sh
@@ -24,5 +24,5 @@ cd $PROJECTDIR/src
 # python3 kpi_manager/tests/test_unitary.py
 
 RCFILE=$PROJECTDIR/coverage/.coveragerc
-python3 -m pytest --log-level=INFO --log-cli-level=INFO --verbose \
+python3 -m pytest --log-level=DEBUG --log-cli-level=DEBUG --verbose \
     kpi_manager/tests/test_kpi_manager.py
\ No newline at end of file
diff --git a/src/kpi_manager/.gitlab-ci.yml b/src/kpi_manager/.gitlab-ci.yml
new file mode 100644
index 0000000000000000000000000000000000000000..ffd4e38ffabd0d18b85a86bca7760001a5466b73
--- /dev/null
+++ b/src/kpi_manager/.gitlab-ci.yml
@@ -0,0 +1,133 @@
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Build, tag, and push the Docker image to the GitLab Docker registry
+build kpi_manager:
+  variables:
+    IMAGE_NAME: 'kpi_manager' # name of the microservice
+    IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
+  stage: build
+  before_script:
+    - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
+  script:
+    - docker buildx build -t "$IMAGE_NAME:$IMAGE_TAG" -f ./src/$IMAGE_NAME/Dockerfile .
+    - docker tag "$IMAGE_NAME:$IMAGE_TAG" "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
+    - docker push "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
+  after_script:
+    - docker images --filter="dangling=true" --quiet | xargs -r docker rmi
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
+    - changes:
+      - src/common/**/*.py
+      - proto/*.proto
+      - src/$IMAGE_NAME/**/*.{py,in,yml}
+      - src/$IMAGE_NAME/Dockerfile
+      - src/$IMAGE_NAME/tests/*.py
+      - manifests/${IMAGE_NAME}service.yaml
+      - .gitlab-ci.yml
+
+# Apply unit test to the component
+unit_test context:
+  variables:
+    IMAGE_NAME: 'kpi_manager' # name of the microservice
+    IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
+  stage: unit_test
+  needs:
+    - build context
+  before_script:
+    - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
+    - if docker network list | grep teraflowbridge; then echo "teraflowbridge is already created"; else docker network create -d bridge teraflowbridge; fi
+    - if docker container ls | grep crdb; then docker rm -f crdb; else echo "CockroachDB container is not in the system"; fi
+    - if docker volume ls | grep crdb; then docker volume rm -f crdb; else echo "CockroachDB volume is not in the system"; fi
+    - if docker container ls | grep $IMAGE_NAME; then docker rm -f $IMAGE_NAME; else echo "$IMAGE_NAME container is not in the system"; fi
+    - docker container prune -f
+  script:
+    - docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
+    - docker pull "cockroachdb/cockroach:latest-v22.2"
+    - docker volume create crdb
+    - >
+      docker run --name crdb -d --network=teraflowbridge -p 26257:26257 -p 8080:8080
+      --env COCKROACH_DATABASE=tfs_test --env COCKROACH_USER=tfs --env COCKROACH_PASSWORD=tfs123
+      --volume "crdb:/cockroach/cockroach-data"
+      cockroachdb/cockroach:latest-v22.2 start-single-node
+    - echo "Waiting for initialization..."
+    - while ! docker logs crdb 2>&1 | grep -q 'finished creating default user \"tfs\"'; do sleep 1; done
+    - docker logs crdb
+    - docker ps -a
+    - CRDB_ADDRESS=$(docker inspect crdb --format "{{.NetworkSettings.Networks.teraflowbridge.IPAddress}}")
+    - echo $CRDB_ADDRESS
+    - NATS_ADDRESS=$(docker inspect nats --format "{{.NetworkSettings.Networks.teraflowbridge.IPAddress}}")
+    - echo $NATS_ADDRESS
+    - >
+      docker run --name $IMAGE_NAME -d -p 1010:1010
+      --env "CRDB_URI=cockroachdb://tfs:tfs123@${CRDB_ADDRESS}:26257/tfs_test?sslmode=require"
+      --volume "$PWD/src/$IMAGE_NAME/tests:/opt/results"
+      --network=teraflowbridge
+      $CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG
+    - docker ps -a
+    - sleep 5
+    - docker logs $IMAGE_NAME
+    - >
+      docker exec -i $IMAGE_NAME bash -c
+      "coverage run -m pytest --log-level=INFO --verbose --junitxml=/opt/results/${IMAGE_NAME}_report.xml $IMAGE_NAME/tests/test_*.py"
+    - docker exec -i $IMAGE_NAME bash -c "coverage report --include='${IMAGE_NAME}/*' --show-missing"
+  coverage: '/TOTAL\s+\d+\s+\d+\s+(\d+%)/'
+  after_script:
+    - docker volume rm -f crdb
+    - docker network rm teraflowbridge
+    - docker volume prune --force
+    - docker image prune --force
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
+    - changes:
+      - src/common/**/*.py
+      - proto/*.proto
+      - src/$IMAGE_NAME/**/*.{py,in,yml}
+      - src/$IMAGE_NAME/Dockerfile
+      - src/$IMAGE_NAME/tests/*.py
+      - src/$IMAGE_NAME/tests/Dockerfile
+      - manifests/${IMAGE_NAME}service.yaml
+      - .gitlab-ci.yml
+  artifacts:
+      when: always
+      reports:
+        junit: src/$IMAGE_NAME/tests/${IMAGE_NAME}_report.xml
+
+## Deployment of the service in Kubernetes Cluster
+#deploy context:
+#  variables:
+#    IMAGE_NAME: 'context' # name of the microservice
+#    IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
+#  stage: deploy
+#  needs:
+#    - unit test context
+#    # - integ_test execute
+#  script:
+#    - 'sed -i "s/$IMAGE_NAME:.*/$IMAGE_NAME:$IMAGE_TAG/" manifests/${IMAGE_NAME}service.yaml'
+#    - kubectl version
+#    - kubectl get all
+#    - kubectl apply -f "manifests/${IMAGE_NAME}service.yaml"
+#    - kubectl get all
+#  # environment:
+#  #   name: test
+#  #   url: https://example.com
+#  #   kubernetes:
+#  #     namespace: test
+#  rules:
+#    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+#      when: manual    
+#    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
+#      when: manual
diff --git a/src/kpi_manager/Dockerfile b/src/kpi_manager/Dockerfile
index 9454f59328ce9206bb2f678b81ce2e289d2a9055..4d74030e7b024469a236e7dfd486dcc91987cfc2 100644
--- a/src/kpi_manager/Dockerfile
+++ b/src/kpi_manager/Dockerfile
@@ -62,8 +62,6 @@ RUN python3 -m pip install -r requirements.txt
 
 # Add component files into working directory
 WORKDIR /var/teraflow
-COPY src/context/. context/
-COPY src/device/. device/
 COPY src/kpi_manager/. kpi_manager/
 
 # Start the service
diff --git a/src/kpi_manager/__init__.py b/src/kpi_manager/__init__.py
index 1549d9811aa5d1c193a44ad45d0d7773236c0612..3ee6f7071f145e06c3aeaefc09a43ccd88e619e3 100644
--- a/src/kpi_manager/__init__.py
+++ b/src/kpi_manager/__init__.py
@@ -1,4 +1,4 @@
-# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
diff --git a/src/kpi_manager/client/KpiManagerClient.py b/src/kpi_manager/client/KpiManagerClient.py
index 140381d3a6c55c2edd997fdabf21da276b122e6c..cd1e98c05b0df3af658a882aedc6e67c5fc0e41c 100755
--- a/src/kpi_manager/client/KpiManagerClient.py
+++ b/src/kpi_manager/client/KpiManagerClient.py
@@ -1,4 +1,4 @@
-# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -58,7 +58,7 @@ class KpiManagerClient:
     def DeleteKpiDescriptor(self,request : KpiId) -> Empty:
         LOGGER.debug('DeleteKpiDescriptor: {:s}'.format(grpc_message_to_json_string(request)))
         response = self.stub.DeleteKpiDescriptor(request)
-        LOGGER.info('DeleteKpiDescriptor result: {:s}'.format(grpc_message_to_json_string(response)))
+        LOGGER.debug('DeleteKpiDescriptor result: {:s}'.format(grpc_message_to_json_string(response)))
         return response
 
     @RETRY_DECORATOR
@@ -69,8 +69,8 @@ class KpiManagerClient:
         return response
 
     @RETRY_DECORATOR
-    def SelectKpiDescriptor(self, request : KpiDescriptorFilter) -> KpiDescriptorList:
-        LOGGER.debug('SelectKpiDescriptor: {:s}'.format(grpc_message_to_json_string(request)))
-        response = self.stub.SelectKpiDescriptor(request)
+    def SelectKpiDescriptor(self, filter : KpiDescriptorFilter) -> KpiDescriptorList:
+        LOGGER.debug('SelectKpiDescriptor: {:s}'.format(grpc_message_to_json_string(filter)))
+        response = self.stub.SelectKpiDescriptor(filter)
         LOGGER.debug('SelectKpiDescriptor result: {:s}'.format(grpc_message_to_json_string(response)))
         return response
\ No newline at end of file
diff --git a/src/kpi_manager/client/__init__.py b/src/kpi_manager/client/__init__.py
index 1549d9811aa5d1c193a44ad45d0d7773236c0612..48f7d354a2f3fe6e91bb79b3ca956f68c36ed9e3 100644
--- a/src/kpi_manager/client/__init__.py
+++ b/src/kpi_manager/client/__init__.py
@@ -1,5 +1,5 @@
-# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
-#
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
+# 
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # You may obtain a copy of the License at
diff --git a/src/kpi_manager/database/KpiEngine.py b/src/kpi_manager/database/KpiEngine.py
index 7fe31946b9386ddc7f67c0d8e3d572ccb73b0e65..620ac97968af4e29b92d148b362f3baf7aaa9fd5 100644
--- a/src/kpi_manager/database/KpiEngine.py
+++ b/src/kpi_manager/database/KpiEngine.py
@@ -1,4 +1,4 @@
-# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
diff --git a/src/kpi_manager/database/KpiModel.py b/src/kpi_manager/database/KpiModel.py
index 78276f59e41c9b6b26de8a960bc10526197fd182..e0f4c47a8127352b98177a0d7f2a8041700d4445 100644
--- a/src/kpi_manager/database/KpiModel.py
+++ b/src/kpi_manager/database/KpiModel.py
@@ -1,4 +1,4 @@
-# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
diff --git a/src/kpi_manager/database/Kpi_DB.py b/src/kpi_manager/database/Kpi_DB.py
index a414609bbbfc0c9548a6cd8a168ebe0bc55818d5..2e8eeeb8cffbac2ab307b0f784df4bb07bf9eef3 100644
--- a/src/kpi_manager/database/Kpi_DB.py
+++ b/src/kpi_manager/database/Kpi_DB.py
@@ -1,4 +1,4 @@
-# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -13,17 +13,18 @@
 # limitations under the License.
 
 import logging, time
-import sqlalchemy
+from typing import List, Tuple
+from sqlalchemy import select, and_
 import sqlalchemy_utils
 from sqlalchemy.orm import sessionmaker
 from sqlalchemy.ext.declarative import declarative_base
 from kpi_manager.database.KpiEngine import KpiEngine
-from kpi_manager.database.KpiModel import Kpi
+from kpi_manager.database.KpiModel import Kpi as KpiModel
 
 LOGGER = logging.getLogger(__name__)
 DB_NAME = "kpi"
 
-class Kpi_DB:
+class KpiDB:
     def __init__(self):
         self.db_engine = KpiEngine.get_engine()
         if self.db_engine is None:
@@ -36,7 +37,7 @@ class Kpi_DB:
 
     def create_database(self) -> None:
         if not sqlalchemy_utils.database_exists(self.db_engine.url):
-            LOGGER.info("Database created. {:}".format(self.db_engine.url))
+            LOGGER.debug("Database created. {:}".format(self.db_engine.url))
             sqlalchemy_utils.create_database(self.db_engine.url)
 
     def drop_database(self) -> None:
@@ -45,31 +46,31 @@ class Kpi_DB:
 
     def create_tables(self):
         try:
-            Kpi.metadata.create_all(self.db_engine)     # type: ignore
-            LOGGER.info("Tables created in the DB Name: {:}".format(self.db_name))
+            KpiModel.metadata.create_all(self.db_engine)     # type: ignore
+            LOGGER.debug("Tables created in the DB Name: {:}".format(self.db_name))
         except Exception as e:
-            LOGGER.info("Tables cannot be created in the kpi database. {:s}".format(str(e)))
+            LOGGER.debug("Tables cannot be created in the kpi database. {:s}".format(str(e)))
 
     def verify_tables(self):
         try:
             with self.db_engine.connect() as connection:
                 result = connection.execute("SHOW TABLES;")
                 tables = result.fetchall()      # type: ignore
-                LOGGER.info("Tables verified: {:}".format(tables))
+                LOGGER.debug("Tables verified: {:}".format(tables))
         except Exception as e:
-            LOGGER.info("Unable to fetch Table names. {:s}".format(str(e)))
+            LOGGER.debug("Unable to fetch Table names. {:s}".format(str(e)))
 
     def add_row_to_db(self, row):
         session = self.Session()
         try:
             session.add(row)
             session.commit()
-            LOGGER.info(f"Row inserted into {row.__class__.__name__} table.")
+            LOGGER.debug(f"Row inserted into {row.__class__.__name__} table.")
             return True
         except Exception as e:
             session.rollback()
             if "psycopg2.errors.UniqueViolation" in str(e):
-                LOGGER.warning(f"Unique key voilation: {row.__class__.__name__} table. {str(e)}")
+                LOGGER.debug(f"Unique key voilation: {row.__class__.__name__} table. {str(e)}")
             else:
                 LOGGER.error(f"Failed to insert new row into {row.__class__.__name__} table. {str(e)}")
             return False
@@ -81,14 +82,14 @@ class Kpi_DB:
         try:
             entity = session.query(model).filter_by(**{col_name: id_to_search}).first()
             if entity:
-                # LOGGER.info(f"{model.__name__} ID found: {str(entity)}")
+                # LOGGER.debug(f"{model.__name__} ID found: {str(entity)}")
                 return entity
             else:
-                LOGGER.warning(f"{model.__name__} ID not found: {str(id_to_search)}")
+                LOGGER.debug(f"{model.__name__} ID not found: {str(id_to_search)}")
                 return None
         except Exception as e:
             session.rollback()
-            LOGGER.info(f"Failed to retrieve {model.__name__} ID. {str(e)}")
+            LOGGER.debug(f"Failed to retrieve {model.__name__} ID. {str(e)}")
             raise
         finally:
             session.close()
@@ -100,9 +101,9 @@ class Kpi_DB:
             if record:
                 session.delete(record)
                 session.commit()
-                LOGGER.info("Deleted %s with %s: %s", model.__name__, col_name, id_to_search)
+                LOGGER.debug("Deleted %s with %s: %s", model.__name__, col_name, id_to_search)
             else:
-                LOGGER.warning("%s with %s %s not found", model.__name__, col_name, id_to_search)
+                LOGGER.debug("%s with %s %s not found", model.__name__, col_name, id_to_search)
                 return None
         except Exception as e:
             session.rollback()
@@ -110,20 +111,43 @@ class Kpi_DB:
         finally:
             session.close()
 
-    def select_with_filter(self, model, **filters):
+    def select_with_filter(self, model, filter_object):
         session = self.Session()
         try:
-            query = session.query(model)
-            for column, value in filters.items():
-                query = query.filter(getattr(model, column) == value) # type: ignore   
+            query = session.query(KpiModel)
+            # Apply filters based on the filter_object
+            if filter_object.kpi_id:
+                query = query.filter(KpiModel.kpi_id.in_([k.kpi_id.uuid for k in filter_object.kpi_id]))
+
+            if filter_object.kpi_sample_type:
+                query = query.filter(KpiModel.kpi_sample_type.in_(filter_object.kpi_sample_type))
+
+            if filter_object.device_id:
+                query = query.filter(KpiModel.device_id.in_([d.device_uuid.uuid for d in filter_object.device_id]))
+
+            if filter_object.endpoint_id:
+                query = query.filter(KpiModel.endpoint_id.in_([e.endpoint_uuid.uuid for e in filter_object.endpoint_id]))
+
+            if filter_object.service_id:
+                query = query.filter(KpiModel.service_id.in_([s.service_uuid.uuid for s in filter_object.service_id]))
+
+            if filter_object.slice_id:
+                query = query.filter(KpiModel.slice_id.in_([s.slice_uuid.uuid for s in filter_object.slice_id]))
+
+            if filter_object.connection_id:
+                query = query.filter(KpiModel.connection_id.in_([c.connection_uuid.uuid for c in filter_object.connection_id]))
+
+            if filter_object.link_id:
+                query = query.filter(KpiModel.link_id.in_([l.link_uuid.uuid for l in filter_object.link_id]))
             result = query.all()
+            
             if result:
-                LOGGER.info(f"Fetched filtered rows from {model.__name__} table with filters: {filters}") #  - Results: {result}
+                LOGGER.debug(f"Fetched filtered rows from {model.__name__} table with filters: {filter_object}") #  - Results: {result}
             else:
-                LOGGER.warning(f"No matching row found in {model.__name__} table with filters: {filters}")
+                LOGGER.debug(f"No matching row found in {model.__name__} table with filters: {filter_object}")
             return result
         except Exception as e:
-            LOGGER.error(f"Error fetching filtered rows from {model.__name__} table with filters {filters} ::: {e}")
+            LOGGER.error(f"Error fetching filtered rows from {model.__name__} table with filters {filter_object} ::: {e}")
             return []
         finally:
             session.close()
\ No newline at end of file
diff --git a/src/kpi_manager/database/__init__.py b/src/kpi_manager/database/__init__.py
index 1549d9811aa5d1c193a44ad45d0d7773236c0612..3ee6f7071f145e06c3aeaefc09a43ccd88e619e3 100644
--- a/src/kpi_manager/database/__init__.py
+++ b/src/kpi_manager/database/__init__.py
@@ -1,4 +1,4 @@
-# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
diff --git a/src/kpi_manager/service/KpiManagerService.py b/src/kpi_manager/service/KpiManagerService.py
index ecc7105a73c7e9238ee560203ff653e950706342..d3eed794473d500ed425e643e45a4ced2d540f2d 100755
--- a/src/kpi_manager/service/KpiManagerService.py
+++ b/src/kpi_manager/service/KpiManagerService.py
@@ -1,4 +1,4 @@
-# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
diff --git a/src/kpi_manager/service/KpiManagerServiceServicerImpl.py b/src/kpi_manager/service/KpiManagerServiceServicerImpl.py
index cf13c0526345805812447cae52bf832476346ed9..d88d6d8eb619a5d21e1f8795d5f4a55b2aaf73eb 100644
--- a/src/kpi_manager/service/KpiManagerServiceServicerImpl.py
+++ b/src/kpi_manager/service/KpiManagerServiceServicerImpl.py
@@ -1,4 +1,4 @@
-# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -14,93 +14,77 @@
 
 
 import logging, grpc
-import sqlalchemy, sqlalchemy_utils
+from typing import List, Set
+from sqlalchemy.sql.expression import BinaryExpression
 from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method
 from common.proto.context_pb2 import Empty
 from common.proto.kpi_management_pb2_grpc import KpiManagerServiceServicer
 from common.proto.kpi_management_pb2 import KpiId, KpiDescriptor, KpiDescriptorFilter, KpiDescriptorList
 from monitoring.service.NameMapping import NameMapping
-# from monitoring.service import ManagementDBTools
-
-from kpi_manager.database.Kpi_DB import Kpi_DB
+from kpi_manager.database.Kpi_DB import KpiDB
 from kpi_manager.database.KpiModel import Kpi as KpiModel
-# from telemetry.database.TelemetryModel import Kpi as KpiModel
-from common.proto.context_pb2 import DeviceId, LinkId, ServiceId, SliceId,\
-                             ConnectionId, EndPointId
 
 LOGGER = logging.getLogger(__name__)
-
-METRICS_POOL = MetricsPool('Monitoring', 'KpiManager')
+METRICS_POOL = MetricsPool('KpiManager', 'NBIgRPC')
 
 class KpiManagerServiceServicerImpl(KpiManagerServiceServicer):
     def __init__(self, name_mapping : NameMapping):
-        LOGGER.info('Init KpiManagerService')
-        self.Kpi_DBobj = Kpi_DB()
+        LOGGER.debug('Init KpiManagerService')
+        self.kpi_db_obj = KpiDB()
     
     @safe_and_metered_rpc_method(METRICS_POOL, LOGGER)
     def SetKpiDescriptor(self, request: KpiDescriptor, grpc_context: grpc.ServicerContext # type: ignore
                         ) -> KpiId: # type: ignore
         response = KpiId()
-        LOGGER.info("Received gRPC message object: {:}".format(request))
+        LOGGER.debug("Received gRPC message object: {:}".format(request))
         try:
             kpi_to_insert = KpiModel.convert_KpiDescriptor_to_row(request)
-            if(self.Kpi_DBobj.add_row_to_db(kpi_to_insert)):
+            if(self.kpi_db_obj.add_row_to_db(kpi_to_insert)):
                 response.kpi_id.uuid = request.kpi_id.kpi_id.uuid
-                # LOGGER.info("Added Row: {:}".format(response))
+                # LOGGER.debug("Added Row: {:}".format(response))
             return response
         except Exception as e:
-            LOGGER.info("Unable to create KpiModel class object. {:}".format(e))
+            LOGGER.debug("Unable to create KpiModel class object. {:}".format(e))
     
     @safe_and_metered_rpc_method(METRICS_POOL, LOGGER)        
     def GetKpiDescriptor(self, request: KpiId, grpc_context: grpc.ServicerContext # type: ignore
                          ) -> KpiDescriptor: # type: ignore
         response = KpiDescriptor()
-        LOGGER.info("Received gRPC message object: {:}".format(request))
+        LOGGER.debug("Received gRPC message object: {:}".format(request))
         try: 
             kpi_id_to_search = request.kpi_id.uuid
-            row = self.Kpi_DBobj.search_db_row_by_id(KpiModel, 'kpi_id', kpi_id_to_search)
+            row = self.kpi_db_obj.search_db_row_by_id(KpiModel, 'kpi_id', kpi_id_to_search)
             if row is not None:
                 response = KpiModel.convert_row_to_KpiDescriptor(row)
             return response
         except Exception as e:
-            LOGGER.info('Unable to search kpi id. {:}'.format(e))
+            LOGGER.debug('Unable to search kpi id. {:}'.format(e))
 
     @safe_and_metered_rpc_method(METRICS_POOL, LOGGER)
     def DeleteKpiDescriptor(self, request: KpiId, grpc_context: grpc.ServicerContext # type: ignore
                             ) -> Empty: # type: ignore
-        LOGGER.info("Received gRPC message object: {:}".format(request))
+        LOGGER.debug("Received gRPC message object: {:}".format(request))
         try:
             kpi_id_to_search = request.kpi_id.uuid
-            self.Kpi_DBobj.delete_db_row_by_id(KpiModel, 'kpi_id', kpi_id_to_search)
+            self.kpi_db_obj.delete_db_row_by_id(KpiModel, 'kpi_id', kpi_id_to_search)
         except Exception as e:
-            LOGGER.info('Unable to search kpi id. {:}'.format(e))
+            LOGGER.debug('Unable to search kpi id. {:}'.format(e))
         finally:
             return Empty()
 
     @safe_and_metered_rpc_method(METRICS_POOL, LOGGER)
-    def SelectKpiDescriptor(self, request: KpiDescriptorFilter, grpc_context: grpc.ServicerContext # type: ignore
+    def SelectKpiDescriptor(self, filter: KpiDescriptorFilter, grpc_context: grpc.ServicerContext # type: ignore
                             ) -> KpiDescriptorList: # type: ignore
-        LOGGER.info("Received gRPC message object: {:}".format(request))
+        LOGGER.debug("Received gRPC message object: {:}".format(filter))
         response = KpiDescriptorList()
-        # LOGGER.info("Recevied requested Object: {:}".format(request))
-        # re-structre the filter. create dynamic filter
-        filter_to_apply = dict()
-        filter_to_apply['kpi_sample_type'] = request.kpi_sample_type[0]
-        filter_to_apply['device_id']       = request.device_id[0].device_uuid.uuid
-        filter_to_apply['endpoint_id']     = request.endpoint_id[0].endpoint_uuid.uuid
-        filter_to_apply['service_id']      = request.service_id[0].service_uuid.uuid
-        filter_to_apply['slice_id']        = request.slice_id[0].slice_uuid.uuid
-        filter_to_apply['connection_id']   = request.connection_id[0].connection_uuid.uuid
-        filter_to_apply['link_id']         = request.link_id[0].link_uuid.uuid
         try:
-            rows = self.Kpi_DBobj.select_with_filter(KpiModel, **filter_to_apply)
+            rows = self.kpi_db_obj.select_with_filter(KpiModel, filter)
         except Exception as e:
-            LOGGER.info('Unable to apply filter on kpi descriptor. {:}'.format(e))
+            LOGGER.debug('Unable to apply filter on kpi descriptor. {:}'.format(e))
         try:
-            if len(rows) != 0:
-                for row in rows:
-                    kpiDescriptor_obj = KpiModel.convert_row_to_KpiDescriptor(row)
-                    response.kpi_descriptor_list.append(kpiDescriptor_obj)
+            for row in rows:
+                kpiDescriptor_obj = KpiModel.convert_row_to_KpiDescriptor(row)
+                response.kpi_descriptor_list.append(kpiDescriptor_obj)
             return response
         except Exception as e:
-            LOGGER.info('Unable to process response {:}'.format(e))
+            LOGGER.debug('Unable to process filter response {:}'.format(e))
diff --git a/src/kpi_manager/service/__init__.py b/src/kpi_manager/service/__init__.py
index 1549d9811aa5d1c193a44ad45d0d7773236c0612..3ee6f7071f145e06c3aeaefc09a43ccd88e619e3 100644
--- a/src/kpi_manager/service/__init__.py
+++ b/src/kpi_manager/service/__init__.py
@@ -1,4 +1,4 @@
-# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
diff --git a/src/kpi_manager/service/__main__.py b/src/kpi_manager/service/__main__.py
index 9f0e5324644a5a58eb47483688f71306a0808d1a..9085bc4683ba159bb64043e7b82173442f0a5bdd 100644
--- a/src/kpi_manager/service/__main__.py
+++ b/src/kpi_manager/service/__main__.py
@@ -1,4 +1,4 @@
-# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -21,7 +21,6 @@ from common.Settings import (
 from common.proto import monitoring_pb2
 from monitoring.service.EventTools import EventsDeviceCollector # import updated
 from monitoring.service.NameMapping import NameMapping          # import updated
-# from .MonitoringService import MonitoringService
 from .KpiManagerService import KpiManagerService
 
 terminate = threading.Event()
diff --git a/src/kpi_manager/database/tests/KpiDBtests.py b/src/kpi_manager/tests/test_kpi_db.py
similarity index 81%
rename from src/kpi_manager/database/tests/KpiDBtests.py
rename to src/kpi_manager/tests/test_kpi_db.py
index f75f05c14e76c66057d17978cc687ddcb81a0ca4..e961c12bacdbac07f111b229435ed3d89d62581f 100644
--- a/src/kpi_manager/database/tests/KpiDBtests.py
+++ b/src/kpi_manager/tests/test_kpi_db.py
@@ -1,4 +1,4 @@
-# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -14,13 +14,13 @@
 
 
 import logging
-from kpi_manager.database.Kpi_DB import Kpi_DB
+from kpi_manager.database.Kpi_DB import KpiDB
 
 LOGGER = logging.getLogger(__name__)
 
-def test_verify_Tables():
+def test_verify_databases_and_Tables():
     LOGGER.info('>>> test_verify_Tables : START <<< ')
-    kpiDBobj = Kpi_DB()
+    kpiDBobj = KpiDB()
     kpiDBobj.drop_database()
     kpiDBobj.verify_tables()
     kpiDBobj.create_database()
diff --git a/src/kpi_manager/tests/test_kpi_manager.py b/src/kpi_manager/tests/test_kpi_manager.py
index ccb7f16c2d6fdffc049b562b627d5266d6d76c88..fb77eb1a80824f053e9ff0eaf85751b70ee9cb83 100755
--- a/src/kpi_manager/tests/test_kpi_manager.py
+++ b/src/kpi_manager/tests/test_kpi_manager.py
@@ -1,4 +1,4 @@
-# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
diff --git a/src/kpi_manager/tests/test_messages.py b/src/kpi_manager/tests/test_messages.py
index e1cb4ddf60412cbb19d2bf355d99cf9063105751..2d4a121fd50b48c1a087877d3059fbe340aa8551 100644
--- a/src/kpi_manager/tests/test_messages.py
+++ b/src/kpi_manager/tests/test_messages.py
@@ -1,4 +1,4 @@
-# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.