diff --git a/deploy/all.sh b/deploy/all.sh
index e9b33b469b7cad1547ab0dcb63e326672f51971e..06b8ee701530f56381080879d0e2941b664e5197 100755
--- a/deploy/all.sh
+++ b/deploy/all.sh
@@ -33,7 +33,7 @@ export TFS_COMPONENTS=${TFS_COMPONENTS:-"context device pathcomp service slice n
 #export TFS_COMPONENTS="${TFS_COMPONENTS} monitoring"
 
 # Uncomment to activate Monitoring Framework (new)
-#export TFS_COMPONENTS="${TFS_COMPONENTS} kpi_manager kpi_value_writer kpi_value_api"
+#export TFS_COMPONENTS="${TFS_COMPONENTS} kpi_manager kpi_value_writer kpi_value_api telemetry analytics"
 
 # Uncomment to activate BGP-LS Speaker
 #export TFS_COMPONENTS="${TFS_COMPONENTS} bgpls_speaker"
diff --git a/deploy/kafka.sh b/deploy/kafka.sh
index f8610801130bdfd198e112c8388d9ec7a34fffff..0483bce153b457800c6f7db2ef66685e90118111 100755
--- a/deploy/kafka.sh
+++ b/deploy/kafka.sh
@@ -78,7 +78,7 @@ function kafka_deploy() {
 
 echo "Apache Kafka"
 echo ">>> Checking if Apache Kafka is deployed ... "
-if [ "$KFK_REDEPLOY" = "YES" ]; then
+if [ "$KFK_REDEPLOY" == "YES" ]; then
     echo ">>> Redeploying kafka namespace"
     kafka_deploy
 elif kubectl get namespace "${KFK_NAMESPACE}" &> /dev/null; then
diff --git a/deploy/tfs.sh b/deploy/tfs.sh
index b756ad2d098a6d1f7d3308929c6e5d7debdf4192..1dceae1c1b4ee3e2a36816557b54df48b224eba1 100755
--- a/deploy/tfs.sh
+++ b/deploy/tfs.sh
@@ -194,7 +194,7 @@ kubectl create secret generic crdb-analytics --namespace ${TFS_K8S_NAMESPACE} --
     --from-literal=CRDB_SSLMODE=require
 printf "\n"
 
-echo "Create secret with Apache Kafka data for KPI and Telemetry microservices"
+echo "Create secret with Apache Kafka data for KPI, Telemetry and Analytics microservices"
 KFK_SERVER_PORT=$(kubectl --namespace ${KFK_NAMESPACE} get service kafka-service -o 'jsonpath={.spec.ports[0].port}')
 kubectl create secret generic kfk-kpi-data --namespace ${TFS_K8S_NAMESPACE} --type='Opaque' \
     --from-literal=KFK_NAMESPACE=${KFK_NAMESPACE} \
@@ -276,7 +276,7 @@ for COMPONENT in $TFS_COMPONENTS; do
 
         if [ "$COMPONENT" == "ztp" ] || [ "$COMPONENT" == "policy" ]; then
             $DOCKER_BUILD -t "$COMPONENT:$TFS_IMAGE_TAG" -f ./src/"$COMPONENT"/Dockerfile ./src/"$COMPONENT"/ > "$BUILD_LOG"
-        elif [ "$COMPONENT" == "pathcomp" ] || [ "$COMPONENT" == "telemetry" ]; then
+        elif [ "$COMPONENT" == "pathcomp" ] || [ "$COMPONENT" == "telemetry" ] || [ "$COMPONENT" == "analytics" ]; then
             BUILD_LOG="$TMP_LOGS_FOLDER/build_${COMPONENT}-frontend.log"
             $DOCKER_BUILD -t "$COMPONENT-frontend:$TFS_IMAGE_TAG" -f ./src/"$COMPONENT"/frontend/Dockerfile . > "$BUILD_LOG"
 
@@ -299,7 +299,7 @@ for COMPONENT in $TFS_COMPONENTS; do
 
         echo "  Pushing Docker image to '$TFS_REGISTRY_IMAGES'..."
 
-        if [ "$COMPONENT" == "pathcomp" ] || [ "$COMPONENT" == "telemetry" ]; then
+        if [ "$COMPONENT" == "pathcomp" ] || [ "$COMPONENT" == "telemetry" ] || [ "$COMPONENT" == "analytics" ] ; then
             IMAGE_URL=$(echo "$TFS_REGISTRY_IMAGES/$COMPONENT-frontend:$TFS_IMAGE_TAG" | sed 's,//,/,g' | sed 's,http:/,,g')
 
             TAG_LOG="$TMP_LOGS_FOLDER/tag_${COMPONENT}-frontend.log"
@@ -350,7 +350,7 @@ for COMPONENT in $TFS_COMPONENTS; do
         cp ./manifests/"${COMPONENT}"service.yaml "$MANIFEST"
     fi
 
-    if [ "$COMPONENT" == "pathcomp" ] || [ "$COMPONENT" == "telemetry" ]; then
+    if [ "$COMPONENT" == "pathcomp" ] || [ "$COMPONENT" == "telemetry" ] || [ "$COMPONENT" == "analytics" ]; then
         IMAGE_URL=$(echo "$TFS_REGISTRY_IMAGES/$COMPONENT-frontend:$TFS_IMAGE_TAG" | sed 's,//,/,g' | sed 's,http:/,,g')
         VERSION=$(grep -i "${GITLAB_REPO_URL}/${COMPONENT}-frontend:" "$MANIFEST" | cut -d ":" -f4)
         sed -E -i "s#image: $GITLAB_REPO_URL/$COMPONENT-frontend:${VERSION}#image: $IMAGE_URL#g" "$MANIFEST"
diff --git a/manifests/analyticsservice.yaml b/manifests/analyticsservice.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..9fbdc642fdb6bc018b7fa1a150fc9fedd7576a94
--- /dev/null
+++ b/manifests/analyticsservice.yaml
@@ -0,0 +1,128 @@
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  name: analyticsservice
+spec:
+  selector:
+    matchLabels:
+      app: analyticsservice
+  #replicas: 1
+  template:
+    metadata:
+      labels:
+        app: analyticsservice
+    spec:
+      terminationGracePeriodSeconds: 5
+      containers:
+        - name: frontend
+          image: labs.etsi.org:5050/tfs/controller/analytics-frontend:latest
+          imagePullPolicy: Always
+          ports:
+            - containerPort: 30080
+            - containerPort: 9192
+          env:
+            - name: LOG_LEVEL
+              value: "INFO"
+          envFrom:
+            - secretRef:
+                name: crdb-analytics
+            - secretRef:
+                name: kfk-kpi-data
+          readinessProbe:
+            exec:
+              command: ["/bin/grpc_health_probe", "-addr=:30050"]
+          livenessProbe:
+            exec:
+              command: ["/bin/grpc_health_probe", "-addr=:30050"]
+          resources:
+            requests:
+              cpu: 250m
+              memory: 128Mi
+            limits:
+              cpu: 1000m
+              memory: 1024Mi
+        - name: backend
+          image: labs.etsi.org:5050/tfs/controller/analytics-backend:latest
+          imagePullPolicy: Always
+          ports:
+            - containerPort: 30090
+            - containerPort: 9192
+          env:
+            - name: LOG_LEVEL
+              value: "INFO"
+          envFrom:
+            - secretRef:
+                name: kfk-kpi-data
+          readinessProbe:
+            exec:
+              command: ["/bin/grpc_health_probe", "-addr=:30060"]
+          livenessProbe:
+            exec:
+              command: ["/bin/grpc_health_probe", "-addr=:30060"]
+          resources:
+            requests:
+              cpu: 250m
+              memory: 128Mi
+            limits:
+              cpu: 1000m
+              memory: 1024Mi
+---
+apiVersion: v1
+kind: Service
+metadata:
+  name: analyticsservice
+  labels:
+    app: analyticsservice
+spec:
+  type: ClusterIP
+  selector:
+    app: analyticsservice
+  ports:
+    - name: frontend-grpc
+      protocol: TCP
+      port: 30080
+      targetPort: 30080
+    - name: backend-grpc
+      protocol: TCP
+      port: 30090
+      targetPort: 30090
+    - name: metrics
+      protocol: TCP
+      port: 9192
+      targetPort: 9192
+---
+apiVersion: autoscaling/v2
+kind: HorizontalPodAutoscaler
+metadata:
+  name: analyticsservice-hpa
+spec:
+  scaleTargetRef:
+    apiVersion: apps/v1
+    kind: Deployment
+    name: analyticsservice
+  minReplicas: 1
+  maxReplicas: 20
+  metrics:
+    - type: Resource
+      resource:
+        name: cpu
+        target:
+          type: Utilization
+          averageUtilization: 80
+  #behavior:
+  #  scaleDown:
+  #    stabilizationWindowSeconds: 30
diff --git a/manifests/kafka/02-kafka.yaml b/manifests/kafka/02-kafka.yaml
index 8e4562e6eabec34bf3b87912310479bd98022aeb..8400f5944193458ccdad8be5dbc189f8f40cdd7b 100644
--- a/manifests/kafka/02-kafka.yaml
+++ b/manifests/kafka/02-kafka.yaml
@@ -53,9 +53,9 @@ spec:
         - name: KAFKA_LISTENERS
           value: PLAINTEXT://:9092
         - name: KAFKA_ADVERTISED_LISTENERS
-          value: PLAINTEXT://localhost:9092
+          value: PLAINTEXT://kafka-service.kafka.svc.cluster.local:9092
         image: wurstmeister/kafka
         imagePullPolicy: IfNotPresent
         name: kafka-broker
         ports:
-          - containerPort: 9092
\ No newline at end of file
+          - containerPort: 9092
diff --git a/scripts/run_tests_locally-telemetry-backend.sh b/scripts/run_tests_locally-telemetry-backend.sh
index 9cf404ffcef6c99b261f81eb0c6b910dd60845e5..79db05fcf1259365e8a909ee99395eb59dfb9437 100755
--- a/scripts/run_tests_locally-telemetry-backend.sh
+++ b/scripts/run_tests_locally-telemetry-backend.sh
@@ -24,5 +24,5 @@ cd $PROJECTDIR/src
 # python3 kpi_manager/tests/test_unitary.py
 
 RCFILE=$PROJECTDIR/coverage/.coveragerc
-python3 -m pytest --log-level=INFO --log-cli-level=INFO --verbose \
-    telemetry/backend/tests/testTelemetryBackend.py
+python3 -m pytest --log-level=INFO --log-cli-level=debug --verbose \
+    telemetry/backend/tests/test_TelemetryBackend.py
diff --git a/scripts/show_logs_analytics_backend.sh b/scripts/show_logs_analytics_backend.sh
new file mode 100755
index 0000000000000000000000000000000000000000..afb58567ca5ab250da48d2cfffa2c56abdff2db2
--- /dev/null
+++ b/scripts/show_logs_analytics_backend.sh
@@ -0,0 +1,27 @@
+#!/bin/bash
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+########################################################################################################################
+# Define your deployment settings here
+########################################################################################################################
+
+# If not already set, set the name of the Kubernetes namespace to deploy to.
+export TFS_K8S_NAMESPACE=${TFS_K8S_NAMESPACE:-"tfs"}
+
+########################################################################################################################
+# Automated steps start here
+########################################################################################################################
+
+kubectl --namespace $TFS_K8S_NAMESPACE logs deployment/analyticsservice -c backend
diff --git a/scripts/show_logs_analytics_frontend.sh b/scripts/show_logs_analytics_frontend.sh
new file mode 100755
index 0000000000000000000000000000000000000000..6d3fae10b366f0082d3a393c224e8f1cb7830721
--- /dev/null
+++ b/scripts/show_logs_analytics_frontend.sh
@@ -0,0 +1,27 @@
+#!/bin/bash
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+########################################################################################################################
+# Define your deployment settings here
+########################################################################################################################
+
+# If not already set, set the name of the Kubernetes namespace to deploy to.
+export TFS_K8S_NAMESPACE=${TFS_K8S_NAMESPACE:-"tfs"}
+
+########################################################################################################################
+# Automated steps start here
+########################################################################################################################
+
+kubectl --namespace $TFS_K8S_NAMESPACE logs deployment/analyticsservice -c frontend
diff --git a/src/analytics/backend/Dockerfile b/src/analytics/backend/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..17adcd3ab1df5704cc7ef0c5a19b3cfb1539ee22
--- /dev/null
+++ b/src/analytics/backend/Dockerfile
@@ -0,0 +1,69 @@
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+FROM python:3.9-slim
+
+# Install dependencies
+RUN apt-get --yes --quiet --quiet update && \
+    apt-get --yes --quiet --quiet install wget g++ git && \
+    rm -rf /var/lib/apt/lists/*
+
+# Set Python to show logs as they occur
+ENV PYTHONUNBUFFERED=0
+
+# Download the gRPC health probe
+RUN GRPC_HEALTH_PROBE_VERSION=v0.2.0 && \
+    wget -qO/bin/grpc_health_probe https://github.com/grpc-ecosystem/grpc-health-probe/releases/download/${GRPC_HEALTH_PROBE_VERSION}/grpc_health_probe-linux-amd64 && \
+    chmod +x /bin/grpc_health_probe
+
+# Get generic Python packages
+RUN python3 -m pip install --upgrade pip
+RUN python3 -m pip install --upgrade setuptools wheel
+RUN python3 -m pip install --upgrade pip-tools
+
+# Get common Python packages
+# Note: this step enables sharing the previous Docker build steps among all the Python components
+WORKDIR /var/teraflow
+COPY common_requirements.in common_requirements.in
+RUN pip-compile --quiet --output-file=common_requirements.txt common_requirements.in
+RUN python3 -m pip install -r common_requirements.txt
+
+# Add common files into working directory
+WORKDIR /var/teraflow/common
+COPY src/common/. ./
+RUN rm -rf proto
+
+# Create proto sub-folder, copy .proto files, and generate Python code
+RUN mkdir -p /var/teraflow/common/proto
+WORKDIR /var/teraflow/common/proto
+RUN touch __init__.py
+COPY proto/*.proto ./
+RUN python3 -m grpc_tools.protoc -I=. --python_out=. --grpc_python_out=. *.proto
+RUN rm *.proto
+RUN find . -type f -exec sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' {} \;
+
+# Create component sub-folders, get specific Python packages
+RUN mkdir -p /var/teraflow/analytics/backend
+WORKDIR /var/teraflow/analytics/backend
+COPY src/analytics/backend/requirements.in requirements.in
+RUN pip-compile --quiet --output-file=requirements.txt requirements.in
+RUN python3 -m pip install -r requirements.txt
+
+# Add component files into working directory
+WORKDIR /var/teraflow
+COPY src/analytics/__init__.py analytics/__init__.py
+COPY src/analytics/backend/. analytics/backend/
+
+# Start the service
+ENTRYPOINT ["python", "-m", "analytics.backend.service"]
diff --git a/src/analytics/backend/requirements.in b/src/analytics/backend/requirements.in
index 5c2280c5d384c24597dd9c4779f1bfee85a768d7..e2917029ea6f8c1d4730773946c114e08ea74cc5 100644
--- a/src/analytics/backend/requirements.in
+++ b/src/analytics/backend/requirements.in
@@ -12,6 +12,5 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-java==11.0.*
 pyspark==3.5.2
 confluent-kafka==2.3.*
\ No newline at end of file
diff --git a/src/analytics/backend/service/AnalyticsBackendService.py b/src/analytics/backend/service/AnalyticsBackendService.py
index 1e0c8a15bd85213042838c0ad7f59e4c7cfceb3d..595603567fe537d9f7b33224cba0fe016a439631 100755
--- a/src/analytics/backend/service/AnalyticsBackendService.py
+++ b/src/analytics/backend/service/AnalyticsBackendService.py
@@ -21,6 +21,9 @@ from analytics.backend.service.SparkStreaming import SparkStreamer
 from common.tools.kafka.Variables import KafkaConfig, KafkaTopic
 from confluent_kafka import Consumer as KafkaConsumer
 from confluent_kafka import KafkaError
+from common.Constants import ServiceNameEnum
+from common.Settings import get_service_port_grpc
+
 
 LOGGER = logging.getLogger(__name__)
 
@@ -29,6 +32,9 @@ class AnalyticsBackendService(GenericGrpcService):
     Class listens for ...
     """
     def __init__(self, cls_name : str = __name__) -> None:
+        LOGGER.info('Init AnalyticsBackendService')
+        port = get_service_port_grpc(ServiceNameEnum.ANALYTICSBACKEND)
+        super().__init__(port, cls_name=cls_name)
         self.running_threads = {}       # To keep track of all running analyzers 
         self.kafka_consumer = KafkaConsumer({'bootstrap.servers' : KafkaConfig.get_kafka_address(),
                                             'group.id'           : 'analytics-frontend',
@@ -72,7 +78,7 @@ class AnalyticsBackendService(GenericGrpcService):
             LOGGER.error("Failed to terminate analytics backend {:}".format(e))
             return False
 
-    def StartRequestListener(self)->tuple:
+    def install_services(self):
         stop_event = threading.Event()
         thread = threading.Thread(target=self.RequestListener,
                                   args=(stop_event,) )
diff --git a/src/analytics/frontend/Dockerfile b/src/analytics/frontend/Dockerfile
index f3b8838b2e76700142986922aefd984b9984fc4f..10499713f318a23e1aeab49c96e8163a5ec147fa 100644
--- a/src/analytics/frontend/Dockerfile
+++ b/src/analytics/frontend/Dockerfile
@@ -55,7 +55,7 @@ RUN find . -type f -exec sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' {} \;
 
 # Create component sub-folders, get specific Python packages
 RUN mkdir -p /var/teraflow/analytics/frontend
-WORKDIR /var/analyticstelemetry/frontend
+WORKDIR /var/teraflow/analytics/frontend
 COPY src/analytics/frontend/requirements.in requirements.in
 RUN pip-compile --quiet --output-file=requirements.txt requirements.in
 RUN python3 -m pip install -r requirements.txt
diff --git a/src/analytics/frontend/requirements.in b/src/analytics/frontend/requirements.in
index 6bf3d7c266e6006deb1fb88289b0b187abd677d6..20000420cc70f07185bddc3b3668fe775eca9c13 100644
--- a/src/analytics/frontend/requirements.in
+++ b/src/analytics/frontend/requirements.in
@@ -12,7 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-apscheduler==3.10.* # .4
+apscheduler==3.10.4
 confluent-kafka==2.3.*
 psycopg2-binary==2.9.*
 SQLAlchemy==1.4.*
diff --git a/src/analytics/frontend/service/__main__.py b/src/analytics/frontend/service/__main__.py
index e33a4c62b1b71b88a3f770cda2ec26c86db1be8b..3fa2ca8758ca54b2e7fa59df06ba0e144216ed84 100644
--- a/src/analytics/frontend/service/__main__.py
+++ b/src/analytics/frontend/service/__main__.py
@@ -13,7 +13,8 @@
 # limitations under the License.
 
 import logging, signal, sys, threading
-from common.Settings import get_log_level
+from prometheus_client import start_http_server
+from common.Settings import get_log_level, get_metrics_port
 from .AnalyticsFrontendService import AnalyticsFrontendService
 
 
@@ -28,13 +29,17 @@ def main():
     global LOGGER # pylint: disable=global-statement
 
     log_level = get_log_level()
-    logging.basicConfig(level=log_level)
+    logging.basicConfig(level=log_level, format="[%(asctime)s] %(levelname)s:%(name)s:%(message)s")
     LOGGER = logging.getLogger(__name__)
 
     signal.signal(signal.SIGINT,  signal_handler)
     signal.signal(signal.SIGTERM, signal_handler)
 
-    LOGGER.debug('Starting...')
+    LOGGER.info('Starting...')
+
+    # Start metrics server
+    metrics_port = get_metrics_port()
+    start_http_server(metrics_port)
 
     grpc_service = AnalyticsFrontendService()
     grpc_service.start()
diff --git a/src/common/tools/kafka/Variables.py b/src/common/tools/kafka/Variables.py
index 215913c0e496d5c6157f4e3caa5184a837529815..fc43c315114e7b51c4e2604afbb14e165796e7c5 100644
--- a/src/common/tools/kafka/Variables.py
+++ b/src/common/tools/kafka/Variables.py
@@ -14,7 +14,6 @@
 
 import logging
 from enum import Enum
-from confluent_kafka import KafkaException
 from confluent_kafka.admin import AdminClient, NewTopic
 from common.Settings import get_setting
 
@@ -26,11 +25,11 @@ class KafkaConfig(Enum):
 
     @staticmethod
     def get_kafka_address() -> str:
-        kafka_server_address = get_setting('KFK_SERVER_ADDRESS', default=None)
-        if kafka_server_address is None:
-            KFK_NAMESPACE        = get_setting('KFK_NAMESPACE')
-            KFK_PORT             = get_setting('KFK_SERVER_PORT')
-            kafka_server_address = KFK_SERVER_ADDRESS_TEMPLATE.format(KFK_NAMESPACE, KFK_PORT)
+        # kafka_server_address = get_setting('KFK_SERVER_ADDRESS', default=None)
+        # if kafka_server_address is None:
+        KFK_NAMESPACE        = get_setting('KFK_NAMESPACE')
+        KFK_PORT             = get_setting('KFK_SERVER_PORT')
+        kafka_server_address = KFK_SERVER_ADDRESS_TEMPLATE.format(KFK_NAMESPACE, KFK_PORT)
         return kafka_server_address
         
     @staticmethod