diff --git a/src/analytics/backend/Dockerfile b/src/analytics/backend/Dockerfile
index 17adcd3ab1df5704cc7ef0c5a19b3cfb1539ee22..df5cd7fbde6dc45780fdb3a333594ab11c1ab146 100644
--- a/src/analytics/backend/Dockerfile
+++ b/src/analytics/backend/Dockerfile
@@ -53,6 +53,15 @@ RUN python3 -m grpc_tools.protoc -I=. --python_out=. --grpc_python_out=. *.proto
 RUN rm *.proto
 RUN find . -type f -exec sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' {} \;
 
+# Install Java (required for PySpark)
+RUN apt-get update && \
+    apt-get install -y default-jdk && \
+    apt-get clean
+
+# Set JAVA_HOME environment variable
+ENV JAVA_HOME=/usr/lib/jvm/java-11-openjdk-amd64
+ENV PATH=$JAVA_HOME/bin:$PATH
+
 # Create component sub-folders, get specific Python packages
 RUN mkdir -p /var/teraflow/analytics/backend
 WORKDIR /var/teraflow/analytics/backend
diff --git a/src/analytics/frontend/service/AnalyticsFrontendServiceServicerImpl.py b/src/analytics/frontend/service/AnalyticsFrontendServiceServicerImpl.py
index baa88a8b7de2a06b569cf61bb248b281ce5715c9..cde19e37869222d0f643bc33409f4f8711fb1c20 100644
--- a/src/analytics/frontend/service/AnalyticsFrontendServiceServicerImpl.py
+++ b/src/analytics/frontend/service/AnalyticsFrontendServiceServicerImpl.py
@@ -38,7 +38,7 @@ class AnalyticsFrontendServiceServicerImpl(AnalyticsFrontendServiceServicer):
     @safe_and_metered_rpc_method(METRICS_POOL, LOGGER)
     def StartAnalyzer(self, 
                        request : Analyzer, grpc_context: grpc.ServicerContext # type: ignore
-                      ) -> AnalyzerAlarms: # type: ignore
+                      ) -> AnalyzerId: # type: ignore
         LOGGER.info ("At Service gRPC message: {:}".format(request))
         response = AnalyzerId()
 
diff --git a/src/common/tools/kafka/Variables.py b/src/common/tools/kafka/Variables.py
index fc43c315114e7b51c4e2604afbb14e165796e7c5..b5cb3bbe07fee03f7e13cadb0b4d87766e80cb1c 100644
--- a/src/common/tools/kafka/Variables.py
+++ b/src/common/tools/kafka/Variables.py
@@ -46,6 +46,7 @@ class KafkaTopic(Enum):
     RAW                = 'topic_raw' 
     LABELED            = 'topic_labeled'
     VALUE              = 'topic_value'
+    ALARMS             = 'topic_alarms'
     ANALYTICS_REQUEST  = 'topic_request_analytics'
     ANALYTICS_RESPONSE = 'topic_response_analytics'
 
@@ -77,8 +78,8 @@ class KafkaTopic(Enum):
                 # LOGGER.debug("Existing topic list: {:}".format(topic_metadata.topics))
                 if topic not in topic_metadata.topics:
                     # If the topic does not exist, create a new topic
-                    print("Topic {:} does not exist. Creating...".format(topic))
-                    LOGGER.debug("Topic {:} does not exist. Creating...".format(topic))
+                    # print("Topic {:} does not exist. Creating...".format(topic))
+                    # LOGGER.debug("Topic {:} does not exist. Creating...".format(topic))
                     new_topic = NewTopic(topic, num_partitions=1, replication_factor=1)
                     KafkaConfig.get_admin_client().create_topics([new_topic])
                 else:
diff --git a/src/kpi_value_api/requirements.in b/src/kpi_value_api/requirements.in
index f5695906a8d02d55e15960a76986b8d03f02dba1..e95d6d8bbd81abca7eb2622e1faf6af473fcdb12 100644
--- a/src/kpi_value_api/requirements.in
+++ b/src/kpi_value_api/requirements.in
@@ -14,4 +14,5 @@
 
 confluent-kafka==2.3.*
 requests==2.27.*
-prometheus-api-client==0.5.3
\ No newline at end of file
+prometheus-api-client==0.5.3
+apscheduler==3.10.4