From 1e232b2ada7e75ce9dc21507f71b4f3d8544064a Mon Sep 17 00:00:00 2001
From: Waleed Akbar <wakbar@cttc.es>
Date: Fri, 20 Sep 2024 13:07:08 +0000
Subject: [PATCH] Changes in Analytics and KpiValueAPI:

- Changed `StartAnalyzer` return type to `AnalyzerId`.
- Added `APScheduler` to requirements.
- Updated `AnalyticsBackend` Dockerfile to set `JAVA_HOME`.
- Added `Alarms` topic in the Kafka class.
---
 src/analytics/backend/Dockerfile                         | 9 +++++++++
 .../service/AnalyticsFrontendServiceServicerImpl.py      | 2 +-
 src/common/tools/kafka/Variables.py                      | 5 +++--
 src/kpi_value_api/requirements.in                        | 3 ++-
 4 files changed, 15 insertions(+), 4 deletions(-)

diff --git a/src/analytics/backend/Dockerfile b/src/analytics/backend/Dockerfile
index 17adcd3ab..df5cd7fbd 100644
--- a/src/analytics/backend/Dockerfile
+++ b/src/analytics/backend/Dockerfile
@@ -53,6 +53,15 @@ RUN python3 -m grpc_tools.protoc -I=. --python_out=. --grpc_python_out=. *.proto
 RUN rm *.proto
 RUN find . -type f -exec sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' {} \;
 
+# Install Java (required for PySpark)
+RUN apt-get update && \
+    apt-get install -y default-jdk && \
+    apt-get clean
+
+# Set JAVA_HOME environment variable
+ENV JAVA_HOME=/usr/lib/jvm/java-11-openjdk-amd64
+ENV PATH=$JAVA_HOME/bin:$PATH
+
 # Create component sub-folders, get specific Python packages
 RUN mkdir -p /var/teraflow/analytics/backend
 WORKDIR /var/teraflow/analytics/backend
diff --git a/src/analytics/frontend/service/AnalyticsFrontendServiceServicerImpl.py b/src/analytics/frontend/service/AnalyticsFrontendServiceServicerImpl.py
index baa88a8b7..cde19e378 100644
--- a/src/analytics/frontend/service/AnalyticsFrontendServiceServicerImpl.py
+++ b/src/analytics/frontend/service/AnalyticsFrontendServiceServicerImpl.py
@@ -38,7 +38,7 @@ class AnalyticsFrontendServiceServicerImpl(AnalyticsFrontendServiceServicer):
     @safe_and_metered_rpc_method(METRICS_POOL, LOGGER)
     def StartAnalyzer(self, 
                        request : Analyzer, grpc_context: grpc.ServicerContext # type: ignore
-                      ) -> AnalyzerAlarms: # type: ignore
+                      ) -> AnalyzerId: # type: ignore
         LOGGER.info ("At Service gRPC message: {:}".format(request))
         response = AnalyzerId()
 
diff --git a/src/common/tools/kafka/Variables.py b/src/common/tools/kafka/Variables.py
index fc43c3151..b5cb3bbe0 100644
--- a/src/common/tools/kafka/Variables.py
+++ b/src/common/tools/kafka/Variables.py
@@ -46,6 +46,7 @@ class KafkaTopic(Enum):
     RAW                = 'topic_raw' 
     LABELED            = 'topic_labeled'
     VALUE              = 'topic_value'
+    ALARMS             = 'topic_alarms'
     ANALYTICS_REQUEST  = 'topic_request_analytics'
     ANALYTICS_RESPONSE = 'topic_response_analytics'
 
@@ -77,8 +78,8 @@ class KafkaTopic(Enum):
                 # LOGGER.debug("Existing topic list: {:}".format(topic_metadata.topics))
                 if topic not in topic_metadata.topics:
                     # If the topic does not exist, create a new topic
-                    print("Topic {:} does not exist. Creating...".format(topic))
-                    LOGGER.debug("Topic {:} does not exist. Creating...".format(topic))
+                    # print("Topic {:} does not exist. Creating...".format(topic))
+                    # LOGGER.debug("Topic {:} does not exist. Creating...".format(topic))
                     new_topic = NewTopic(topic, num_partitions=1, replication_factor=1)
                     KafkaConfig.get_admin_client().create_topics([new_topic])
                 else:
diff --git a/src/kpi_value_api/requirements.in b/src/kpi_value_api/requirements.in
index f5695906a..e95d6d8bb 100644
--- a/src/kpi_value_api/requirements.in
+++ b/src/kpi_value_api/requirements.in
@@ -14,4 +14,5 @@
 
 confluent-kafka==2.3.*
 requests==2.27.*
-prometheus-api-client==0.5.3
\ No newline at end of file
+prometheus-api-client==0.5.3
+apscheduler==3.10.4
-- 
GitLab