From 3b47e6ff0f83934293bccb8f7957ef38d8d19549 Mon Sep 17 00:00:00 2001
From: gifrerenom <lluis.gifre@cttc.es>
Date: Mon, 7 Oct 2024 16:13:23 +0000
Subject: [PATCH] Pre-merge code cleanup

---
 manifests/analyticsservice.yaml               |   2 +-
 manifests/automationservice.yaml              |   5 -
 manifests/telemetryservice.yaml               |   2 +-
 my_deploy.sh                                  |  10 +-
 proto/automation.proto                        |  10 +-
 scripts/run_tests_locally-analytics-DB.sh     |   2 +-
 .../run_tests_locally-analytics-backend.sh    |   2 +-
 .../run_tests_locally-analytics-frontend.sh   |   2 +-
 .../backend/service/SparkStreaming.py         | 154 ++++++++++++++++++
 src/analytics/backend/service/__main__.py     |   4 +-
 src/analytics/frontend/Dockerfile             |   6 +-
 src/analytics/frontend/tests/test_frontend.py |   2 +-
 src/automation/Dockerfile                     |  24 +--
 src/automation/client/PolicyClient.py         |   2 +-
 .../service/AutomationServiceServicerImpl.py  |  36 ++--
 src/automation/service/__main__.py            |   2 +-
 src/automation/tests/test_unitary_emulated.py |   2 +-
 .../tests/test_unitary_ietf_actn.py           |   2 +-
 src/common/Constants.py                       |   2 +-
 .../policy/kafka/TopicAlarmDeserializer.java  |  16 ++
 src/telemetry/frontend/service/__main__.py    |   4 +-
 update_tfs_runtime_env_vars.sh                |   2 +-
 22 files changed, 219 insertions(+), 74 deletions(-)
 create mode 100644 src/analytics/backend/service/SparkStreaming.py

diff --git a/manifests/analyticsservice.yaml b/manifests/analyticsservice.yaml
index f014fe2dd..61666ead9 100644
--- a/manifests/analyticsservice.yaml
+++ b/manifests/analyticsservice.yaml
@@ -90,7 +90,7 @@ spec:
 apiVersion: v1
 kind: Service
 metadata:
-  name: analytics-frontendservice
+  name: analyticsservice
   labels:
     app: analyticsservice
 spec:
diff --git a/manifests/automationservice.yaml b/manifests/automationservice.yaml
index e280f479b..f6c97f7fb 100644
--- a/manifests/automationservice.yaml
+++ b/manifests/automationservice.yaml
@@ -40,11 +40,6 @@ spec:
           env:
             - name: LOG_LEVEL
               value: "INFO"
-          envFrom:
-#            - secretRef:
-#                name: crdb-analytics
-            - secretRef:
-                name: kfk-kpi-data
           startupProbe:
             exec:
               command: ["/bin/grpc_health_probe", "-addr=:30200"]
diff --git a/manifests/telemetryservice.yaml b/manifests/telemetryservice.yaml
index 04141b8dd..cd35d2698 100644
--- a/manifests/telemetryservice.yaml
+++ b/manifests/telemetryservice.yaml
@@ -90,7 +90,7 @@ spec:
 apiVersion: v1
 kind: Service
 metadata:
-  name: telemetry-frontendservice
+  name: telemetryservice
   labels:
     app: telemetryservice
 spec:
diff --git a/my_deploy.sh b/my_deploy.sh
index 10a262c6e..d6ed6259a 100755
--- a/my_deploy.sh
+++ b/my_deploy.sh
@@ -20,13 +20,13 @@
 export TFS_REGISTRY_IMAGES="http://localhost:32000/tfs/"
 
 # Set the list of components, separated by spaces, you want to build images for, and deploy.
-export TFS_COMPONENTS="context device pathcomp service slice nbi webui load_generator automation"
+export TFS_COMPONENTS="context device pathcomp service slice nbi webui load_generator"
 
 # Uncomment to activate Monitoring (old)
 #export TFS_COMPONENTS="${TFS_COMPONENTS} monitoring"
 
 # Uncomment to activate Monitoring Framework (new)
-export TFS_COMPONENTS="${TFS_COMPONENTS} kpi_manager kpi_value_writer kpi_value_api telemetry analytics"
+#export TFS_COMPONENTS="${TFS_COMPONENTS} kpi_manager kpi_value_writer kpi_value_api telemetry analytics automation"
 
 # Uncomment to activate BGP-LS Speaker
 #export TFS_COMPONENTS="${TFS_COMPONENTS} bgpls_speaker"
@@ -42,10 +42,10 @@ export TFS_COMPONENTS="${TFS_COMPONENTS} kpi_manager kpi_value_writer kpi_value_
 #fi
 
 # Uncomment to activate ZTP
-export TFS_COMPONENTS="${TFS_COMPONENTS} ztp"
+#export TFS_COMPONENTS="${TFS_COMPONENTS} ztp"
 
 # Uncomment to activate Policy Manager
-export TFS_COMPONENTS="${TFS_COMPONENTS} policy"
+#export TFS_COMPONENTS="${TFS_COMPONENTS} policy"
 
 # Uncomment to activate Optical CyberSecurity
 #export TFS_COMPONENTS="${TFS_COMPONENTS} dbscanserving opticalattackmitigator opticalattackdetector opticalattackmanager"
@@ -86,7 +86,7 @@ export TFS_IMAGE_TAG="dev"
 
 # Set the name of the Kubernetes namespace to deploy TFS to.
 export TFS_K8S_NAMESPACE="tfs"
-:
+
 # Set additional manifest files to be applied after the deployment
 export TFS_EXTRA_MANIFESTS="manifests/nginx_ingress_http.yaml"
 
diff --git a/proto/automation.proto b/proto/automation.proto
index c482dc0da..edb1ef404 100644
--- a/proto/automation.proto
+++ b/proto/automation.proto
@@ -20,11 +20,11 @@ import "policy.proto";
 
 // Automation service RPCs
 service AutomationService {
-  rpc ZSMCreate (ZSMCreateRequest) returns (ZSMService) {}
-  rpc ZSMUpdate (ZSMCreateUpdate) returns (ZSMService) {}
-  rpc ZSMDelete (ZSMServiceID) returns (ZSMServiceState) {}
-  rpc ZSMGetById (ZSMServiceID) returns (ZSMService) {}
-  rpc ZSMGetByService (context.ServiceId) returns (ZSMService) {}
+  rpc ZSMCreate       (ZSMCreateRequest ) returns (ZSMService     ) {}
+  rpc ZSMUpdate       (ZSMCreateUpdate  ) returns (ZSMService     ) {}
+  rpc ZSMDelete       (ZSMServiceID     ) returns (ZSMServiceState) {}
+  rpc ZSMGetById      (ZSMServiceID     ) returns (ZSMService     ) {}
+  rpc ZSMGetByService (context.ServiceId) returns (ZSMService     ) {}
 }
 
 // ZSM service states
diff --git a/scripts/run_tests_locally-analytics-DB.sh b/scripts/run_tests_locally-analytics-DB.sh
index a31ab81ce..3efc8f971 100755
--- a/scripts/run_tests_locally-analytics-DB.sh
+++ b/scripts/run_tests_locally-analytics-DB.sh
@@ -19,6 +19,6 @@ PROJECTDIR=`pwd`
 cd $PROJECTDIR/src
 RCFILE=$PROJECTDIR/coverage/.coveragerc
 CRDB_SQL_ADDRESS=$(kubectl get service cockroachdb-public --namespace crdb -o jsonpath='{.spec.clusterIP}')
-export CRDB_URI="cockroachdb://tfs:tfs123@${CRDB_SQL_ADDRESS}:26257/tfs-analyzer?sslmode=require"
+export CRDB_URI="cockroachdb://tfs:tfs123@${CRDB_SQL_ADDRESS}:26257/tfs_analytics?sslmode=require"
 python3 -m pytest --log-level=DEBUG --log-cli-level=DEBUG --verbose \
     analytics/tests/test_analytics_db.py
diff --git a/scripts/run_tests_locally-analytics-backend.sh b/scripts/run_tests_locally-analytics-backend.sh
index 722779824..f2e012027 100755
--- a/scripts/run_tests_locally-analytics-backend.sh
+++ b/scripts/run_tests_locally-analytics-backend.sh
@@ -20,6 +20,6 @@ cd $PROJECTDIR/src
 RCFILE=$PROJECTDIR/coverage/.coveragerc
 export KFK_SERVER_ADDRESS='127.0.0.1:9092'
 CRDB_SQL_ADDRESS=$(kubectl get service cockroachdb-public --namespace crdb -o jsonpath='{.spec.clusterIP}')
-export CRDB_URI="cockroachdb://tfs:tfs123@${CRDB_SQL_ADDRESS}:26257/tfs_kpi_mgmt?sslmode=require"
+export CRDB_URI="cockroachdb://tfs:tfs123@${CRDB_SQL_ADDRESS}:26257/tfs_analytics?sslmode=require"
 python3 -m pytest --log-level=DEBUG --log-cli-level=DEBUG --verbose \
     analytics/backend/tests/test_backend.py
diff --git a/scripts/run_tests_locally-analytics-frontend.sh b/scripts/run_tests_locally-analytics-frontend.sh
index e74eb4ec1..ea04e0323 100755
--- a/scripts/run_tests_locally-analytics-frontend.sh
+++ b/scripts/run_tests_locally-analytics-frontend.sh
@@ -20,6 +20,6 @@ cd $PROJECTDIR/src
 RCFILE=$PROJECTDIR/coverage/.coveragerc
 export KFK_SERVER_ADDRESS='127.0.0.1:9092'
 CRDB_SQL_ADDRESS=$(kubectl get service cockroachdb-public --namespace crdb -o jsonpath='{.spec.clusterIP}')
-export CRDB_URI="cockroachdb://tfs:tfs123@${CRDB_SQL_ADDRESS}:26257/tfs_kpi_mgmt?sslmode=require"
+export CRDB_URI="cockroachdb://tfs:tfs123@${CRDB_SQL_ADDRESS}:26257/tfs_analytics?sslmode=require"
 python3 -m pytest --log-level=DEBUG --log-cli-level=DEBUG --verbose \
     analytics/frontend/tests/test_frontend.py
diff --git a/src/analytics/backend/service/SparkStreaming.py b/src/analytics/backend/service/SparkStreaming.py
new file mode 100644
index 000000000..f204c6247
--- /dev/null
+++ b/src/analytics/backend/service/SparkStreaming.py
@@ -0,0 +1,154 @@
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import logging, time
+from pyspark.sql                  import SparkSession
+from pyspark.sql.types            import StructType, StructField, StringType, DoubleType, TimestampType
+from pyspark.sql.functions        import from_json, col, window, avg, min, max, first, last, stddev, when, round
+from common.tools.kafka.Variables import KafkaConfig, KafkaTopic
+
+LOGGER = logging.getLogger(__name__)
+
+def DefiningSparkSession():
+    # Create a Spark session with specific spark verions (3.5.0)
+    return SparkSession.builder \
+            .appName("Analytics") \
+            .config("spark.sql.streaming.forceDeleteTempCheckpointLocation", "true") \
+            .config("spark.jars.packages", "org.apache.spark:spark-sql-kafka-0-10_2.12:3.5.0") \
+            .getOrCreate()
+
+def SettingKafkaConsumerParams():   # TODO:  create get_kafka_consumer() in common with inputs (bootstrap server, subscribe, startingOffset and failOnDataLoss with default values)
+    return {
+            # "kafka.bootstrap.servers": '127.0.0.1:9092',
+            "kafka.bootstrap.servers": KafkaConfig.get_kafka_address(),
+            "subscribe"              : KafkaTopic.VALUE.value,         # topic should have atleast one message before spark session 
+            "startingOffsets"        : 'latest',
+            "failOnDataLoss"         : 'false'              # Optional: Set to "true" to fail the query on data loss
+        }
+
+def DefiningRequestSchema():
+    return StructType([
+            StructField("time_stamp" ,  StringType()  , True),
+            StructField("kpi_id"     ,  StringType()  , True),
+            StructField("kpi_value"  ,  DoubleType()  , True)
+        ])
+
+def GetAggregations(oper_list):
+    # Define the possible aggregation functions
+    agg_functions = {
+        'avg'  :  round(avg    ("kpi_value"), 3) .alias("avg_value"),
+        'min'  :  round(min    ("kpi_value"), 3) .alias("min_value"),
+        'max'  :  round(max    ("kpi_value"), 3) .alias("max_value"),
+        'first':  round(first  ("kpi_value"), 3) .alias("first_value"),
+        'last' :  round(last   ("kpi_value"), 3) .alias("last_value"),
+        'stdev':  round(stddev ("kpi_value"), 3) .alias("stdev_value")
+    }
+    return [agg_functions[op] for op in oper_list if op in agg_functions]   # Filter and return only the selected aggregations
+
+def ApplyThresholds(aggregated_df, thresholds):
+    # Apply thresholds (TH-Fail and TH-RAISE) based on the thresholds dictionary on the aggregated DataFrame.
+    
+    # Loop through each column name and its associated thresholds
+    for col_name, (fail_th, raise_th) in thresholds.items():
+        # Apply TH-Fail condition (if column value is less than the fail threshold)
+        aggregated_df = aggregated_df.withColumn(
+            f"{col_name}_THRESHOLD_FALL", 
+            when(col(col_name) < fail_th, True).otherwise(False)
+        )
+        # Apply TH-RAISE condition (if column value is greater than the raise threshold)
+        aggregated_df = aggregated_df.withColumn(
+            f"{col_name}_THRESHOLD_RAISE", 
+            when(col(col_name) > raise_th, True).otherwise(False)
+        )
+    return aggregated_df
+
+def SparkStreamer(key, kpi_list, oper_list, thresholds, stop_event,
+                  window_size=None, win_slide_duration=None, time_stamp_col=None):
+    """
+    Method to perform Spark operation Kafka stream.
+    NOTE: Kafka topic to be processesd should have atleast one row before initiating the spark session. 
+    """
+    kafka_consumer_params = SettingKafkaConsumerParams()         # Define the Kafka consumer parameters
+    schema                = DefiningRequestSchema()              # Define the schema for the incoming JSON data
+    spark                 = DefiningSparkSession()               # Define the spark session with app name and spark version
+    
+    # extra options default assignment
+    if window_size        is None: window_size        = "60 seconds"    # default
+    if win_slide_duration is None: win_slide_duration = "30 seconds"    # default
+    if time_stamp_col     is None: time_stamp_col     = "time_stamp"    # default
+    
+    try:
+        # Read data from Kafka
+        raw_stream_data = spark \
+            .readStream \
+            .format("kafka") \
+            .options(**kafka_consumer_params) \
+            .load()
+
+        # Convert the value column from Kafka to a string
+        stream_data          = raw_stream_data.selectExpr("CAST(value AS STRING)")
+        # Parse the JSON string into a DataFrame with the defined schema
+        parsed_stream_data   = stream_data.withColumn("parsed_value", from_json(col("value"), schema))
+        # Select the parsed fields
+        final_stream_data    = parsed_stream_data.select("parsed_value.*")
+        # Convert the time_stamp to proper timestamp (assuming it's in ISO format)
+        final_stream_data    = final_stream_data.withColumn(time_stamp_col, col(time_stamp_col).cast(TimestampType()))
+        # Filter the stream to only include rows where the kpi_id is in the kpi_list
+        filtered_stream_data = final_stream_data.filter(col("kpi_id").isin(kpi_list))
+         # Define a window for aggregation
+        windowed_stream_data = filtered_stream_data \
+                                .groupBy(
+                                    window( col(time_stamp_col), 
+                                           window_size, slideDuration=win_slide_duration
+                                           ),
+                                    col("kpi_id")
+                                ) \
+                                .agg(*GetAggregations(oper_list))
+        # Apply thresholds to the aggregated data
+        thresholded_stream_data = ApplyThresholds(windowed_stream_data, thresholds)
+
+        # --- This will write output on console: FOR TESTING PURPOSES
+        # Start the Spark streaming query
+        # query = thresholded_stream_data \
+        #     .writeStream \
+        #     .outputMode("update") \
+        #     .format("console") 
+
+        # --- This will write output to Kafka: ACTUAL IMPLEMENTATION
+        query = thresholded_stream_data \
+            .selectExpr(f"CAST(kpi_id AS STRING) AS key", "to_json(struct(*)) AS value") \
+            .writeStream \
+            .format("kafka") \
+            .option("kafka.bootstrap.servers", KafkaConfig.get_kafka_address()) \
+            .option("topic",                   KafkaTopic.ALARMS.value) \
+            .option("checkpointLocation",      "analytics/.spark/checkpoint") \
+            .outputMode("update")
+
+        # Start the query execution
+        queryHandler = query.start()
+
+        # Loop to check for stop event flag. To be set by stop collector method.
+        while True:
+            if stop_event.is_set():
+                LOGGER.debug("Stop Event activated. Terminating in 5 seconds...")
+                print       ("Stop Event activated. Terminating in 5 seconds...")
+                time.sleep(5)
+                queryHandler.stop()
+                break
+            time.sleep(5)
+
+    except Exception as e:
+        print("Error in Spark streaming process: {:}".format(e))
+        LOGGER.debug("Error in Spark streaming process: {:}".format(e))
diff --git a/src/analytics/backend/service/__main__.py b/src/analytics/backend/service/__main__.py
index 78a06a8c6..3c4c36b7c 100644
--- a/src/analytics/backend/service/__main__.py
+++ b/src/analytics/backend/service/__main__.py
@@ -37,8 +37,8 @@ def main():
     LOGGER.info('Starting...')
 
     # Start metrics server
-    # metrics_port = get_metrics_port()
-    # start_http_server(metrics_port)
+    metrics_port = get_metrics_port()
+    start_http_server(metrics_port)
 
     grpc_service = AnalyticsBackendService()
     grpc_service.start()
diff --git a/src/analytics/frontend/Dockerfile b/src/analytics/frontend/Dockerfile
index ac9b3fe95..10499713f 100644
--- a/src/analytics/frontend/Dockerfile
+++ b/src/analytics/frontend/Dockerfile
@@ -62,9 +62,9 @@ RUN python3 -m pip install -r requirements.txt
 
 # Add component files into working directory
 WORKDIR /var/teraflow
-COPY ./src/analytics/__init__.py analytics/__init__.py
-COPY ./src/analytics/frontend/. analytics/frontend/
-COPY ./src/analytics/database/. analytics/database/
+COPY src/analytics/__init__.py analytics/__init__.py
+COPY src/analytics/frontend/. analytics/frontend/
+COPY src/analytics/database/. analytics/database/
 
 # Start the service
 ENTRYPOINT ["python", "-m", "analytics.frontend.service"]
diff --git a/src/analytics/frontend/tests/test_frontend.py b/src/analytics/frontend/tests/test_frontend.py
index bdab55198..58bee3cd2 100644
--- a/src/analytics/frontend/tests/test_frontend.py
+++ b/src/analytics/frontend/tests/test_frontend.py
@@ -130,4 +130,4 @@ def test_StartAnalyzers(analyticsFrontend_client):
 #         class_obj = AnalyticsFrontendServiceServicerImpl()
 #         for response in class_obj.StartResponseListener(analyzer_id.analyzer_id.uuid):
 #             LOGGER.debug(response)
-#             assert isinstance(response, tuple)
\ No newline at end of file
+#             assert isinstance(response, tuple)
diff --git a/src/automation/Dockerfile b/src/automation/Dockerfile
index ae6c83811..deef6b36f 100644
--- a/src/automation/Dockerfile
+++ b/src/automation/Dockerfile
@@ -62,27 +62,17 @@ RUN python3 -m pip install -r requirements.txt
 
 # Add component files into working directory
 WORKDIR /var/teraflow
-COPY src/telemetry/frontend/__init__.py telemetry/frontend/__init__.py
-COPY src/telemetry/frontend/client/. telemetry/frontend/client/
-
-COPY src/analytics/frontend/client/. analytics/frontend/client/
-COPY src/analytics/frontend/service/. analytics/frontend/service/
-COPY src/analytics/database/. analytics/database/
-COPY src/analytics/frontend/__init__.py analytics/frontend/__init__.py
-
 COPY src/context/__init__.py context/__init__.py
 COPY src/context/client/. context/client/
-
-COPY src/kpi_value_api/__init__.py kpi_value_api/__init__.py
-COPY src/kpi_value_api/client/. kpi_value_api/client/
-
 COPY src/kpi_manager/__init__.py kpi_manager/__init__.py
 COPY src/kpi_manager/client/. kpi_manager/client/
-
-COPY src/monitoring/__init__.py monitoring/__init__.py
-COPY src/monitoring/client/. monitoring/client/
-
+COPY src/telemetry/__init__.py telemetry/__init__.py
+COPY src/telemetry/frontend/__init__.py telemetry/frontend/__init__.py
+COPY src/telemetry/frontend/client/. telemetry/frontend/client/
+COPY src/analytics/__init__.py analytics/__init__.py
+COPY src/analytics/frontend/__init__.py analytics/frontend/__init__.py
+COPY src/analytics/frontend/client/. analytics/frontend/client/
 COPY src/automation/. automation/
 
 # Start the service
-ENTRYPOINT ["python", "-m", "automation.service"]
\ No newline at end of file
+ENTRYPOINT ["python", "-m", "automation.service"]
diff --git a/src/automation/client/PolicyClient.py b/src/automation/client/PolicyClient.py
index 22f2aa18b..f2b25f242 100644
--- a/src/automation/client/PolicyClient.py
+++ b/src/automation/client/PolicyClient.py
@@ -52,4 +52,4 @@ class PolicyClient:
         LOGGER.debug('AddPolicy request: {:s}'.format(grpc_message_to_json_string(request)))
         response = self.stub.PolicyAddService(request)
         LOGGER.debug('AddPolicy result: {:s}'.format(grpc_message_to_json_string(response)))
-        return response
\ No newline at end of file
+        return response
diff --git a/src/automation/service/AutomationServiceServicerImpl.py b/src/automation/service/AutomationServiceServicerImpl.py
index fa079b7b7..94550157e 100644
--- a/src/automation/service/AutomationServiceServicerImpl.py
+++ b/src/automation/service/AutomationServiceServicerImpl.py
@@ -12,35 +12,25 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import grpc , logging, os, grpc
+import grpc, json, logging
+from uuid import uuid4
 from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method
-from common.method_wrappers.Decorator import MetricsPool
+from common.method_wrappers.ServiceExceptions import InvalidArgumentException
+from common.proto.analytics_frontend_pb2 import Analyzer, AnalyzerId
+from common.proto.automation_pb2 import ZSMCreateRequest, ZSMService, ZSMServiceID, ZSMServiceState, ZSMCreateUpdate
 from common.proto.automation_pb2_grpc import AutomationServiceServicer
-from common.proto.automation_pb2 import ( ZSMCreateRequest , ZSMService ,ZSMServiceID ,ZSMServiceState,ZSMCreateUpdate , ZSMServiceStateEnum)
-from common.proto.context_pb2 import ( ServiceId , ContextId , Uuid , Empty)
-from common.proto.telemetry_frontend_pb2 import ( Collector , CollectorId )
-from common.proto.policy_pb2 import ( PolicyRuleList)
-from context.client.ContextClient import ContextClient
-from automation.client.PolicyClient import PolicyClient
-from telemetry.frontend.client.TelemetryFrontendClient import TelemetryFrontendClient
-from kpi_manager.client.KpiManagerClient import KpiManagerClient
-from common.proto.context_pb2 import ( Service )
-
-from common.proto.kpi_manager_pb2 import (KpiId, KpiDescriptor)
-from common.proto.kpi_value_api_pb2 import (KpiAlarms)
-
+from common.proto.context_pb2 import Service, ServiceId
+from common.proto.kpi_manager_pb2 import KpiId, KpiDescriptor
 from common.proto.policy_pb2 import PolicyRuleService, PolicyRuleState
-from common.proto.policy_action_pb2 import PolicyRuleAction , PolicyRuleActionConfig
+from common.proto.policy_action_pb2 import PolicyRuleAction, PolicyRuleActionConfig
 from common.proto.policy_condition_pb2 import PolicyRuleCondition
-from uuid import uuid4
-import json
+from common.proto.telemetry_frontend_pb2 import Collector, CollectorId
 
-from analytics.frontend.service.AnalyticsFrontendServiceServicerImpl import AnalyticsFrontendServiceServicerImpl
 from analytics.frontend.client.AnalyticsFrontendClient import AnalyticsFrontendClient
-from common.proto.analytics_frontend_pb2      import Analyzer, AnalyzerId
-
-from kpi_value_api.client.KpiValueApiClient import KpiValueApiClient
-from common.method_wrappers.ServiceExceptions import InvalidArgumentException
+from automation.client.PolicyClient import PolicyClient
+from context.client.ContextClient import ContextClient
+from kpi_manager.client.KpiManagerClient import KpiManagerClient
+from telemetry.frontend.client.TelemetryFrontendClient import TelemetryFrontendClient
 
 LOGGER = logging.getLogger(__name__)
 METRICS_POOL = MetricsPool('Automation', 'RPC')
diff --git a/src/automation/service/__main__.py b/src/automation/service/__main__.py
index ef0f107b4..39d8beaff 100644
--- a/src/automation/service/__main__.py
+++ b/src/automation/service/__main__.py
@@ -50,4 +50,4 @@ def main():
     return 0
 
 if __name__ == '__main__':
-    sys.exit(main())
\ No newline at end of file
+    sys.exit(main())
diff --git a/src/automation/tests/test_unitary_emulated.py b/src/automation/tests/test_unitary_emulated.py
index 24f18a2f4..dee3c4fab 100644
--- a/src/automation/tests/test_unitary_emulated.py
+++ b/src/automation/tests/test_unitary_emulated.py
@@ -19,4 +19,4 @@ LOGGER = logging.getLogger(__name__)
 def test_device_emulated_add_error_cases():
     LOGGER.info("Start Tests")
     LOGGER.info("Second log Tests")
-    assert True
\ No newline at end of file
+    assert True
diff --git a/src/automation/tests/test_unitary_ietf_actn.py b/src/automation/tests/test_unitary_ietf_actn.py
index 78d6b003e..37a5ac496 100644
--- a/src/automation/tests/test_unitary_ietf_actn.py
+++ b/src/automation/tests/test_unitary_ietf_actn.py
@@ -18,4 +18,4 @@ LOGGER = logging.getLogger(__name__)
 
 def test_device_emulated_add_error_cases():
     LOGGER.info("Start Tests")
-    assert True
\ No newline at end of file
+    assert True
diff --git a/src/common/Constants.py b/src/common/Constants.py
index 7b83baa81..4fff09ba0 100644
--- a/src/common/Constants.py
+++ b/src/common/Constants.py
@@ -86,7 +86,6 @@ DEFAULT_SERVICE_GRPC_PORTS = {
     ServiceNameEnum.MONITORING             .value :  7070,
     ServiceNameEnum.DLT                    .value :  8080,
     ServiceNameEnum.NBI                    .value :  9090,
-    ServiceNameEnum.AUTOMATION             .value :  1020,
     ServiceNameEnum.L3_CAD                 .value : 10001,
     ServiceNameEnum.L3_AM                  .value : 10002,
     ServiceNameEnum.DBSCANSERVING          .value : 10008,
@@ -108,6 +107,7 @@ DEFAULT_SERVICE_GRPC_PORTS = {
     ServiceNameEnum.TELEMETRYBACKEND       .value : 30060,
     ServiceNameEnum.ANALYTICSFRONTEND      .value : 30080,
     ServiceNameEnum.ANALYTICSBACKEND       .value : 30090,
+    ServiceNameEnum.AUTOMATION             .value : 30200,
 
     # Used for test and debugging only
     ServiceNameEnum.DLT_GATEWAY   .value : 50051,
diff --git a/src/policy/src/main/java/org/etsi/tfs/policy/policy/kafka/TopicAlarmDeserializer.java b/src/policy/src/main/java/org/etsi/tfs/policy/policy/kafka/TopicAlarmDeserializer.java
index 48945bb44..5cf476c54 100644
--- a/src/policy/src/main/java/org/etsi/tfs/policy/policy/kafka/TopicAlarmDeserializer.java
+++ b/src/policy/src/main/java/org/etsi/tfs/policy/policy/kafka/TopicAlarmDeserializer.java
@@ -1,3 +1,19 @@
+/*
+ * Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.etsi.tfs.policy.policy.kafka;
 
 import io.quarkus.kafka.client.serialization.ObjectMapperDeserializer;
diff --git a/src/telemetry/frontend/service/__main__.py b/src/telemetry/frontend/service/__main__.py
index 58c622fc3..6697ff5f1 100644
--- a/src/telemetry/frontend/service/__main__.py
+++ b/src/telemetry/frontend/service/__main__.py
@@ -44,8 +44,8 @@ def main():
     kpiDBobj.create_tables()
     
     # Start metrics server
-    # metrics_port = get_metrics_port()
-    # start_http_server(metrics_port)
+    metrics_port = get_metrics_port()
+    start_http_server(metrics_port)
 
     grpc_service = TelemetryFrontendService()
     grpc_service.start()
diff --git a/update_tfs_runtime_env_vars.sh b/update_tfs_runtime_env_vars.sh
index 63a692c9f..209551c03 100755
--- a/update_tfs_runtime_env_vars.sh
+++ b/update_tfs_runtime_env_vars.sh
@@ -20,7 +20,7 @@
 
 # If not already set, set the list of components you want to build images for, and deploy.
 # By default, only basic components are deployed
-export TFS_COMPONENTS=${TFS_COMPONENTS:-"context device monitoring service nbi webui automation"}
+export TFS_COMPONENTS=${TFS_COMPONENTS:-"context device monitoring service nbi webui"}
 
 # If not already set, set the name of the Kubernetes namespace to deploy to.
 export TFS_K8S_NAMESPACE=${TFS_K8S_NAMESPACE:-"tfs"}
-- 
GitLab