Skip to content
Snippets Groups Projects
Commit e34421ab authored by Konstantinos Poulakakis's avatar Konstantinos Poulakakis
Browse files

Merge branch...

Merge branch 'feat/194-unable-to-correctly-extract-the-aggregation-function-names-from-the-dictionary-received-as' into feat/159-automation-component-skeleton
parents 2b9013e1 1e232b2a
No related branches found
No related tags found
2 merge requests!294Release TeraFlowSDN 4.0,!238Automation component skeleton
...@@ -53,6 +53,15 @@ RUN python3 -m grpc_tools.protoc -I=. --python_out=. --grpc_python_out=. *.proto ...@@ -53,6 +53,15 @@ RUN python3 -m grpc_tools.protoc -I=. --python_out=. --grpc_python_out=. *.proto
RUN rm *.proto RUN rm *.proto
RUN find . -type f -exec sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' {} \; RUN find . -type f -exec sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' {} \;
# Install Java (required for PySpark)
RUN apt-get update && \
apt-get install -y default-jdk && \
apt-get clean
# Set JAVA_HOME environment variable
ENV JAVA_HOME=/usr/lib/jvm/java-11-openjdk-amd64
ENV PATH=$JAVA_HOME/bin:$PATH
# Create component sub-folders, get specific Python packages # Create component sub-folders, get specific Python packages
RUN mkdir -p /var/teraflow/analytics/backend RUN mkdir -p /var/teraflow/analytics/backend
WORKDIR /var/teraflow/analytics/backend WORKDIR /var/teraflow/analytics/backend
......
...@@ -38,7 +38,7 @@ class AnalyticsFrontendServiceServicerImpl(AnalyticsFrontendServiceServicer): ...@@ -38,7 +38,7 @@ class AnalyticsFrontendServiceServicerImpl(AnalyticsFrontendServiceServicer):
@safe_and_metered_rpc_method(METRICS_POOL, LOGGER) @safe_and_metered_rpc_method(METRICS_POOL, LOGGER)
def StartAnalyzer(self, def StartAnalyzer(self,
request : Analyzer, grpc_context: grpc.ServicerContext # type: ignore request : Analyzer, grpc_context: grpc.ServicerContext # type: ignore
) -> AnalyzerAlarms: # type: ignore ) -> AnalyzerId: # type: ignore
LOGGER.info ("At Service gRPC message: {:}".format(request)) LOGGER.info ("At Service gRPC message: {:}".format(request))
response = AnalyzerId() response = AnalyzerId()
......
...@@ -46,6 +46,7 @@ class KafkaTopic(Enum): ...@@ -46,6 +46,7 @@ class KafkaTopic(Enum):
RAW = 'topic_raw' RAW = 'topic_raw'
LABELED = 'topic_labeled' LABELED = 'topic_labeled'
VALUE = 'topic_value' VALUE = 'topic_value'
ALARMS = 'topic_alarms'
ANALYTICS_REQUEST = 'topic_request_analytics' ANALYTICS_REQUEST = 'topic_request_analytics'
ANALYTICS_RESPONSE = 'topic_response_analytics' ANALYTICS_RESPONSE = 'topic_response_analytics'
...@@ -77,8 +78,8 @@ class KafkaTopic(Enum): ...@@ -77,8 +78,8 @@ class KafkaTopic(Enum):
# LOGGER.debug("Existing topic list: {:}".format(topic_metadata.topics)) # LOGGER.debug("Existing topic list: {:}".format(topic_metadata.topics))
if topic not in topic_metadata.topics: if topic not in topic_metadata.topics:
# If the topic does not exist, create a new topic # If the topic does not exist, create a new topic
print("Topic {:} does not exist. Creating...".format(topic)) # print("Topic {:} does not exist. Creating...".format(topic))
LOGGER.debug("Topic {:} does not exist. Creating...".format(topic)) # LOGGER.debug("Topic {:} does not exist. Creating...".format(topic))
new_topic = NewTopic(topic, num_partitions=1, replication_factor=1) new_topic = NewTopic(topic, num_partitions=1, replication_factor=1)
KafkaConfig.get_admin_client().create_topics([new_topic]) KafkaConfig.get_admin_client().create_topics([new_topic])
else: else:
......
...@@ -14,4 +14,5 @@ ...@@ -14,4 +14,5 @@
confluent-kafka==2.3.* confluent-kafka==2.3.*
requests==2.27.* requests==2.27.*
prometheus-api-client==0.5.3 prometheus-api-client==0.5.3
\ No newline at end of file apscheduler==3.10.4
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment