Skip to content
Snippets Groups Projects
Commit 55dd580c authored by Waleed Akbar's avatar Waleed Akbar
Browse files

Changes in Telemetry backend service.

- __main__ is added.
- DockerFile added.
- .gitlab-ci.yml file added.
parent 93985272
No related branches found
No related tags found
2 merge requests!294Release TeraFlowSDN 4.0,!258Resolve "Re-designing of Telemetry"
......@@ -48,6 +48,7 @@ include:
- local: '/src/kpi_manager/.gitlab-ci.yml'
- local: '/src/kpi_value_api/.gitlab-ci.yml'
- local: '/src/kpi_value_writer/.gitlab-ci.yml'
# - local: '/src/telemetry/frontend/.gitlab-ci.yml'
# - local: '/src/telemetry/backend/.gitlab-ci.yml'
# This should be last one: end-to-end integration tests
- local: '/src/tests/.gitlab-ci.yml'
# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Build, tag, and push the Docker image to the GitLab Docker registry
build kpi-manager:
variables:
IMAGE_NAME: 'telemetry' # name of the microservice
IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
stage: build
before_script:
- docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
script:
- docker buildx build -t "${IMAGE_NAME}-frontend:$IMAGE_TAG" -f ./src/$IMAGE_NAME/frontend/Dockerfile .
- docker buildx build -t "${IMAGE_NAME}-backend:$IMAGE_TAG" -f ./src/$IMAGE_NAME/backend/Dockerfile .
- docker tag "${IMAGE_NAME}-frontend:$IMAGE_TAG" "$CI_REGISTRY_IMAGE/${IMAGE_NAME}-frontend:$IMAGE_TAG"
- docker tag "${IMAGE_NAME}-backend:$IMAGE_TAG" "$CI_REGISTRY_IMAGE/${IMAGE_NAME}-backend:$IMAGE_TAG"
- docker push "$CI_REGISTRY_IMAGE/${IMAGE_NAME}-frontend:$IMAGE_TAG"
- docker push "$CI_REGISTRY_IMAGE/${IMAGE_NAME}-backend:$IMAGE_TAG"
after_script:
- docker images --filter="dangling=true" --quiet | xargs -r docker rmi
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
- if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
- changes:
- src/common/**/*.py
- proto/*.proto
- src/$IMAGE_NAME/frontend/**/*.{py,in,yml}
- src/$IMAGE_NAME/frontend/Dockerfile
- src/$IMAGE_NAME/frontend/tests/*.py
- src/$IMAGE_NAME/backend/Dockerfile
- src/$IMAGE_NAME/backend/**/*.{py,in,yml}
- src/$IMAGE_NAME/backend/tests/*.py
- manifests/${IMAGE_NAME}service.yaml
- .gitlab-ci.yml
# Apply unit test to the component
unit_test telemetry:
variables:
IMAGE_NAME: 'telemetry' # name of the microservice
IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
stage: unit_test
needs:
- build telemetry
before_script:
- docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
- if docker network list | grep teraflowbridge; then echo "teraflowbridge is already created"; else docker network create -d bridge teraflowbridge; fi
- if docker container ls | grep crdb; then docker rm -f crdb; else echo "CockroachDB container is not in the system"; fi
- if docker volume ls | grep crdb; then docker volume rm -f crdb; else echo "CockroachDB volume is not in the system"; fi
- if docker container ls | grep ${IMAGE_NAME}-frontend; then docker rm -f ${IMAGE_NAME}-frontend; else echo "${IMAGE_NAME}-frontend container is not in the system"; fi
- if docker container ls | grep ${IMAGE_NAME}-backend; then docker rm -f ${IMAGE_NAME}-backend; else echo "${IMAGE_NAME}-backend container is not in the system"; fi
- docker container prune -f
script:
- docker pull "$CI_REGISTRY_IMAGE/${IMAGE_NAME}-backend:$IMAGE_TAG"
- docker pull "$CI_REGISTRY_IMAGE/${IMAGE_NAME}-frontend:$IMAGE_TAG"
- docker pull "cockroachdb/cockroach:latest-v22.2"
- docker volume create crdb
- >
docker run --name crdb -d --network=teraflowbridge -p 26257:26257 -p 8080:8080
--env COCKROACH_DATABASE=tfs_test --env COCKROACH_USER=tfs --env COCKROACH_PASSWORD=tfs123
--volume "crdb:/cockroach/cockroach-data"
cockroachdb/cockroach:latest-v22.2 start-single-node
- echo "Waiting for initialization..."
- while ! docker logs crdb 2>&1 | grep -q 'finished creating default user \"tfs\"'; do sleep 1; done
- docker logs crdb
- docker ps -a
- CRDB_ADDRESS=$(docker inspect crdb --format "{{.NetworkSettings.Networks.teraflowbridge.IPAddress}}")
- echo $CRDB_ADDRESS
- >
docker run --name $IMAGE_NAME -d -p 30010:30010
--env "CRDB_URI=cockroachdb://tfs:tfs123@${CRDB_ADDRESS}:26257/tfs_test?sslmode=require"
--volume "$PWD/src/$IMAGE_NAME/tests:/opt/results"
--network=teraflowbridge
$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG
- docker ps -a
- sleep 5
- docker logs $IMAGE_NAME
- >
docker exec -i $IMAGE_NAME bash -c
"coverage run -m pytest --log-level=INFO --verbose --junitxml=/opt/results/${IMAGE_NAME}_report.xml $IMAGE_NAME/tests/test_*.py"
- docker exec -i $IMAGE_NAME bash -c "coverage report --include='${IMAGE_NAME}/*' --show-missing"
coverage: '/TOTAL\s+\d+\s+\d+\s+(\d+%)/'
after_script:
- docker volume rm -f crdb
- docker network rm teraflowbridge
- docker volume prune --force
- docker image prune --force
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
- if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
- changes:
- src/common/**/*.py
- proto/*.proto
- src/$IMAGE_NAME/**/*.{py,in,yml}
- src/$IMAGE_NAME/frontend/Dockerfile
- src/$IMAGE_NAME/frontend/tests/*.py
- src/$IMAGE_NAME/frontend/tests/Dockerfile
- src/$IMAGE_NAME/backend/Dockerfile
- src/$IMAGE_NAME/backend/tests/*.py
- src/$IMAGE_NAME/backend/tests/Dockerfile
- manifests/${IMAGE_NAME}service.yaml
- .gitlab-ci.yml
# artifacts:
# when: always
# reports:
# junit: src/$IMAGE_NAME/tests/${IMAGE_NAME}_report.xml
## Deployment of the service in Kubernetes Cluster
#deploy context:
# variables:
# IMAGE_NAME: 'context' # name of the microservice
# IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
# stage: deploy
# needs:
# - unit test context
# # - integ_test execute
# script:
# - 'sed -i "s/$IMAGE_NAME:.*/$IMAGE_NAME:$IMAGE_TAG/" manifests/${IMAGE_NAME}service.yaml'
# - kubectl version
# - kubectl get all
# - kubectl apply -f "manifests/${IMAGE_NAME}service.yaml"
# - kubectl get all
# # environment:
# # name: test
# # url: https://example.com
# # kubernetes:
# # namespace: test
# rules:
# - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
# when: manual
# - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
# when: manual
# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
FROM python:3.9-slim
# Install dependencies
RUN apt-get --yes --quiet --quiet update && \
apt-get --yes --quiet --quiet install wget g++ git && \
rm -rf /var/lib/apt/lists/*
# Set Python to show logs as they occur
ENV PYTHONUNBUFFERED=0
# Download the gRPC health probe
RUN GRPC_HEALTH_PROBE_VERSION=v0.2.0 && \
wget -qO/bin/grpc_health_probe https://github.com/grpc-ecosystem/grpc-health-probe/releases/download/${GRPC_HEALTH_PROBE_VERSION}/grpc_health_probe-linux-amd64 && \
chmod +x /bin/grpc_health_probe
# Get generic Python packages
RUN python3 -m pip install --upgrade pip
RUN python3 -m pip install --upgrade setuptools wheel
RUN python3 -m pip install --upgrade pip-tools
# Get common Python packages
# Note: this step enables sharing the previous Docker build steps among all the Python components
WORKDIR /var/teraflow
COPY common_requirements.in common_requirements.in
RUN pip-compile --quiet --output-file=common_requirements.txt common_requirements.in
RUN python3 -m pip install -r common_requirements.txt
# Add common files into working directory
WORKDIR /var/teraflow/common
COPY src/common/. ./
RUN rm -rf proto
# Create proto sub-folder, copy .proto files, and generate Python code
RUN mkdir -p /var/teraflow/common/proto
WORKDIR /var/teraflow/common/proto
RUN touch __init__.py
COPY proto/*.proto ./
RUN python3 -m grpc_tools.protoc -I=. --python_out=. --grpc_python_out=. *.proto
RUN rm *.proto
RUN find . -type f -exec sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' {} \;
# Create component sub-folders, get specific Python packages
RUN mkdir -p /var/teraflow/telemetry/backend
WORKDIR /var/teraflow/telemetry/backend
COPY src/telemetry/backend/requirements.in requirements.in
RUN pip-compile --quiet --output-file=requirements.txt requirements.in
RUN python3 -m pip install -r requirements.txt
# Add component files into working directory
WORKDIR /var/teraflow
COPY src/telemetry/__init__.py telemetry/__init__.py
COPY src/telemetry/backend/. telemetry/backend/
# Start the service
ENTRYPOINT ["python", "-m", "telemetry.backend.service"]
# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
confluent-kafka==2.3.*
# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging, signal, sys, threading
from common.Settings import get_log_level
from .TelemetryBackendService import TelemetryBackendService
terminate = threading.Event()
LOGGER = None
def signal_handler(signal, frame): # pylint: disable=redefined-outer-name
LOGGER.warning('Terminate signal received')
terminate.set()
def main():
global LOGGER # pylint: disable=global-statement
log_level = get_log_level()
logging.basicConfig(level=log_level)
LOGGER = logging.getLogger(__name__)
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
LOGGER.debug('Starting...')
grpc_service = TelemetryBackendService()
grpc_service.start()
# Wait for Ctrl+C or termination signal
while not terminate.wait(timeout=1.0): pass
LOGGER.debug('Terminating...')
grpc_service.stop()
LOGGER.debug('Bye')
return 0
if __name__ == '__main__':
sys.exit(main())
# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
FROM python:3.9-slim
# Install dependencies
RUN apt-get --yes --quiet --quiet update && \
apt-get --yes --quiet --quiet install wget g++ git && \
rm -rf /var/lib/apt/lists/*
# Set Python to show logs as they occur
ENV PYTHONUNBUFFERED=0
# Download the gRPC health probe
RUN GRPC_HEALTH_PROBE_VERSION=v0.2.0 && \
wget -qO/bin/grpc_health_probe https://github.com/grpc-ecosystem/grpc-health-probe/releases/download/${GRPC_HEALTH_PROBE_VERSION}/grpc_health_probe-linux-amd64 && \
chmod +x /bin/grpc_health_probe
# Get generic Python packages
RUN python3 -m pip install --upgrade pip
RUN python3 -m pip install --upgrade setuptools wheel
RUN python3 -m pip install --upgrade pip-tools
# Get common Python packages
# Note: this step enables sharing the previous Docker build steps among all the Python components
WORKDIR /var/teraflow
COPY common_requirements.in common_requirements.in
RUN pip-compile --quiet --output-file=common_requirements.txt common_requirements.in
RUN python3 -m pip install -r common_requirements.txt
# Add common files into working directory
WORKDIR /var/teraflow/common
COPY src/common/. ./
RUN rm -rf proto
# Create proto sub-folder, copy .proto files, and generate Python code
RUN mkdir -p /var/teraflow/common/proto
WORKDIR /var/teraflow/common/proto
RUN touch __init__.py
COPY proto/*.proto ./
RUN python3 -m grpc_tools.protoc -I=. --python_out=. --grpc_python_out=. *.proto
RUN rm *.proto
RUN find . -type f -exec sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' {} \;
# Create component sub-folders, get specific Python packages
RUN mkdir -p /var/teraflow/telemetry/frontend
WORKDIR /var/teraflow/telemetry/frontend
COPY src/telemetry/frontend/requirements.in requirements.in
RUN pip-compile --quiet --output-file=requirements.txt requirements.in
RUN python3 -m pip install -r requirements.txt
# Add component files into working directory
WORKDIR /var/teraflow
COPY src/telemetry/__init__.py telemetry/__init__.py
COPY src/telemetry/frontend/. telemetry/frontend/
# Start the service
ENTRYPOINT ["python", "-m", "telemetry.frontend.service"]
# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
confluent-kafka==2.3.*
......@@ -12,16 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import signal
import sys
import logging, threading
from prometheus_client import start_http_server
from monitoring.service.NameMapping import NameMapping
import logging, signal, sys, threading
from common.Settings import get_log_level
from .TelemetryFrontendService import TelemetryFrontendService
from monitoring.service.EventTools import EventsDeviceCollector
from common.Settings import (
get_log_level, wait_for_environment_variables, get_env_var_name,
get_metrics_port )
terminate = threading.Event()
LOGGER = None
......@@ -31,42 +24,28 @@ def signal_handler(signal, frame): # pylint: disable=redefined-outer-name
terminate.set()
def main():
global LOGGER
global LOGGER # pylint: disable=global-statement
log_level = get_log_level()
logging.basicConfig(level=log_level, format="[%(asctime)s] %(levelname)s:%(name)s:%(message)s")
logging.basicConfig(level=log_level)
LOGGER = logging.getLogger(__name__)
# ------- will be added later --------------
# wait_for_environment_variables([
# get_env_var_name
# ])
# ------- will be added later --------------
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
LOGGER.info('Starting...')
# Start metrics server
metrics_port = get_metrics_port()
start_http_server(metrics_port)
name_mapping = NameMapping()
LOGGER.debug('Starting...')
grpc_service = TelemetryFrontendService(name_mapping)
grpc_service = TelemetryFrontendService()
grpc_service.start()
# Wait for Ctrl+C or termination signal
while not terminate.wait(timeout=1.0): pass
LOGGER.info('Terminating...')
LOGGER.debug('Terminating...')
grpc_service.stop()
LOGGER.info('Bye')
LOGGER.debug('Bye')
return 0
if __name__ == '__main__':
sys.exit(main())
\ No newline at end of file
sys.exit(main())
anytree==2.8.0
APScheduler==3.10.1
attrs==23.2.0
certifi==2024.2.2
charset-normalizer==2.0.12
colorama==0.4.6
confluent-kafka==2.3.0
coverage==6.3
future-fstrings==1.2.0
greenlet==3.0.3
grpcio==1.47.5
grpcio-health-checking==1.47.5
grpcio-tools==1.47.5
grpclib==0.4.4
h2==4.1.0
hpack==4.0.0
hyperframe==6.0.1
idna==3.7
influx-line-protocol==0.1.4
iniconfig==2.0.0
kafka-python==2.0.2
multidict==6.0.5
networkx==3.3
packaging==24.0
pluggy==1.5.0
prettytable==3.5.0
prometheus-client==0.13.0
protobuf==3.20.3
psycopg2-binary==2.9.3
py==1.11.0
py-cpuinfo==9.0.0
pytest==6.2.5
pytest-benchmark==3.4.1
pytest-depends==1.0.1
python-dateutil==2.8.2
python-json-logger==2.0.2
pytz==2024.1
questdb==1.0.1
requests==2.27.1
six==1.16.0
SQLAlchemy==1.4.52
sqlalchemy-cockroachdb==1.4.4
SQLAlchemy-Utils==0.38.3
toml==0.10.2
typing_extensions==4.12.0
tzlocal==5.2
urllib3==1.26.18
wcwidth==0.2.13
xmltodict==0.12.0
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment