diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 115b336761dd94902597c3b6e21e7d3dcf225af1..cb6ea273b144535bb3bbb425df601f77ad117cc5 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -49,5 +49,6 @@ include: - local: '/src/kpi_value_api/.gitlab-ci.yml' - local: '/src/kpi_value_writer/.gitlab-ci.yml' - local: '/src/telemetry/.gitlab-ci.yml' + - local: '/src/analytics/.gitlab-ci.yml' # This should be last one: end-to-end integration tests - local: '/src/tests/.gitlab-ci.yml' diff --git a/deploy/kafka.sh b/deploy/kafka.sh index 0483bce153b457800c6f7db2ef66685e90118111..4cbcdb7014c983eeda9bab1d6655fa042751b931 100755 --- a/deploy/kafka.sh +++ b/deploy/kafka.sh @@ -47,10 +47,10 @@ function kafka_deploy() { cp "${KFK_MANIFESTS_PATH}/${KFK_MANIFEST}" "${TMP_MANIFESTS_FOLDER}/${KFK_MANIFEST}" # echo "Apache Kafka Namespace" - echo ">>> Delete Apache Kafka Namespace" + echo "Delete Apache Kafka Namespace" kubectl delete namespace ${KFK_NAMESPACE} --ignore-not-found - echo ">>> Create Apache Kafka Namespace" + echo "Create Apache Kafka Namespace" kubectl create namespace ${KFK_NAMESPACE} # echo ">>> Deplying Apache Kafka Zookeeper" @@ -76,15 +76,15 @@ function kafka_deploy() { # fi } -echo "Apache Kafka" -echo ">>> Checking if Apache Kafka is deployed ... " +echo ">>> Apache Kafka" +echo "Checking if Apache Kafka is deployed ... " if [ "$KFK_REDEPLOY" == "YES" ]; then - echo ">>> Redeploying kafka namespace" + echo "Redeploying kafka namespace" kafka_deploy elif kubectl get namespace "${KFK_NAMESPACE}" &> /dev/null; then - echo ">>> Apache Kafka already present; skipping step." + echo "Apache Kafka already present; skipping step." else - echo ">>> Kafka namespace doesn't exists. Deploying kafka namespace" + echo "Kafka namespace doesn't exists. Deploying kafka namespace" kafka_deploy fi echo diff --git a/deploy/tfs.sh b/deploy/tfs.sh index 189ae11e16e77196d6728482b7f16443149b60a9..65c1e8de28f2045b2ac78938b84d3c33e282025e 100755 --- a/deploy/tfs.sh +++ b/deploy/tfs.sh @@ -146,55 +146,17 @@ kubectl create namespace $TFS_K8S_NAMESPACE sleep 2 printf "\n" -echo "Create secret with CockroachDB data" +echo ">>> Create Secret with CockroachDB data..." CRDB_SQL_PORT=$(kubectl --namespace ${CRDB_NAMESPACE} get service cockroachdb-public -o 'jsonpath={.spec.ports[?(@.name=="sql")].port}') -CRDB_DATABASE_CONTEXT=${CRDB_DATABASE} # TODO: change by specific configurable environment variable kubectl create secret generic crdb-data --namespace ${TFS_K8S_NAMESPACE} --type='Opaque' \ --from-literal=CRDB_NAMESPACE=${CRDB_NAMESPACE} \ --from-literal=CRDB_SQL_PORT=${CRDB_SQL_PORT} \ - --from-literal=CRDB_DATABASE=${CRDB_DATABASE_CONTEXT} \ --from-literal=CRDB_USERNAME=${CRDB_USERNAME} \ --from-literal=CRDB_PASSWORD=${CRDB_PASSWORD} \ --from-literal=CRDB_SSLMODE=require printf "\n" -echo "Create secret with CockroachDB data for KPI Management microservices" -CRDB_SQL_PORT=$(kubectl --namespace ${CRDB_NAMESPACE} get service cockroachdb-public -o 'jsonpath={.spec.ports[?(@.name=="sql")].port}') -CRDB_DATABASE_KPI_MGMT="tfs_kpi_mgmt" # TODO: change by specific configurable environment variable -kubectl create secret generic crdb-kpi-data --namespace ${TFS_K8S_NAMESPACE} --type='Opaque' \ - --from-literal=CRDB_NAMESPACE=${CRDB_NAMESPACE} \ - --from-literal=CRDB_SQL_PORT=${CRDB_SQL_PORT} \ - --from-literal=CRDB_DATABASE=${CRDB_DATABASE_KPI_MGMT} \ - --from-literal=CRDB_USERNAME=${CRDB_USERNAME} \ - --from-literal=CRDB_PASSWORD=${CRDB_PASSWORD} \ - --from-literal=CRDB_SSLMODE=require -printf "\n" - -echo "Create secret with CockroachDB data for Telemetry microservices" -CRDB_SQL_PORT=$(kubectl --namespace ${CRDB_NAMESPACE} get service cockroachdb-public -o 'jsonpath={.spec.ports[?(@.name=="sql")].port}') -CRDB_DATABASE_TELEMETRY="tfs_telemetry" # TODO: change by specific configurable environment variable -kubectl create secret generic crdb-telemetry --namespace ${TFS_K8S_NAMESPACE} --type='Opaque' \ - --from-literal=CRDB_NAMESPACE=${CRDB_NAMESPACE} \ - --from-literal=CRDB_SQL_PORT=${CRDB_SQL_PORT} \ - --from-literal=CRDB_DATABASE=${CRDB_DATABASE_TELEMETRY} \ - --from-literal=CRDB_USERNAME=${CRDB_USERNAME} \ - --from-literal=CRDB_PASSWORD=${CRDB_PASSWORD} \ - --from-literal=CRDB_SSLMODE=require -printf "\n" - -echo "Create secret with CockroachDB data for Analytics microservices" -CRDB_SQL_PORT=$(kubectl --namespace ${CRDB_NAMESPACE} get service cockroachdb-public -o 'jsonpath={.spec.ports[?(@.name=="sql")].port}') -CRDB_DATABASE_ANALYTICS="tfs_analytics" # TODO: change by specific configurable environment variable -kubectl create secret generic crdb-analytics --namespace ${TFS_K8S_NAMESPACE} --type='Opaque' \ - --from-literal=CRDB_NAMESPACE=${CRDB_NAMESPACE} \ - --from-literal=CRDB_SQL_PORT=${CRDB_SQL_PORT} \ - --from-literal=CRDB_DATABASE=${CRDB_DATABASE_ANALYTICS} \ - --from-literal=CRDB_USERNAME=${CRDB_USERNAME} \ - --from-literal=CRDB_PASSWORD=${CRDB_PASSWORD} \ - --from-literal=CRDB_SSLMODE=require -printf "\n" - -echo "Create secret with Apache Kafka data for KPI, Telemetry and Analytics microservices" +echo ">>> Create Secret with Apache Kakfa..." KFK_SERVER_PORT=$(kubectl --namespace ${KFK_NAMESPACE} get service kafka-service -o 'jsonpath={.spec.ports[0].port}') kubectl create secret generic kfk-kpi-data --namespace ${TFS_K8S_NAMESPACE} --type='Opaque' \ --from-literal=KFK_NAMESPACE=${KFK_NAMESPACE} \ @@ -669,6 +631,10 @@ if [[ "$TFS_COMPONENTS" == *"monitoring"* ]] && [[ "$TFS_COMPONENTS" == *"webui" printf "\n\n" fi +echo "Pruning Docker Images..." +docker image prune --force +printf "\n\n" + if [ "$DOCKER_BUILD" == "docker buildx build" ]; then echo "Pruning Docker Buildx Cache..." docker buildx prune --force diff --git a/manifests/analyticsservice.yaml b/manifests/analyticsservice.yaml index 0fa3ed0be6eda8cf944e199543e3c2cd59cc98d6..61666ead951c73e4034110b00a51743d33bd4ce2 100644 --- a/manifests/analyticsservice.yaml +++ b/manifests/analyticsservice.yaml @@ -37,9 +37,13 @@ spec: env: - name: LOG_LEVEL value: "INFO" + - name: CRDB_DATABASE + value: "tfs_analytics" + - name: METRICS_PORT + value: "9192" envFrom: - secretRef: - name: crdb-analytics + name: crdb-data - secretRef: name: kfk-kpi-data readinessProbe: @@ -60,10 +64,12 @@ spec: imagePullPolicy: Always ports: - containerPort: 30090 - - containerPort: 9192 + - containerPort: 9193 env: - name: LOG_LEVEL value: "INFO" + - name: METRICS_PORT + value: "9193" envFrom: - secretRef: name: kfk-kpi-data @@ -100,10 +106,14 @@ spec: protocol: TCP port: 30090 targetPort: 30090 - - name: metrics + - name: metrics-frontend protocol: TCP port: 9192 targetPort: 9192 + - name: metrics-backend + protocol: TCP + port: 9193 + targetPort: 9193 --- apiVersion: autoscaling/v2 kind: HorizontalPodAutoscaler diff --git a/manifests/contextservice.yaml b/manifests/contextservice.yaml index 3abc4f208da8b4820b589b798a328c4a971f55f0..0fc8a1c44f7358a962276ebcf38a165d2db986cd 100644 --- a/manifests/contextservice.yaml +++ b/manifests/contextservice.yaml @@ -45,6 +45,8 @@ spec: value: "FALSE" - name: ALLOW_EXPLICIT_ADD_LINK_TO_TOPOLOGY value: "FALSE" + - name: CRDB_DATABASE + value: "tfs_context" envFrom: - secretRef: name: crdb-data diff --git a/manifests/kpi_managerservice.yaml b/manifests/kpi_managerservice.yaml index 984d783a9de7ed3c0c02e87d82ec673dc19c9508..31eaf1284a08961adc6fe97d5e54eeaa7a98edae 100644 --- a/manifests/kpi_managerservice.yaml +++ b/manifests/kpi_managerservice.yaml @@ -39,9 +39,11 @@ spec: env: - name: LOG_LEVEL value: "INFO" + - name: CRDB_DATABASE + value: "tfs_kpi" envFrom: - secretRef: - name: crdb-kpi-data + name: crdb-data readinessProbe: exec: command: ["/bin/grpc_health_probe", "-addr=:30010"] diff --git a/manifests/nginx_ingress_http.yaml b/manifests/nginx_ingress_http.yaml index 955d5726a9f8f79560327a8f595c1865f6d37d22..ed713bf29ad8228ab3f5b051af24519c2fb9ef09 100644 --- a/manifests/nginx_ingress_http.yaml +++ b/manifests/nginx_ingress_http.yaml @@ -62,3 +62,10 @@ spec: name: nbiservice port: number: 8080 + - path: /()(qkd_app/.*) + pathType: Prefix + backend: + service: + name: qkd-appservice + port: + number: 8005 diff --git a/manifests/qkd_appservice.yaml b/manifests/qkd_appservice.yaml new file mode 100644 index 0000000000000000000000000000000000000000..4f89d6c6f8400b509dc595f551e8f181e70b2f51 --- /dev/null +++ b/manifests/qkd_appservice.yaml @@ -0,0 +1,83 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: apps/v1 +kind: Deployment +metadata: + name: qkd-appservice +spec: + selector: + matchLabels: + app: qkd-appservice + #replicas: 1 + template: + metadata: + labels: + app: qkd-appservice + spec: + terminationGracePeriodSeconds: 5 + containers: + - name: server + image: labs.etsi.org:5050/tfs/controller/qkd_app:latest + imagePullPolicy: Always + ports: + - containerPort: 10060 + - containerPort: 9192 + - containerPort: 8005 + env: + - name: LOG_LEVEL + value: "DEBUG" + - name: CRDB_DATABASE_APP + value: "qkd_app" + envFrom: + - secretRef: + name: crdb-data + - secretRef: + name: nats-data + readinessProbe: + exec: + command: ["/bin/grpc_health_probe", "-addr=:10060"] + livenessProbe: + exec: + command: ["/bin/grpc_health_probe", "-addr=:10060"] + resources: + requests: + cpu: 150m + memory: 128Mi + limits: + cpu: 500m + memory: 512Mi +--- +apiVersion: v1 +kind: Service +metadata: + name: qkd-appservice + labels: + app: qkd-appservice +spec: + type: ClusterIP + selector: + app: qkd-appservice + ports: + - name: grpc + protocol: TCP + port: 10060 + targetPort: 10060 + - name: metrics + protocol: TCP + port: 9192 + targetPort: 9192 + - name: http + port: 8005 + targetPort: 8005 diff --git a/manifests/servicemonitors.yaml b/manifests/servicemonitors.yaml index 716c1c6891802d7fcc55da798d06c650373fb1b5..8a8fe6f39eff87d12582f2f83734c07dc695cea3 100644 --- a/manifests/servicemonitors.yaml +++ b/manifests/servicemonitors.yaml @@ -475,3 +475,156 @@ spec: any: false matchNames: - tfs # namespace where the app is running +--- +apiVersion: monitoring.coreos.com/v1 +kind: ServiceMonitor +metadata: + namespace: monitoring # namespace where prometheus is running + name: tfs-analyticsservice-metric + labels: + app: analyticsservice + #release: prometheus + #release: prom # name of the release + # ( VERY IMPORTANT: You need to know the correct release name by viewing + # the servicemonitor of Prometheus itself: Without the correct name, + # Prometheus cannot identify the metrics of the Flask app as the target.) +spec: + selector: + matchLabels: + # Target app service + #namespace: tfs + app: analyticsservice # same as above + #release: prometheus # same as above + endpoints: + - port: metrics-frontend # named port in target app + scheme: http + path: /metrics # path to scrape + interval: 5s # scrape interval + - port: metrics-backend # named port in target app + scheme: http + path: /metrics # path to scrape + interval: 5s # scrape interval + namespaceSelector: + any: false + matchNames: + - tfs # namespace where the app is running +--- +apiVersion: monitoring.coreos.com/v1 +kind: ServiceMonitor +metadata: + namespace: monitoring # namespace where prometheus is running + name: tfs-telemetryservice-metric + labels: + app: telemetryservice + #release: prometheus + #release: prom # name of the release + # ( VERY IMPORTANT: You need to know the correct release name by viewing + # the servicemonitor of Prometheus itself: Without the correct name, + # Prometheus cannot identify the metrics of the Flask app as the target.) +spec: + selector: + matchLabels: + # Target app service + #namespace: tfs + app: telemetryservice # same as above + #release: prometheus # same as above + endpoints: + - port: metrics-frontend # named port in target app + scheme: http + path: /metrics # path to scrape + interval: 5s # scrape interval + - port: metrics-backend # named port in target app + scheme: http + path: /metrics # path to scrape + interval: 5s # scrape interval + namespaceSelector: + any: false + matchNames: + - tfs # namespace where the app is running +--- +apiVersion: monitoring.coreos.com/v1 +kind: ServiceMonitor +metadata: + namespace: monitoring # namespace where prometheus is running + name: tfs-kpi-managerservice-metric + labels: + app: kpi-managerservice + #release: prometheus + #release: prom # name of the release + # ( VERY IMPORTANT: You need to know the correct release name by viewing + # the servicemonitor of Prometheus itself: Without the correct name, + # Prometheus cannot identify the metrics of the Flask app as the target.) +spec: + selector: + matchLabels: + # Target app service + #namespace: tfs + app: kpi-managerservice # same as above + #release: prometheus # same as above + endpoints: + - port: metrics # named port in target app + scheme: http + path: /metrics # path to scrape + interval: 5s # scrape interval + namespaceSelector: + any: false + matchNames: + - tfs # namespace where the app is running +--- +apiVersion: monitoring.coreos.com/v1 +kind: ServiceMonitor +metadata: + namespace: monitoring # namespace where prometheus is running + name: tfs-kpi_value_apiservice-metric + labels: + app: kpi_value_apiservice + #release: prometheus + #release: prom # name of the release + # ( VERY IMPORTANT: You need to know the correct release name by viewing + # the servicemonitor of Prometheus itself: Without the correct name, + # Prometheus cannot identify the metrics of the Flask app as the target.) +spec: + selector: + matchLabels: + # Target app service + #namespace: tfs + app: kpi_value_apiservice # same as above + #release: prometheus # same as above + endpoints: + - port: metrics # named port in target app + scheme: http + path: /metrics # path to scrape + interval: 5s # scrape interval + namespaceSelector: + any: false + matchNames: + - tfs # namespace where the app is running +--- +apiVersion: monitoring.coreos.com/v1 +kind: ServiceMonitor +metadata: + namespace: monitoring # namespace where prometheus is running + name: tfs-kpi_value_writerservice-metric + labels: + app: kpi_value_writerservice + #release: prometheus + #release: prom # name of the release + # ( VERY IMPORTANT: You need to know the correct release name by viewing + # the servicemonitor of Prometheus itself: Without the correct name, + # Prometheus cannot identify the metrics of the Flask app as the target.) +spec: + selector: + matchLabels: + # Target app service + #namespace: tfs + app: kpi_value_writerservice # same as above + #release: prometheus # same as above + endpoints: + - port: metrics # named port in target app + scheme: http + path: /metrics # path to scrape + interval: 5s # scrape interval + namespaceSelector: + any: false + matchNames: + - tfs # namespace where the app is running diff --git a/manifests/telemetryservice.yaml b/manifests/telemetryservice.yaml index 2f9917499a425b95d436ffa8cdb311d29483d2ca..c3763d6a91756c2592ec819d60bc649584ef3ca9 100644 --- a/manifests/telemetryservice.yaml +++ b/manifests/telemetryservice.yaml @@ -37,9 +37,13 @@ spec: env: - name: LOG_LEVEL value: "INFO" + - name: CRDB_DATABASE + value: "tfs_kpi" + - name: METRICS_PORT + value: "9192" envFrom: - secretRef: - name: crdb-telemetry + name: crdb-data - secretRef: name: kfk-kpi-data readinessProbe: @@ -60,10 +64,12 @@ spec: imagePullPolicy: Always ports: - containerPort: 30060 - - containerPort: 9192 + - containerPort: 9193 env: - name: LOG_LEVEL value: "INFO" + - name: METRICS_PORT + value: "9193" envFrom: - secretRef: name: kfk-kpi-data @@ -100,10 +106,14 @@ spec: protocol: TCP port: 30060 targetPort: 30060 - - name: metrics + - name: metrics-frontend protocol: TCP port: 9192 targetPort: 9192 + - name: metrics-backend + protocol: TCP + port: 9193 + targetPort: 9193 --- apiVersion: autoscaling/v2 kind: HorizontalPodAutoscaler diff --git a/my_deploy.sh b/my_deploy.sh index ffd2ba958a426ed862ed9ea6ed76062bb78c7d23..bb78bece5387532d46ea18dea9c2972e532c0cd8 100755 --- a/my_deploy.sh +++ b/my_deploy.sh @@ -71,7 +71,14 @@ export TFS_COMPONENTS="context device pathcomp opticalcontroller service slice #fi # Uncomment to activate QKD App -#export TFS_COMPONENTS="${TFS_COMPONENTS} app" +# To manage QKD Apps, "service" requires "qkd_app" to be deployed +# before "service", thus we "hack" the TFS_COMPONENTS environment variable prepending the +# "qkd_app" only if "service" is already in TFS_COMPONENTS, and re-export it. +#if [[ "$TFS_COMPONENTS" == *"service"* ]]; then +# BEFORE="${TFS_COMPONENTS% service*}" +# AFTER="${TFS_COMPONENTS#* service}" +# export TFS_COMPONENTS="${BEFORE} qkd_app service ${AFTER}" +#fi # Set the tag you want to use for your images. diff --git a/proto/qkd_app.proto b/proto/qkd_app.proto new file mode 100644 index 0000000000000000000000000000000000000000..7b6c47330833849b889e770aac43844ec6e6072c --- /dev/null +++ b/proto/qkd_app.proto @@ -0,0 +1,53 @@ +syntax = "proto3"; +package qkd_app; + +import "context.proto"; + +// Optare: Change this if you want to change App's structure or enums. +// Optare: If a message (structure) is changed it must be changed in src/app/service/database + +enum QKDAppStatusEnum { + QKDAPPSTATUS_ON = 0; + QKDAPPSTATUS_DISCONNECTED = 1; + QKDAPPSTATUS_OUT_OF_TIME = 2; + QKDAPPSTATUS_ZOMBIE = 3; +} + +enum QKDAppTypesEnum { + QKDAPPTYPES_INTERNAL = 0; + QKDAPPTYPES_CLIENT = 1; +} + +message QKDLId { + context.Uuid qkdl_uuid = 1; +} + + +message App { + AppId app_id = 1; + QKDAppStatusEnum app_status = 2; + QKDAppTypesEnum app_type = 3; + string server_app_id = 4; + repeated string client_app_id = 5; + repeated QKDLId backing_qkdl_id = 6; + context.DeviceId local_device_id = 7; + context.DeviceId remote_device_id = 8; +} + + +message AppId { + context.ContextId context_id = 1; + context.Uuid app_uuid = 2; +} + + +service AppService { + rpc RegisterApp(App) returns (context.Empty) {} + rpc ListApps (context.ContextId ) returns ( AppList ) {} + } + + + + message AppList { + repeated App apps = 1; +} diff --git a/scripts/run_tests_locally-kpi-DB.sh b/scripts/run_tests_locally-kpi-DB.sh index 4953b49e0a437becfda1648c722bcdcf92c58d93..29c6595102c22bc47fa221eb80459aea934cbcd9 100755 --- a/scripts/run_tests_locally-kpi-DB.sh +++ b/scripts/run_tests_locally-kpi-DB.sh @@ -24,7 +24,7 @@ cd $PROJECTDIR/src # python3 kpi_manager/tests/test_unitary.py RCFILE=$PROJECTDIR/coverage/.coveragerc -CRDB_SQL_ADDRESS=$(kubectl --namespace ${CRDB_NAMESPACE} get service cockroachdb-public -o 'jsonpath={.spec.clusterIP}') +CRDB_SQL_ADDRESS=$(kubectl get service --namespace ${CRDB_NAMESPACE} cockroachdb-public -o 'jsonpath={.spec.clusterIP}') export CRDB_URI="cockroachdb://tfs:tfs123@${CRDB_SQL_ADDRESS}:26257/tfs_kpi_mgmt?sslmode=require" python3 -m pytest --log-level=DEBUG --log-cli-level=DEBUG --verbose \ kpi_manager/tests/test_kpi_db.py diff --git a/scripts/run_tests_locally-telemetry-DB.sh b/scripts/run_tests_locally-telemetry-DB.sh index 4b9a417603cc42a4e7e8b19c7394cc38633817fa..85cb8664a7e93b63363b0bad51b52449e57d80b1 100755 --- a/scripts/run_tests_locally-telemetry-DB.sh +++ b/scripts/run_tests_locally-telemetry-DB.sh @@ -20,7 +20,8 @@ cd $PROJECTDIR/src # RCFILE=$PROJECTDIR/coverage/.coveragerc # coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ # kpi_manager/tests/test_unitary.py - +CRDB_SQL_ADDRESS=$(kubectl get service --namespace ${CRDB_NAMESPACE} cockroachdb-public -o 'jsonpath={.spec.clusterIP}') +export CRDB_URI="cockroachdb://tfs:tfs123@${CRDB_SQL_ADDRESS}:26257/tfs_kpi_mgmt?sslmode=require" RCFILE=$PROJECTDIR/coverage/.coveragerc python3 -m pytest --log-level=DEBUG --log-cli-level=debug --verbose \ telemetry/tests/test_telemetryDB.py diff --git a/scripts/run_tests_locally-telemetry-backend.sh b/scripts/run_tests_locally-telemetry-backend.sh index 79db05fcf1259365e8a909ee99395eb59dfb9437..97a06a0d6c16daf94e3e6b30bfc70eca3e7ce3a3 100755 --- a/scripts/run_tests_locally-telemetry-backend.sh +++ b/scripts/run_tests_locally-telemetry-backend.sh @@ -19,10 +19,9 @@ PROJECTDIR=`pwd` cd $PROJECTDIR/src # RCFILE=$PROJECTDIR/coverage/.coveragerc # coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ -# kpi_manager/tests/test_unitary.py - -# python3 kpi_manager/tests/test_unitary.py RCFILE=$PROJECTDIR/coverage/.coveragerc + + python3 -m pytest --log-level=INFO --log-cli-level=debug --verbose \ telemetry/backend/tests/test_TelemetryBackend.py diff --git a/scripts/run_tests_locally-telemetry-frontend.sh b/scripts/run_tests_locally-telemetry-frontend.sh index a2a1de52340cac527d4d1c446c76740d38ce7783..7506be5e0750b44e37368e86dbbfd00131c0d270 100755 --- a/scripts/run_tests_locally-telemetry-frontend.sh +++ b/scripts/run_tests_locally-telemetry-frontend.sh @@ -17,11 +17,9 @@ PROJECTDIR=`pwd` cd $PROJECTDIR/src -# RCFILE=$PROJECTDIR/coverage/.coveragerc -# coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ -# kpi_manager/tests/test_unitary.py -# python3 kpi_manager/tests/test_unitary.py +CRDB_SQL_ADDRESS=$(kubectl get service --namespace ${CRDB_NAMESPACE} cockroachdb-public -o 'jsonpath={.spec.clusterIP}') +export CRDB_URI="cockroachdb://tfs:tfs123@${CRDB_SQL_ADDRESS}:26257/tfs_kpi_mgmt?sslmode=require" RCFILE=$PROJECTDIR/coverage/.coveragerc python3 -m pytest --log-level=DEBUG --log-cli-level=DEBUG --verbose \ diff --git a/scripts/show_logs_qkd_app.sh b/scripts/show_logs_qkd_app.sh new file mode 100755 index 0000000000000000000000000000000000000000..f4f9ee6a13367c8d50eb4401a5d5f357726e5966 --- /dev/null +++ b/scripts/show_logs_qkd_app.sh @@ -0,0 +1,27 @@ +#!/bin/bash +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +######################################################################################################################## +# Define your deployment settings here +######################################################################################################################## + +# If not already set, set the name of the Kubernetes namespace to deploy to. +export TFS_K8S_NAMESPACE=${TFS_K8S_NAMESPACE:-"tfs"} + +######################################################################################################################## +# Automated steps start here +######################################################################################################################## + +kubectl --namespace $TFS_K8S_NAMESPACE logs deployment/qkd-appservice -c server diff --git a/src/analytics/database/Analyzer_DB.py b/src/analytics/database/Analyzer_DB.py index 1ba68989a066e4638adc12e65289ed50b740731d..ab0b50f2ebba8e2590f1fcb4f2801f42a9c5d208 100644 --- a/src/analytics/database/Analyzer_DB.py +++ b/src/analytics/database/Analyzer_DB.py @@ -13,138 +13,44 @@ # limitations under the License. import logging -import sqlalchemy_utils +from common.method_wrappers.Decorator import MetricsPool +from common.tools.database.GenericDatabase import Database +from common.method_wrappers.ServiceExceptions import OperationFailedException -from sqlalchemy import inspect, or_ -from sqlalchemy.orm import sessionmaker +LOGGER = logging.getLogger(__name__) +METRICS_POOL = MetricsPool('KpiManager', 'Database') -from analytics.database.AnalyzerModel import Analyzer as AnalyzerModel -from analytics.database.AnalyzerEngine import AnalyzerEngine -from common.method_wrappers.ServiceExceptions import (OperationFailedException, AlreadyExistsException) +class AnalyzerDB(Database): + def __init__(self, model) -> None: + LOGGER.info('Init KpiManagerService') + super().__init__(model) -LOGGER = logging.getLogger(__name__) -DB_NAME = "tfs_analyzer" # TODO: export name from enviornment variable - -class AnalyzerDB: - def __init__(self): - self.db_engine = AnalyzerEngine.get_engine() - if self.db_engine is None: - LOGGER.error('Unable to get SQLAlchemy DB Engine...') - return False - self.db_name = DB_NAME - self.Session = sessionmaker(bind=self.db_engine) - - def create_database(self): - if not sqlalchemy_utils.database_exists(self.db_engine.url): - LOGGER.debug("Database created. {:}".format(self.db_engine.url)) - sqlalchemy_utils.create_database(self.db_engine.url) - - def drop_database(self) -> None: - if sqlalchemy_utils.database_exists(self.db_engine.url): - sqlalchemy_utils.drop_database(self.db_engine.url) - - def create_tables(self): - try: - AnalyzerModel.metadata.create_all(self.db_engine) # type: ignore - LOGGER.debug("Tables created in the database: {:}".format(self.db_name)) - except Exception as e: - LOGGER.debug("Tables cannot be created in the database. {:s}".format(str(e))) - raise OperationFailedException ("Tables can't be created", extra_details=["unable to create table {:}".format(e)]) - - def verify_tables(self): - try: - inspect_object = inspect(self.db_engine) - if(inspect_object.has_table('analyzer', None)): - LOGGER.info("Table exists in DB: {:}".format(self.db_name)) - except Exception as e: - LOGGER.info("Unable to fetch Table names. {:s}".format(str(e))) - -# ----------------- CURD OPERATIONS --------------------- - - def add_row_to_db(self, row): - session = self.Session() - try: - session.add(row) - session.commit() - LOGGER.debug(f"Row inserted into {row.__class__.__name__} table.") - return True - except Exception as e: - session.rollback() - if "psycopg2.errors.UniqueViolation" in str(e): - LOGGER.error(f"Unique key voilation: {row.__class__.__name__} table. {str(e)}") - raise AlreadyExistsException(row.__class__.__name__, row, - extra_details=["Unique key voilation: {:}".format(e)] ) - else: - LOGGER.error(f"Failed to insert new row into {row.__class__.__name__} table. {str(e)}") - raise OperationFailedException ("Deletion by column id", extra_details=["unable to delete row {:}".format(e)]) - finally: - session.close() - - def search_db_row_by_id(self, model, col_name, id_to_search): - session = self.Session() - try: - entity = session.query(model).filter_by(**{col_name: id_to_search}).first() - if entity: - # LOGGER.debug(f"{model.__name__} ID found: {str(entity)}") - return entity - else: - LOGGER.debug(f"{model.__name__} ID not found, No matching row: {str(id_to_search)}") - print("{:} ID not found, No matching row: {:}".format(model.__name__, id_to_search)) - return None - except Exception as e: - session.rollback() - LOGGER.debug(f"Failed to retrieve {model.__name__} ID. {str(e)}") - raise OperationFailedException ("search by column id", extra_details=["unable to search row {:}".format(e)]) - finally: - session.close() - - def delete_db_row_by_id(self, model, col_name, id_to_search): - session = self.Session() - try: - record = session.query(model).filter_by(**{col_name: id_to_search}).first() - if record: - session.delete(record) - session.commit() - LOGGER.debug("Deleted %s with %s: %s", model.__name__, col_name, id_to_search) - else: - LOGGER.debug("%s with %s %s not found", model.__name__, col_name, id_to_search) - return None - except Exception as e: - session.rollback() - LOGGER.error("Error deleting %s with %s %s: %s", model.__name__, col_name, id_to_search, e) - raise OperationFailedException ("Deletion by column id", extra_details=["unable to delete row {:}".format(e)]) - finally: - session.close() - def select_with_filter(self, model, filter_object): + """ + Generic method to create filters dynamically based on filter_object attributes. + params: model: SQLAlchemy model class to query. + filter_object: Object that contains filtering criteria as attributes. + return: SQLAlchemy session, query and Model + """ session = self.Session() try: - query = session.query(AnalyzerModel) - + query = session.query(model) # Apply filters based on the filter_object if filter_object.analyzer_id: - query = query.filter(AnalyzerModel.analyzer_id.in_([a.analyzer_id.uuid for a in filter_object.analyzer_id])) + query = query.filter(model.analyzer_id.in_([a.analyzer_id.uuid for a in filter_object.analyzer_id])) if filter_object.algorithm_names: - query = query.filter(AnalyzerModel.algorithm_name.in_(filter_object.algorithm_names)) + query = query.filter(model.algorithm_name.in_(filter_object.algorithm_names)) if filter_object.input_kpi_ids: input_kpi_uuids = [k.kpi_id.uuid for k in filter_object.input_kpi_ids] - query = query.filter(AnalyzerModel.input_kpi_ids.op('&&')(input_kpi_uuids)) + query = query.filter(model.input_kpi_ids.op('&&')(input_kpi_uuids)) if filter_object.output_kpi_ids: output_kpi_uuids = [k.kpi_id.uuid for k in filter_object.output_kpi_ids] - query = query.filter(AnalyzerModel.output_kpi_ids.op('&&')(output_kpi_uuids)) - - result = query.all() - # query should be added to return all rows - if result: - LOGGER.debug(f"Fetched filtered rows from {model.__name__} table with filters: {filter_object}") # - Results: {result} - else: - LOGGER.warning(f"No matching row found in {model.__name__} table with filters: {filter_object}") - return result + query = query.filter(model.output_kpi_ids.op('&&')(output_kpi_uuids)) except Exception as e: - LOGGER.error(f"Error fetching filtered rows from {model.__name__} table with filters {filter_object} ::: {e}") - raise OperationFailedException ("Select by filter", extra_details=["unable to apply the filter {:}".format(e)]) - finally: - session.close() + LOGGER.error(f"Error creating filter of {model.__name__} table. ERROR: {e}") + raise OperationFailedException ("CreateKpiDescriptorFilter", extra_details=["unable to create the filter {:}".format(e)]) + + return super().select_with_filter(query, session, model) diff --git a/src/analytics/frontend/service/AnalyticsFrontendServiceServicerImpl.py b/src/analytics/frontend/service/AnalyticsFrontendServiceServicerImpl.py index 8bb6a17afb5b911e3652fdb8d1853b5b7bc6faf3..a7fc8d49248ff01a860accac1b64a29d5533069f 100644 --- a/src/analytics/frontend/service/AnalyticsFrontendServiceServicerImpl.py +++ b/src/analytics/frontend/service/AnalyticsFrontendServiceServicerImpl.py @@ -37,7 +37,7 @@ class AnalyticsFrontendServiceServicerImpl(AnalyticsFrontendServiceServicer): def __init__(self): LOGGER.info('Init AnalyticsFrontendService') self.listener_topic = KafkaTopic.ANALYTICS_RESPONSE.value - self.db_obj = AnalyzerDB() + self.db_obj = AnalyzerDB(AnalyzerModel) self.result_queue = queue.Queue() self.scheduler = BackgroundScheduler() self.kafka_producer = KafkaProducer({'bootstrap.servers' : KafkaConfig.get_kafka_address()}) @@ -84,7 +84,6 @@ class AnalyticsFrontendServiceServicerImpl(AnalyticsFrontendServiceServicer): LOGGER.info("Analyzer Start Request Generated: Analyzer Id: {:}, Value: {:}".format(analyzer_uuid, analyzer_to_generate)) self.kafka_producer.flush() - # self.StartResponseListener(analyzer_uuid) def StartResponseListener(self, filter_key=None): """ @@ -209,6 +208,6 @@ class AnalyticsFrontendServiceServicerImpl(AnalyticsFrontendServiceServicer): if err: LOGGER.debug('Message delivery failed: {:}'.format(err)) print ('Message delivery failed: {:}'.format(err)) - # else: - # LOGGER.debug('Message delivered to topic {:}'.format(msg.topic())) - # print('Message delivered to topic {:}'.format(msg.topic())) + else: + LOGGER.debug('Message delivered to topic {:}'.format(msg.topic())) + print('Message delivered to topic {:}'.format(msg.topic())) diff --git a/src/analytics/frontend/service/__main__.py b/src/analytics/frontend/service/__main__.py index 6c331844f45d98095ef98951f3db43a0e2f0c69c..1df996785ec636592cf5197144d916a89257d9af 100644 --- a/src/analytics/frontend/service/__main__.py +++ b/src/analytics/frontend/service/__main__.py @@ -16,9 +16,11 @@ import logging, signal, sys, threading from prometheus_client import start_http_server from common.Settings import get_log_level, get_metrics_port from .AnalyticsFrontendService import AnalyticsFrontendService +from analytics.database.AnalyzerModel import Analyzer as Model +from common.tools.database.GenericDatabase import Database terminate = threading.Event() -LOGGER = None +LOGGER = None def signal_handler(signal, frame): # pylint: disable=redefined-outer-name LOGGER.warning('Terminate signal received') @@ -36,6 +38,11 @@ def main(): LOGGER.info('Starting...') + # To create DB + kpiDBobj = Database(Model) + kpiDBobj.create_database() + kpiDBobj.create_tables() + # Start metrics server metrics_port = get_metrics_port() start_http_server(metrics_port) diff --git a/src/analytics/frontend/tests/test_frontend.py b/src/analytics/frontend/tests/test_frontend.py index d2428c01fb021f71a884d9a99c446bfef6e66559..44e84e4683bcdcec72e572b8e4deea903bf0de65 100644 --- a/src/analytics/frontend/tests/test_frontend.py +++ b/src/analytics/frontend/tests/test_frontend.py @@ -84,10 +84,10 @@ def analyticsFrontend_client(analyticsFrontend_service : AnalyticsFrontendServic ########################### # --- "test_validate_kafka_topics" should be executed before the functionality tests --- -def test_validate_kafka_topics(): - LOGGER.debug(" >>> test_validate_kafka_topics: START <<< ") - response = KafkaTopic.create_all_topics() - assert isinstance(response, bool) +# def test_validate_kafka_topics(): +# LOGGER.debug(" >>> test_validate_kafka_topics: START <<< ") +# response = KafkaTopic.create_all_topics() +# assert isinstance(response, bool) # ----- core funtionality test ----- # def test_StartAnalytics(analyticsFrontend_client): @@ -102,27 +102,19 @@ def test_StartStopAnalyzers(analyticsFrontend_client): LOGGER.info('--> StartAnalyzer') added_analyzer_id = analyticsFrontend_client.StartAnalyzer(create_analyzer()) LOGGER.debug(str(added_analyzer_id)) - LOGGER.info(' --> Calling StartResponseListener... ') - class_obj = AnalyticsFrontendServiceServicerImpl() - response = class_obj.StartResponseListener(added_analyzer_id.analyzer_id._uuid) - LOGGER.debug(response) - LOGGER.info("waiting for timer to comlete ...") - time.sleep(3) - LOGGER.info('--> StopAnalyzer') - response = analyticsFrontend_client.StopAnalyzer(added_analyzer_id) - LOGGER.debug(str(response)) + assert isinstance(added_analyzer_id, AnalyzerId) -# def test_SelectAnalytics(analyticsFrontend_client): -# LOGGER.info(' >>> test_SelectAnalytics START: <<< ') -# response = analyticsFrontend_client.SelectAnalyzers(create_analyzer_filter()) -# LOGGER.debug(str(response)) -# assert isinstance(response, AnalyzerList) +def test_StopAnalytic(analyticsFrontend_client): + LOGGER.info(' >>> test_StopAnalytic START: <<< ') + response = analyticsFrontend_client.StopAnalyzer(create_analyzer_id()) + LOGGER.debug(str(response)) + assert isinstance(response, Empty) -# def test_StopAnalytic(analyticsFrontend_client): -# LOGGER.info(' >>> test_StopAnalytic START: <<< ') -# response = analyticsFrontend_client.StopAnalyzer(create_analyzer_id()) -# LOGGER.debug(str(response)) -# assert isinstance(response, Empty) +def test_SelectAnalytics(analyticsFrontend_client): + LOGGER.info(' >>> test_SelectAnalytics START: <<< ') + response = analyticsFrontend_client.SelectAnalyzers(create_analyzer_filter()) + LOGGER.debug(str(response)) + assert isinstance(response, AnalyzerList) # def test_ResponseListener(): # LOGGER.info(' >>> test_ResponseListener START <<< ') @@ -131,4 +123,4 @@ def test_StartStopAnalyzers(analyticsFrontend_client): # class_obj = AnalyticsFrontendServiceServicerImpl() # for response in class_obj.StartResponseListener(analyzer_id.analyzer_id.uuid): # LOGGER.debug(response) -# assert isinstance(response, tuple) \ No newline at end of file +# assert isinstance(response, tuple) diff --git a/src/common/Constants.py b/src/common/Constants.py index 74490321f9c8ec016fa4b48b583e2217c61710ec..8b2e215a0ee669726430d12ea4ebac334f69c1ce 100644 --- a/src/common/Constants.py +++ b/src/common/Constants.py @@ -61,6 +61,7 @@ class ServiceNameEnum(Enum): E2EORCHESTRATOR = 'e2e-orchestrator' OPTICALCONTROLLER = 'opticalcontroller' BGPLS = 'bgpls-speaker' + QKD_APP = 'qkd_app' KPIMANAGER = 'kpi-manager' KPIVALUEAPI = 'kpi-value-api' KPIVALUEWRITER = 'kpi-value-writer' @@ -96,6 +97,7 @@ DEFAULT_SERVICE_GRPC_PORTS = { ServiceNameEnum.FORECASTER .value : 10040, ServiceNameEnum.E2EORCHESTRATOR .value : 10050, ServiceNameEnum.OPTICALCONTROLLER .value : 10060, + ServiceNameEnum.QKD_APP .value : 10070, ServiceNameEnum.BGPLS .value : 20030, ServiceNameEnum.KPIMANAGER .value : 30010, ServiceNameEnum.KPIVALUEAPI .value : 30020, @@ -115,10 +117,12 @@ DEFAULT_SERVICE_HTTP_PORTS = { ServiceNameEnum.CONTEXT .value : 8080, ServiceNameEnum.NBI .value : 8080, ServiceNameEnum.WEBUI .value : 8004, + ServiceNameEnum.QKD_APP .value : 8005, } # Default HTTP/REST-API service base URLs DEFAULT_SERVICE_HTTP_BASEURLS = { ServiceNameEnum.NBI .value : None, ServiceNameEnum.WEBUI .value : None, + ServiceNameEnum.QKD_APP .value : None, } diff --git a/src/common/Settings.py b/src/common/Settings.py index eaeb363adc1d9eadb9ddb0487abef8a0885ce380..13fcfc76966301599b0f5f39f2b188aea4e4d52a 100644 --- a/src/common/Settings.py +++ b/src/common/Settings.py @@ -79,12 +79,12 @@ def get_service_host(service_name : ServiceNameEnum): def get_service_port_grpc(service_name : ServiceNameEnum): envvar_name = get_env_var_name(service_name, ENVVAR_SUFIX_SERVICE_PORT_GRPC) default_value = DEFAULT_SERVICE_GRPC_PORTS.get(service_name.value) - return get_setting(envvar_name, default=default_value) + return int(get_setting(envvar_name, default=default_value)) def get_service_port_http(service_name : ServiceNameEnum): envvar_name = get_env_var_name(service_name, ENVVAR_SUFIX_SERVICE_PORT_HTTP) default_value = DEFAULT_SERVICE_HTTP_PORTS.get(service_name.value) - return get_setting(envvar_name, default=default_value) + return int(get_setting(envvar_name, default=default_value)) def get_service_baseurl_http(service_name : ServiceNameEnum): envvar_name = get_env_var_name(service_name, ENVVAR_SUFIX_SERVICE_BASEURL_HTTP) @@ -95,16 +95,34 @@ def get_log_level(): return get_setting(ENVVAR_LOG_LEVEL, default=DEFAULT_LOG_LEVEL) def get_metrics_port(): - return get_setting(ENVVAR_METRICS_PORT, default=DEFAULT_METRICS_PORT) + return int(get_setting(ENVVAR_METRICS_PORT, default=DEFAULT_METRICS_PORT)) def get_grpc_bind_address(): return get_setting(ENVVAR_GRPC_BIND_ADDRESS, default=DEFAULT_GRPC_BIND_ADDRESS) def get_grpc_max_workers(): - return get_setting(ENVVAR_GRPC_MAX_WORKERS, default=DEFAULT_GRPC_MAX_WORKERS) + return int(get_setting(ENVVAR_GRPC_MAX_WORKERS, default=DEFAULT_GRPC_MAX_WORKERS)) def get_grpc_grace_period(): - return get_setting(ENVVAR_GRPC_GRACE_PERIOD, default=DEFAULT_GRPC_GRACE_PERIOD) + return int(get_setting(ENVVAR_GRPC_GRACE_PERIOD, default=DEFAULT_GRPC_GRACE_PERIOD)) def get_http_bind_address(): return get_setting(ENVVAR_HTTP_BIND_ADDRESS, default=DEFAULT_HTTP_BIND_ADDRESS) + + +##### ----- Detect deployed microservices ----- ##### + +def is_microservice_deployed(service_name : ServiceNameEnum) -> bool: + host_env_var_name = get_env_var_name(service_name, ENVVAR_SUFIX_SERVICE_HOST ) + port_env_var_name = get_env_var_name(service_name, ENVVAR_SUFIX_SERVICE_PORT_GRPC) + return (host_env_var_name in os.environ) and (port_env_var_name in os.environ) + +def is_deployed_bgpls () -> bool: return is_microservice_deployed(ServiceNameEnum.BGPLS ) +def is_deployed_e2e_orch () -> bool: return is_microservice_deployed(ServiceNameEnum.E2EORCHESTRATOR ) +def is_deployed_forecaster() -> bool: return is_microservice_deployed(ServiceNameEnum.FORECASTER ) +def is_deployed_load_gen () -> bool: return is_microservice_deployed(ServiceNameEnum.LOAD_GENERATOR ) +def is_deployed_optical () -> bool: return is_microservice_deployed(ServiceNameEnum.OPTICALCONTROLLER) +def is_deployed_policy () -> bool: return is_microservice_deployed(ServiceNameEnum.POLICY ) +def is_deployed_qkd_app () -> bool: return is_microservice_deployed(ServiceNameEnum.QKD_APP ) +def is_deployed_slice () -> bool: return is_microservice_deployed(ServiceNameEnum.SLICE ) +def is_deployed_te () -> bool: return is_microservice_deployed(ServiceNameEnum.TE ) diff --git a/src/kpi_manager/database/Kpi_DB.py b/src/common/tools/database/GenericDatabase.py similarity index 58% rename from src/kpi_manager/database/Kpi_DB.py rename to src/common/tools/database/GenericDatabase.py index 49ad9c9b579daa918818366a1d9505089968edc2..0cd41b9ef0c97263b56a5eda67b173f6ba61a997 100644 --- a/src/kpi_manager/database/Kpi_DB.py +++ b/src/common/tools/database/GenericDatabase.py @@ -12,52 +12,54 @@ # See the License for the specific language governing permissions and # limitations under the License. + import logging import sqlalchemy_utils +from .GenericEngine import Engine +from sqlalchemy import inspect from sqlalchemy.orm import sessionmaker -from kpi_manager.database.KpiEngine import KpiEngine -from kpi_manager.database.KpiModel import Kpi as KpiModel -from common.method_wrappers.ServiceExceptions import ( - AlreadyExistsException, OperationFailedException , NotFoundException) +from common.Settings import get_setting + +from common.method_wrappers.ServiceExceptions import (OperationFailedException, AlreadyExistsException) LOGGER = logging.getLogger(__name__) -DB_NAME = "tfs_kpi_mgmt" -class KpiDB: - def __init__(self): - self.db_engine = KpiEngine.get_engine() +class Database: + def __init__(self, model): + self.db_engine = Engine.get_engine() if self.db_engine is None: LOGGER.error('Unable to get SQLAlchemy DB Engine...') - return False - self.db_name = DB_NAME - self.Session = sessionmaker(bind=self.db_engine) - - def create_database(self) -> None: + raise Exception('Failed to initialize the database engine.') + self.db_model = model + self.db_table = model.__name__ + self.Session = sessionmaker(bind=self.db_engine) + + def create_database(self): if not sqlalchemy_utils.database_exists(self.db_engine.url): - sqlalchemy_utils.create_database(self.db_engine.url) LOGGER.debug("Database created. {:}".format(self.db_engine.url)) + sqlalchemy_utils.create_database(self.db_engine.url) def drop_database(self) -> None: if sqlalchemy_utils.database_exists(self.db_engine.url): sqlalchemy_utils.drop_database(self.db_engine.url) def create_tables(self): - # TODO: use "get_tables(declatrative class obj)" method of "sqlalchemy_utils" to verify tables. try: - KpiModel.metadata.create_all(self.db_engine) # type: ignore - LOGGER.debug("Tables created in the DB Name: {:}".format(self.db_name)) + self.db_model.metadata.create_all(self.db_engine) + LOGGER.debug("Tables created in the database: {:}".format(self.db_table)) except Exception as e: - LOGGER.debug("Tables cannot be created in the kpi database. {:s}".format(str(e))) + LOGGER.debug("Tables cannot be created in the database. {:s}".format(str(e))) raise OperationFailedException ("Tables can't be created", extra_details=["unable to create table {:}".format(e)]) def verify_tables(self): try: - with self.db_engine.connect() as connection: - result = connection.execute("SHOW TABLES;") - tables = result.fetchall() # type: ignore - LOGGER.debug("Tables verified: {:}".format(tables)) + inspect_object = inspect(self.db_engine) + if(inspect_object.has_table(self.db_table , None)): + LOGGER.info("Table exists in DB: {:}".format(self.db_name)) except Exception as e: - LOGGER.debug("Unable to fetch Table names. {:s}".format(str(e))) + LOGGER.info("Unable to fetch Table names. {:s}".format(str(e))) + +# ----------------- DB OPERATIONS --------------------- def add_row_to_db(self, row): session = self.Session() @@ -70,7 +72,8 @@ class KpiDB: session.rollback() if "psycopg2.errors.UniqueViolation" in str(e): LOGGER.error(f"Unique key voilation: {row.__class__.__name__} table. {str(e)}") - raise AlreadyExistsException(row.__class__.__name__, row, extra_details=["Unique key voilation: {:}".format(e)] ) + raise AlreadyExistsException(row.__class__.__name__, row, + extra_details=["Unique key voilation: {:}".format(e)] ) else: LOGGER.error(f"Failed to insert new row into {row.__class__.__name__} table. {str(e)}") raise OperationFailedException ("Deletion by column id", extra_details=["unable to delete row {:}".format(e)]) @@ -89,6 +92,7 @@ class KpiDB: print("{:} ID not found, No matching row: {:}".format(model.__name__, id_to_search)) return None except Exception as e: + session.rollback() LOGGER.debug(f"Failed to retrieve {model.__name__} ID. {str(e)}") raise OperationFailedException ("search by column id", extra_details=["unable to search row {:}".format(e)]) finally: @@ -112,43 +116,24 @@ class KpiDB: finally: session.close() - def select_with_filter(self, model, filter_object): - session = self.Session() + def select_with_filter(self, query_object, session, model): + """ + Generic method to apply filters dynamically based on filter. + params: model_name: SQLAlchemy model class name. + query_object : Object that contains query with applied filters. + session: session of the query. + return: List of filtered records. + """ try: - query = session.query(KpiModel) - # Apply filters based on the filter_object - if filter_object.kpi_id: - query = query.filter(KpiModel.kpi_id.in_([k.kpi_id.uuid for k in filter_object.kpi_id])) - - if filter_object.kpi_sample_type: - query = query.filter(KpiModel.kpi_sample_type.in_(filter_object.kpi_sample_type)) - - if filter_object.device_id: - query = query.filter(KpiModel.device_id.in_([d.device_uuid.uuid for d in filter_object.device_id])) - - if filter_object.endpoint_id: - query = query.filter(KpiModel.endpoint_id.in_([e.endpoint_uuid.uuid for e in filter_object.endpoint_id])) - - if filter_object.service_id: - query = query.filter(KpiModel.service_id.in_([s.service_uuid.uuid for s in filter_object.service_id])) - - if filter_object.slice_id: - query = query.filter(KpiModel.slice_id.in_([s.slice_uuid.uuid for s in filter_object.slice_id])) - - if filter_object.connection_id: - query = query.filter(KpiModel.connection_id.in_([c.connection_uuid.uuid for c in filter_object.connection_id])) - - if filter_object.link_id: - query = query.filter(KpiModel.link_id.in_([l.link_uuid.uuid for l in filter_object.link_id])) - result = query.all() - + result = query_object.all() + # Log result and handle empty case if result: - LOGGER.debug(f"Fetched filtered rows from {model.__name__} table with filters: {filter_object}") # - Results: {result} + LOGGER.debug(f"Fetched filtered rows from {model.__name__} with filters: {query_object}") else: - LOGGER.debug(f"No matching row found in {model.__name__} table with filters: {filter_object}") + LOGGER.warning(f"No matching rows found in {model.__name__} with filters: {query_object}") return result except Exception as e: - LOGGER.error(f"Error fetching filtered rows from {model.__name__} table with filters {filter_object} ::: {e}") - raise OperationFailedException ("Select by filter", extra_details=["unable to apply the filter {:}".format(e)]) + LOGGER.error(f"Error fetching filtered rows from {model.__name__} with filters {query_object} ::: {e}") + raise OperationFailedException("Select by filter", extra_details=[f"Unable to apply the filter: {e}"]) finally: session.close() diff --git a/src/analytics/database/AnalyzerEngine.py b/src/common/tools/database/GenericEngine.py similarity index 92% rename from src/analytics/database/AnalyzerEngine.py rename to src/common/tools/database/GenericEngine.py index 9294e09966ef9e13c9cfa3cab590e5d0c8b6a80e..18bb15360853524ed93606f3137972aa76aa850a 100644 --- a/src/analytics/database/AnalyzerEngine.py +++ b/src/common/tools/database/GenericEngine.py @@ -18,14 +18,14 @@ from common.Settings import get_setting LOGGER = logging.getLogger(__name__) CRDB_URI_TEMPLATE = 'cockroachdb://{:s}:{:s}@cockroachdb-public.{:s}.svc.cluster.local:{:s}/{:s}?sslmode={:s}' -class AnalyzerEngine: +class Engine: @staticmethod def get_engine() -> sqlalchemy.engine.Engine: crdb_uri = get_setting('CRDB_URI', default=None) if crdb_uri is None: CRDB_NAMESPACE = get_setting('CRDB_NAMESPACE') CRDB_SQL_PORT = get_setting('CRDB_SQL_PORT') - CRDB_DATABASE = "tfs-analyzer" # TODO: define variable get_setting('CRDB_DATABASE_KPI_MGMT') + CRDB_DATABASE = get_setting('CRDB_DATABASE') CRDB_USERNAME = get_setting('CRDB_USERNAME') CRDB_PASSWORD = get_setting('CRDB_PASSWORD') CRDB_SSLMODE = get_setting('CRDB_SSLMODE') diff --git a/src/common/tools/database/__init__.py b/src/common/tools/database/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3ee6f7071f145e06c3aeaefc09a43ccd88e619e3 --- /dev/null +++ b/src/common/tools/database/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/src/common/tools/kafka/Variables.py b/src/common/tools/kafka/Variables.py index fc43c315114e7b51c4e2604afbb14e165796e7c5..73b633e23cd55aefeed9b8075f2ad35348fc83ef 100644 --- a/src/common/tools/kafka/Variables.py +++ b/src/common/tools/kafka/Variables.py @@ -25,11 +25,11 @@ class KafkaConfig(Enum): @staticmethod def get_kafka_address() -> str: - # kafka_server_address = get_setting('KFK_SERVER_ADDRESS', default=None) - # if kafka_server_address is None: - KFK_NAMESPACE = get_setting('KFK_NAMESPACE') - KFK_PORT = get_setting('KFK_SERVER_PORT') - kafka_server_address = KFK_SERVER_ADDRESS_TEMPLATE.format(KFK_NAMESPACE, KFK_PORT) + kafka_server_address = get_setting('KFK_SERVER_ADDRESS', default=None) + if kafka_server_address is None: + KFK_NAMESPACE = get_setting('KFK_NAMESPACE') + KFK_PORT = get_setting('KFK_SERVER_PORT') + kafka_server_address = KFK_SERVER_ADDRESS_TEMPLATE.format(KFK_NAMESPACE, KFK_PORT) return kafka_server_address @staticmethod diff --git a/src/common/tools/object_factory/QKDApp.py b/src/common/tools/object_factory/QKDApp.py new file mode 100644 index 0000000000000000000000000000000000000000..936a0f228302469ec51321623f8d8050f3daced0 --- /dev/null +++ b/src/common/tools/object_factory/QKDApp.py @@ -0,0 +1,24 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import copy +from typing import Dict, List, Optional +from common.Constants import DEFAULT_CONTEXT_NAME +from common.tools.object_factory.Context import json_context_id + + +def json_app_id(app_uuid : str, context_id : Optional[Dict] = None) -> Dict: + result = {'app_uuid': {'uuid': app_uuid}} + if context_id is not None: result['context_id'] = copy.deepcopy(context_id) + return result diff --git a/src/common/tools/object_factory/Service.py b/src/common/tools/object_factory/Service.py index 32b99a31f22072874ab894de2a87ce2b7d56ba85..b05821c7814ce250abca1819b111376af7c0430f 100644 --- a/src/common/tools/object_factory/Service.py +++ b/src/common/tools/object_factory/Service.py @@ -42,6 +42,16 @@ def json_service( 'service_config' : {'config_rules': copy.deepcopy(config_rules)}, } +def json_service_qkd_planned( + service_uuid : str, endpoint_ids : List[Dict] = [], constraints : List[Dict] = [], + config_rules : List[Dict] = [], context_uuid : str = DEFAULT_CONTEXT_NAME + ): + + return json_service( + service_uuid, ServiceTypeEnum.SERVICETYPE_QKD, context_id=json_context_id(context_uuid), + status=ServiceStatusEnum.SERVICESTATUS_PLANNED, endpoint_ids=endpoint_ids, constraints=constraints, + config_rules=config_rules) + def json_service_l2nm_planned( service_uuid : str, endpoint_ids : List[Dict] = [], constraints : List[Dict] = [], config_rules : List[Dict] = [], context_uuid : str = DEFAULT_CONTEXT_NAME diff --git a/src/device/.gitlab-ci.yml b/src/device/.gitlab-ci.yml index 9106c96a8bed2cea406dd150fe656927311958cf..3ae6b2b20d22d0a91d10b61acd20fd0d2d4a28ac 100644 --- a/src/device/.gitlab-ci.yml +++ b/src/device/.gitlab-ci.yml @@ -38,6 +38,30 @@ build device: - manifests/${IMAGE_NAME}service.yaml - .gitlab-ci.yml +## Start Mock QKD Nodes before unit testing +#start_mock_nodes: +# stage: deploy +# script: +# - bash src/tests/tools/mock_qkd_nodes/start.sh & +# - sleep 10 # wait for nodes to spin up +# artifacts: +# paths: +# - mock_nodes.log +# rules: +# - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)' +# - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' + +## Prepare Scenario (Start NBI, mock services) +#prepare_scenario: +# stage: deploy +# script: +# - pytest src/tests/qkd/unit/PrepareScenario.py +# needs: +# - start_mock_nodes +# rules: +# - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)' +# - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' + # Apply unit test to the component unit_test device: variables: @@ -46,6 +70,8 @@ unit_test device: stage: unit_test needs: - build device + #- start_mock_nodes + #- prepare_scenario before_script: - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY - > @@ -68,6 +94,7 @@ unit_test device: - docker logs $IMAGE_NAME - docker exec -i $IMAGE_NAME bash -c "coverage run --append -m pytest --log-level=INFO --verbose $IMAGE_NAME/tests/test_unitary_emulated.py --junitxml=/opt/results/${IMAGE_NAME}_report_emulated.xml" - docker exec -i $IMAGE_NAME bash -c "coverage run --append -m pytest --log-level=INFO --verbose $IMAGE_NAME/tests/test_unitary_ietf_actn.py --junitxml=/opt/results/${IMAGE_NAME}_report_ietf_actn.xml" + #- docker exec -i $IMAGE_NAME bash -c "coverage run --append -m pytest --log-level=INFO --verbose $IMAGE_NAME/tests/qkd/unit/test_*.py" - docker exec -i $IMAGE_NAME bash -c "coverage report --include='${IMAGE_NAME}/*' --show-missing" coverage: '/TOTAL\s+\d+\s+\d+\s+(\d+%)/' after_script: diff --git a/src/device/tests/qkd/unit/descriptorQKD_links.json b/src/device/tests/qkd/unit/descriptorQKD_links.json new file mode 100644 index 0000000000000000000000000000000000000000..28a9e7d5ae014f78cfa0e554ee73a53449bba03c --- /dev/null +++ b/src/device/tests/qkd/unit/descriptorQKD_links.json @@ -0,0 +1,77 @@ +{ + "contexts": [ + {"context_id": {"context_uuid": {"uuid": "admin"}}} + ], + "topologies": [ + {"topology_id": {"topology_uuid": {"uuid": "admin"}, "context_id": {"context_uuid": {"uuid": "admin"}}}} + ], + "devices": [ + { + "device_id": {"device_uuid": {"uuid": "QKD1"}}, "device_type": "qkd-node", + "device_operational_status": 0, "device_drivers": [12], "device_endpoints": [], + "device_config": {"config_rules": [ + {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "10.0.2.10"}}, + {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "11111"}}, + {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": { + "scheme": "http" + }}} + ]} + + }, + { + "device_id": {"device_uuid": {"uuid": "QKD2"}}, "device_type": "qkd-node", + "device_operational_status": 0, "device_drivers": [12], "device_endpoints": [], + "device_config": {"config_rules": [ + {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "10.0.2.10"}}, + {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "22222"}}, + {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": { + "scheme": "http" + }}} + ]} + + }, + { + "device_id": {"device_uuid": {"uuid": "QKD3"}}, "device_type": "qkd-node", + "device_operational_status": 0, "device_drivers": [12], "device_endpoints": [], + "device_config": {"config_rules": [ + {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "10.0.2.10"}}, + {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "33333"}}, + {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": { + "scheme": "http" + }}} + ]} + + } + ], + "links": [ + { + "link_id": {"link_uuid": {"uuid": "QKD1/10.0.2.10:1001==QKD2/10.0.2.10:2001"}}, + "link_endpoint_ids": [ + {"device_id": {"device_uuid": {"uuid": "QKD1"}}, "endpoint_uuid": {"uuid": "10.0.2.10:1001"}}, + {"device_id": {"device_uuid": {"uuid": "QKD2"}}, "endpoint_uuid": {"uuid": "10.0.2.10:2001"}} + ] + }, + { + "link_id": {"link_uuid": {"uuid": "QKD2/10.0.2.10:2001==QKD1/10.0.2.10:1001"}}, + "link_endpoint_ids": [ + {"device_id": {"device_uuid": {"uuid": "QKD2"}}, "endpoint_uuid": {"uuid": "10.0.2.10:2001"}}, + {"device_id": {"device_uuid": {"uuid": "QKD1"}}, "endpoint_uuid": {"uuid": "10.0.2.10:1001"}} + ] + }, + { + "link_id": {"link_uuid": {"uuid": "QKD2/10.0.2.10:2002==QKD3/10.0.2.10:3001"}}, + "link_endpoint_ids": [ + {"device_id": {"device_uuid": {"uuid": "QKD2"}}, "endpoint_uuid": {"uuid": "10.0.2.10:2002"}}, + {"device_id": {"device_uuid": {"uuid": "QKD3"}}, "endpoint_uuid": {"uuid": "10.0.2.10:3001"}} + ] + }, + { + "link_id": {"link_uuid": {"uuid": "QKD3/10.0.2.10:3001==QKD2/10.0.2.10:2002"}}, + "link_endpoint_ids": [ + {"device_id": {"device_uuid": {"uuid": "QKD3"}}, "endpoint_uuid": {"uuid": "10.0.2.10:3001"}}, + {"device_id": {"device_uuid": {"uuid": "QKD2"}}, "endpoint_uuid": {"uuid": "10.0.2.10:2002"}} + ] + } + + ] +} diff --git a/src/device/tests/qkd/unit/test_application_deployment.py b/src/device/tests/qkd/unit/test_application_deployment.py index 92e16663b41556563aab884be2ee48518cd15ff7..d10ddc523062e1377c7621edfdd42b57c32bad56 100644 --- a/src/device/tests/qkd/unit/test_application_deployment.py +++ b/src/device/tests/qkd/unit/test_application_deployment.py @@ -14,9 +14,11 @@ import pytest import json +import os +os.environ['DEVICE_EMULATED_ONLY'] = 'YES' from device.service.drivers.qkd.QKDDriver2 import QKDDriver -MOCK_QKD_ADDRRESS = '127.0.0.1' +MOCK_QKD_ADDRRESS = '10.0.2.10' MOCK_PORT = 11111 @pytest.fixture diff --git a/src/device/tests/qkd/unit/test_create_apps.py b/src/device/tests/qkd/unit/test_create_apps.py new file mode 100644 index 0000000000000000000000000000000000000000..4724e5c4abd04e28586d54ae6dbbbf0be3c7dc5c --- /dev/null +++ b/src/device/tests/qkd/unit/test_create_apps.py @@ -0,0 +1,40 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import requests + +QKD_ADDRESS = '10.0.2.10' +QKD_URL = 'http://{:s}/qkd_app/create_qkd_app'.format(QKD_ADDRESS) + +QKD_REQUEST_1 = { + 'app': { + 'server_app_id': '1', + 'client_app_id': [], + 'app_status': 'ON', + 'local_qkdn_id': '00000001-0000-0000-0000-0000000000', + 'backing_qkdl_id': ['00000003-0002-0000-0000-0000000000'] + } +} +print(requests.post(QKD_URL, json=QKD_REQUEST_1)) + +QKD_REQUEST_2 = { + 'app': { + 'server_app_id': '1', + 'client_app_id': [], + 'app_status': 'ON', + 'local_qkdn_id': '00000003-0000-0000-0000-0000000000', + 'backing_qkdl_id': ['00000003-0002-0000-0000-0000000000'] + } +} +print(requests.post(QKD_URL, json=QKD_REQUEST_2)) diff --git a/src/device/tests/qkd/unit/test_qkd_mock_connectivity.py b/src/device/tests/qkd/unit/test_qkd_mock_connectivity.py index 150d00fd079b0a036f383653c833562279bb4d72..be9427d9b619423a61a3e6f5270d8aab76dc8955 100644 --- a/src/device/tests/qkd/unit/test_qkd_mock_connectivity.py +++ b/src/device/tests/qkd/unit/test_qkd_mock_connectivity.py @@ -38,3 +38,4 @@ def test_qkd_driver_timeout_connection(mock_get, qkd_driver): mock_get.side_effect = requests.exceptions.Timeout qkd_driver.timeout = 0.001 # Simulate very short timeout assert qkd_driver.Connect() is False + diff --git a/src/kpi_manager/database/KpiDB.py b/src/kpi_manager/database/KpiDB.py new file mode 100644 index 0000000000000000000000000000000000000000..d503f06f4cdeb57efd4c02701803f81fd31d3eea --- /dev/null +++ b/src/kpi_manager/database/KpiDB.py @@ -0,0 +1,66 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +from common.method_wrappers.Decorator import MetricsPool +from common.tools.database.GenericDatabase import Database +from common.method_wrappers.ServiceExceptions import OperationFailedException + +LOGGER = logging.getLogger(__name__) +METRICS_POOL = MetricsPool('KpiManager', 'Database') + +class KpiDB(Database): + def __init__(self, model) -> None: + LOGGER.info('Init KpiManagerService') + super().__init__(model) + + def select_with_filter(self, model, filter_object): + """ + Generic method to create filters dynamically based on filter_object attributes. + params: model: SQLAlchemy model class to query. + filter_object: Object that contains filtering criteria as attributes. + return: SQLAlchemy session, query and Model + """ + session = self.Session() + try: + query = session.query(model) + # Apply filters based on the filter_object + if filter_object.kpi_id: + query = query.filter(model.kpi_id.in_([k.kpi_id.uuid for k in filter_object.kpi_id])) + + if filter_object.kpi_sample_type: + query = query.filter(model.kpi_sample_type.in_(filter_object.kpi_sample_type)) + + if filter_object.device_id: + query = query.filter(model.device_id.in_([d.device_uuid.uuid for d in filter_object.device_id])) + + if filter_object.endpoint_id: + query = query.filter(model.endpoint_id.in_([e.endpoint_uuid.uuid for e in filter_object.endpoint_id])) + + if filter_object.service_id: + query = query.filter(model.service_id.in_([s.service_uuid.uuid for s in filter_object.service_id])) + + if filter_object.slice_id: + query = query.filter(model.slice_id.in_([s.slice_uuid.uuid for s in filter_object.slice_id])) + + if filter_object.connection_id: + query = query.filter(model.connection_id.in_([c.connection_uuid.uuid for c in filter_object.connection_id])) + + if filter_object.link_id: + query = query.filter(model.link_id.in_([l.link_uuid.uuid for l in filter_object.link_id])) + except Exception as e: + LOGGER.error(f"Error creating filter of {model.__name__} table. ERROR: {e}") + raise OperationFailedException ("CreateKpiDescriptorFilter", extra_details=["unable to create the filter {:}".format(e)]) + + return super().select_with_filter(query, session, model) diff --git a/src/kpi_manager/service/KpiManagerServiceServicerImpl.py b/src/kpi_manager/service/KpiManagerServiceServicerImpl.py index fd22474829ea0dfb6b1a25e70bbb4d5440c0216b..3f9ae8492380e5e11cd3cbc926a2fce07620d8a7 100644 --- a/src/kpi_manager/service/KpiManagerServiceServicerImpl.py +++ b/src/kpi_manager/service/KpiManagerServiceServicerImpl.py @@ -18,7 +18,8 @@ from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_m from common.proto.context_pb2 import Empty from common.proto.kpi_manager_pb2_grpc import KpiManagerServiceServicer from common.proto.kpi_manager_pb2 import KpiId, KpiDescriptor, KpiDescriptorFilter, KpiDescriptorList -from kpi_manager.database.Kpi_DB import KpiDB +# from kpi_manager.database.Kpi_DB import KpiDB +from kpi_manager.database.KpiDB import KpiDB from kpi_manager.database.KpiModel import Kpi as KpiModel LOGGER = logging.getLogger(__name__) @@ -27,7 +28,7 @@ METRICS_POOL = MetricsPool('KpiManager', 'NBIgRPC') class KpiManagerServiceServicerImpl(KpiManagerServiceServicer): def __init__(self): LOGGER.info('Init KpiManagerService') - self.kpi_db_obj = KpiDB() + self.kpi_db_obj = KpiDB(KpiModel) @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) def SetKpiDescriptor(self, request: KpiDescriptor, grpc_context: grpc.ServicerContext # type: ignore diff --git a/src/kpi_manager/service/__main__.py b/src/kpi_manager/service/__main__.py index 244d5afa373a6462a0382a0ed26a588088a689a1..05e32bb58128975ea5d2a5f015d1e8b3977c9905 100644 --- a/src/kpi_manager/service/__main__.py +++ b/src/kpi_manager/service/__main__.py @@ -16,8 +16,11 @@ import logging, signal, sys, threading from common.Settings import get_log_level from .KpiManagerService import KpiManagerService +from kpi_manager.database.KpiModel import Kpi as Model +from common.tools.database.GenericDatabase import Database + terminate = threading.Event() -LOGGER = None +LOGGER = None def signal_handler(signal, frame): # pylint: disable=redefined-outer-name LOGGER.warning('Terminate signal received') @@ -35,6 +38,11 @@ def main(): LOGGER.debug('Starting...') + # To create DB + kpiDBobj = Database(Model) + kpiDBobj.create_database() + kpiDBobj.create_tables() + grpc_service = KpiManagerService() grpc_service.start() diff --git a/src/kpi_manager/tests/__init__.py b/src/kpi_manager/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3ee6f7071f145e06c3aeaefc09a43ccd88e619e3 --- /dev/null +++ b/src/kpi_manager/tests/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/src/kpi_manager/tests/test_kpi_db.py b/src/kpi_manager/tests/test_kpi_db.py index d4a57f83664f851504389b3bbe99d5c2a92542d9..b1513a83f7c6122a34553d5933aea8d8c438e2a6 100644 --- a/src/kpi_manager/tests/test_kpi_db.py +++ b/src/kpi_manager/tests/test_kpi_db.py @@ -14,7 +14,12 @@ import logging -from kpi_manager.database.Kpi_DB import KpiDB +# from kpi_manager.database.Kpi_DB import KpiDB +from common.proto.kpi_manager_pb2 import KpiDescriptorList +from .test_messages import create_kpi_filter_request +from kpi_manager.database.KpiModel import Kpi as KpiModel +from kpi_manager.database.KpiDB import KpiDB +# from common.tools.database.GenericDatabase import Database LOGGER = logging.getLogger(__name__) @@ -26,3 +31,22 @@ def test_verify_databases_and_Tables(): kpiDBobj.create_database() kpiDBobj.create_tables() kpiDBobj.verify_tables() + +# def test_generic_DB_select_method(): +# LOGGER.info("--> STARTED-test_generic_DB_select_method") +# kpi_obj = KpiDB() +# _filter = create_kpi_filter_request() +# # response = KpiDescriptorList() +# try: +# kpi_obj.select_with_filter(KpiModel, _filter) +# except Exception as e: +# LOGGER.error('Unable to apply filter on kpi descriptor. {:}'.format(e)) +# LOGGER.info("--> FINISHED-test_generic_DB_select_method") +# # try: +# # for row in rows: +# # kpiDescriptor_obj = KpiModel.convert_row_to_KpiDescriptor(row) +# # response.kpi_descriptor_list.append(kpiDescriptor_obj) +# # return response +# # except Exception as e: +# # LOGGER.info('Unable to process filter response {:}'.format(e)) +# # assert isinstance(r) diff --git a/src/kpi_manager/tests/test_kpi_manager.py b/src/kpi_manager/tests/test_kpi_manager.py index 219fdadee9e2f4ca9ea9ac0be040043d4edfbdbe..06e836b70963768b375ab04e29a640591b283108 100755 --- a/src/kpi_manager/tests/test_kpi_manager.py +++ b/src/kpi_manager/tests/test_kpi_manager.py @@ -139,9 +139,9 @@ def test_SelectKpiDescriptor(kpi_manager_client): LOGGER.info("Response gRPC message object: {:}".format(response)) assert isinstance(response, KpiDescriptorList) -def test_set_list_of_KPIs(kpi_manager_client): - LOGGER.debug(" >>> test_set_list_of_KPIs: START <<< ") - KPIs_TO_SEARCH = ["node_in_power_total", "node_in_current_total", "node_out_power_total"] - # adding KPI - for kpi in KPIs_TO_SEARCH: - kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request_a(kpi)) +# def test_set_list_of_KPIs(kpi_manager_client): +# LOGGER.debug(" >>> test_set_list_of_KPIs: START <<< ") +# KPIs_TO_SEARCH = ["node_in_power_total", "node_in_current_total", "node_out_power_total"] +# # adding KPI +# for kpi in KPIs_TO_SEARCH: +# kpi_manager_client.SetKpiDescriptor(create_kpi_descriptor_request_a(kpi)) diff --git a/src/pathcomp/frontend/Config.py b/src/pathcomp/frontend/Config.py index 08de81b47dd05ce19ac8335b5b31df8ef4ee461e..ab431acb92ac3732ff98bf7228d825c92d279986 100644 --- a/src/pathcomp/frontend/Config.py +++ b/src/pathcomp/frontend/Config.py @@ -13,7 +13,7 @@ # limitations under the License. import os -from common.Settings import get_setting +from common.Settings import get_setting, is_deployed_forecaster DEFAULT_PATHCOMP_BACKEND_SCHEME = 'http' DEFAULT_PATHCOMP_BACKEND_HOST = '127.0.0.1' @@ -44,6 +44,7 @@ SETTING_NAME_ENABLE_FORECASTER = 'ENABLE_FORECASTER' TRUE_VALUES = {'Y', 'YES', 'TRUE', 'T', 'E', 'ENABLE', 'ENABLED'} def is_forecaster_enabled() -> bool: + if not is_deployed_forecaster(): return False is_enabled = get_setting(SETTING_NAME_ENABLE_FORECASTER, default=None) if is_enabled is None: return False str_is_enabled = str(is_enabled).upper() diff --git a/src/qkd_app/.gitlab-ci.yml b/src/qkd_app/.gitlab-ci.yml new file mode 100644 index 0000000000000000000000000000000000000000..85ee2b5e04bce3077086d0cc831043483a995aef --- /dev/null +++ b/src/qkd_app/.gitlab-ci.yml @@ -0,0 +1,80 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +build app: + variables: + IMAGE_NAME: 'qkd_app' # name of the microservice + IMAGE_TAG: 'latest' # tag of the container image (production, development, etc) + stage: build + before_script: + - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY + script: + - docker build -t "$IMAGE_NAME:$IMAGE_TAG" -f ./src/$IMAGE_NAME/Dockerfile . + - docker tag "$IMAGE_NAME:$IMAGE_TAG" "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG" + - docker push "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG" + after_script: + - docker images --filter="dangling=true" --quiet | xargs -r docker rmi + rules: + - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)' + - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' + - changes: + - src/common/**/*.py + - proto/*.proto + - src/$IMAGE_NAME/**/*.{py,in,yml} + - src/$IMAGE_NAME/Dockerfile + - src/$IMAGE_NAME/tests/*.py + - manifests/${IMAGE_NAME}service.yaml + - .gitlab-ci.yml + +# Apply unit test to the component +unit_test app: + variables: + IMAGE_NAME: 'qkd_app' # name of the microservice + IMAGE_TAG: 'latest' # tag of the container image (production, development, etc) + stage: unit_test + needs: + - build app + - unit_test service + before_script: + - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY + - if docker network list | grep teraflowbridge; then echo "teraflowbridge is already created"; else docker network create -d bridge teraflowbridge; fi + - if docker container ls | grep $IMAGE_NAME; then docker rm -f $IMAGE_NAME; else echo "$IMAGE_NAME image is not in the system"; fi + script: + - docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG" + - docker run --name $IMAGE_NAME -d -p 10070:10070 -p 8005:8005 -v "$PWD/src/$IMAGE_NAME/tests:/opt/results" --network=teraflowbridge $CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG + - sleep 5 + - docker ps -a + - docker logs $IMAGE_NAME + - docker exec -i $IMAGE_NAME bash -c "coverage run -m pytest --log-level=INFO --verbose $IMAGE_NAME/tests/test_unitary.py --junitxml=/opt/results/${IMAGE_NAME}_report.xml" + - docker exec -i $IMAGE_NAME bash -c "coverage report --include='${IMAGE_NAME}/*' --show-missing" + coverage: '/TOTAL\s+\d+\s+\d+\s+(\d+%)/' + after_script: + - docker rm -f $IMAGE_NAME + - docker network rm teraflowbridge + rules: + - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)' + - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' + - changes: + - src/common/**/*.py + - proto/*.proto + - src/$IMAGE_NAME/**/*.{py,in,yml} + - src/$IMAGE_NAME/Dockerfile + - src/$IMAGE_NAME/tests/*.py + - src/$IMAGE_NAME/tests/Dockerfile + - manifests/${IMAGE_NAME}service.yaml + - .gitlab-ci.yml + artifacts: + when: always + reports: + junit: src/$IMAGE_NAME/tests/${IMAGE_NAME}_report.xml diff --git a/src/qkd_app/Config.py b/src/qkd_app/Config.py new file mode 100644 index 0000000000000000000000000000000000000000..07d08814021ef82220611ee21c01ba01806682e9 --- /dev/null +++ b/src/qkd_app/Config.py @@ -0,0 +1,13 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/src/qkd_app/Dockerfile b/src/qkd_app/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..4d2b8d61bd61ca98d62316b021d2486de3777977 --- /dev/null +++ b/src/qkd_app/Dockerfile @@ -0,0 +1,70 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM python:3.9-slim + +# Install dependencies +RUN apt-get --yes --quiet --quiet update && \ + apt-get --yes --quiet --quiet install wget g++ git && \ + rm -rf /var/lib/apt/lists/* + +# Set Python to show logs as they occur +ENV PYTHONUNBUFFERED=0 + +# Download the gRPC health probe +RUN GRPC_HEALTH_PROBE_VERSION=v0.2.0 && \ + wget -qO/bin/grpc_health_probe https://github.com/grpc-ecosystem/grpc-health-probe/releases/download/${GRPC_HEALTH_PROBE_VERSION}/grpc_health_probe-linux-amd64 && \ + chmod +x /bin/grpc_health_probe + +# Get generic Python packages +RUN python3 -m pip install --upgrade pip +RUN python3 -m pip install --upgrade setuptools wheel +RUN python3 -m pip install --upgrade pip-tools + +# Get common Python packages +# Note: this step enables sharing the previous Docker build steps among all the Python components +WORKDIR /var/teraflow +COPY common_requirements.in common_requirements.in +RUN pip-compile --quiet --output-file=common_requirements.txt common_requirements.in +RUN python3 -m pip install -r common_requirements.txt + +# Add common files into working directory +WORKDIR /var/teraflow/common +COPY src/common/. ./ +RUN rm -rf proto + +# Create proto sub-folder, copy .proto files, and generate Python code +RUN mkdir -p /var/teraflow/common/proto +WORKDIR /var/teraflow/common/proto +RUN touch __init__.py +COPY proto/*.proto ./ +RUN python3 -m grpc_tools.protoc -I=. --python_out=. --grpc_python_out=. *.proto +RUN rm *.proto +RUN find . -type f -exec sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' {} \; + +# Create component sub-folders, get specific Python packages +RUN mkdir -p /var/teraflow/qkd_app +WORKDIR /var/teraflow/qkd_app +COPY src/qkd_app/requirements.in requirements.in +RUN pip-compile --quiet --output-file=requirements.txt requirements.in +RUN python3 -m pip install -r requirements.txt + +# Add component files into working directory +WORKDIR /var/teraflow +COPY src/context/. context/ +COPY src/service/. service/ +COPY src/qkd_app/. qkd_app/ + +# Start the service +ENTRYPOINT ["python", "-m", "qkd_app.service"] diff --git a/src/qkd_app/__init__.py b/src/qkd_app/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..07d08814021ef82220611ee21c01ba01806682e9 --- /dev/null +++ b/src/qkd_app/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/src/qkd_app/client/QKDAppClient.py b/src/qkd_app/client/QKDAppClient.py new file mode 100644 index 0000000000000000000000000000000000000000..1a174df6adc69ab9ce88b0d8878c92b9b9e7820e --- /dev/null +++ b/src/qkd_app/client/QKDAppClient.py @@ -0,0 +1,64 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import grpc, logging +from common.Constants import ServiceNameEnum +from common.Settings import get_service_host, get_service_port_grpc +from common.proto.context_pb2 import Empty, ContextId +from common.proto.qkd_app_pb2 import App, AppId, AppList +from common.proto.qkd_app_pb2_grpc import AppServiceStub +from common.tools.client.RetryDecorator import retry, delay_exponential +from common.tools.grpc.Tools import grpc_message_to_json_string + +LOGGER = logging.getLogger(__name__) +MAX_RETRIES = 15 +DELAY_FUNCTION = delay_exponential(initial=0.01, increment=2.0, maximum=5.0) +RETRY_DECORATOR = retry(max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect') + +class QKDAppClient: + def __init__(self, host=None, port=None): + if not host: host = get_service_host(ServiceNameEnum.QKD_APP) + if not port: port = get_service_port_grpc(ServiceNameEnum.QKD_APP) + self.endpoint = '{:s}:{:s}'.format(str(host), str(port)) + LOGGER.debug('Creating channel to {:s}...'.format(self.endpoint)) + self.channel = None + self.stub = None + self.connect() + LOGGER.debug('Channel created') + + def connect(self): + self.channel = grpc.insecure_channel(self.endpoint) + self.stub = AppServiceStub(self.channel) + + def close(self): + if self.channel is not None: self.channel.close() + self.channel = None + self.stub = None + + + + @RETRY_DECORATOR + def RegisterApp(self, request : App) -> Empty: + LOGGER.debug('RegisterApp request: {:s}'.format(grpc_message_to_json_string(request))) + response = self.stub.RegisterApp(request) + LOGGER.debug('RegisterApp result: {:s}'.format(grpc_message_to_json_string(response))) + return response + + + @RETRY_DECORATOR + def ListApps(self, request: ContextId) -> AppList: + LOGGER.debug('ListApps request: {:s}'.format(grpc_message_to_json_string(request))) + response = self.stub.ListApps(request) + LOGGER.debug('ListApps result: {:s}'.format(grpc_message_to_json_string(response))) + return response diff --git a/src/qkd_app/client/__init__.py b/src/qkd_app/client/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..07d08814021ef82220611ee21c01ba01806682e9 --- /dev/null +++ b/src/qkd_app/client/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/src/qkd_app/requirements.in b/src/qkd_app/requirements.in new file mode 100644 index 0000000000000000000000000000000000000000..a9bce93b569792a75687811f08e03d398ae4aeb5 --- /dev/null +++ b/src/qkd_app/requirements.in @@ -0,0 +1,25 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +Flask==2.1.3 +Flask-HTTPAuth==4.5.0 +Flask-RESTful==0.3.9 +jsonschema==4.4.0 +requests==2.27.1 +werkzeug==2.3.7 +nats-py==2.6.* +psycopg2-binary==2.9.* +SQLAlchemy==1.4.* +sqlalchemy-cockroachdb==1.4.* +SQLAlchemy-Utils==0.38.* diff --git a/src/qkd_app/service/QKDAppService.py b/src/qkd_app/service/QKDAppService.py new file mode 100644 index 0000000000000000000000000000000000000000..a6c93cd811a72594804fe8e8e86a9586533a1317 --- /dev/null +++ b/src/qkd_app/service/QKDAppService.py @@ -0,0 +1,37 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging, sqlalchemy +from common.Constants import ServiceNameEnum +from common.Settings import get_service_port_grpc +from common.message_broker.MessageBroker import MessageBroker +from common.proto.qkd_app_pb2_grpc import add_AppServiceServicer_to_server +from common.tools.service.GenericGrpcService import GenericGrpcService +from qkd_app.service.QKDAppServiceServicerImpl import AppServiceServicerImpl + +# Custom gRPC settings +GRPC_MAX_WORKERS = 200 # multiple clients might keep connections alive for Get*Events() RPC methods +LOGGER = logging.getLogger(__name__) + + +class AppService(GenericGrpcService): + def __init__( + self, db_engine : sqlalchemy.engine.Engine, messagebroker : MessageBroker, cls_name: str = __name__ + ) -> None: + port = get_service_port_grpc(ServiceNameEnum.QKD_APP) + super().__init__(port, max_workers=GRPC_MAX_WORKERS, cls_name=cls_name) + self.app_servicer = AppServiceServicerImpl(db_engine, messagebroker) + + def install_servicers(self): + add_AppServiceServicer_to_server(self.app_servicer, self.server) diff --git a/src/qkd_app/service/QKDAppServiceServicerImpl.py b/src/qkd_app/service/QKDAppServiceServicerImpl.py new file mode 100644 index 0000000000000000000000000000000000000000..df7a885c47eda9d7a6137c9905388da49c698e7e --- /dev/null +++ b/src/qkd_app/service/QKDAppServiceServicerImpl.py @@ -0,0 +1,73 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import grpc, logging, sqlalchemy +#from typing import Iterator, Optional +from common.message_broker.MessageBroker import MessageBroker +import grpc, json, logging #, deepdiff +from common.proto.context_pb2 import ( + Empty, Service, ServiceId, ServiceStatusEnum, ServiceTypeEnum, ContextId) +from common.proto.qkd_app_pb2 import (App, AppId, AppList, QKDAppTypesEnum) +from common.proto.qkd_app_pb2_grpc import AppServiceServicer +from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method +#from common.tools.context_queries.InterDomain import is_inter_domain #, is_multi_domain +#from common.tools.grpc.ConfigRules import copy_config_rules +#from common.tools.grpc.Constraints import copy_constraints +#from common.tools.grpc.EndPointIds import copy_endpoint_ids +#from common.tools.grpc.ServiceIds import update_service_ids +#from common.tools.grpc.Tools import grpc_message_to_json_string +#from context.client.ContextClient import ContextClient +#from qkd_app.client.QKDAppClient import QKDAppClient +from .database.QKDApp import app_set, app_list_objs, app_get, app_get_by_server +from common.method_wrappers.ServiceExceptions import NotFoundException + +LOGGER = logging.getLogger(__name__) + +METRICS_POOL = MetricsPool('QkdApp', 'RPC') + +# Optare: This file must be edited based on app's logic + +class AppServiceServicerImpl(AppServiceServicer): + def __init__(self, db_engine : sqlalchemy.engine.Engine, messagebroker : MessageBroker): + LOGGER.debug('Creating Servicer...') + self.db_engine = db_engine + self.messagebroker = messagebroker + LOGGER.debug('Servicer Created') + + @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def RegisterApp(self, request : App, context : grpc.ServicerContext) -> Empty: + # Optare: This is the main function required for the project. + # Optare: If it's an internal it will save it directly. If it's an external one it will save it as pending by not providing the remote until the other party requests it too + # Optare: Ideally, the only thing needed to change is the code inside the try block. Currently it just searches by a pending app with the same server_id but you can put more restrictions or different search and raise the NotFoundException + + if request.app_type == QKDAppTypesEnum.QKDAPPTYPES_INTERNAL: + app_set(self.db_engine, self.messagebroker, request) + + else: + try: + app = app_get_by_server(self.db_engine, request.server_app_id) + except NotFoundException: + app = request + app_set(self.db_engine, self.messagebroker, app) + else: + app.remote_device_id.device_uuid.uuid = request.local_device_id.device_uuid.uuid + app_set(self.db_engine, self.messagebroker, app) + + + return Empty() + + + @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def ListApps(self, request: ContextId, context : grpc.ServicerContext) -> AppList: + return app_list_objs(self.db_engine) diff --git a/src/qkd_app/service/__init__.py b/src/qkd_app/service/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..07d08814021ef82220611ee21c01ba01806682e9 --- /dev/null +++ b/src/qkd_app/service/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/src/qkd_app/service/__main__.py b/src/qkd_app/service/__main__.py new file mode 100644 index 0000000000000000000000000000000000000000..ed7e554728eb2de6240dd4facb7f084337a026a4 --- /dev/null +++ b/src/qkd_app/service/__main__.py @@ -0,0 +1,94 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging, signal, sys, threading +from prometheus_client import start_http_server +#from common.Constants import ServiceNameEnum +from common.Settings import ( + #ENVVAR_SUFIX_SERVICE_HOST, ENVVAR_SUFIX_SERVICE_PORT_GRPC, get_env_var_name, + get_log_level, get_metrics_port, wait_for_environment_variables) +from qkd_app.service.QKDAppService import AppService +from qkd_app.service.rest_server.RestServer import RestServer +from qkd_app.service.rest_server.qkd_app import register_qkd_app +#from common.message_broker.Factory import get_messagebroker_backend +#from common.message_broker.MessageBroker import MessageBroker +from qkd_app.service.database.Engine import Engine +from qkd_app.service.database.models._Base import rebuild_database + +terminate = threading.Event() +LOGGER : logging.Logger = None + +def signal_handler(signal, frame): # pylint: disable=redefined-outer-name + LOGGER.warning('Terminate signal received') + terminate.set() + +def main(): + global LOGGER # pylint: disable=global-statement + + log_level = get_log_level() + logging.basicConfig(level=log_level, format="[%(asctime)s] %(levelname)s:%(name)s:%(message)s") + LOGGER = logging.getLogger(__name__) + + wait_for_environment_variables([ + #get_env_var_name(ServiceNameEnum.CONTEXT, ENVVAR_SUFIX_SERVICE_HOST ), + #get_env_var_name(ServiceNameEnum.CONTEXT, ENVVAR_SUFIX_SERVICE_PORT_GRPC), + ]) + + signal.signal(signal.SIGINT, signal_handler) + signal.signal(signal.SIGTERM, signal_handler) + + LOGGER.info('Starting...') + + # Start metrics server + metrics_port = get_metrics_port() + start_http_server(metrics_port) + + # Get Database Engine instance and initialize database, if needed + LOGGER.info('Getting SQLAlchemy DB Engine...') + db_engine = Engine.get_engine() + if db_engine is None: + LOGGER.error('Unable to get SQLAlchemy DB Engine...') + return -1 + + try: + Engine.create_database(db_engine) + except: # pylint: disable=bare-except # pragma: no cover + LOGGER.exception('Failed to check/create the database: {:s}'.format(str(db_engine.url))) + + rebuild_database(db_engine) + + # Get message broker instance + messagebroker = None #MessageBroker(get_messagebroker_backend()) + + # Starting context service + grpc_service = AppService(db_engine, messagebroker) + grpc_service.start() + + rest_server = RestServer() + register_qkd_app(rest_server) + rest_server.start() + + # Wait for Ctrl+C or termination signal + while not terminate.wait(timeout=1.0): pass + + LOGGER.info('Terminating...') + grpc_service.stop() + rest_server.shutdown() + rest_server.join() + + LOGGER.info('Bye') + return 0 + +if __name__ == '__main__': + sys.exit(main()) diff --git a/src/kpi_manager/database/KpiEngine.py b/src/qkd_app/service/database/Engine.py similarity index 67% rename from src/kpi_manager/database/KpiEngine.py rename to src/qkd_app/service/database/Engine.py index 0fce7e3d36cf2f03a18f311c815719a4f17b2869..8f528f9a1b3cacca2ea260901ab808461dd3183d 100644 --- a/src/kpi_manager/database/KpiEngine.py +++ b/src/qkd_app/service/database/Engine.py @@ -12,29 +12,44 @@ # See the License for the specific language governing permissions and # limitations under the License. -import logging, sqlalchemy +import logging, sqlalchemy, sqlalchemy_utils from common.Settings import get_setting LOGGER = logging.getLogger(__name__) + +APP_NAME = 'tfs' +ECHO = False # true: dump SQL commands and transactions executed CRDB_URI_TEMPLATE = 'cockroachdb://{:s}:{:s}@cockroachdb-public.{:s}.svc.cluster.local:{:s}/{:s}?sslmode={:s}' -class KpiEngine: +class Engine: @staticmethod def get_engine() -> sqlalchemy.engine.Engine: crdb_uri = get_setting('CRDB_URI', default=None) if crdb_uri is None: CRDB_NAMESPACE = get_setting('CRDB_NAMESPACE') CRDB_SQL_PORT = get_setting('CRDB_SQL_PORT') - CRDB_DATABASE = 'tfs_kpi_mgmt' # TODO: define variable get_setting('CRDB_DATABASE_KPI_MGMT') + CRDB_DATABASE = get_setting('CRDB_DATABASE_APP') CRDB_USERNAME = get_setting('CRDB_USERNAME') CRDB_PASSWORD = get_setting('CRDB_PASSWORD') CRDB_SSLMODE = get_setting('CRDB_SSLMODE') crdb_uri = CRDB_URI_TEMPLATE.format( CRDB_USERNAME, CRDB_PASSWORD, CRDB_NAMESPACE, CRDB_SQL_PORT, CRDB_DATABASE, CRDB_SSLMODE) + try: - engine = sqlalchemy.create_engine(crdb_uri, echo=False) - LOGGER.info(' KpiDBmanager initalized with DB URL: {:}'.format(crdb_uri)) + engine = sqlalchemy.create_engine( + crdb_uri, connect_args={'application_name': APP_NAME}, echo=ECHO, future=True) except: # pylint: disable=bare-except # pragma: no cover LOGGER.exception('Failed to connect to database: {:s}'.format(str(crdb_uri))) - return None # type: ignore + return None + return engine + + @staticmethod + def create_database(engine : sqlalchemy.engine.Engine) -> None: + if not sqlalchemy_utils.database_exists(engine.url): + sqlalchemy_utils.create_database(engine.url) + + @staticmethod + def drop_database(engine : sqlalchemy.engine.Engine) -> None: + if sqlalchemy_utils.database_exists(engine.url): + sqlalchemy_utils.drop_database(engine.url) diff --git a/src/qkd_app/service/database/QKDApp.py b/src/qkd_app/service/database/QKDApp.py new file mode 100644 index 0000000000000000000000000000000000000000..b1fb90d4efcd0770bcc4c48c1f00deb0e95687ad --- /dev/null +++ b/src/qkd_app/service/database/QKDApp.py @@ -0,0 +1,185 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime, logging, uuid +from sqlalchemy.dialects.postgresql import insert +from sqlalchemy.engine import Engine +from sqlalchemy.orm import Session, selectinload, sessionmaker +from sqlalchemy_cockroachdb import run_transaction +from typing import Dict, List, Optional, Set, Tuple +from common.method_wrappers.ServiceExceptions import InvalidArgumentException, NotFoundException +from common.message_broker.MessageBroker import MessageBroker +from common.proto.context_pb2 import Empty +from common.proto.qkd_app_pb2 import ( + AppList, App, AppId) +from common.tools.grpc.Tools import grpc_message_to_json_string +from .models.QKDAppModel import AppModel +from .models.enums.QKDAppStatus import grpc_to_enum__qkd_app_status +from .models.enums.QKDAppTypes import grpc_to_enum__qkd_app_types +from .uuids.QKDApp import app_get_uuid +from common.tools.object_factory.Context import json_context_id +from common.tools.object_factory.QKDApp import json_app_id +from context.service.database.uuids.Context import context_get_uuid + + + +#from .Events import notify_event_context, notify_event_device, notify_event_topology + +LOGGER = logging.getLogger(__name__) + + +def app_list_objs(db_engine : Engine) -> AppList: + def callback(session : Session) -> List[Dict]: + obj_list : List[AppModel] = session.query(AppModel)\ + .all() + return [obj.dump() for obj in obj_list] + apps = run_transaction(sessionmaker(bind=db_engine), callback) + return AppList(apps=apps) + +def app_get(db_engine : Engine, request : AppId) -> App: + app_uuid = app_get_uuid(request, allow_random=False) + def callback(session : Session) -> Optional[Dict]: + obj : Optional[AppModel] = session.query(AppModel)\ + .filter_by(app_uuid=app_uuid).one_or_none() + return None if obj is None else obj.dump() + obj = run_transaction(sessionmaker(bind=db_engine), callback) + if obj is None: + raw_app_uuid = request.app_uuid.uuid + raise NotFoundException('App', raw_app_uuid, extra_details=[ + 'app_uuid generated was: {:s}'.format(app_uuid) + ]) + return App(**obj) + +def app_set(db_engine : Engine, messagebroker : MessageBroker, request : App) -> AppId: + context_uuid = context_get_uuid(request.app_id.context_id, allow_random=False) + raw_app_uuid = request.app_id.app_uuid.uuid + app_uuid = app_get_uuid(request.app_id, allow_random=True) + + app_type = request.app_type + app_status = grpc_to_enum__qkd_app_status(request.app_status) + app_type = grpc_to_enum__qkd_app_types(request.app_type) + + now = datetime.datetime.utcnow() + + + app_data = [{ + 'context_uuid' : context_uuid, + 'app_uuid' : app_uuid, + 'app_status' : app_status, + 'app_type' : app_type, + 'server_app_id' : request.server_app_id, + 'client_app_id' : request.client_app_id, + 'backing_qkdl_uuid' : [qkdl_id.qkdl_uuid.uuid for qkdl_id in request.backing_qkdl_id], + 'local_device_uuid' : request.local_device_id.device_uuid.uuid, + 'remote_device_uuid' : request.remote_device_id.device_uuid.uuid or None, + 'created_at' : now, + 'updated_at' : now, + }] + + + def callback(session : Session) -> Tuple[bool, List[Dict]]: + stmt = insert(AppModel).values(app_data) + stmt = stmt.on_conflict_do_update( + index_elements=[AppModel.app_uuid], + set_=dict( + app_status = stmt.excluded.app_status, + app_type = stmt.excluded.app_type, + server_app_id = stmt.excluded.server_app_id, + client_app_id = stmt.excluded.client_app_id, + backing_qkdl_uuid = stmt.excluded.backing_qkdl_uuid, + local_device_uuid = stmt.excluded.local_device_uuid, + remote_device_uuid = stmt.excluded.remote_device_uuid, + updated_at = stmt.excluded.updated_at, + ) + ) + stmt = stmt.returning(AppModel.created_at, AppModel.updated_at) + created_at,updated_at = session.execute(stmt).fetchone() + updated = updated_at > created_at + + return updated + + updated = run_transaction(sessionmaker(bind=db_engine), callback) + context_id = json_context_id(context_uuid) + app_id = json_app_id(app_uuid, context_id=context_id) + #event_type = EventTypeEnum.EVENTTYPE_UPDATE if updated else EventTypeEnum.EVENTTYPE_CREATE + #notify_event_app(messagebroker, event_type, app_id) + #notify_event_context(messagebroker, EventTypeEnum.EVENTTYPE_UPDATE, context_id) + return AppId(**app_id) + + + +def app_get_by_server(db_engine : Engine, request : str) -> App: + def callback(session : Session) -> Optional[Dict]: + obj : Optional[AppModel] = session.query(AppModel)\ + .filter_by(server_app_id=request).one_or_none() + return None if obj is None else obj.dump() + obj = run_transaction(sessionmaker(bind=db_engine), callback) + if obj is None: + raise NotFoundException('No app match found for', request) + return App(**obj) + + + +""" +def device_delete(db_engine : Engine, messagebroker : MessageBroker, request : DeviceId) -> Empty: + device_uuid = device_get_uuid(request, allow_random=False) + def callback(session : Session) -> Tuple[bool, List[Dict]]: + query = session.query(TopologyDeviceModel) + query = query.filter_by(device_uuid=device_uuid) + topology_device_list : List[TopologyDeviceModel] = query.all() + topology_ids = [obj.topology.dump_id() for obj in topology_device_list] + num_deleted = session.query(DeviceModel).filter_by(device_uuid=device_uuid).delete() + return num_deleted > 0, topology_ids + deleted, updated_topology_ids = run_transaction(sessionmaker(bind=db_engine), callback) + device_id = json_device_id(device_uuid) + if deleted: + notify_event_device(messagebroker, EventTypeEnum.EVENTTYPE_REMOVE, device_id) + + context_ids : Dict[str, Dict] = dict() + topology_ids : Dict[str, Dict] = dict() + for topology_id in updated_topology_ids: + topology_uuid = topology_id['topology_uuid']['uuid'] + topology_ids[topology_uuid] = topology_id + context_id = topology_id['context_id'] + context_uuid = context_id['context_uuid']['uuid'] + context_ids[context_uuid] = context_id + + for topology_id in topology_ids.values(): + notify_event_topology(messagebroker, EventTypeEnum.EVENTTYPE_UPDATE, topology_id) + + for context_id in context_ids.values(): + notify_event_context(messagebroker, EventTypeEnum.EVENTTYPE_UPDATE, context_id) + + return Empty() + +def device_select(db_engine : Engine, request : DeviceFilter) -> DeviceList: + device_uuids = [ + device_get_uuid(device_id, allow_random=False) + for device_id in request.device_ids.device_ids + ] + dump_params = dict( + include_endpoints =request.include_endpoints, + include_config_rules=request.include_config_rules, + include_components =request.include_components, + ) + def callback(session : Session) -> List[Dict]: + query = session.query(DeviceModel) + if request.include_endpoints : query = query.options(selectinload(DeviceModel.endpoints)) + if request.include_config_rules: query = query.options(selectinload(DeviceModel.config_rules)) + #if request.include_components : query = query.options(selectinload(DeviceModel.components)) + obj_list : List[DeviceModel] = query.filter(DeviceModel.device_uuid.in_(device_uuids)).all() + return [obj.dump(**dump_params) for obj in obj_list] + devices = run_transaction(sessionmaker(bind=db_engine), callback) + return DeviceList(devices=devices) +""" diff --git a/src/qkd_app/service/database/__init__.py b/src/qkd_app/service/database/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..07d08814021ef82220611ee21c01ba01806682e9 --- /dev/null +++ b/src/qkd_app/service/database/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/src/qkd_app/service/database/models/QKDAppModel.py b/src/qkd_app/service/database/models/QKDAppModel.py new file mode 100644 index 0000000000000000000000000000000000000000..c32b4e28c95105d8659cb52790f51b330764c2cf --- /dev/null +++ b/src/qkd_app/service/database/models/QKDAppModel.py @@ -0,0 +1,63 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import operator +from sqlalchemy import CheckConstraint, Column, DateTime, Float, Enum, ForeignKey, Integer, String +from sqlalchemy.dialects.postgresql import UUID, ARRAY +from sqlalchemy.orm import relationship +from typing import Dict +from ._Base import _Base +from .enums.QKDAppStatus import ORM_QKDAppStatusEnum +from .enums.QKDAppTypes import ORM_QKDAppTypesEnum + +class AppModel(_Base): + __tablename__ = 'qkd_app' + + app_uuid = Column(UUID(as_uuid=False), primary_key=True) + context_uuid = Column(UUID(as_uuid=False), nullable=False) # Supposed to be Foreign Key + app_status = Column(Enum(ORM_QKDAppStatusEnum), nullable=False) + app_type = Column(Enum(ORM_QKDAppTypesEnum), nullable=False) + server_app_id = Column(String, nullable=False) + client_app_id = Column(ARRAY(String), nullable=False) + backing_qkdl_uuid = Column(ARRAY(UUID(as_uuid=False)), nullable=False) + local_device_uuid = Column(UUID(as_uuid=False), nullable=False) + remote_device_uuid = Column(UUID(as_uuid=False), nullable=True) + + # Optare: Created_at and Updated_at are only used to know if an app was updated later on the code. Don't change it + + created_at = Column(DateTime, nullable=False) + updated_at = Column(DateTime, nullable=False) + + #__table_args__ = ( + # CheckConstraint(... >= 0, name='name_value_...'), + #) + + def dump_id(self) -> Dict: + return { + 'context_id': {'context_uuid': {'uuid': self.context_uuid}}, + 'app_uuid': {'uuid': self.app_uuid} + } + + def dump(self) -> Dict: + result = { + 'app_id' : self.dump_id(), + 'app_status' : self.app_status.value, + 'app_type' : self.app_type.value, + 'server_app_id' : self.server_app_id, + 'client_app_id' : self.client_app_id, + 'backing_qkdl_id' : [{'qkdl_uuid': {'uuid': qkdl_id}} for qkdl_id in self.backing_qkdl_uuid], + 'local_device_id' : {'device_uuid': {'uuid': self.local_device_uuid}}, + 'remote_device_id' : {'device_uuid': {'uuid': self.remote_device_uuid}}, + } + return result diff --git a/src/qkd_app/service/database/models/_Base.py b/src/qkd_app/service/database/models/_Base.py new file mode 100644 index 0000000000000000000000000000000000000000..51863e1d5c06a875c298eab726cfdc3b7fcb75ca --- /dev/null +++ b/src/qkd_app/service/database/models/_Base.py @@ -0,0 +1,44 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sqlalchemy +from typing import Any, List +from sqlalchemy.orm import Session, sessionmaker, declarative_base +from sqlalchemy.sql import text +from sqlalchemy_cockroachdb import run_transaction + +_Base = declarative_base() + +''' +def create_performance_enhancers(db_engine : sqlalchemy.engine.Engine) -> None: + def index_storing( + index_name : str, table_name : str, index_fields : List[str], storing_fields : List[str] + ) -> Any: + str_index_fields = ','.join(['"{:s}"'.format(index_field) for index_field in index_fields]) + str_storing_fields = ','.join(['"{:s}"'.format(storing_field) for storing_field in storing_fields]) + INDEX_STORING = 'CREATE INDEX IF NOT EXISTS {:s} ON "{:s}" ({:s}) STORING ({:s});' + return text(INDEX_STORING.format(index_name, table_name, str_index_fields, str_storing_fields)) + + statements = [ + # In case of relations + ] + def callback(session : Session) -> bool: + for stmt in statements: session.execute(stmt) + run_transaction(sessionmaker(bind=db_engine), callback) +''' + +def rebuild_database(db_engine : sqlalchemy.engine.Engine, drop_if_exists : bool = False): + if drop_if_exists: _Base.metadata.drop_all(db_engine) + _Base.metadata.create_all(db_engine) + #create_performance_enhancers(db_engine) diff --git a/src/qkd_app/service/database/models/__init__.py b/src/qkd_app/service/database/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..07d08814021ef82220611ee21c01ba01806682e9 --- /dev/null +++ b/src/qkd_app/service/database/models/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/src/qkd_app/service/database/models/enums/QKDAppStatus.py b/src/qkd_app/service/database/models/enums/QKDAppStatus.py new file mode 100644 index 0000000000000000000000000000000000000000..d3063ef56704ce1bdd48d15ea8c6486ed7c8cfae --- /dev/null +++ b/src/qkd_app/service/database/models/enums/QKDAppStatus.py @@ -0,0 +1,27 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import enum, functools +from common.proto.qkd_app_pb2 import QKDAppStatusEnum +from ._GrpcToEnum import grpc_to_enum + +class ORM_QKDAppStatusEnum(enum.Enum): + ON = QKDAppStatusEnum.QKDAPPSTATUS_ON + DISCONNECTED = QKDAppStatusEnum.QKDAPPSTATUS_DISCONNECTED + OUT_OF_TIME = QKDAppStatusEnum.QKDAPPSTATUS_OUT_OF_TIME + ZOMBIE = QKDAppStatusEnum.QKDAPPSTATUS_ZOMBIE + + +grpc_to_enum__qkd_app_status = functools.partial( + grpc_to_enum, QKDAppStatusEnum, ORM_QKDAppStatusEnum) diff --git a/src/qkd_app/service/database/models/enums/QKDAppTypes.py b/src/qkd_app/service/database/models/enums/QKDAppTypes.py new file mode 100644 index 0000000000000000000000000000000000000000..f50b8982d80c0af97c2cbd96d336f450afc50f9b --- /dev/null +++ b/src/qkd_app/service/database/models/enums/QKDAppTypes.py @@ -0,0 +1,25 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import enum, functools +from common.proto.qkd_app_pb2 import QKDAppTypesEnum +from ._GrpcToEnum import grpc_to_enum + +class ORM_QKDAppTypesEnum(enum.Enum): + INTERNAL = QKDAppTypesEnum.QKDAPPTYPES_INTERNAL + CLIENT = QKDAppTypesEnum.QKDAPPTYPES_CLIENT + + +grpc_to_enum__qkd_app_types = functools.partial( + grpc_to_enum, QKDAppTypesEnum, ORM_QKDAppTypesEnum) diff --git a/src/qkd_app/service/database/models/enums/_GrpcToEnum.py b/src/qkd_app/service/database/models/enums/_GrpcToEnum.py new file mode 100644 index 0000000000000000000000000000000000000000..0dcad039f7be240acbbec418d12475557e4c42c1 --- /dev/null +++ b/src/qkd_app/service/database/models/enums/_GrpcToEnum.py @@ -0,0 +1,38 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re +from enum import Enum +from typing import Optional + +# Enumeration classes are redundant with gRPC classes, but gRPC does not provide a programmatical method to retrieve +# the values it expects from strings containing the desired value symbol or its integer value, so a kind of mapping is +# required. Besides, ORM Models expect Enum classes in EnumeratedFields; we create specific and conveniently defined +# Enum classes to serve both purposes. + +def grpc_to_enum(grpc_enum_class, orm_enum_class : Enum, grpc_enum_value, grpc_enum_prefix : Optional[str] = None): + enum_name = grpc_enum_class.Name(grpc_enum_value) + + if grpc_enum_prefix is None: + grpc_enum_prefix = orm_enum_class.__name__.upper() + #grpc_enum_prefix = re.sub(r'^ORM_(.+)$', r'\1', grpc_enum_prefix) + #grpc_enum_prefix = re.sub(r'^(.+)ENUM$', r'\1', grpc_enum_prefix) + #grpc_enum_prefix = grpc_enum_prefix + '_' + grpc_enum_prefix = re.sub(r'^ORM_(.+)ENUM$', r'\1_', grpc_enum_prefix) + + if len(grpc_enum_prefix) > 0: + enum_name = enum_name.replace(grpc_enum_prefix, '') + + orm_enum_value = orm_enum_class._member_map_.get(enum_name) + return orm_enum_value diff --git a/src/qkd_app/service/database/models/enums/__init__.py b/src/qkd_app/service/database/models/enums/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..07d08814021ef82220611ee21c01ba01806682e9 --- /dev/null +++ b/src/qkd_app/service/database/models/enums/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/src/qkd_app/service/database/uuids/QKDApp.py b/src/qkd_app/service/database/uuids/QKDApp.py new file mode 100644 index 0000000000000000000000000000000000000000..175f1d5f3cf4ceda12a022b4afadb376e11ae5a5 --- /dev/null +++ b/src/qkd_app/service/database/uuids/QKDApp.py @@ -0,0 +1,30 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from common.proto.qkd_app_pb2 import AppId +from common.method_wrappers.ServiceExceptions import InvalidArgumentsException +from ._Builder import get_uuid_from_string, get_uuid_random + +def app_get_uuid( + app_id : AppId, allow_random : bool = False +) -> str: + app_uuid = app_id.app_uuid.uuid + + if len(app_uuid) > 0: + return get_uuid_from_string(app_uuid) + if allow_random: return get_uuid_random() + + raise InvalidArgumentsException([ + ('app_id.app_uuid.uuid', app_uuid), + ], extra_details=['At least one is required to produce a App UUID']) diff --git a/src/qkd_app/service/database/uuids/_Builder.py b/src/qkd_app/service/database/uuids/_Builder.py new file mode 100644 index 0000000000000000000000000000000000000000..39c98de69d577ce2722693e57c4ee678124f9e30 --- /dev/null +++ b/src/qkd_app/service/database/uuids/_Builder.py @@ -0,0 +1,44 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Optional, Union +from uuid import UUID, uuid4, uuid5 + +# Generate a UUIDv5-like from the SHA-1 of "TFS" and no namespace to be used as the NAMESPACE for all +# the context UUIDs generated. For efficiency purposes, the UUID is hardcoded; however, it is produced +# using the following code: +# from hashlib import sha1 +# from uuid import UUID +# hash = sha1(bytes('TFS', 'utf-8')).digest() +# NAMESPACE_TFS = UUID(bytes=hash[:16], version=5) +NAMESPACE_TFS = UUID('200e3a1f-2223-534f-a100-758e29c37f40') + +def get_uuid_from_string(str_uuid_or_name : Union[str, UUID], prefix_for_name : Optional[str] = None) -> str: + # if UUID given, assume it is already a valid UUID + if isinstance(str_uuid_or_name, UUID): return str_uuid_or_name + if not isinstance(str_uuid_or_name, str): + MSG = 'Parameter({:s}) cannot be used to produce a UUID' + raise Exception(MSG.format(str(repr(str_uuid_or_name)))) + try: + # try to parse as UUID + return str(UUID(str_uuid_or_name)) + except: # pylint: disable=bare-except + # produce a UUID within TFS namespace from parameter + if prefix_for_name is not None: + str_uuid_or_name = '{:s}/{:s}'.format(prefix_for_name, str_uuid_or_name) + return str(uuid5(NAMESPACE_TFS, str_uuid_or_name)) + +def get_uuid_random() -> str: + # Generate random UUID. No need to use namespace since "namespace + random = random". + return str(uuid4()) diff --git a/src/qkd_app/service/rest_server/RestServer.py b/src/qkd_app/service/rest_server/RestServer.py new file mode 100644 index 0000000000000000000000000000000000000000..e21531c5bcf0e1cf15a8f08952d6325a8349f398 --- /dev/null +++ b/src/qkd_app/service/rest_server/RestServer.py @@ -0,0 +1,23 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from common.Constants import ServiceNameEnum +from common.Settings import get_service_baseurl_http, get_service_port_http +from common.tools.service.GenericRestServer import GenericRestServer + +class RestServer(GenericRestServer): + def __init__(self, cls_name: str = __name__) -> None: + bind_port = get_service_port_http(ServiceNameEnum.QKD_APP) + base_url = get_service_baseurl_http(ServiceNameEnum.QKD_APP) + super().__init__(bind_port, base_url, cls_name=cls_name) diff --git a/src/qkd_app/service/rest_server/__init__.py b/src/qkd_app/service/rest_server/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..07d08814021ef82220611ee21c01ba01806682e9 --- /dev/null +++ b/src/qkd_app/service/rest_server/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/src/qkd_app/service/rest_server/qkd_app/Resources.py b/src/qkd_app/service/rest_server/qkd_app/Resources.py new file mode 100644 index 0000000000000000000000000000000000000000..6ba79d3940da91dfebc1a1c666893548caccbe6c --- /dev/null +++ b/src/qkd_app/service/rest_server/qkd_app/Resources.py @@ -0,0 +1,86 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import uuid, json +from flask import request +from flask_restful import Resource +from common.proto.context_pb2 import Empty +from common.proto.qkd_app_pb2 import App, QKDAppTypesEnum +from common.Constants import DEFAULT_CONTEXT_NAME +from context.client.ContextClient import ContextClient +from qkd_app.client.QKDAppClient import QKDAppClient + + +class _Resource(Resource): + def __init__(self) -> None: + super().__init__() + self.context_client = ContextClient() + self.qkd_app_client = QKDAppClient() + +class Index(_Resource): + def get(self): + return {'hello': 'world'} + +class CreateQKDApp(_Resource): + # Optare: Post request for the QKD Node to call the TeraflowSDN. Example of requests below + def post(self): + app = request.get_json()['app'] + + devices = self.context_client.ListDevices(Empty()) + devices = devices.devices + + local_device = None + + + # This for-loop won't be necessary if we can garantee Device ID is the same as QKDN Id + for device in devices: + for config_rule in device.device_config.config_rules: + if config_rule.custom.resource_key == '__node__': + value = json.loads(config_rule.custom.resource_value) + qkdn_id = value['qkdn_id'] + if app['local_qkdn_id'] == qkdn_id: + local_device = device + break + + # Optare: Todo: Verify that a service is present for this app + ''' + requests.post('http://10.211.36.220/app/create_qkd_app', json={'app': {'server_app_id':'1', 'client_app_id':[], 'app_status':'ON', 'local_qkdn_id':'00000001-0000-0000-0000-000000000000', 'backing_qkdl_id':['00000003-0002-0000-0000-000000000000']}}) + + + requests.post('http://10.211.36.220/app/create_qkd_app', json={'app': {'server_app_id':'1', 'client_app_id':[], 'app_status':'ON', 'local_qkdn_id':'00000003-0000-0000-0000-000000000000', 'backing_qkdl_id':['00000003-0002-0000-0000-000000000000']}}) + ''' + + + if local_device is None: + return {"status": "fail"} + + external_app_src_dst = { + 'app_id': {'context_id': {'context_uuid': {'uuid': DEFAULT_CONTEXT_NAME}}, 'app_uuid': {'uuid': ''}}, + 'app_status': 'QKDAPPSTATUS_' + app['app_status'], + 'app_type': QKDAppTypesEnum.QKDAPPTYPES_CLIENT, + 'server_app_id': app['server_app_id'], + 'client_app_id': app['client_app_id'], + 'backing_qkdl_id': [{'qkdl_uuid': {'uuid': qkdl_id}} for qkdl_id in app['backing_qkdl_id']], + 'local_device_id': local_device.device_id, + 'remote_device_id': {'device_uuid': {'uuid': ''}}, + } + + + # Optare: This will call our internal RegisterApp which supports the creation of both internal and external app. + # Optare the verification for knowing if two parties are requesting the same app is done inside RegisterApp's function + self.qkd_app_client.RegisterApp(App(**external_app_src_dst)) + + # Optare: Todo: Communicate by SBI with both Nodes of the new App + + return {"status": "success"} diff --git a/src/qkd_app/service/rest_server/qkd_app/__init__.py b/src/qkd_app/service/rest_server/qkd_app/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6fc23b371414dcb2bac4afde63524febf71e5337 --- /dev/null +++ b/src/qkd_app/service/rest_server/qkd_app/__init__.py @@ -0,0 +1,30 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from qkd_app.service.rest_server.RestServer import RestServer +from .Resources import ( + CreateQKDApp, Index) + +URL_PREFIX = '/qkd_app' + +# Use 'path' type since some identifiers might contain char '/' and Flask is unable to recognize them in 'string' type. +RESOURCES = [ + # (endpoint_name, resource_class, resource_url) + ('api.index', Index, '/'), + ('api.register_qkd_app', CreateQKDApp, '/create_qkd_app'), +] + +def register_qkd_app(app_server : RestServer): + for endpoint_name, resource_class, resource_url in RESOURCES: + app_server.add_resource(resource_class, URL_PREFIX + resource_url, endpoint=endpoint_name) diff --git a/src/service/Dockerfile b/src/service/Dockerfile index a847ae762d1303dda852d7f3d8200d3db3ef53f7..6f23f0a89387e2db3802ee20e003ccb6482844ff 100644 --- a/src/service/Dockerfile +++ b/src/service/Dockerfile @@ -70,6 +70,8 @@ COPY src/pathcomp/frontend/__init__.py pathcomp/frontend/__init__.py COPY src/pathcomp/frontend/client/. pathcomp/frontend/client/ COPY src/e2e_orchestrator/__init__.py e2e_orchestrator/__init__.py COPY src/e2e_orchestrator/client/. e2e_orchestrator/client/ +COPY src/qkd_app/__init__.py qkd_app/__init__.py +COPY src/qkd_app/client/. qkd_app/client/ COPY src/service/. service/ # Start the service diff --git a/src/service/service/ServiceServiceServicerImpl.py b/src/service/service/ServiceServiceServicerImpl.py index fd265c8e05ed068e516cadad260864315ec81d36..085b6f0d1b4d8a9ee8e0503ad8e8058c4f6ec170 100644 --- a/src/service/service/ServiceServiceServicerImpl.py +++ b/src/service/service/ServiceServiceServicerImpl.py @@ -30,6 +30,9 @@ from common.tools.grpc.Tools import grpc_message_to_json, grpc_message_to_json_s from common.tools.object_factory.Context import json_context_id from common.tools.object_factory.Topology import json_topology_id from common.Constants import DEFAULT_CONTEXT_NAME, DEFAULT_TOPOLOGY_NAME +from common.Settings import ( + is_deployed_e2e_orch, is_deployed_optical, is_deployed_te +) from context.client.ContextClient import ContextClient from e2e_orchestrator.client.E2EOrchestratorClient import E2EOrchestratorClient from pathcomp.frontend.client.PathCompClient import PathCompClient @@ -143,7 +146,7 @@ class ServiceServiceServicerImpl(ServiceServiceServicer): service.service_type = request.service_type # pylint: disable=no-member service.service_status.service_status = ServiceStatusEnum.SERVICESTATUS_PLANNED # pylint: disable=no-member - if service.service_type == ServiceTypeEnum.SERVICETYPE_TE: + if is_deployed_te() and service.service_type == ServiceTypeEnum.SERVICETYPE_TE: # TE service: context_client.SetService(request) @@ -165,7 +168,7 @@ class ServiceServiceServicerImpl(ServiceServiceServicer): str_service_status = ServiceStatusEnum.Name(service_status.service_status) raise Exception(MSG.format(service_key, str_service_status)) - if service.service_type == ServiceTypeEnum.SERVICETYPE_E2E: + if is_deployed_e2e_orch() and service.service_type == ServiceTypeEnum.SERVICETYPE_E2E: # End-to-End service: service_id_with_uuids = context_client.SetService(request) @@ -249,7 +252,7 @@ class ServiceServiceServicerImpl(ServiceServiceServicer): tasks_scheduler = TasksScheduler(self.service_handler_factory) - if service.service_type == ServiceTypeEnum.SERVICETYPE_OPTICAL_CONNECTIVITY: + if is_deployed_optical() and service.service_type == ServiceTypeEnum.SERVICETYPE_OPTICAL_CONNECTIVITY: context_id_x = json_context_id(DEFAULT_CONTEXT_NAME) topology_id_x = json_topology_id( DEFAULT_TOPOLOGY_NAME, context_id_x) @@ -349,7 +352,7 @@ class ServiceServiceServicerImpl(ServiceServiceServicer): context_client.SetService(service) - if service.service_type == ServiceTypeEnum.SERVICETYPE_TE: + if is_deployed_te() and service.service_type == ServiceTypeEnum.SERVICETYPE_TE: # TE service te_service_client = TEServiceClient() te_service_client.DeleteLSP(request) diff --git a/src/service/service/service_handlers/qkd/qkd_service_handler.py b/src/service/service/service_handlers/qkd/qkd_service_handler.py index 76c67867ee2f4bae60b8dd6e187f221f2efc1eb0..0977388005ef72fe036de93de2dc73438f0c6163 100644 --- a/src/service/service/service_handlers/qkd/qkd_service_handler.py +++ b/src/service/service/service_handlers/qkd/qkd_service_handler.py @@ -17,7 +17,7 @@ import json, logging, uuid from typing import Any, Dict, List, Optional, Tuple, Union from common.method_wrappers.Decorator import MetricsPool, metered_subclass_method from common.proto.context_pb2 import ConfigRule, DeviceId, Service -from common.proto.app_pb2 import App, QKDAppStatusEnum, QKDAppTypesEnum +from common.proto.qkd_app_pb2 import App, QKDAppStatusEnum, QKDAppTypesEnum from common.tools.object_factory.ConfigRule import json_config_rule_delete, json_config_rule_set from common.tools.object_factory.Device import json_device_id from common.type_checkers.Checkers import chk_type diff --git a/src/service/service/task_scheduler/TaskExecutor.py b/src/service/service/task_scheduler/TaskExecutor.py index 610a518a17ed4fcb33de2eb0d15fc5adbccd6434..0e908277b2f3d7e93065ff9a4ba93bf099bc60f7 100644 --- a/src/service/service/task_scheduler/TaskExecutor.py +++ b/src/service/service/task_scheduler/TaskExecutor.py @@ -20,6 +20,7 @@ from common.proto.context_pb2 import ( Connection, ConnectionId, Device, DeviceDriverEnum, DeviceId, Service, ServiceId, OpticalConfig, OpticalConfigId,ConnectionList,ServiceConfigRule ) +from common.proto.qkd_app_pb2 import App from common.tools.context_queries.Connection import get_connection_by_id from common.tools.context_queries.Device import get_device from common.tools.context_queries.Service import get_service_by_id @@ -27,6 +28,7 @@ from common.tools.grpc.Tools import grpc_message_to_json_string from common.tools.object_factory.Device import json_device_id from context.client.ContextClient import ContextClient from device.client.DeviceClient import DeviceClient +from qkd_app.client.QKDAppClient import QKDAppClient from service.service.service_handler_api.Exceptions import ( UnsatisfiedFilterException, UnsupportedFilterFieldException, UnsupportedFilterFieldValueException ) @@ -46,11 +48,14 @@ class CacheableObjectType(Enum): CONNECTION = 'connection' DEVICE = 'device' SERVICE = 'service' + QKD_APP = 'qkd-app' class TaskExecutor: def __init__(self, service_handler_factory : ServiceHandlerFactory) -> None: self._service_handler_factory = service_handler_factory self._context_client = ContextClient() + # DEPENDENCY QKD + self._qkd_app_client = QKDAppClient() self._device_client = DeviceClient() self._grpc_objects_cache : Dict[str, CacheableObject] = dict() @@ -327,3 +332,12 @@ class TaskExecutor: str(dict_connection_devices) ) ) + + + # ----- QkdApp-related methods ------------------------------------------------------------------------------------- + + def register_app(self, app: App) -> None: + app_key = get_app_key(app.app_id) + self._qkd_app_client.RegisterApp(app) + LOGGER.info("reg registered") + self._store_grpc_object(CacheableObjectType.QKD_APP, app_key, app) diff --git a/src/service/service/tools/ObjectKeys.py b/src/service/service/tools/ObjectKeys.py index 47afc88edaad49b417417d79711eaebb8f49e026..940633eb978d60d01638075499d447a30667ba96 100644 --- a/src/service/service/tools/ObjectKeys.py +++ b/src/service/service/tools/ObjectKeys.py @@ -13,7 +13,7 @@ # limitations under the License. from common.proto.context_pb2 import ConnectionId, DeviceId, ServiceId -import logging +from common.proto.qkd_app_pb2 import AppId def get_connection_key(connection_id : ConnectionId) -> str: return connection_id.connection_uuid.uuid @@ -26,3 +26,7 @@ def get_service_key(service_id : ServiceId) -> str: context_uuid = service_id.context_id.context_uuid.uuid service_uuid = service_id.service_uuid.uuid return '{:s}/{:s}'.format(context_uuid, service_uuid) + +def get_app_key(app_id : AppId) -> str: + return app_id.app_uuid.uuid + diff --git a/src/telemetry/database/TelemetryEngine.py b/src/telemetry/database/TelemetryEngine.py deleted file mode 100644 index 7c8620faf25e695e7f971bce78be9ad208a7701b..0000000000000000000000000000000000000000 --- a/src/telemetry/database/TelemetryEngine.py +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging, sqlalchemy -from common.Settings import get_setting - -LOGGER = logging.getLogger(__name__) -CRDB_URI_TEMPLATE = 'cockroachdb://{:s}:{:s}@cockroachdb-public.{:s}.svc.cluster.local:{:s}/{:s}?sslmode={:s}' - -class TelemetryEngine: - @staticmethod - def get_engine() -> sqlalchemy.engine.Engine: - crdb_uri = get_setting('CRDB_URI', default=None) - if crdb_uri is None: - CRDB_NAMESPACE = get_setting('CRDB_NAMESPACE') - CRDB_SQL_PORT = get_setting('CRDB_SQL_PORT') - CRDB_DATABASE = "tfs-telemetry" # TODO: define variable get_setting('CRDB_DATABASE_KPI_MGMT') - CRDB_USERNAME = get_setting('CRDB_USERNAME') - CRDB_PASSWORD = get_setting('CRDB_PASSWORD') - CRDB_SSLMODE = get_setting('CRDB_SSLMODE') - crdb_uri = CRDB_URI_TEMPLATE.format( - CRDB_USERNAME, CRDB_PASSWORD, CRDB_NAMESPACE, CRDB_SQL_PORT, CRDB_DATABASE, CRDB_SSLMODE) - try: - engine = sqlalchemy.create_engine(crdb_uri, echo=False) - LOGGER.info(' TelemetryDB initalized with DB URL: {:}'.format(crdb_uri)) - except: # pylint: disable=bare-except # pragma: no cover - LOGGER.exception('Failed to connect to database: {:s}'.format(str(crdb_uri))) - return None # type: ignore - return engine diff --git a/src/telemetry/database/Telemetry_DB.py b/src/telemetry/database/Telemetry_DB.py index 32acfd73a410a7bfddd6b487d0b1962afadb3842..110c7e80a4c36eed15417bfa05c4057ccb7fe292 100644 --- a/src/telemetry/database/Telemetry_DB.py +++ b/src/telemetry/database/Telemetry_DB.py @@ -13,125 +13,32 @@ # limitations under the License. import logging -import sqlalchemy_utils -from sqlalchemy import inspect -from sqlalchemy.orm import sessionmaker -from telemetry.database.TelemetryModel import Collector as CollectorModel -from telemetry.database.TelemetryEngine import TelemetryEngine -from common.method_wrappers.ServiceExceptions import ( - OperationFailedException, AlreadyExistsException ) +from common.method_wrappers.Decorator import MetricsPool +from common.tools.database.GenericDatabase import Database +from common.method_wrappers.ServiceExceptions import OperationFailedException -LOGGER = logging.getLogger(__name__) -DB_NAME = "tfs_telemetry" +LOGGER = logging.getLogger(__name__) +METRICS_POOL = MetricsPool('TelemteryFrontend', 'Database') -class TelemetryDB: - def __init__(self): - self.db_engine = TelemetryEngine.get_engine() - if self.db_engine is None: - LOGGER.error('Unable to get SQLAlchemy DB Engine...') - return False - self.db_name = DB_NAME - self.Session = sessionmaker(bind=self.db_engine) - - def create_database(self): - if not sqlalchemy_utils.database_exists(self.db_engine.url): - LOGGER.debug("Database created. {:}".format(self.db_engine.url)) - sqlalchemy_utils.create_database(self.db_engine.url) - - def drop_database(self) -> None: - if sqlalchemy_utils.database_exists(self.db_engine.url): - sqlalchemy_utils.drop_database(self.db_engine.url) - - def create_tables(self): - try: - CollectorModel.metadata.create_all(self.db_engine) # type: ignore - LOGGER.debug("Tables created in the database: {:}".format(self.db_name)) - except Exception as e: - LOGGER.debug("Tables cannot be created in the database. {:s}".format(str(e))) - raise OperationFailedException ("Tables can't be created", extra_details=["unable to create table {:}".format(e)]) - - def verify_tables(self): - try: - inspect_object = inspect(self.db_engine) - if(inspect_object.has_table('collector', None)): - LOGGER.info("Table exists in DB: {:}".format(self.db_name)) - except Exception as e: - LOGGER.info("Unable to fetch Table names. {:s}".format(str(e))) - -# ----------------- CURD METHODs --------------------- - - def add_row_to_db(self, row): - session = self.Session() - try: - session.add(row) - session.commit() - LOGGER.debug(f"Row inserted into {row.__class__.__name__} table.") - return True - except Exception as e: - session.rollback() - if "psycopg2.errors.UniqueViolation" in str(e): - LOGGER.error(f"Unique key voilation: {row.__class__.__name__} table. {str(e)}") - raise AlreadyExistsException(row.__class__.__name__, row, - extra_details=["Unique key voilation: {:}".format(e)] ) - else: - LOGGER.error(f"Failed to insert new row into {row.__class__.__name__} table. {str(e)}") - raise OperationFailedException ("Deletion by column id", extra_details=["unable to delete row {:}".format(e)]) - finally: - session.close() - - def search_db_row_by_id(self, model, col_name, id_to_search): - session = self.Session() - try: - entity = session.query(model).filter_by(**{col_name: id_to_search}).first() - if entity: - # LOGGER.debug(f"{model.__name__} ID found: {str(entity)}") - return entity - else: - LOGGER.debug(f"{model.__name__} ID not found, No matching row: {str(id_to_search)}") - print("{:} ID not found, No matching row: {:}".format(model.__name__, id_to_search)) - return None - except Exception as e: - session.rollback() - LOGGER.debug(f"Failed to retrieve {model.__name__} ID. {str(e)}") - raise OperationFailedException ("search by column id", extra_details=["unable to search row {:}".format(e)]) - finally: - session.close() - - def delete_db_row_by_id(self, model, col_name, id_to_search): - session = self.Session() - try: - record = session.query(model).filter_by(**{col_name: id_to_search}).first() - if record: - session.delete(record) - session.commit() - LOGGER.debug("Deleted %s with %s: %s", model.__name__, col_name, id_to_search) - else: - LOGGER.debug("%s with %s %s not found", model.__name__, col_name, id_to_search) - return None - except Exception as e: - session.rollback() - LOGGER.error("Error deleting %s with %s %s: %s", model.__name__, col_name, id_to_search, e) - raise OperationFailedException ("Deletion by column id", extra_details=["unable to delete row {:}".format(e)]) - finally: - session.close() +class TelemetryDB(Database): + def __init__(self, model) -> None: + LOGGER.info('Init KpiManagerService') + super().__init__(model) def select_with_filter(self, model, filter_object): + """ + Generic method to create filters dynamically based on filter_object attributes. + params: model: SQLAlchemy model class to query. + filter_object: Object that contains filtering criteria as attributes. + return: SQLAlchemy session, query and Model + """ session = self.Session() try: - query = session.query(CollectorModel) - # Apply filters based on the filter_object + query = session.query(model) if filter_object.kpi_id: - query = query.filter(CollectorModel.kpi_id.in_([k.kpi_id.uuid for k in filter_object.kpi_id])) - result = query.all() - # query should be added to return all rows - if result: - LOGGER.debug(f"Fetched filtered rows from {model.__name__} table with filters: {filter_object}") # - Results: {result} - else: - LOGGER.warning(f"No matching row found in {model.__name__} table with filters: {filter_object}") - return result + query = query.filter(model.kpi_id.in_([k.kpi_id.uuid for k in filter_object.kpi_id])) except Exception as e: - LOGGER.error(f"Error fetching filtered rows from {model.__name__} table with filters {filter_object} ::: {e}") - raise OperationFailedException ("Select by filter", extra_details=["unable to apply the filter {:}".format(e)]) - finally: - session.close() - + LOGGER.error(f"Error creating filter of {model.__name__} table. ERROR: {e}") + raise OperationFailedException ("CreateKpiDescriptorFilter", extra_details=["unable to create the filter {:}".format(e)]) + + return super().select_with_filter(query, session, model) diff --git a/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py b/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py index b73d9fa952ee42aeb7adb8f3c0b2e4a3ba7f3e09..c72e66bdd53f165ebae131e07f51d23e609dd8be 100644 --- a/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py +++ b/src/telemetry/frontend/service/TelemetryFrontendServiceServicerImpl.py @@ -40,7 +40,7 @@ ACTIVE_COLLECTORS = [] # keep and can be populated from DB class TelemetryFrontendServiceServicerImpl(TelemetryFrontendServiceServicer): def __init__(self): LOGGER.info('Init TelemetryFrontendService') - self.tele_db_obj = TelemetryDB() + self.tele_db_obj = TelemetryDB(CollectorModel) self.kafka_producer = KafkaProducer({'bootstrap.servers' : KafkaConfig.get_kafka_address()}) self.kafka_consumer = KafkaConsumer({'bootstrap.servers' : KafkaConfig.get_kafka_address(), 'group.id' : 'frontend', diff --git a/src/telemetry/frontend/service/__main__.py b/src/telemetry/frontend/service/__main__.py index 2a6c5dbcf2da6b6a074c2b8ee23791bc4896442f..6697ff5f10e58b494736738e631a29a20691732d 100644 --- a/src/telemetry/frontend/service/__main__.py +++ b/src/telemetry/frontend/service/__main__.py @@ -16,6 +16,8 @@ import logging, signal, sys, threading from prometheus_client import start_http_server from common.Settings import get_log_level, get_metrics_port from .TelemetryFrontendService import TelemetryFrontendService +from telemetry.database.TelemetryModel import Collector as Model +from common.tools.database.GenericDatabase import Database terminate = threading.Event() LOGGER = None @@ -36,6 +38,11 @@ def main(): LOGGER.info('Starting...') + # To create DB + kpiDBobj = Database(Model) + kpiDBobj.create_database() + kpiDBobj.create_tables() + # Start metrics server metrics_port = get_metrics_port() start_http_server(metrics_port) diff --git a/src/telemetry/tests/test_telemetryDB.py b/src/telemetry/tests/test_telemetryDB.py index c4976f8c2144fcdcad43a3e25d43091010de0d18..1b122e4bca266018c01044e2eb8a1ab277b3e3c3 100644 --- a/src/telemetry/tests/test_telemetryDB.py +++ b/src/telemetry/tests/test_telemetryDB.py @@ -21,8 +21,8 @@ LOGGER = logging.getLogger(__name__) def test_verify_databases_and_tables(): LOGGER.info('>>> test_verify_databases_and_tables : START <<< ') TelemetryDBobj = TelemetryDB() - TelemetryDBobj.drop_database() - TelemetryDBobj.verify_tables() + # TelemetryDBobj.drop_database() + # TelemetryDBobj.verify_tables() TelemetryDBobj.create_database() TelemetryDBobj.create_tables() - TelemetryDBobj.verify_tables() \ No newline at end of file + TelemetryDBobj.verify_tables() diff --git a/src/tests/tools/mock_qkd_nodes/start.sh b/src/tests/tools/mock_qkd_nodes/start.sh index b1bc56d5a7f90809e81c73a54803fb2dc11bacd9..faf2f84baf61f16565b497b53bf5f41f45007c00 100755 --- a/src/tests/tools/mock_qkd_nodes/start.sh +++ b/src/tests/tools/mock_qkd_nodes/start.sh @@ -23,8 +23,8 @@ killbg() { trap killbg EXIT pids=() -flask --app mock run --host 0.0.0.0 --port 11111 & +flask run --host 0.0.0.0 --port 11111 & pids+=($!) -flask --app mock run --host 0.0.0.0 --port 22222 & +flask run --host 0.0.0.0 --port 22222 & pids+=($!) -flask --app mock run --host 0.0.0.0 --port 33333 +flask run --host 0.0.0.0 --port 33333 diff --git a/src/tests/tools/mock_qkd_nodes/mock.py b/src/tests/tools/mock_qkd_nodes/wsgi.py similarity index 97% rename from src/tests/tools/mock_qkd_nodes/mock.py rename to src/tests/tools/mock_qkd_nodes/wsgi.py index 7a606f6cac855fee9852f620c595908fbb3d36da..3f8847849337fbfb1a9f84c783786218db4fb04d 100644 --- a/src/tests/tools/mock_qkd_nodes/mock.py +++ b/src/tests/tools/mock_qkd_nodes/wsgi.py @@ -23,7 +23,7 @@ yang_validator = YangValidator('etsi-qkd-sdn-node', ['etsi-qkd-node-types']) nodes = { - '127.0.0.1:11111': {'node': { + '10.0.2.10:11111': {'node': { 'qkdn_id': '00000001-0000-0000-0000-000000000000', }, 'qkdn_capabilities': { @@ -54,7 +54,7 @@ nodes = { { 'qkdi_id': '101', 'qkdi_att_point': { - 'device':'127.0.0.1', + 'device':'10.0.2.10', 'port':'1001' }, 'qkdi_capabilities': { @@ -69,7 +69,7 @@ nodes = { } }, - '127.0.0.1:22222': {'node': { + '10.0.2.10:22222': {'node': { 'qkdn_id': '00000002-0000-0000-0000-000000000000', }, 'qkdn_capabilities': { @@ -100,7 +100,7 @@ nodes = { { 'qkdi_id': '201', 'qkdi_att_point': { - 'device':'127.0.0.1', + 'device':'10.0.2.10', 'port':'2001' }, 'qkdi_capabilities': { @@ -109,7 +109,7 @@ nodes = { { 'qkdi_id': '202', 'qkdi_att_point': { - 'device':'127.0.0.1', + 'device':'10.0.2.10', 'port':'2002' }, 'qkdi_capabilities': { @@ -124,7 +124,7 @@ nodes = { } }, - '127.0.0.1:33333': {'node': { + '10.0.2.10:33333': {'node': { 'qkdn_id': '00000003-0000-0000-0000-000000000000', }, 'qkdn_capabilities': { @@ -155,7 +155,7 @@ nodes = { { 'qkdi_id': '301', 'qkdi_att_point': { - 'device':'127.0.0.1', + 'device':'10.0.2.10', 'port':'3001' }, 'qkdi_capabilities': { diff --git a/src/webui/Dockerfile b/src/webui/Dockerfile index 55e67b670f36812a55cf60e411cf137bc5b8a2ee..8295087667a6a25b209c584fc9627a5457f47f4d 100644 --- a/src/webui/Dockerfile +++ b/src/webui/Dockerfile @@ -84,9 +84,11 @@ COPY --chown=webui:webui src/service/__init__.py service/__init__.py COPY --chown=webui:webui src/service/client/. service/client/ COPY --chown=webui:webui src/slice/__init__.py slice/__init__.py COPY --chown=webui:webui src/slice/client/. slice/client/ -COPY --chown=webui:webui src/webui/. webui/ +COPY --chown=webui:webui src/qkd_app/__init__.py qkd_app/__init__.py +COPY --chown=webui:webui src/qkd_app/client/. qkd_app/client/ COPY --chown=webui:webui src/bgpls_speaker/__init__.py bgpls_speaker/__init__.py COPY --chown=webui:webui src/bgpls_speaker/client/. bgpls_speaker/client/ +COPY --chown=webui:webui src/webui/. webui/ # Start the service ENTRYPOINT ["python", "-m", "webui.service"] diff --git a/src/webui/service/__init__.py b/src/webui/service/__init__.py index bbb5fa4fc6eccb3d15f596db28239b9fed9258ae..ff333a78ce298a8e6c578c876a003d08b11a084d 100644 --- a/src/webui/service/__init__.py +++ b/src/webui/service/__init__.py @@ -19,6 +19,10 @@ from flask_healthz import healthz, HealthError from common.tools.grpc.Tools import grpc_message_to_json from context.client.ContextClient import ContextClient from device.client.DeviceClient import DeviceClient +from qkd_app.client.QKDAppClient import QKDAppClient +from common.Settings import ( + is_deployed_bgpls, is_deployed_load_gen, is_deployed_policy, is_deployed_qkd_app, is_deployed_slice +) def get_working_context() -> str: return session['context_uuid'] if 'context_uuid' in session else '---' @@ -37,6 +41,10 @@ def readiness(): device_client = DeviceClient() device_client.connect() device_client.close() + # DEPENDENCY QKD + qkd_app_client = QKDAppClient() + qkd_app_client.connect() + qkd_app_client.close() except Exception as e: raise HealthError("Can't connect with the service: {:s}".format(str(e))) from e @@ -78,13 +86,13 @@ def create_app(use_config=None, web_app_root=None): app.register_blueprint(healthz, url_prefix='/healthz') - from webui.service.js.routes import js # pylint: disable=import-outside-toplevel + from webui.service.js.routes import js # pylint: disable=import-outside-toplevel app.register_blueprint(js) - from webui.service.main.routes import main # pylint: disable=import-outside-toplevel + from webui.service.main.routes import main # pylint: disable=import-outside-toplevel app.register_blueprint(main) - from webui.service.load_gen.routes import load_gen # pylint: disable=import-outside-toplevel + from webui.service.load_gen.routes import load_gen # pylint: disable=import-outside-toplevel app.register_blueprint(load_gen) from webui.service.base_optical.route import base_optical # pylint: disable=import-outside-toplevel @@ -96,21 +104,24 @@ def create_app(use_config=None, web_app_root=None): from webui.service.optical_link.routes import optical_link # pylint: disable=import-outside-toplevel app.register_blueprint(optical_link) - from webui.service.service.routes import service # pylint: disable=import-outside-toplevel + from webui.service.service.routes import service # pylint: disable=import-outside-toplevel app.register_blueprint(service) - from webui.service.slice.routes import slice # pylint: disable=import-outside-toplevel,redefined-builtin + from webui.service.slice.routes import slice # pylint: disable=import-outside-toplevel,redefined-builtin app.register_blueprint(slice) - from webui.service.device.routes import device # pylint: disable=import-outside-toplevel + from webui.service.device.routes import device # pylint: disable=import-outside-toplevel app.register_blueprint(device) - from webui.service.bgpls.routes import bgpls # pylint: disable=import-outside-toplevel + from webui.service.bgpls.routes import bgpls # pylint: disable=import-outside-toplevel app.register_blueprint(bgpls) - from webui.service.link.routes import link # pylint: disable=import-outside-toplevel + from webui.service.link.routes import link # pylint: disable=import-outside-toplevel app.register_blueprint(link) + from webui.service.qkd_app.routes import qkd_app # pylint: disable=import-outside-toplevel + app.register_blueprint(qkd_app) + from webui.service.policy_rule.routes import policy_rule # pylint: disable=import-outside-toplevel app.register_blueprint(policy_rule) @@ -121,6 +132,12 @@ def create_app(use_config=None, web_app_root=None): 'round' : round, 'get_working_context' : get_working_context, 'get_working_topology': get_working_topology, + + 'is_deployed_bgpls' : is_deployed_bgpls, + 'is_deployed_load_gen': is_deployed_load_gen, + 'is_deployed_policy' : is_deployed_policy, + 'is_deployed_qkd_app' : is_deployed_qkd_app, + 'is_deployed_slice' : is_deployed_slice, }) if web_app_root is not None: diff --git a/src/webui/service/__main__.py b/src/webui/service/__main__.py index e9a906e8a431e287911547abc4065d9d9364ccb4..bb6b8bfc7187dec41cda3e8bcf7b5a1b9f8a601d 100644 --- a/src/webui/service/__main__.py +++ b/src/webui/service/__main__.py @@ -33,6 +33,7 @@ def main(): logging.basicConfig(level=log_level) logger = logging.getLogger(__name__) + # DEPENDENCY QKD wait_for_environment_variables([ get_env_var_name(ServiceNameEnum.CONTEXT, ENVVAR_SUFIX_SERVICE_HOST ), get_env_var_name(ServiceNameEnum.CONTEXT, ENVVAR_SUFIX_SERVICE_PORT_GRPC), diff --git a/src/webui/service/device/forms.py b/src/webui/service/device/forms.py index e4c71d92170dc9fe46996a1c93978647800aa300..eebc06755f204cd270ff8feca21733cb4426493a 100644 --- a/src/webui/service/device/forms.py +++ b/src/webui/service/device/forms.py @@ -33,6 +33,7 @@ class AddDeviceForm(FlaskForm): device_drivers_gnmi_openconfig = BooleanField('GNMI OPENCONFIG') device_drivers_optical_tfs = BooleanField('OPTICAL TFS') device_drivers_ietf_actn = BooleanField('IETF ACTN') + device_drivers_qkd = BooleanField('QKD') device_config_address = StringField('connect/address',default='127.0.0.1',validators=[DataRequired(), Length(min=5)]) device_config_port = StringField('connect/port',default='0',validators=[DataRequired(), Length(min=1)]) @@ -57,3 +58,4 @@ class UpdateDeviceForm(FlaskForm): validators=[NumberRange(min=0)]) submit = SubmitField('Update') + diff --git a/src/webui/service/device/routes.py b/src/webui/service/device/routes.py index b7fdb78e85dc634627de02947c0861a7f13bdae9..429f4a2ea8539b7b12baf5e20eb30760694ede64 100644 --- a/src/webui/service/device/routes.py +++ b/src/webui/service/device/routes.py @@ -129,6 +129,8 @@ def add(): device_drivers.append(DeviceDriverEnum.DEVICEDRIVER_OPTICAL_TFS) if form.device_drivers_ietf_actn.data: device_drivers.append(DeviceDriverEnum.DEVICEDRIVER_IETF_ACTN) + if form.device_drivers_qkd.data: + device_drivers.append(DeviceDriverEnum.DEVICEDRIVER_QKD) device_obj.device_drivers.extend(device_drivers) # pylint: disable=no-member try: diff --git a/src/webui/service/qkd_app/__init__.py b/src/webui/service/qkd_app/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..bbfc943b68af13a11e562abbc8680ade71db8f02 --- /dev/null +++ b/src/webui/service/qkd_app/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/src/webui/service/qkd_app/routes.py b/src/webui/service/qkd_app/routes.py new file mode 100644 index 0000000000000000000000000000000000000000..71243fb75e552ec5568eedacdcadabbc39516b4e --- /dev/null +++ b/src/webui/service/qkd_app/routes.py @@ -0,0 +1,113 @@ +# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import grpc, json, logging + +from flask import current_app, render_template, Blueprint, flash, session, redirect, url_for +from common.proto.context_pb2 import Empty, Link, LinkId, LinkList +from common.proto.qkd_app_pb2 import App, QKDAppStatusEnum, QKDAppTypesEnum +from common.tools.context_queries.Context import get_context +from common.tools.context_queries.Device import get_device +from common.tools.context_queries.Topology import get_topology +from context.client.ContextClient import ContextClient +from qkd_app.client.QKDAppClient import QKDAppClient + + +LOGGER = logging.getLogger(__name__) +qkd_app = Blueprint('qkd_app', __name__, url_prefix='/qkd_app') + +qkd_app_client = QKDAppClient() +context_client = ContextClient() + +@qkd_app.get('/') +def home(): + if 'context_uuid' not in session or 'topology_uuid' not in session: + flash("Please select a context!", "warning") + return redirect(url_for("main.home")) + context_uuid = session['context_uuid'] + topology_uuid = session['topology_uuid'] + + context_client.connect() + device_names = dict() + + context_obj = get_context(context_client, context_uuid, rw_copy=False) + if context_obj is None: + flash('Context({:s}) not found'.format(str(context_uuid)), 'danger') + apps = list() + else: + try: + apps = qkd_app_client.ListApps(context_obj.context_id) + apps = apps.apps + except grpc.RpcError as e: + if e.code() != grpc.StatusCode.NOT_FOUND: raise + if e.details() != 'Context({:s}) not found'.format(context_uuid): raise + apps = list() + else: + # Too many requests to context_client if it has too many apps (update in the future) + for app in apps: + if app.local_device_id.device_uuid.uuid not in device_names: + device = get_device(context_client, app.local_device_id.device_uuid.uuid) + if device is not None: + device_names[app.local_device_id.device_uuid.uuid] = device.name + + if app.remote_device_id.device_uuid.uuid and app.remote_device_id.device_uuid.uuid not in device_names: + device = get_device(context_client, app.remote_device_id.device_uuid.uuid) + if device is not None: + device_names[app.remote_device_id.device_uuid.uuid] = device.name + + context_client.close() + return render_template( + 'qkd_app/home.html', apps=apps, device_names=device_names, ate=QKDAppTypesEnum, ase=QKDAppStatusEnum) + + +@qkd_app.route('detail/<path:app_uuid>', methods=('GET', 'POST')) +def detail(app_uuid: str): + ''' + context_client.connect() + link_obj = get_link(context_client, link_uuid, rw_copy=False) + if link_obj is None: + flash('Link({:s}) not found'.format(str(link_uuid)), 'danger') + link_obj = Link() + device_names, endpoints_data = dict(), dict() + else: + device_names, endpoints_data = get_endpoint_names(context_client, link_obj.link_endpoint_ids) + context_client.close() + return render_template('link/detail.html',link=link_obj, device_names=device_names, endpoints_data=endpoints_data) + ''' + pass + +@qkd_app.get('<path:app_uuid>/delete') +def delete(app_uuid): + ''' + try: + + # first, check if link exists! + # request: LinkId = LinkId() + # request.link_uuid.uuid = link_uuid + # response: Link = client.GetLink(request) + # TODO: finalize implementation + + request = LinkId() + request.link_uuid.uuid = link_uuid # pylint: disable=no-member + context_client.connect() + context_client.RemoveLink(request) + context_client.close() + + flash(f'Link "{link_uuid}" deleted successfully!', 'success') + except Exception as e: # pylint: disable=broad-except + flash(f'Problem deleting link "{link_uuid}": {e.details()}', 'danger') + current_app.logger.exception(e) + return redirect(url_for('link.home')) + ''' + pass diff --git a/src/webui/service/service/__init__.py b/src/webui/service/service/__init__.py index 3ee6f7071f145e06c3aeaefc09a43ccd88e619e3..5cf553eaaec41de7599b6723e31e4ca3f82cbcae 100644 --- a/src/webui/service/service/__init__.py +++ b/src/webui/service/service/__init__.py @@ -12,3 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. + diff --git a/src/webui/service/service/forms.py b/src/webui/service/service/forms.py index f07acf54365b79245583e7f9567b8bc4a5cfd89d..dad15f1c2dbef3a5d1c9a3ecdc6f96c00b883aa2 100644 --- a/src/webui/service/service/forms.py +++ b/src/webui/service/service/forms.py @@ -17,6 +17,11 @@ from flask_wtf import FlaskForm from wtforms import StringField, SelectField, IntegerField, DecimalField from wtforms.validators import InputRequired, Optional, NumberRange, ValidationError, StopValidation +# Custom uuid validator +def validate_uuid_address(form, field): + if not re.match(r'^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$', field.data): + raise ValidationError('Invalid uuid format') + # Custom IPv4 address validator def validate_ipv4_address(form, field): try: @@ -60,7 +65,25 @@ class CustomInputRequired(): raise StopValidation(self.message) class AddServiceForm_1(FlaskForm): - service_type = SelectField('Type of service', choices=[('', 'Select a type of service to add'), ('ACL_L2', 'ACL_L2'), ('ACL_IPV4', 'ACL_IPV4'), ('ACL_IPV6', 'ACL_IPV6'), ('L2VPN', 'L2VPN'), ('L3VPN', 'L3VPN')], validators=[InputRequired()]) + service_type = SelectField('Type of service', choices=[('', 'Select a type of service to add'), ('ACL_L2', 'ACL_L2'), ('ACL_IPV4', 'ACL_IPV4'), ('ACL_IPV6', 'ACL_IPV6'), ('L2VPN', 'L2VPN'), ('L3VPN', 'L3VPN'), ('QKD', 'QKD')], validators=[InputRequired()]) + +class AddServiceForm_QKD(FlaskForm): + #GENERIC SERVICE PARAMETERS (COMMON & MANDATORY) + service_name = StringField('Service Name', validators=[CustomInputRequired()]) + service_type = SelectField('Service Type', choices=[(6, '6 (QKD)')], validators=[CustomInputRequired()]) + service_device_1 = SelectField('Device_1', choices=[('', 'Select a device (Mandatory)')], validators=[CustomInputRequired()]) + service_device_2 = SelectField('Device_2', choices=[('', 'Select a device (Mandatory)')], validators=[CustomInputRequired()]) + service_endpoint_1 = StringField('Device_1 Endpoint', validators=[CustomInputRequired()]) + service_endpoint_2 = StringField('Device_2 Endpoint', validators=[CustomInputRequired()]) + + #GENERIC SERVICE CONSTRAINT PARAMETERS (ALL OPTIONAL) + service_capacity = DecimalField('Service Capacity', places=2, default=10.00, validators=[Optional(), NumberRange(min=0)]) + service_latency = DecimalField('Service Latency', places=2, default=15.20, validators=[Optional(), NumberRange(min=0)]) + service_availability= DecimalField('Service Availability', places=2, validators=[Optional(), NumberRange(min=0)]) + service_isolation = SelectField('Service Isolation', choices=[('', 'Select (Optional)'), ('NO_ISOLATION', 'NO_ISOLATION'), ('PHYSICAL_ISOLATION', 'PHYSICAL_ISOLATION'), + ('LOGICAL_ISOLATION', 'LOGICAL_ISOLATION'), ('PROCESS_ISOLATION', 'PROCESS_ISOLATION'), ('PHYSICAL_MEMORY_ISOLATION', 'PHYSICAL_MEMORY_ISOLATION'), + ('PHYSICAL_NETWORK_ISOLATION', 'PHYSICAL_NETWORK_ISOLATION'), ('VIRTUAL_RESOURCE_ISOLATION', 'VIRTUAL_RESOURCE_ISOLATION'), + ('NETWORK_FUNCTIONS_ISOLATION', 'NETWORK_FUNCTIONS_ISOLATION'), ('SERVICE_ISOLATION', 'SERVICE_ISOLATION')], validators=[Optional()]) class AddServiceForm_ACL_L2(FlaskForm): #GENERIC SERVICE PARAMETERS (COMMON & MANDATORY) @@ -259,3 +282,4 @@ class AddServiceForm_L3VPN(FlaskForm): Device_2_IF_address_ip = StringField('Device_2 IP Address', validators=[CustomInputRequired(), validate_ipv4_address]) Device_2_IF_address_prefix = IntegerField('Device_2 IP Prefix length', validators=[CustomInputRequired(), validate_uint32]) Device_2_IF_description = StringField ('Device_2 SubIF Description', validators=[Optional()]) + diff --git a/src/webui/service/service/routes.py b/src/webui/service/service/routes.py index 92025b2bec4f7c70b446a8c422e2cdb166c95466..c164b41773e15ac4e9746753e1fdc3b56a51b0d2 100644 --- a/src/webui/service/service/routes.py +++ b/src/webui/service/service/routes.py @@ -35,14 +35,14 @@ from common.tools.object_factory.Constraint import ( from common.tools.object_factory.Context import json_context_id from common.tools.object_factory.Device import json_device_id from common.tools.object_factory.EndPoint import json_endpoint_id -from common.tools.object_factory.Service import json_service_l2nm_planned, json_service_l3nm_planned +from common.tools.object_factory.Service import json_service_l2nm_planned, json_service_l3nm_planned, json_service_qkd_planned from common.tools.object_factory.Topology import json_topology_id from context.client.ContextClient import ContextClient from device.client.DeviceClient import DeviceClient from service.client.ServiceClient import ServiceClient from webui.service.service.forms import ( AddServiceForm_1, AddServiceForm_ACL_L2, AddServiceForm_ACL_IPV4, AddServiceForm_ACL_IPV6, - AddServiceForm_L2VPN, AddServiceForm_L3VPN + AddServiceForm_L2VPN, AddServiceForm_L3VPN, AddServiceForm_QKD ) LOGGER = logging.getLogger(__name__) @@ -329,10 +329,83 @@ def add_configure(): form_1 = AddServiceForm_1() if form_1.validate_on_submit(): service_type = str(form_1.service_type.data) - if service_type in {'ACL_L2', 'ACL_IPV4', 'ACL_IPV6', 'L2VPN', 'L3VPN'}: + if service_type in {'ACL_L2', 'ACL_IPV4', 'ACL_IPV6', 'L2VPN', 'L3VPN', 'QKD'}: return redirect(url_for('service.add_configure_{:s}'.format(service_type))) return render_template('service/add.html', form_1=form_1, submit_text='Continue to configuraton') +@service.route('add/configure/QKD', methods=['GET', 'POST']) +def add_configure_QKD(): + form_qkd = AddServiceForm_QKD() + service_obj = Service() + + context_uuid, topology_uuid = get_context_and_topology_uuids() + if context_uuid and topology_uuid: + context_client.connect() + grpc_topology = get_topology(context_client, topology_uuid, context_uuid=context_uuid, rw_copy=False) + if grpc_topology: + topo_device_uuids = {device_id.device_uuid.uuid for device_id in grpc_topology.device_ids} + devices = get_filtered_devices(context_client, topo_device_uuids) + grpc_devices = context_client.ListDevices(Empty()) + devices = [ + device for device in grpc_devices.devices + if device.device_id.device_uuid.uuid in topo_device_uuids and DeviceDriverEnum.DEVICEDRIVER_QKD in device.device_drivers + ] + choices = get_device_choices(devices) + add_device_choices_to_form(choices, form_qkd.service_device_1) + add_device_choices_to_form(choices, form_qkd.service_device_2) + else: + flash('Context({:s})/Topology({:s}) not found'.format(str(context_uuid), str(topology_uuid)), 'danger') + else: + flash('Missing context or topology UUID', 'danger') + + if form_qkd.validate_on_submit(): + try: + [selected_device_1, selected_device_2, selected_endpoint_1, selected_endpoint_2] = validate_selected_devices_and_endpoints(form_qkd, devices) + except Exception as e: + flash('{:s}'.format(str(e.args[0])), 'danger') + current_app.logger.exception(e) + return render_template('service/configure_QKD.html', form_qkd=form_qkd, submit_text='Add New Service') + + service_uuid, service_type, endpoint_ids = set_service_parameters(service_obj, form_qkd, selected_device_1, selected_device_2, selected_endpoint_1, selected_endpoint_2) + constraints = add_constraints(form_qkd) + params_device_1_with_data = get_device_params(form_qkd, 1, service_type) + params_device_2_with_data = get_device_params(form_qkd, 2, service_type) + print(params_device_1_with_data) + print(params_device_2_with_data) + params_settings = {} + config_rules = [ + json_config_rule_set( + '/settings', params_settings + ), + json_config_rule_set( + '/device[{:s}]/endpoint[{:s}]/settings'.format(str(selected_device_1.name), str(selected_endpoint_1)), params_device_1_with_data + ), + json_config_rule_set( + '/device[{:s}]/endpoint[{:s}]/settings'.format(str(selected_device_2.name), str(selected_endpoint_2)), params_device_2_with_data + ) + ] + + service_client.connect() + context_client.connect() + device_client.connect() + descriptor_json = json_service_qkd_planned(service_uuid = service_uuid, endpoint_ids = endpoint_ids, constraints = constraints, config_rules = config_rules, context_uuid= context_uuid) + descriptor_json = {"services": [descriptor_json]} + try: + process_descriptors(descriptor_json) + flash('Service "{:s}" added successfully!'.format(service_obj.service_id.service_uuid.uuid), 'success') + return redirect(url_for('service.home', service_uuid=service_obj.service_id.service_uuid.uuid)) + except Exception as e: + flash('Problem adding service: {:s}'.format((str(e.args[0]))), 'danger') + current_app.logger.exception(e) + finally: + context_client.close() + device_client.close() + service_client.close() + + + return render_template('service/configure_QKD.html', form_qkd=form_qkd, submit_text='Add New Service') + + @service.route('add/configure/ACL_L2', methods=['GET', 'POST']) def add_configure_ACL_L2(): form_acl = AddServiceForm_ACL_L2() @@ -666,6 +739,9 @@ def get_device_params(form, device_num, form_type): 'ni_description': str(getattr(form, 'NI_description').data), 'subif_description': str(getattr(form, f'Device_{device_num}_IF_description').data), } + elif form_type == 6: + device_params = { + } else: raise ValueError(f'Unsupported form type: {form_type}') diff --git a/src/webui/service/static/topology_icons/emu-qkd-node.png b/src/webui/service/static/topology_icons/emu-qkd-node.png new file mode 100644 index 0000000000000000000000000000000000000000..d4dc1abaf42a56ff07d1f4a2c5d250b56486584d Binary files /dev/null and b/src/webui/service/static/topology_icons/emu-qkd-node.png differ diff --git a/src/webui/service/static/topology_icons/qkd-node.png b/src/webui/service/static/topology_icons/qkd-node.png new file mode 100644 index 0000000000000000000000000000000000000000..79f40d2a600bd7f9e55d0360a132800c09a8ac85 Binary files /dev/null and b/src/webui/service/static/topology_icons/qkd-node.png differ diff --git a/src/webui/service/templates/base.html b/src/webui/service/templates/base.html index fcd1bdebeec3ceb63add5654098809b9ef0069e5..aeba9e7048d6506b59688521bda3f3fee6963406 100644 --- a/src/webui/service/templates/base.html +++ b/src/webui/service/templates/base.html @@ -55,6 +55,7 @@ <a class="nav-link" href="{{ url_for('main.home') }}">Home</a> {% endif %} </li> + <li class="nav-item"> {% if '/device/' in request.path %} <a class="nav-link active" aria-current="page" href="{{ url_for('device.home') }}">Device</a> @@ -62,6 +63,7 @@ <a class="nav-link" href="{{ url_for('device.home') }}">Device</a> {% endif %} </li> + <li class="nav-item"> {% if '/link/' in request.path %} <a class="nav-link active" aria-current="page" href="{{ url_for('link.home') }}">Link</a> @@ -69,6 +71,7 @@ <a class="nav-link" href="{{ url_for('link.home') }}">Link</a> {% endif %} </li> + <li class="nav-item"> {% if '/service/' in request.path %} <a class="nav-link active" aria-current="page" href="{{ url_for('service.home') }}">Service</a> @@ -97,26 +100,63 @@ <a class="nav-link" href="{{ url_for('policy_rule.home') }}">Policy Rules</a> {% endif %} </li> + + {% if is_deployed_slice() %} + <li class="nav-item"> + {% if '/slice/' in request.path %} + <a class="nav-link active" aria-current="page" href="{{ url_for('slice.home') }}">Slice</a> + {% else %} + <a class="nav-link" href="{{ url_for('slice.home') }}">Slice</a> + {% endif %} + </li> + {% endif %} + + {% if is_deployed_policy() %} + <li class="nav-item"> + {% if '/policy_rule/' in request.path %} + <a class="nav-link active" aria-current="page" href="{{ url_for('policy_rule.home') }}">Policy Rules</a> + {% else %} + <a class="nav-link" href="{{ url_for('policy_rule.home') }}">Policy Rules</a> + {% endif %} + </li> + {% endif %} + + {% if is_deployed_qkd_app() %} + <li class="nav-item"> + {% if '/qkd_app/' in request.path %} + <a class="nav-link active" aria-current="page" href="{{ url_for('qkd_app.home') }}">QKD Apps</a> + {% else %} + <a class="nav-link" href="{{ url_for('qkd_app.home') }}">QKD Apps</a> + {% endif %} + </li> + {% endif %} + + {% if is_deployed_bgpls() %} + <li class="nav-item"> + {% if '/bgpls/' in request.path %} + <a class="nav-link active" aria-current="page" href="{{ url_for('bgpls.home') }}">BGPLS</a> + {% else %} + <a class="nav-link" href="{{ url_for('bgpls.home') }}">BGPLS</a> + {% endif %} + </li> + {% endif %} + + {% if is_deployed_load_gen() %} + <li class="nav-item"> + {% if '/load-gen/' in request.path %} + <a class="nav-link active" aria-current="page" href="{{ url_for('load_gen.home') }}">Load Generator</a> + {% else %} + <a class="nav-link" href="{{ url_for('load_gen.home') }}">Load Generator</a> + {% endif %} + </li> + {% endif %} + <li class="nav-item"> <a class="nav-link" href="/grafana" id="grafana_link" target="grafana">Grafana</a> </li> <li class="nav-item"> <a class="nav-link" href="{{ url_for('main.debug') }}">Debug</a> </li> - <li class="nav-item"> - {% if '/load-gen/' in request.path %} - <a class="nav-link active" aria-current="page" href="{{ url_for('load_gen.home') }}">Load Generator</a> - {% else %} - <a class="nav-link" href="{{ url_for('load_gen.home') }}">Load Generator</a> - {% endif %} - </li> - <li class="nav-item"> - {% if '/bgpls/' in request.path %} - <a class="nav-link active" aria-current="page" href="{{ url_for('bgpls.home') }}">BGPLS</a> - {% else %} - <a class="nav-link" href="{{ url_for('bgpls.home') }}">BGPLS</a> - {% endif %} - </li> <!-- <li class="nav-item"> <a class="nav-link" href="#">Context</a> diff --git a/src/webui/service/templates/device/add.html b/src/webui/service/templates/device/add.html index 3bea6ae719a75c91835ceb35f50b5bbeba2c7940..e11c37688c09b96849c63a5d51cd7e546468d558 100644 --- a/src/webui/service/templates/device/add.html +++ b/src/webui/service/templates/device/add.html @@ -95,6 +95,7 @@ <br /> {{ form.device_drivers_optical_tfs }} {{ form.device_drivers_optical_tfs.label(class="col-sm-3 col-form-label") }} {{ form.device_drivers_ietf_actn }} {{ form.device_drivers_ietf_actn.label(class="col-sm-3 col-form-label") }} + {{ form.device_drivers_qkd }} {{ form.device_drivers_qkd.label(class="col-sm-3 col-form-label") }} {% endif %} </div> </div> diff --git a/src/webui/service/templates/qkd_app/home.html b/src/webui/service/templates/qkd_app/home.html new file mode 100644 index 0000000000000000000000000000000000000000..9573013f41410a5d8560e71c174ce6a85237089f --- /dev/null +++ b/src/webui/service/templates/qkd_app/home.html @@ -0,0 +1,96 @@ +<!-- + Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +--> + +{% extends 'base.html' %} + +{% block content %} + <h1>Apps</h1> + + <div class="row"> + <div class="col"> + {{ apps | length }} apps found in context <i>{{ session['context_uuid'] }}</i> + </div> + </div> + + <table class="table table-striped table-hover"> + <thead> + <tr> + <th scope="col">UUID</th> + <th scope="col">Status</th> + <th scope="col">Type</th> + <th scope="col">Device 1</th> + <th scope="col">Device 2</th> + <th scope="col"></th> + </tr> + </thead> + <tbody> + {% if apps %} + {% for app in apps %} + <tr> + <td> + {{ app.app_id.app_uuid.uuid }} + </td> + <td> + {{ ase.Name(app.app_status).replace('QKDAPPSTATUS_', '') }} + </td> + <td> + {{ ate.Name(app.app_type).replace('QKDAPPTYPES_', '').replace('CLIENT', 'EXTERNAL') }} + </td> + <td> + <li> + <a href="{{ url_for('device.detail', device_uuid=app.local_device_id.device_uuid.uuid) }}"> + {{ device_names.get(app.local_device_id.device_uuid.uuid, app.local_device_id.device_uuid.uuid) }} + <svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-eye" viewBox="0 0 16 16"> + <path d="M16 8s-3-5.5-8-5.5S0 8 0 8s3 5.5 8 5.5S16 8 16 8zM1.173 8a13.133 13.133 0 0 1 1.66-2.043C4.12 4.668 5.88 3.5 8 3.5c2.12 0 3.879 1.168 5.168 2.457A13.133 13.133 0 0 1 14.828 8c-.058.087-.122.183-.195.288-.335.48-.83 1.12-1.465 1.755C11.879 11.332 10.119 12.5 8 12.5c-2.12 0-3.879-1.168-5.168-2.457A13.134 13.134 0 0 1 1.172 8z"/> + <path d="M8 5.5a2.5 2.5 0 1 0 0 5 2.5 2.5 0 0 0 0-5zM4.5 8a3.5 3.5 0 1 1 7 0 3.5 3.5 0 0 1-7 0z"/> + </svg> + </a> + </li> + </td> + <td> + {% if app.remote_device_id.device_uuid.uuid %} + <li> + <a href="{{ url_for('device.detail', device_uuid=app.remote_device_id.device_uuid.uuid) }}"> + {{ device_names.get(app.remote_device_id.device_uuid.uuid, app.remote_device_id.device_uuid.uuid) }} + <svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-eye" viewBox="0 0 16 16"> + <path d="M16 8s-3-5.5-8-5.5S0 8 0 8s3 5.5 8 5.5S16 8 16 8zM1.173 8a13.133 13.133 0 0 1 1.66-2.043C4.12 4.668 5.88 3.5 8 3.5c2.12 0 3.879 1.168 5.168 2.457A13.133 13.133 0 0 1 14.828 8c-.058.087-.122.183-.195.288-.335.48-.83 1.12-1.465 1.755C11.879 11.332 10.119 12.5 8 12.5c-2.12 0-3.879-1.168-5.168-2.457A13.134 13.134 0 0 1 1.172 8z"/> + <path d="M8 5.5a2.5 2.5 0 1 0 0 5 2.5 2.5 0 0 0 0-5zM4.5 8a3.5 3.5 0 1 1 7 0 3.5 3.5 0 0 1-7 0z"/> + </svg> + </a> + </li> + {% endif %} + </td> + <td> + <!-- + <a href="{{ url_for('qkd_app.detail', app_uuid=app.app_id.app_uuid.uuid) }}"> + <svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-eye" viewBox="0 0 16 16"> + <path d="M16 8s-3-5.5-8-5.5S0 8 0 8s3 5.5 8 5.5S16 8 16 8zM1.173 8a13.133 13.133 0 0 1 1.66-2.043C4.12 4.668 5.88 3.5 8 3.5c2.12 0 3.879 1.168 5.168 2.457A13.133 13.133 0 0 1 14.828 8c-.058.087-.122.183-.195.288-.335.48-.83 1.12-1.465 1.755C11.879 11.332 10.119 12.5 8 12.5c-2.12 0-3.879-1.168-5.168-2.457A13.134 13.134 0 0 1 1.172 8z"/> + <path d="M8 5.5a2.5 2.5 0 1 0 0 5 2.5 2.5 0 0 0 0-5zM4.5 8a3.5 3.5 0 1 1 7 0 3.5 3.5 0 0 1-7 0z"/> + </svg> + </a> + --> + </td> + </tr> + {% endfor %} + {% else %} + <tr> + <td colspan="7">No apps found</td> + </tr> + {% endif %} + </tbody> + </table> + +{% endblock %} diff --git a/src/webui/service/templates/service/configure_QKD.html b/src/webui/service/templates/service/configure_QKD.html new file mode 100644 index 0000000000000000000000000000000000000000..a01f4519d7f4b09732fba6d24db034f143be9943 --- /dev/null +++ b/src/webui/service/templates/service/configure_QKD.html @@ -0,0 +1,188 @@ +<!-- + Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +--> + +{% extends 'base.html' %} + +{% block content %} +<h1>Add New Service [QKD]</h1> +<form method="POST" action="{{ url_for('service.add_configure_QKD') }}"> + <fieldset> + <div class="row mb-3"> + {{ form_qkd.hidden_tag() }} + </div> + <h3>Generic Service Parameters</h3> + {% if form_qkd.qkd_params is not none %} + <div class="row mb-3"> + {{ form_qkd.service_name.label(class="col-sm-2 col-form-label") }} + <div class="col-sm-10"> + {% if form_qkd.service_name.errors %} + {{ form_qkd.service_name(class="form-control is-invalid", placeholder="Mandatory") }} + <div class="invalid-feedback"> + {% for error in form_qkd.service_name.errors %} + <span>{{ error }}</span> + {% endfor %} + </div> + {% else %} + {{ form_qkd.service_name(class="form-control", placeholder="Mandatory") }} + {% endif %} + </div> + </div> + <div class="row mb-3"> + {{ form_qkd.service_type.label(class="col-sm-2 col-form-label") }} + <div class="col-sm-10"> + {% if form_qkd.service_type.errors %} + {{ form_qkd.service_type(class="form-control is-invalid", placeholder="Mandatory") }} + <div class="invalid-feedback"> + {% for error in form_qkd.service_type.errors %} + <span>{{ error }}</span> + {% endfor %} + </div> + {% else %} + {{ form_qkd.service_type(class="form-control", placeholder="Mandatory") }} + {% endif %} + </div> + </div> + <div class="row mb-3"> + {{ form_qkd.service_device_1.label(class="col-sm-2 col-form-label") }} + <div class="col-sm-4"> + {% if form_qkd.service_device_1.errors %} + {{ form_qkd.service_device_1(class="form-control is-invalid", placeholder="Mandatory") }} + <div class="invalid-feedback"> + {% for error in form_qkd.service_device_1.errors %} + <span>{{ error }}</span> + {% endfor %} + </div> + {% else %} + {{ form_qkd.service_device_1(class="form-control", placeholder="Mandatory") }} + {% endif %} + </div> + {{ form_qkd.service_device_2.label(class="col-sm-2 col-form-label") }} + <div class="col-sm-4"> + {% if form_qkd.service_device_2.errors %} + {{ form_qkd.service_device_2(class="form-control is-invalid", placeholder="Mandatory") }} + <div class="invalid-feedback"> + {% for error in form_qkd.service_device_2.errors %} + <span>{{ error }}</span> + {% endfor %} + </div> + {% else %} + {{ form_qkd.service_device_2(class="form-control", placeholder="Mandatory") }} + {% endif %} + </div> + </div> + <div class="row mb-3"> + {{ form_qkd.service_endpoint_1.label(class="col-sm-2 col-form-label") }} + <div class="col-sm-4"> + {% if form_qkd.service_endpoint_1.errors %} + {{ form_qkd.service_endpoint_1(class="form-control is-invalid", placeholder="Mandatory") }} + <div class="invalid-feedback"> + {% for error in form_qkd.service_endpoint_1.errors %} + <span>{{ error }}</span> + {% endfor %} + </div> + {% else %} + {{ form_qkd.service_endpoint_1(class="form-control", placeholder="Mandatory") }} + {% endif %} + </div> + {{ form_qkd.service_endpoint_2.label(class="col-sm-2 col-form-label") }} + <div class="col-sm-4"> + {% if form_qkd.service_endpoint_2.errors %} + {{ form_qkd.service_endpoint_2(class="form-control is-invalid", placeholder="Mandatory") }} + <div class="invalid-feedback"> + {% for error in form_qkd.service_endpoint_2.errors %} + <span>{{ error }}</span> + {% endfor %} + </div> + {% else %} + {{ form_qkd.service_endpoint_2(class="form-control", placeholder="Mandatory") }} + {% endif %} + </div> + </div> + </br> + <h3>Generic Service Constraints</h3> + <div class="row mb-3"> + {{ form_qkd.service_capacity.label(class="col-sm-2 col-form-label") }} + <div class="col-sm-10"> + {% if form_qkd.service_capacity.errors %} + {{ form_qkd.service_capacity(class="form-control is-invalid") }} + <div class="invalid-feedback"> + {% for error in form_qkd.service_capacity.errors %} + <span>{{ error }}</span> + {% endfor %} + </div> + {% else %} + {{ form_qkd.service_capacity(class="form-control") }} + {% endif %} + </div> + </div> + <div class="row mb-3"> + {{ form_qkd.service_latency.label(class="col-sm-2 col-form-label") }} + <div class="col-sm-10"> + {% if form_qkd.service_latency.errors %} + {{ form_qkd.service_latency(class="form-control is-invalid") }} + <div class="invalid-feedback"> + {% for error in form_qkd.service_latency.errors %} + <span>{{ error }}</span> + {% endfor %} + </div> + {% else %} + {{ form_qkd.service_latency(class="form-control") }} + {% endif %} + </div> + </div> + <div class="row mb-3"> + {{ form_qkd.service_availability.label(class="col-sm-2 col-form-label") }} + <div class="col-sm-10"> + {% if form_qkd.service_availability.errors %} + {{ form_qkd.service_availability(class="form-control is-invalid") }} + <div class="invalid-feedback"> + {% for error in form_qkd.service_availability.errors %} + <span>{{ error }}</span> + {% endfor %} + </div> + {% else %} + {{ form_qkd.service_availability(class="form-control") }} + {% endif %} + </div> + </div> + <div class="row mb-3"> + {{ form_qkd.service_isolation.label(class="col-sm-2 col-form-label") }} + <div class="col-sm-10"> + {% if form_qkd.service_isolation.errors %} + {{ form_qkd.service_isolation(class="form-control is-invalid") }} + <div class="invalid-feedback"> + {% for error in form_qkd.service_isolation.errors %} + <span>{{ error }}</span> + {% endfor %} + </div> + {% else %} + {{ form_qkd.service_isolation(class="form-control") }} + {% endif %} + </div> + </div> + {% endif %} + <button type="submit" class="btn btn-primary"> + <i class="bi bi-plus-circle-fill"></i> + {{ submit_text }} + </button> + <button type="button" class="btn btn-block btn-secondary" onclick="javascript: history.back()"> + <i class="bi bi-box-arrow-in-left"></i> + Cancel + </button> + </fieldset> + </form> + {% endblock %} +