diff --git a/.gitignore b/.gitignore index e0f8501490a85015a57c7280aeba872fcb2c0692..3e79c7c63c4b544408ddf347f07c720b26d43248 100644 --- a/.gitignore +++ b/.gitignore @@ -162,3 +162,6 @@ cython_debug/ # TeraFlowSDN-generated files tfs_runtime_env_vars.sh +delete_local_deployment.sh +local_docker_deployment.sh +local_k8s_deployment.sh diff --git a/deploy.sh b/deploy.sh index f80974573dfa83ef2c2139d6855a46a16e149746..c5dee68a06e000bf7df90ef437e77c14232f2cec 100755 --- a/deploy.sh +++ b/deploy.sh @@ -58,18 +58,6 @@ kubectl delete namespace $TFS_K8S_NAMESPACE kubectl create namespace $TFS_K8S_NAMESPACE printf "\n" -if [[ "$TFS_COMPONENTS" == *"monitoring"* ]]; then - echo "Creating secrets for InfluxDB..." - #TODO: make sure to change this when having a production deployment - kubectl create secret generic influxdb-secrets --namespace=$TFS_K8S_NAMESPACE \ - --from-literal=INFLUXDB_DB="monitoring" --from-literal=INFLUXDB_ADMIN_USER="teraflow" \ - --from-literal=INFLUXDB_ADMIN_PASSWORD="teraflow" --from-literal=INFLUXDB_HTTP_AUTH_ENABLED="True" - kubectl create secret generic monitoring-secrets --namespace=$TFS_K8S_NAMESPACE \ - --from-literal=INFLUXDB_DATABASE="monitoring" --from-literal=INFLUXDB_USER="teraflow" \ - --from-literal=INFLUXDB_PASSWORD="teraflow" --from-literal=INFLUXDB_HOSTNAME="localhost" - printf "\n" -fi - echo "Deploying components and collecting environment variables..." ENV_VARS_SCRIPT=tfs_runtime_env_vars.sh echo "# Environment variables for TeraFlowSDN deployment" > $ENV_VARS_SCRIPT @@ -158,6 +146,8 @@ for COMPONENT in $TFS_COMPONENTS; do fi fi + # TODO: harmonize names of the monitoring component + echo " Deploying '$COMPONENT' component to Kubernetes..." DEPLOY_LOG="$TMP_LOGS_FOLDER/deploy_${COMPONENT}.log" kubectl --namespace $TFS_K8S_NAMESPACE apply -f "$MANIFEST" > "$DEPLOY_LOG" @@ -212,12 +202,12 @@ if [[ "$TFS_COMPONENTS" == *"webui"* ]] && [[ "$TFS_COMPONENTS" == *"monitoring" echo "Configuring WebUI DataStores and Dashboards..." sleep 3 - INFLUXDB_HOST="monitoringservice" - INFLUXDB_PORT=$(kubectl --namespace $TFS_K8S_NAMESPACE get service/monitoringservice -o jsonpath='{.spec.ports[?(@.name=="influxdb")].port}') - INFLUXDB_URL="http://${INFLUXDB_HOST}:${INFLUXDB_PORT}" - INFLUXDB_USER=$(kubectl --namespace $TFS_K8S_NAMESPACE get secrets influxdb-secrets -o jsonpath='{.data.INFLUXDB_ADMIN_USER}' | base64 --decode) - INFLUXDB_PASSWORD=$(kubectl --namespace $TFS_K8S_NAMESPACE get secrets influxdb-secrets -o jsonpath='{.data.INFLUXDB_ADMIN_PASSWORD}' | base64 --decode) - INFLUXDB_DATABASE=$(kubectl --namespace $TFS_K8S_NAMESPACE get secrets influxdb-secrets -o jsonpath='{.data.INFLUXDB_DB}' | base64 --decode) + # INFLUXDB_HOST="monitoringservice" + # INFLUXDB_PORT=$(kubectl --namespace $TFS_K8S_NAMESPACE get service/monitoringservice -o jsonpath='{.spec.ports[?(@.name=="influxdb")].port}') + # INFLUXDB_URL="http://${INFLUXDB_HOST}:${INFLUXDB_PORT}" + # INFLUXDB_USER=$(kubectl --namespace $TFS_K8S_NAMESPACE get secrets influxdb-secrets -o jsonpath='{.data.INFLUXDB_ADMIN_USER}' | base64 --decode) + # INFLUXDB_PASSWORD=$(kubectl --namespace $TFS_K8S_NAMESPACE get secrets influxdb-secrets -o jsonpath='{.data.INFLUXDB_ADMIN_PASSWORD}' | base64 --decode) + # INFLUXDB_DATABASE=$(kubectl --namespace $TFS_K8S_NAMESPACE get secrets influxdb-secrets -o jsonpath='{.data.INFLUXDB_DB}' | base64 --decode) # Exposed through the ingress controller "tfs-ingress" GRAFANA_HOSTNAME="127.0.0.1" @@ -234,6 +224,8 @@ if [[ "$TFS_COMPONENTS" == *"webui"* ]] && [[ "$TFS_COMPONENTS" == *"monitoring" # Updated Grafana API URL GRAFANA_URL_UPDATED="http://${GRAFANA_USERNAME}:${TFS_GRAFANA_PASSWORD}@${GRAFANA_HOSTNAME}:${GRAFANA_PORT}${GRAFANA_BASEURL}" + echo "export GRAFANA_URL_UPDATED=${GRAFANA_URL_UPDATED}" >> $ENV_VARS_SCRIPT + echo "Connecting to grafana at URL: ${GRAFANA_URL_DEFAULT}..." # Configure Grafana Admin Password @@ -245,26 +237,38 @@ if [[ "$TFS_COMPONENTS" == *"webui"* ]] && [[ "$TFS_COMPONENTS" == *"monitoring" }' ${GRAFANA_URL_DEFAULT}/api/user/password echo - # Create InfluxDB DataSource # Ref: https://grafana.com/docs/grafana/latest/http_api/data_source/ - curl -X POST -H "Content-Type: application/json" -d '{ - "type" : "influxdb", - "name" : "InfluxDB", - "url" : "'"$INFLUXDB_URL"'", + # TODO: replace user, password and database by variables to be saved + echo "Creating a datasource..." + curl -X POST -H "Content-Type: application/json" -H "Accept: application/json" -d '{ "access" : "proxy", - "basicAuth": false, - "user" : "'"$INFLUXDB_USER"'", - "password" : "'"$INFLUXDB_PASSWORD"'", + "type" : "postgres", + "name" : "monitoringdb", + "url" : "monitoringservice:8812", + "database" : "monitoring", + "user" : "admin", + "password" : "quest", + "basicAuth" : false, "isDefault": true, - "database" : "'"$INFLUXDB_DATABASE"'" + "jsonData" : { + "sslmode" : "disable", + "postgresVersion" : 1100, + "tlsAuth" : false, + "tlsAuthWithCACert": false, + "tlsConfigurationMethod": "file-path", + "tlsSkipVerify": true + }, + "secureJsonFields" : { + "password" : true + } }' ${GRAFANA_URL_UPDATED}/api/datasources echo # Create Monitoring Dashboard # Ref: https://grafana.com/docs/grafana/latest/http_api/dashboard/ curl -X POST -H "Content-Type: application/json" \ - -d '@src/webui/grafana_dashboard.json' \ - ${GRAFANA_URL_UPDATED}/api/dashboards/db + -d '@src/webui/grafana_dashboard.json' \ + ${GRAFANA_URL_UPDATED}/api/dashboards/db echo DASHBOARD_URL="${GRAFANA_URL_UPDATED}/api/dashboards/uid/tf-l3-monit" diff --git a/manifests/monitoringservice.yaml b/manifests/monitoringservice.yaml index 7f0bee9efc68e66c72487624241e763dccb2fc76..3924ba2d116a522b23fbfd272fd1bb23c2f0572c 100644 --- a/manifests/monitoringservice.yaml +++ b/manifests/monitoringservice.yaml @@ -13,13 +13,14 @@ # limitations under the License. apiVersion: apps/v1 -kind: Deployment +kind: StatefulSet metadata: - name: monitoringservice + name: monitoringdb spec: selector: matchLabels: app: monitoringservice + serviceName: "monitoringservice" replicas: 1 template: metadata: @@ -32,35 +33,50 @@ spec: - name: metricsdb image: questdb/questdb ports: - - containerPort: 9000 - - containerPort: 9009 - - containerPort: 9003 + - name: http + containerPort: 9000 + protocol: TCP + - name: influxdb + containerPort: 9009 + protocol: TCP + - name: postgre + containerPort: 8812 + protocol: TCP env: - name: QDB_CAIRO_COMMIT_LAG value: "1000" - name: QDB_CAIRO_MAX_UNCOMMITTED_ROWS value: "100000" - readinessProbe: - exec: - command: ["curl", "-XGET", "localhost:9000"] - livenessProbe: - exec: - command: ["curl", "-XGET", "localhost:9003/metrics"] - resources: - requests: - cpu: 250m - memory: 512Mi - limits: - cpu: 700m - memory: 1024Mi +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: monitoringserver +spec: + selector: + matchLabels: + app: monitoringservice + replicas: 1 + template: + metadata: + labels: + app: monitoringservice + spec: + terminationGracePeriodSeconds: 5 + restartPolicy: Always + containers: - name: server image: registry.gitlab.com/teraflow-h2020/controller/monitoring:latest imagePullPolicy: Always ports: - - containerPort: 7070 + - name: grpc + containerPort: 7070 + protocol: TCP env: + - name: LOG_LEVEL + value: "INFO" - name: METRICSDB_HOSTNAME - value: "localhost" + value: "monitoringservice" - name: METRICSDB_ILP_PORT value: "9009" - name: METRICSDB_REST_PORT @@ -73,14 +89,6 @@ spec: livenessProbe: exec: command: ["/bin/grpc_health_probe", "-addr=:7070"] - resources: - requests: - cpu: 250m - memory: 512Mi - limits: - cpu: 700m - memory: 1024Mi - --- apiVersion: v1 kind: Service @@ -95,7 +103,37 @@ spec: protocol: TCP port: 7070 targetPort: 7070 - - name: questdb + - name: http protocol: TCP port: 9000 - targetPort: 9000 \ No newline at end of file + targetPort: 9000 + - name: influxdb + protocol: TCP + port: 9009 + targetPort: 9009 + - name: postgre + protocol: TCP + port: 8812 + targetPort: 8812 + +--- +apiVersion: networking.k8s.io/v1 +kind: NetworkPolicy +metadata: + name: access-monitoring +spec: + podSelector: + matchLabels: + app: monitoringservice + ingress: + - from: [] + ports: + - port: 7070 + - port: 8812 + - from: + - podSelector: + matchLabels: + app: monitoringservice + ports: + - port: 9009 + - port: 9000 diff --git a/manifests/webuiservice.yaml b/manifests/webuiservice.yaml index 4c399861ad810f99161525a459f3d83db347aeaa..cac64a816075f1a0ad91a21c519463aa5cd8f973 100644 --- a/manifests/webuiservice.yaml +++ b/manifests/webuiservice.yaml @@ -40,7 +40,7 @@ spec: - name: LOG_LEVEL value: "DEBUG" - name: WEBUISERVICE_SERVICE_BASEURL_HTTP - value: "/webui" + value: "/webui/" readinessProbe: httpGet: path: /healthz/ready @@ -61,7 +61,7 @@ spec: cpu: 700m memory: 1024Mi - name: grafana - image: grafana/grafana:8.2.6 + image: grafana/grafana:8.5.11 imagePullPolicy: IfNotPresent ports: - containerPort: 3000 diff --git a/scripts/old/open_dashboard.sh b/scripts/old/open_dashboard.sh index a3864d2d04c5fb0b3220967f13c324cd554e7963..d0529a00921be896ae976c86d10d67139719de9c 100755 --- a/scripts/old/open_dashboard.sh +++ b/scripts/old/open_dashboard.sh @@ -18,8 +18,8 @@ K8S_NAMESPACE=${K8S_NAMESPACE:-'tfs'} -GRAFANA_IP=$(kubectl get service/webuiservice -n ${K8S_NAMESPACE} -o jsonpath='{.spec.clusterIP}') -GRAFANA_PORT=$(kubectl get service webuiservice-public --namespace $K8S_NAMESPACE -o 'jsonpath={.spec.ports[?(@.port==3000)].nodePort}') +GRAFANA_IP=$(kubectl get service/webuiservice -n ${TFS_K8S_NAMESPACE} -o jsonpath='{.spec.clusterIP}') +GRAFANA_PORT=3000 #$(kubectl get service webuiservice --namespace $TFS_K8S_NAMESPACE -o 'jsonpath={.spec.ports[?(@.port==3000)].nodePort}') URL=http://${GRAFANA_IP}:${GRAFANA_PORT} echo Opening Dashboard on URL ${URL} diff --git a/scripts/old/open_webui.sh b/scripts/old/open_webui.sh index 2d4659bf6080a989774050b7ff36ddb88ba41eae..d539c1970adb7882c9621fc909acf21c2dde743a 100755 --- a/scripts/old/open_webui.sh +++ b/scripts/old/open_webui.sh @@ -23,6 +23,58 @@ WEBUI_PORT=8004 # GRAFANA_PORT=$(kubectl get service ${WEBUI_SERVICE_NAME} --namespace $K8S_NAMESPACE -o 'jsonpath={.spec.ports[?(@.port==3000)].nodePort}') GRAFANA_PORT=3000 +echo "Configuring WebUI DataStores and Dashboards..." +sleep 3 +INFLUXDB_HOST="monitoringservice" +INFLUXDB_PORT=$(kubectl --namespace $TFS_K8S_NAMESPACE get service/monitoringservice -o jsonpath='{.spec.ports[?(@.name=="influxdb")].port}') +INFLUXDB_URL="http://${INFLUXDB_HOST}:${INFLUXDB_PORT}" +INFLUXDB_USER=$(kubectl --namespace $TFS_K8S_NAMESPACE get secrets influxdb-secrets -o jsonpath='{.data.INFLUXDB_ADMIN_USER}' | base64 --decode) +INFLUXDB_PASSWORD=$(kubectl --namespace $TFS_K8S_NAMESPACE get secrets influxdb-secrets -o jsonpath='{.data.INFLUXDB_ADMIN_PASSWORD}' | base64 --decode) +INFLUXDB_DATABASE=$(kubectl --namespace $TFS_K8S_NAMESPACE get secrets influxdb-secrets -o jsonpath='{.data.INFLUXDB_DB}' | base64 --decode) +# Exposed through the ingress controller "tfs-ingress" +# GRAFANA_HOSTNAME="127.0.0.1" +# GRAFANA_PORT="80" +# GRAFANA_BASEURL="/grafana" +# Default Grafana credentials +GRAFANA_USERNAME="admin" +GRAFANA_PASSWORD="admin" +# Default Grafana API URL +GRAFANA_URL_DEFAULT=http://${GRAFANA_USERNAME}:${GRAFANA_PASSWORD}@${WEBUI_IP}:${GRAFANA_PORT} #"http://${GRAFANA_USERNAME}:${GRAFANA_PASSWORD}@${GRAFANA_HOSTNAME}:${GRAFANA_PORT}${GRAFANA_BASEURL}" +# Updated Grafana API URL +GRAFANA_URL_UPDATED=http://${GRAFANA_USERNAME}:${TFS_GRAFANA_PASSWORD}@${WEBUI_IP}:${GRAFANA_PORT} #"http://${GRAFANA_USERNAME}:${TFS_GRAFANA_PASSWORD}@${GRAFANA_HOSTNAME}:${GRAFANA_PORT}${GRAFANA_BASEURL}" +echo "Connecting to grafana at URL: ${GRAFANA_URL_DEFAULT}..." +# Configure Grafana Admin Password +# Ref: https://grafana.com/docs/grafana/latest/http_api/user/#change-password +curl -X PUT -H "Content-Type: application/json" -d '{ + "oldPassword": "'${GRAFANA_PASSWORD}'", + "newPassword": "'${TFS_GRAFANA_PASSWORD}'", + "confirmNew" : "'${TFS_GRAFANA_PASSWORD}'" +}' ${GRAFANA_URL_DEFAULT}/api/user/password +echo +# Create InfluxDB DataSource +# Ref: https://grafana.com/docs/grafana/latest/http_api/data_source/ +curl -X POST -H "Content-Type: application/json" -d '{ + "type" : "influxdb", + "name" : "InfluxDB", + "url" : "'"$INFLUXDB_URL"'", + "access" : "proxy", + "basicAuth": false, + "user" : "'"$INFLUXDB_USER"'", + "password" : "'"$INFLUXDB_PASSWORD"'", + "isDefault": true, + "database" : "'"$INFLUXDB_DATABASE"'" +}' ${GRAFANA_URL_UPDATED}/api/datasources +echo +# Create Monitoring Dashboard +# Ref: https://grafana.com/docs/grafana/latest/http_api/dashboard/ +curl -X POST -H "Content-Type: application/json" \ +-d '@src/webui/grafana_dashboard.json' \ +${GRAFANA_URL_UPDATED}/api/dashboards/db +echo +DASHBOARD_URL="${GRAFANA_URL_UPDATED}/api/dashboards/uid/tf-l3-monit" +DASHBOARD_ID=$(curl -s "${DASHBOARD_URL}" | jq '.dashboard.id') +curl -X POST ${GRAFANA_URL_UPDATED}/api/user/stars/dashboard/${DASHBOARD_ID} + # Open WebUI UI_URL="http://${WEBUI_IP}:${WEBUI_PORT}" echo "Opening web UI on URL ${UI_URL}" diff --git a/scripts/show_logs_monitoring.sh b/scripts/show_logs_monitoring.sh index bd37d0d142f76d532219f1dcdcbd229914d3b0b5..520a9da1c652553eb90acd083caf5724275f4efe 100755 --- a/scripts/show_logs_monitoring.sh +++ b/scripts/show_logs_monitoring.sh @@ -24,4 +24,4 @@ export TFS_K8S_NAMESPACE=${TFS_K8S_NAMESPACE:-"tfs"} # Automated steps start here ######################################################################################################################## -kubectl --namespace $TFS_K8S_NAMESPACE logs deployment/monitoringservice -c server +kubectl --namespace $TFS_K8S_NAMESPACE logs deployment/monitoringserver diff --git a/src/compute/tests/Constants.py b/src/compute/tests/Constants.py index 640124b07fd8e5dc0dff0635175b1499544f1b2d..cb1331c7445888070c6c3d5ecef6136f9f149916 100644 --- a/src/compute/tests/Constants.py +++ b/src/compute/tests/Constants.py @@ -82,4 +82,4 @@ SERVICE_CONNECTION_POINTS_2 = [ {'service_endpoint_id': 'ep-3', 'service_endpoint_encapsulation_type': 'dot1q', 'service_endpoint_encapsulation_info': {'vlan': 1234}}, -] +] \ No newline at end of file diff --git a/src/monitoring/.gitlab-ci.yml b/src/monitoring/.gitlab-ci.yml index 246b29bd42a889b0662a8ab0cb8b198e8f4b92ab..ef3a8c39a045dd059f8a7942223bdc20775ae92c 100644 --- a/src/monitoring/.gitlab-ci.yml +++ b/src/monitoring/.gitlab-ci.yml @@ -56,7 +56,7 @@ unit test monitoring: - docker pull questdb/questdb - docker run --name questdb -d -p 9000:9000 -p 9009:9009 -p 8812:8812 -p 9003:9003 -e QDB_CAIRO_COMMIT_LAG=1000 -e QDB_CAIRO_MAX_UNCOMMITTED_ROWS=100000 --network=teraflowbridge --rm questdb/questdb - sleep 10 - - docker run --name $IMAGE_NAME -d -p 7070:7070 --env METRICSDB_HOSTNAME=localhost --env METRICSDB_ILP_PORT=9009 --env METRICSDB_REST_PORT=9000 --env METRICSDB_TABLE=monitoring -v "$PWD/src/$IMAGE_NAME/tests:/opt/results" --network=teraflowbridge $CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG + - docker run --name $IMAGE_NAME -d -p 7070:7070 --env METRICSDB_HOSTNAME=questdb --env METRICSDB_ILP_PORT=9009 --env METRICSDB_REST_PORT=9000 --env METRICSDB_TABLE=monitoring -v "$PWD/src/$IMAGE_NAME/tests:/opt/results" --network=teraflowbridge $CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG - sleep 30 - docker ps -a - docker logs $IMAGE_NAME diff --git a/src/monitoring/requirements.in b/src/monitoring/requirements.in index 95953f100c448557471d112cd1e5e8a072320b30..50f283a1940ed99d16276857d2cab22220921879 100644 --- a/src/monitoring/requirements.in +++ b/src/monitoring/requirements.in @@ -16,6 +16,7 @@ pytz==2021.3 redis==4.1.2 requests==2.27.1 xmltodict==0.12.0 +questdb==1.0.1 # pip's dependency resolver does not take into account installed packages. # p4runtime does not specify the version of grpcio/protobuf it needs, so it tries to install latest one diff --git a/src/monitoring/service/EventTools.py b/src/monitoring/service/EventTools.py index cbcf920f1c5dc98a18b0e48a123bc6490f55737c..4999d2a95991d79ed5417948e220d35aa668c653 100644 --- a/src/monitoring/service/EventTools.py +++ b/src/monitoring/service/EventTools.py @@ -19,16 +19,13 @@ import grpc from common.rpc_method_wrapper.ServiceExceptions import ServiceException from context.client.ContextClient import ContextClient -#from common.proto import kpi_sample_types_pb2 + from common.proto.context_pb2 import Empty, EventTypeEnum -from common.logger import getJSONLogger from monitoring.client.MonitoringClient import MonitoringClient +from monitoring.service.MonitoringServiceServicerImpl import LOGGER from common.proto import monitoring_pb2 -LOGGER = getJSONLogger('monitoringservice-server') -LOGGER.setLevel('DEBUG') - class EventsDeviceCollector: def __init__(self) -> None: # pylint: disable=redefined-outer-name self._events_queue = Queue() @@ -74,7 +71,7 @@ class EventsDeviceCollector: kpi_id_list = [] while not self._events_queue.empty(): - LOGGER.info('getting Kpi by KpiID') + # LOGGER.info('getting Kpi by KpiID') event = self.get_event(block=True) if event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE: device = self._context_client.GetDevice(event.device_id) diff --git a/src/monitoring/service/MetricsDBTools.py b/src/monitoring/service/MetricsDBTools.py index ea6180aa072bd48a04f26d019ba1e4ab9e08af88..dc194c430c9700a2d89e0757c75c64025082ac29 100644 --- a/src/monitoring/service/MetricsDBTools.py +++ b/src/monitoring/service/MetricsDBTools.py @@ -12,41 +12,64 @@ # See the License for the specific language governing permissions and # limitations under the License. -from influx_line_protocol import Metric -import socket +from questdb.ingress import Sender, IngressError import requests import json -import sys +import logging +import datetime + +LOGGER = logging.getLogger(__name__) class MetricsDB(): def __init__(self, host, ilp_port, rest_port, table): - self.socket=socket.socket(socket.AF_INET, socket.SOCK_STREAM) - self.host=host - self.ilp_port=ilp_port - self.rest_port=rest_port - self.table=table + self.host=host + self.ilp_port=int(ilp_port) + self.rest_port=rest_port + self.table=table + self.create_table() def write_KPI(self,time,kpi_id,kpi_sample_type,device_id,endpoint_id,service_id,kpi_value): - self.socket.connect((self.host,self.ilp_port)) - metric = Metric(self.table) - metric.with_timestamp(time) - metric.add_tag('kpi_id', kpi_id) - metric.add_tag('kpi_sample_type', kpi_sample_type) - metric.add_tag('device_id', device_id) - metric.add_tag('endpoint_id', endpoint_id) - metric.add_tag('service_id', service_id) - metric.add_value('kpi_value', kpi_value) - str_metric = str(metric) - str_metric += "\n" - self.socket.sendall((str_metric).encode()) - self.socket.close() + counter=0 + number_of_retries=10 + while (counter<number_of_retries): + try: + with Sender(self.host, self.ilp_port) as sender: + sender.row( + self.table, + symbols={ + 'kpi_id': kpi_id, + 'kpi_sample_type': kpi_sample_type, + 'device_id': device_id, + 'endpoint_id': endpoint_id, + 'service_id': service_id}, + columns={ + 'kpi_value': kpi_value}, + at=datetime.datetime.fromtimestamp(time)) + sender.flush() + counter=number_of_retries + LOGGER.info(f"KPI written") + except IngressError as ierr: + # LOGGER.info(ierr) + # LOGGER.info(f"Ingress Retry number {counter}") + counter=counter+1 + def run_query(self, sql_query): - query_params = {'query': sql_query, 'fmt' : 'json'} - url = f"http://{self.host}:{self.rest_port}/exec" - try: - response = requests.get(url, params=query_params) - json_response = json.loads(response.text) - print(json_response) - except requests.exceptions.RequestException as e: - print(f'Error: {e}', file=sys.stderr) + query_params = {'query': sql_query, 'fmt' : 'json'} + url = f"http://{self.host}:{self.rest_port}/exec" + response = requests.get(url, params=query_params) + json_response = json.loads(response.text) + LOGGER.info(f"Query executed, result:{json_response}") + + def create_table(self): + query = f'CREATE TABLE IF NOT EXISTS {self.table}'\ + '(kpi_id SYMBOL,'\ + 'kpi_sample_type SYMBOL,'\ + 'device_id SYMBOL,'\ + 'endpoint_id SYMBOL,'\ + 'service_id SYMBOL,'\ + 'timestamp TIMESTAMP,'\ + 'kpi_value DOUBLE)'\ + 'TIMESTAMP(timestamp);' + self.run_query(query) + LOGGER.info(f"Table {self.table} created") diff --git a/src/monitoring/service/MonitoringServiceServicerImpl.py b/src/monitoring/service/MonitoringServiceServicerImpl.py index d9f8b1e100bada795f8d6c91a796f458da8d212f..df3b907415aabe0ed4c276ac6ac09582636ebe6b 100644 --- a/src/monitoring/service/MonitoringServiceServicerImpl.py +++ b/src/monitoring/service/MonitoringServiceServicerImpl.py @@ -18,6 +18,7 @@ from typing import Iterator from common.Constants import ServiceNameEnum from common.Settings import get_setting, get_service_port_grpc, get_service_host +from common.logger import getJSONLogger from common.proto.context_pb2 import Empty from common.proto.device_pb2 import MonitoringSettings from common.proto.kpi_sample_types_pb2 import KpiSampleType @@ -26,23 +27,23 @@ from common.proto.monitoring_pb2 import AlarmResponse, AlarmDescriptor, AlarmIDL KpiDescriptor, KpiList, KpiQuery, SubsDescriptor, SubscriptionID, AlarmID, KpiDescriptorList, \ MonitorKpiRequest, Kpi, AlarmSubscription from common.rpc_method_wrapper.ServiceExceptions import ServiceException -from common.tools.timestamp.Converters import timestamp_float_to_string from monitoring.service import SqliteTools, MetricsDBTools from device.client.DeviceClient import DeviceClient from prometheus_client import Counter, Summary -LOGGER = logging.getLogger(__name__) +LOGGER = getJSONLogger('monitoringservice-server') +LOGGER.setLevel('DEBUG') MONITORING_GETINSTANTKPI_REQUEST_TIME = Summary( 'monitoring_getinstantkpi_processing_seconds', 'Time spent processing monitoring instant kpi request') MONITORING_INCLUDEKPI_COUNTER = Counter('monitoring_includekpi_counter', 'Monitoring include kpi request counter') -METRICSDB_HOSTNAME = os.environ.get("METRICSDB_HOSTNAME") -METRICSDB_ILP_PORT = os.environ.get("METRICSDB_ILP_PORT") +METRICSDB_HOSTNAME = os.environ.get("METRICSDB_HOSTNAME") +METRICSDB_ILP_PORT = os.environ.get("METRICSDB_ILP_PORT") METRICSDB_REST_PORT = os.environ.get("METRICSDB_REST_PORT") -METRICSDB_TABLE = os.environ.get("METRICSDB_TABLE") +METRICSDB_TABLE = os.environ.get("METRICSDB_TABLE") DEVICESERVICE_SERVICE_HOST = get_setting('DEVICESERVICE_SERVICE_HOST', default=get_service_host(ServiceNameEnum.DEVICE) ) @@ -57,8 +58,8 @@ class MonitoringServiceServicerImpl(MonitoringServiceServicer): self.sql_db = SqliteTools.SQLite('monitoring.db') self.deviceClient = DeviceClient(host=DEVICESERVICE_SERVICE_HOST, port=DEVICESERVICE_SERVICE_PORT_GRPC) # instantiate the client - # Set metrics_db client self.metrics_db = MetricsDBTools.MetricsDB(METRICSDB_HOSTNAME,METRICSDB_ILP_PORT,METRICSDB_REST_PORT,METRICSDB_TABLE) + LOGGER.info('MetricsDB initialized') # SetKpi (SetKpiRequest) returns (KpiId) {} def SetKpi( @@ -80,7 +81,6 @@ class MonitoringServiceServicerImpl(MonitoringServiceServicer): kpi_description, kpi_sample_type, kpi_device_id, kpi_endpoint_id, kpi_service_id) kpi_id.kpi_id.uuid = str(data) - # CREATEKPI_COUNTER_COMPLETED.inc() return kpi_id except ServiceException as e: @@ -161,7 +161,7 @@ class MonitoringServiceServicerImpl(MonitoringServiceServicer): deviceId = kpiDescriptor.device_id.device_uuid.uuid endpointId = kpiDescriptor.endpoint_id.endpoint_uuid.uuid serviceId = kpiDescriptor.service_id.service_uuid.uuid - time_stamp = timestamp_float_to_string(request.timestamp.timestamp) + time_stamp = request.timestamp.timestamp kpi_value = getattr(request.kpi_value, request.kpi_value.WhichOneof('value')) # Build the structure to be included as point in the MetricsDB diff --git a/src/monitoring/service/__main__.py b/src/monitoring/service/__main__.py index e37412fa004704d089a8e00bada8033d8abe53bd..3334a860ccd94d51390ab5f5869d25e2475084ee 100644 --- a/src/monitoring/service/__main__.py +++ b/src/monitoring/service/__main__.py @@ -45,8 +45,8 @@ def start_monitoring(): # Create Monitor Kpi Requests monitor_kpi_request = monitoring_pb2.MonitorKpiRequest() monitor_kpi_request.kpi_id.CopyFrom(kpi_id) - monitor_kpi_request.sampling_duration_s = 86400 - monitor_kpi_request.sampling_interval_s = 30 + monitor_kpi_request.monitoring_window_s = 86400 + monitor_kpi_request.sampling_rate_s = 30 events_collector._monitoring_client.MonitorKpi(monitor_kpi_request) else: # Terminate is set, looping terminates diff --git a/src/monitoring/tests/Messages.py b/src/monitoring/tests/Messages.py index 7b7f4150e5c084bbf25c6a4d9c1c47b70e3f76a0..cf81ceed1e134240415ec1aabe8796cd4486f75f 100644 --- a/src/monitoring/tests/Messages.py +++ b/src/monitoring/tests/Messages.py @@ -11,17 +11,13 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import datetime from common.proto import monitoring_pb2 from common.proto.kpi_sample_types_pb2 import KpiSampleType -from common.tools.timestamp.Converters import timestamp_string_to_float +from common.tools.timestamp.Converters import timestamp_string_to_float, timestamp_utcnow_to_float -def kpi(): - _kpi = monitoring_pb2.Kpi() - _kpi.kpi_id.kpi_id.uuid = 'KPIID0000' # pylint: disable=maybe-no-member - return _kpi - def kpi_id(): _kpi_id = monitoring_pb2.KpiId() _kpi_id.kpi_id.uuid = str(1) # pylint: disable=maybe-no-member @@ -43,9 +39,9 @@ def monitor_kpi_request(kpi_uuid, monitoring_window_s, sampling_rate_s): _monitor_kpi_request.sampling_rate_s = sampling_rate_s return _monitor_kpi_request -def include_kpi_request(): +def include_kpi_request(kpi_id): _include_kpi_request = monitoring_pb2.Kpi() - _include_kpi_request.kpi_id.kpi_id.uuid = str(1) # pylint: disable=maybe-no-member - _include_kpi_request.timestamp.timestamp = timestamp_string_to_float("2021-10-12T13:14:42Z") - _include_kpi_request.kpi_value.int32Val = 500 # pylint: disable=maybe-no-member + _include_kpi_request.kpi_id.kpi_id.uuid = kpi_id.kpi_id.uuid + _include_kpi_request.timestamp.timestamp = timestamp_utcnow_to_float() + _include_kpi_request.kpi_value.int32Val = 500 # pylint: disable=maybe-no-member return _include_kpi_request diff --git a/src/monitoring/tests/test_unitary.py b/src/monitoring/tests/test_unitary.py index 45f55cb05f8c0cb5c9cdb2b3f02ed70c80285009..b62b5f97f965beb75ddaafa122ac8f026faab686 100644 --- a/src/monitoring/tests/test_unitary.py +++ b/src/monitoring/tests/test_unitary.py @@ -12,16 +12,18 @@ # See the License for the specific language governing permissions and # limitations under the License. -import copy, logging, os, pytest +import copy, os, pytest from time import sleep from typing import Tuple from common.Constants import ServiceNameEnum from common.Settings import ( ENVVAR_SUFIX_SERVICE_HOST, ENVVAR_SUFIX_SERVICE_PORT_GRPC, get_env_var_name, get_service_port_grpc) +from common.logger import getJSONLogger from common.orm.Database import Database from common.orm.Factory import get_database_backend, BackendEnum as DatabaseBackendEnum from common.message_broker.Factory import get_messagebroker_backend, BackendEnum as MessageBrokerBackendEnum from common.message_broker.MessageBroker import MessageBroker +from common.proto import monitoring_pb2 from common.proto.monitoring_pb2 import KpiId, KpiDescriptor from context.client.ContextClient import ContextClient @@ -38,17 +40,17 @@ from device.service.drivers import DRIVERS # pylint: disable=wrong-import-posit # pylint: disable=wrong-import-position from monitoring.client.MonitoringClient import MonitoringClient -from common.proto import context_pb2, monitoring_pb2 from common.proto.kpi_sample_types_pb2 import KpiSampleType from monitoring.service import SqliteTools, MetricsDBTools from monitoring.service.MonitoringService import MonitoringService from monitoring.service.EventTools import EventsDeviceCollector -from monitoring.tests.Messages import create_kpi_request, include_kpi_request, kpi, kpi_id, monitor_kpi_request +from monitoring.tests.Messages import create_kpi_request, include_kpi_request, monitor_kpi_request from monitoring.tests.Objects import DEVICE_DEV1, DEVICE_DEV1_CONNECT_RULES, DEVICE_DEV1_UUID +from monitoring.service.MonitoringServiceServicerImpl import LOGGER -LOGGER = logging.getLogger(__name__) -LOGGER.setLevel(logging.DEBUG) +# LOGGER = getJSONLogger('monitoringservice-server') +# LOGGER.setLevel('DEBUG') ########################### # Tests Setup @@ -180,7 +182,7 @@ def test_monitor_kpi( monitoring_client : MonitoringClient, # pylint: disable=redefined-outer-name context_db_mb : Tuple[Database, MessageBroker] # pylint: disable=redefined-outer-name ): - LOGGER.warning('test_monitor_kpi begin') + LOGGER.info('test_monitor_kpi begin') context_database = context_db_mb[0] @@ -213,14 +215,14 @@ def test_monitor_kpi( def test_include_kpi(monitoring_client): # pylint: disable=redefined-outer-name # make call to server LOGGER.warning('test_include_kpi requesting') - response = monitoring_client.IncludeKpi(include_kpi_request()) - LOGGER.debug(str(response)) + kpi_id = monitoring_client.SetKpi(create_kpi_request()) + response = monitoring_client.IncludeKpi(include_kpi_request(kpi_id)) assert isinstance(response, Empty) # Test case that makes use of client fixture to test server's GetStreamKpi method def test_get_stream_kpi(monitoring_client): # pylint: disable=redefined-outer-name LOGGER.warning('test_getstream_kpi begin') - response = monitoring_client.GetStreamKpi(kpi()) + response = monitoring_client.GetStreamKpi(monitoring_pb2.Kpi()) LOGGER.debug(str(response)) #assert isinstance(response, Kpi) @@ -235,8 +237,9 @@ def test_get_stream_kpi(monitoring_client): # pylint: disable=redefined-outer-na def test_get_kpidescritor_kpi(monitoring_client): # pylint: disable=redefined-outer-name LOGGER.warning('test_getkpidescritor_kpi begin') response = monitoring_client.SetKpi(create_kpi_request()) + # LOGGER.debug(str(response)) response = monitoring_client.GetKpiDescriptor(response) - LOGGER.debug(str(response)) + # LOGGER.debug(str(response)) assert isinstance(response, KpiDescriptor) def test_sqlitedb_tools_insert_kpi(sql_db): # pylint: disable=redefined-outer-name diff --git a/src/tests/ofc22/descriptors_emulated.json b/src/tests/ofc22/descriptors_emulated.json index 6beb1427ebb22f22bcb24b73c173d33d4352cdb4..83f9c39e2ac7154b088ccdd0a1519ea32c1aee1d 100644 --- a/src/tests/ofc22/descriptors_emulated.json +++ b/src/tests/ofc22/descriptors_emulated.json @@ -105,4 +105,4 @@ ] } ] -} +} \ No newline at end of file diff --git a/src/tests/ofc22/run_test_01_bootstrap.sh b/src/tests/ofc22/run_test_01_bootstrap.sh index be30b15189786de3fd2f593a1584c73890e9e4fe..ef23c28eb0af1158bd31dd9f35f330e7225bdd07 100755 --- a/src/tests/ofc22/run_test_01_bootstrap.sh +++ b/src/tests/ofc22/run_test_01_bootstrap.sh @@ -13,4 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +# make sure to source the following scripts: +# - my_deploy.sh +# - tfs_runtime_env_vars.sh + pytest --verbose src/tests/ofc22/tests/test_functional_bootstrap.py diff --git a/src/tests/ofc22/tests/LoadDescriptors.py b/src/tests/ofc22/tests/LoadDescriptors.py index 4d3af78f5c9a3fd9b09d94f24bb8aaec48af6b7a..33bc699af933601e4c6d4b8dbc7b0c51206241ef 100644 --- a/src/tests/ofc22/tests/LoadDescriptors.py +++ b/src/tests/ofc22/tests/LoadDescriptors.py @@ -15,7 +15,7 @@ import json, logging, sys from common.Settings import get_setting from context.client.ContextClient import ContextClient -from context.proto.context_pb2 import Context, Device, Link, Topology +from common.proto.context_pb2 import Context, Device, Link, Topology from device.client.DeviceClient import DeviceClient LOGGER = logging.getLogger(__name__) diff --git a/src/tests/ofc22/tests/Objects.py b/src/tests/ofc22/tests/Objects.py index bda08d7761ab3ad794246e6f94932c147a787993..d2fb32ebb20b7bcdda9ac12b7a7390c46e6fb1d1 100644 --- a/src/tests/ofc22/tests/Objects.py +++ b/src/tests/ofc22/tests/Objects.py @@ -21,7 +21,7 @@ from common.tools.object_factory.Device import ( from common.tools.object_factory.EndPoint import json_endpoint, json_endpoint_id from common.tools.object_factory.Link import json_link, json_link_id from common.tools.object_factory.Topology import json_topology, json_topology_id -from context.proto.kpi_sample_types_pb2 import KpiSampleType +from common.proto.kpi_sample_types_pb2 import KpiSampleType # ----- Context -------------------------------------------------------------------------------------------------------- CONTEXT_ID = json_context_id(DEFAULT_CONTEXT_UUID) @@ -228,4 +228,4 @@ DEVICES = [ (DEVICE_O1, DEVICE_O1_CONNECT_RULES), ] -LINKS = [LINK_R1_O1, LINK_R2_O1, LINK_R3_O1, LINK_R4_O1] +LINKS = [LINK_R1_O1, LINK_R2_O1, LINK_R3_O1, LINK_R4_O1] \ No newline at end of file diff --git a/src/tests/ofc22/tests/test_functional_bootstrap.py b/src/tests/ofc22/tests/test_functional_bootstrap.py index 334d7894babedfed2ffb30e4682a1d56e4261cb9..56231512d49542f4cca4a0850767409d340d6852 100644 --- a/src/tests/ofc22/tests/test_functional_bootstrap.py +++ b/src/tests/ofc22/tests/test_functional_bootstrap.py @@ -21,7 +21,7 @@ from common.tools.object_factory.Link import json_link_id from common.tools.object_factory.Topology import json_topology_id from context.client.ContextClient import ContextClient from context.client.EventsCollector import EventsCollector -from context.proto.context_pb2 import Context, ContextId, Device, Empty, Link, Topology +from common.proto.context_pb2 import Context, ContextId, Device, Empty, Link, Topology from device.client.DeviceClient import DeviceClient from .Objects import CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES diff --git a/src/tests/ofc22/tests/test_functional_cleanup.py b/src/tests/ofc22/tests/test_functional_cleanup.py index eb78a585079e3ee757a836433bf23423a3ad899d..6c3a79dfd2e1e46d5c3e4b2d5e33f0ae42decd00 100644 --- a/src/tests/ofc22/tests/test_functional_cleanup.py +++ b/src/tests/ofc22/tests/test_functional_cleanup.py @@ -21,7 +21,7 @@ from common.tools.object_factory.Link import json_link_id from common.tools.object_factory.Topology import json_topology_id from context.client.ContextClient import ContextClient from context.client.EventsCollector import EventsCollector -from context.proto.context_pb2 import ContextId, DeviceId, Empty, LinkId, TopologyId +from common.proto.context_pb2 import ContextId, DeviceId, Empty, LinkId, TopologyId from device.client.DeviceClient import DeviceClient from .Objects import CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES diff --git a/src/tests/ofc22/tests/test_functional_create_service.py b/src/tests/ofc22/tests/test_functional_create_service.py index 31572e7ba0854a394607fb705aa52b9caeb08085..a76cc444e0327768c62375980fb5baa82d4ce521 100644 --- a/src/tests/ofc22/tests/test_functional_create_service.py +++ b/src/tests/ofc22/tests/test_functional_create_service.py @@ -23,7 +23,7 @@ from common.tools.grpc.Tools import grpc_message_to_json_string from compute.tests.mock_osm.MockOSM import MockOSM from context.client.ContextClient import ContextClient from context.client.EventsCollector import EventsCollector -from context.proto.context_pb2 import ContextId, Empty +from common.proto.context_pb2 import ContextId, Empty from .Objects import ( CONTEXT_ID, CONTEXTS, DEVICE_O1_UUID, DEVICE_R1_UUID, DEVICE_R3_UUID, DEVICES, LINKS, TOPOLOGIES, WIM_MAPPING, WIM_PASSWORD, WIM_SERVICE_CONNECTION_POINTS, WIM_SERVICE_TYPE, WIM_USERNAME) diff --git a/src/tests/ofc22/tests/test_functional_delete_service.py b/src/tests/ofc22/tests/test_functional_delete_service.py index b4bc621d294245a6286e77483e3074f95533fd4e..fed2300c6508d9bcfe19acc1a36a09797ccafee8 100644 --- a/src/tests/ofc22/tests/test_functional_delete_service.py +++ b/src/tests/ofc22/tests/test_functional_delete_service.py @@ -23,7 +23,7 @@ from common.tools.grpc.Tools import grpc_message_to_json_string from compute.tests.mock_osm.MockOSM import MockOSM from context.client.ContextClient import ContextClient from context.client.EventsCollector import EventsCollector -from context.proto.context_pb2 import ContextId, Empty +from common.proto.context_pb2 import ContextId, Empty from .Objects import ( CONTEXT_ID, CONTEXTS, DEVICE_O1_UUID, DEVICE_R1_UUID, DEVICE_R3_UUID, DEVICES, LINKS, TOPOLOGIES, WIM_MAPPING, WIM_PASSWORD, WIM_USERNAME) diff --git a/src/webui/grafana_backup_dashboard.json b/src/webui/grafana_backup_dashboard.json new file mode 100644 index 0000000000000000000000000000000000000000..58a856a6c50de422b1f6bde1e2799d53762db916 --- /dev/null +++ b/src/webui/grafana_backup_dashboard.json @@ -0,0 +1,320 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "target": { + "limit": 100, + "matchAny": false, + "tags": [], + "type": "dashboard" + }, + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "gnetId": null, + "graphTooltip": 0, + "id": 1, + "iteration": 1664282779131, + "links": [], + "liveNow": false, + "panels": [ + { + "datasource": null, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "smooth", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [ + { + "matcher": { + "id": "byRegexp", + "options": ".* PACKETS_.*" + }, + "properties": [ + { + "id": "custom.axisPlacement", + "value": "left" + }, + { + "id": "unit", + "value": "pps" + }, + { + "id": "custom.axisLabel", + "value": "Packets / sec" + }, + { + "id": "custom.axisSoftMin", + "value": 0 + } + ] + }, + { + "matcher": { + "id": "byRegexp", + "options": ".* BYTES_.*" + }, + "properties": [ + { + "id": "custom.axisPlacement", + "value": "right" + }, + { + "id": "unit", + "value": "Bps" + }, + { + "id": "custom.axisLabel", + "value": "Bytes / sec" + }, + { + "id": "custom.axisSoftMin", + "value": 0 + } + ] + } + ] + }, + "gridPos": { + "h": 19, + "w": 24, + "x": 0, + "y": 0 + }, + "id": 2, + "options": { + "legend": { + "calcs": [ + "first", + "min", + "mean", + "max", + "lastNotNull" + ], + "displayMode": "table", + "placement": "right" + }, + "tooltip": { + "mode": "multi" + } + }, + "targets": [ + { + "format": "time_series", + "group": [], + "hide": false, + "metricColumn": "kpi_value", + "rawQuery": false, + "rawSql": "SELECT\n timestamp AS \"time\",\n kpi_value AS metric,\n kpi_value AS \"kpi_value\"\nFROM monitoring\nWHERE\n $__timeFilter(timestamp) AND\n device_id = $device_id AND\n endpoint_id = $endpoint_id\nORDER BY 1,2", + "refId": "A", + "select": [ + [ + { + "params": [ + "kpi_value" + ], + "type": "column" + }, + { + "params": [ + "avg" + ], + "type": "aggregate" + }, + { + "params": [ + "kpi_value" + ], + "type": "alias" + } + ] + ], + "table": "monitoring", + "timeColumn": "timestamp", + "where": [ + { + "name": "$__timeFilter", + "params": [], + "type": "macro" + }, + { + "name": "", + "params": [ + "device_id", + "=", + "$device_id" + ], + "type": "expression" + }, + { + "name": "", + "params": [ + "endpoint_id", + "=", + "$endpoint_id" + ], + "type": "expression" + } + ] + } + ], + "title": "L3 Monitoring Packets/Bytes Received/Sent", + "transformations": [], + "type": "timeseries" + } + ], + "refresh": "", + "schemaVersion": 32, + "style": "dark", + "tags": [], + "templating": { + "list": [ + { + "allValue": null, + "current": { + "selected": true, + "text": [ + "R1-EMU" + ], + "value": [ + "R1-EMU" + ] + }, + "datasource": null, + "definition": "SELECT DISTINCT device_id FROM monitoring;", + "description": null, + "error": null, + "hide": 0, + "includeAll": true, + "label": "Device", + "multi": true, + "name": "device_id", + "options": [], + "query": "SELECT DISTINCT device_id FROM monitoring;", + "refresh": 2, + "regex": "", + "skipUrlSync": false, + "sort": 0, + "type": "query" + }, + { + "allValue": null, + "current": { + "selected": true, + "text": [ + "13/1/2" + ], + "value": [ + "13/1/2" + ] + }, + "datasource": null, + "definition": "SELECT DISTINCT endpoint_id FROM monitoring WHERE device_id IN (${device_id})", + "description": null, + "error": null, + "hide": 0, + "includeAll": true, + "label": "EndPoint", + "multi": true, + "name": "endpoint_id", + "options": [], + "query": "SELECT DISTINCT endpoint_id FROM monitoring WHERE device_id IN (${device_id})", + "refresh": 2, + "regex": "", + "skipUrlSync": false, + "sort": 0, + "type": "query" + }, + { + "allValue": null, + "current": { + "selected": true, + "text": [ + "All" + ], + "value": [ + "$__all" + ] + }, + "datasource": null, + "definition": "SELECT DISTINCT kpi_sample_type FROM monitoring;", + "description": null, + "error": null, + "hide": 0, + "includeAll": true, + "label": "Kpi Sample Type", + "multi": true, + "name": "kpi_sample_type", + "options": [], + "query": "SELECT DISTINCT kpi_sample_type FROM monitoring;", + "refresh": 2, + "regex": "", + "skipUrlSync": false, + "sort": 0, + "type": "query" + } + ] + }, + "time": { + "from": "now-5m", + "to": "now" + }, + "timepicker": {}, + "timezone": "", + "title": "L3 Monitoring", + "uid": "tf-l3-monit", + "version": 3 + } \ No newline at end of file diff --git a/src/webui/grafana_dashboard.json b/src/webui/grafana_dashboard.json index a845ac20c7861b86fd1931452b7802b3f1e57aa8..49148825a973aecca5901ffac2249fed6057f4d0 100644 --- a/src/webui/grafana_dashboard.json +++ b/src/webui/grafana_dashboard.json @@ -193,19 +193,19 @@ "tags": [ { "key": "device_id", - "operator": "=~", + "operator": "=", "value": "/^$device_id$/" }, { "condition": "AND", "key": "endpoint_id", - "operator": "=~", + "operator": "=", "value": "/^$endpoint_id$/" }, { "condition": "AND", "key": "kpi_sample_type", - "operator": "=~", + "operator": "=", "value": "/^$kpi_sample_type$/" } ] @@ -236,7 +236,7 @@ ] }, "datasource": null, - "definition": "SHOW TAG VALUES FROM samples WITH KEY=\"device_id\"", + "definition": "SELECT DISTINCT device_id FROM monitoring;", "description": null, "error": null, "hide": 0, @@ -245,7 +245,7 @@ "multi": true, "name": "device_id", "options": [], - "query": "SHOW TAG VALUES FROM samples WITH KEY=\"device_id\"", + "query": "SELECT DISTINCT device_id FROM monitoring;", "refresh": 2, "regex": "", "skipUrlSync": false, @@ -264,7 +264,7 @@ ] }, "datasource": null, - "definition": "SHOW TAG VALUES FROM samples WITH KEY=\"endpoint_id\" WHERE \"device_id\"=~/^$device_id$/", + "definition": "SELECT DISTINCT endpoint_id FROM monitoring WHERE device_id IN (${device_id})", "description": null, "error": null, "hide": 0, @@ -273,7 +273,7 @@ "multi": true, "name": "endpoint_id", "options": [], - "query": "SHOW TAG VALUES FROM samples WITH KEY=\"endpoint_id\" WHERE \"device_id\"=~/^$device_id$/", + "query": "SELECT DISTINCT endpoint_id FROM monitoring WHERE device_id IN (${device_id})", "refresh": 2, "regex": "", "skipUrlSync": false, @@ -292,7 +292,7 @@ ] }, "datasource": null, - "definition": "SHOW TAG VALUES FROM samples WITH KEY=\"kpi_sample_type\"", + "definition": "SELECT DISTINCT kpi_sample_type FROM monitoring;", "description": null, "error": null, "hide": 0, @@ -301,7 +301,7 @@ "multi": true, "name": "kpi_sample_type", "options": [], - "query": "SHOW TAG VALUES FROM samples WITH KEY=\"kpi_sample_type\"", + "query": "SELECT DISTINCT kpi_sample_type FROM monitoring;", "refresh": 2, "regex": "", "skipUrlSync": false, diff --git a/tutorial/2-2-ofc22.md b/tutorial/2-2-ofc22.md index d70d72661b733c484d787c3abc10a1cd967ff558..bd308ae30e756e140cf36184c089099a00181365 100644 --- a/tutorial/2-2-ofc22.md +++ b/tutorial/2-2-ofc22.md @@ -49,6 +49,18 @@ Notes: ## 2.2.5. Test execution +Before executing the tests, the environment variables need to be prepared. First, make sure to load your deployment variables by: + +``` +source my_deploy.sh +``` + +Then, you also need to load the environment variables to support the execution of the tests by: + +``` +source tfs_runtime_env_vars.sh +``` + To execute this functional test, four main steps needs to be carried out: 1. Device bootstrapping 2. L3VPN Service creation