diff --git a/.gitignore b/.gitignore
index e0f8501490a85015a57c7280aeba872fcb2c0692..3e79c7c63c4b544408ddf347f07c720b26d43248 100644
--- a/.gitignore
+++ b/.gitignore
@@ -162,3 +162,6 @@ cython_debug/
 
 # TeraFlowSDN-generated files
 tfs_runtime_env_vars.sh
+delete_local_deployment.sh
+local_docker_deployment.sh
+local_k8s_deployment.sh
diff --git a/deploy.sh b/deploy.sh
index f80974573dfa83ef2c2139d6855a46a16e149746..c5dee68a06e000bf7df90ef437e77c14232f2cec 100755
--- a/deploy.sh
+++ b/deploy.sh
@@ -58,18 +58,6 @@ kubectl delete namespace $TFS_K8S_NAMESPACE
 kubectl create namespace $TFS_K8S_NAMESPACE
 printf "\n"
 
-if [[ "$TFS_COMPONENTS" == *"monitoring"* ]]; then
-    echo "Creating secrets for InfluxDB..."
-    #TODO: make sure to change this when having a production deployment
-    kubectl create secret generic influxdb-secrets --namespace=$TFS_K8S_NAMESPACE \
-        --from-literal=INFLUXDB_DB="monitoring" --from-literal=INFLUXDB_ADMIN_USER="teraflow" \
-        --from-literal=INFLUXDB_ADMIN_PASSWORD="teraflow" --from-literal=INFLUXDB_HTTP_AUTH_ENABLED="True"
-    kubectl create secret generic monitoring-secrets --namespace=$TFS_K8S_NAMESPACE \
-        --from-literal=INFLUXDB_DATABASE="monitoring" --from-literal=INFLUXDB_USER="teraflow" \
-        --from-literal=INFLUXDB_PASSWORD="teraflow" --from-literal=INFLUXDB_HOSTNAME="localhost"
-    printf "\n"
-fi
-
 echo "Deploying components and collecting environment variables..."
 ENV_VARS_SCRIPT=tfs_runtime_env_vars.sh
 echo "# Environment variables for TeraFlowSDN deployment" > $ENV_VARS_SCRIPT
@@ -158,6 +146,8 @@ for COMPONENT in $TFS_COMPONENTS; do
         fi
     fi
 
+    # TODO: harmonize names of the monitoring component
+
     echo "  Deploying '$COMPONENT' component to Kubernetes..."
     DEPLOY_LOG="$TMP_LOGS_FOLDER/deploy_${COMPONENT}.log"
     kubectl --namespace $TFS_K8S_NAMESPACE apply -f "$MANIFEST" > "$DEPLOY_LOG"
@@ -212,12 +202,12 @@ if [[ "$TFS_COMPONENTS" == *"webui"* ]] && [[ "$TFS_COMPONENTS" == *"monitoring"
     echo "Configuring WebUI DataStores and Dashboards..."
     sleep 3
 
-    INFLUXDB_HOST="monitoringservice"
-    INFLUXDB_PORT=$(kubectl --namespace $TFS_K8S_NAMESPACE get service/monitoringservice -o jsonpath='{.spec.ports[?(@.name=="influxdb")].port}')
-    INFLUXDB_URL="http://${INFLUXDB_HOST}:${INFLUXDB_PORT}"
-    INFLUXDB_USER=$(kubectl --namespace $TFS_K8S_NAMESPACE get secrets influxdb-secrets -o jsonpath='{.data.INFLUXDB_ADMIN_USER}' | base64 --decode)
-    INFLUXDB_PASSWORD=$(kubectl --namespace $TFS_K8S_NAMESPACE get secrets influxdb-secrets -o jsonpath='{.data.INFLUXDB_ADMIN_PASSWORD}' | base64 --decode)
-    INFLUXDB_DATABASE=$(kubectl --namespace $TFS_K8S_NAMESPACE get secrets influxdb-secrets -o jsonpath='{.data.INFLUXDB_DB}' | base64 --decode)
+    # INFLUXDB_HOST="monitoringservice"
+    # INFLUXDB_PORT=$(kubectl --namespace $TFS_K8S_NAMESPACE get service/monitoringservice -o jsonpath='{.spec.ports[?(@.name=="influxdb")].port}')
+    # INFLUXDB_URL="http://${INFLUXDB_HOST}:${INFLUXDB_PORT}"
+    # INFLUXDB_USER=$(kubectl --namespace $TFS_K8S_NAMESPACE get secrets influxdb-secrets -o jsonpath='{.data.INFLUXDB_ADMIN_USER}' | base64 --decode)
+    # INFLUXDB_PASSWORD=$(kubectl --namespace $TFS_K8S_NAMESPACE get secrets influxdb-secrets -o jsonpath='{.data.INFLUXDB_ADMIN_PASSWORD}' | base64 --decode)
+    # INFLUXDB_DATABASE=$(kubectl --namespace $TFS_K8S_NAMESPACE get secrets influxdb-secrets -o jsonpath='{.data.INFLUXDB_DB}' | base64 --decode)
 
     # Exposed through the ingress controller "tfs-ingress"
     GRAFANA_HOSTNAME="127.0.0.1"
@@ -234,6 +224,8 @@ if [[ "$TFS_COMPONENTS" == *"webui"* ]] && [[ "$TFS_COMPONENTS" == *"monitoring"
     # Updated Grafana API URL
     GRAFANA_URL_UPDATED="http://${GRAFANA_USERNAME}:${TFS_GRAFANA_PASSWORD}@${GRAFANA_HOSTNAME}:${GRAFANA_PORT}${GRAFANA_BASEURL}"
 
+    echo "export GRAFANA_URL_UPDATED=${GRAFANA_URL_UPDATED}" >> $ENV_VARS_SCRIPT
+
     echo "Connecting to grafana at URL: ${GRAFANA_URL_DEFAULT}..."
 
     # Configure Grafana Admin Password
@@ -245,26 +237,38 @@ if [[ "$TFS_COMPONENTS" == *"webui"* ]] && [[ "$TFS_COMPONENTS" == *"monitoring"
     }' ${GRAFANA_URL_DEFAULT}/api/user/password
     echo
 
-    # Create InfluxDB DataSource
     # Ref: https://grafana.com/docs/grafana/latest/http_api/data_source/
-    curl -X POST -H "Content-Type: application/json" -d '{
-        "type"     : "influxdb",
-        "name"     : "InfluxDB",
-        "url"      : "'"$INFLUXDB_URL"'",
+    # TODO: replace user, password and database by variables to be saved
+    echo "Creating a datasource..."
+    curl -X POST -H "Content-Type: application/json" -H "Accept: application/json" -d '{
         "access"   : "proxy",
-        "basicAuth": false,
-        "user"     : "'"$INFLUXDB_USER"'",
-        "password" : "'"$INFLUXDB_PASSWORD"'",
+        "type"     : "postgres",
+        "name"     : "monitoringdb",
+        "url"      : "monitoringservice:8812",
+        "database" : "monitoring",
+        "user"     : "admin",
+        "password" : "quest",
+        "basicAuth" : false,
         "isDefault": true,
-        "database" : "'"$INFLUXDB_DATABASE"'"
+        "jsonData" : {
+            "sslmode"         : "disable",
+            "postgresVersion" : 1100,
+            "tlsAuth"         : false,
+            "tlsAuthWithCACert": false,
+            "tlsConfigurationMethod": "file-path",
+            "tlsSkipVerify": true
+        },
+        "secureJsonFields" : {
+            "password" : true
+        }
     }' ${GRAFANA_URL_UPDATED}/api/datasources
     echo
 
     # Create Monitoring Dashboard
     # Ref: https://grafana.com/docs/grafana/latest/http_api/dashboard/
     curl -X POST -H "Content-Type: application/json" \
-    -d '@src/webui/grafana_dashboard.json' \
-    ${GRAFANA_URL_UPDATED}/api/dashboards/db
+        -d '@src/webui/grafana_dashboard.json' \
+        ${GRAFANA_URL_UPDATED}/api/dashboards/db
     echo
 
     DASHBOARD_URL="${GRAFANA_URL_UPDATED}/api/dashboards/uid/tf-l3-monit"
diff --git a/manifests/monitoringservice.yaml b/manifests/monitoringservice.yaml
index 7f0bee9efc68e66c72487624241e763dccb2fc76..3924ba2d116a522b23fbfd272fd1bb23c2f0572c 100644
--- a/manifests/monitoringservice.yaml
+++ b/manifests/monitoringservice.yaml
@@ -13,13 +13,14 @@
 # limitations under the License.
 
 apiVersion: apps/v1
-kind: Deployment
+kind: StatefulSet
 metadata:
-  name: monitoringservice
+  name: monitoringdb
 spec:
   selector:
     matchLabels:
       app: monitoringservice
+  serviceName: "monitoringservice"
   replicas: 1
   template:
     metadata:
@@ -32,35 +33,50 @@ spec:
       - name: metricsdb
         image: questdb/questdb
         ports:
-        - containerPort: 9000
-        - containerPort: 9009
-        - containerPort: 9003
+        - name: http
+          containerPort: 9000
+          protocol: TCP
+        - name: influxdb
+          containerPort: 9009
+          protocol: TCP
+        - name: postgre
+          containerPort: 8812
+          protocol: TCP
         env:
         - name: QDB_CAIRO_COMMIT_LAG
           value: "1000"
         - name: QDB_CAIRO_MAX_UNCOMMITTED_ROWS
           value: "100000"
-        readinessProbe:
-          exec:
-            command: ["curl", "-XGET", "localhost:9000"]
-        livenessProbe:
-          exec:
-            command: ["curl", "-XGET", "localhost:9003/metrics"]
-        resources:
-          requests:
-            cpu: 250m
-            memory: 512Mi
-          limits:
-            cpu: 700m
-            memory: 1024Mi
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  name: monitoringserver
+spec:
+  selector:
+    matchLabels:
+      app: monitoringservice
+  replicas: 1
+  template:
+    metadata:
+      labels:
+        app: monitoringservice
+    spec:
+      terminationGracePeriodSeconds: 5
+      restartPolicy: Always
+      containers:
       - name: server
         image: registry.gitlab.com/teraflow-h2020/controller/monitoring:latest
         imagePullPolicy: Always
         ports:
-        - containerPort: 7070
+        - name: grpc
+          containerPort: 7070
+          protocol: TCP
         env:
+        - name: LOG_LEVEL
+          value: "INFO"
         - name: METRICSDB_HOSTNAME
-          value: "localhost"
+          value: "monitoringservice"
         - name: METRICSDB_ILP_PORT
           value: "9009"
         - name: METRICSDB_REST_PORT
@@ -73,14 +89,6 @@ spec:
         livenessProbe:
           exec:
             command: ["/bin/grpc_health_probe", "-addr=:7070"]
-        resources:
-          requests:
-            cpu: 250m
-            memory: 512Mi
-          limits:
-            cpu: 700m
-            memory: 1024Mi
-
 ---
 apiVersion: v1
 kind: Service
@@ -95,7 +103,37 @@ spec:
     protocol: TCP
     port: 7070
     targetPort: 7070
-  - name: questdb
+  - name: http
     protocol: TCP
     port: 9000
-    targetPort: 9000
\ No newline at end of file
+    targetPort: 9000
+  - name: influxdb
+    protocol: TCP
+    port: 9009
+    targetPort: 9009
+  - name: postgre
+    protocol: TCP
+    port: 8812
+    targetPort: 8812
+
+---
+apiVersion: networking.k8s.io/v1
+kind: NetworkPolicy
+metadata:
+  name: access-monitoring
+spec:
+  podSelector:
+    matchLabels:
+      app: monitoringservice 
+  ingress:
+  - from: []
+    ports:
+    - port: 7070
+    - port: 8812
+  - from:
+    - podSelector:
+        matchLabels:
+          app: monitoringservice
+    ports:
+    - port: 9009
+    - port: 9000
diff --git a/manifests/webuiservice.yaml b/manifests/webuiservice.yaml
index 52fc75a9868001d50f7380cfe238fa344de27f6e..0dba6104d8e78e8365eb9a9dc6ff5e681e0bc57e 100644
--- a/manifests/webuiservice.yaml
+++ b/manifests/webuiservice.yaml
@@ -40,7 +40,7 @@ spec:
         - name: LOG_LEVEL
           value: "DEBUG"
         - name: WEBUISERVICE_SERVICE_BASEURL_HTTP
-          value: "/webui"
+          value: "/webui/"
         readinessProbe:
           httpGet:
             path: /healthz/ready
@@ -61,7 +61,7 @@ spec:
             cpu: 700m
             memory: 1024Mi
       - name: grafana
-        image: grafana/grafana:8.2.6
+        image: grafana/grafana:8.5.11
         imagePullPolicy: IfNotPresent
         ports:
           - containerPort: 3000
diff --git a/scripts/old/deploy_in_kubernetes.sh b/scripts/old/deploy_in_kubernetes.sh
index 5e16120bb3b47e993e1d331ccdef3186380304ec..89f45a5484f95f065f6656249f3fb04bf507a782 100755
--- a/scripts/old/deploy_in_kubernetes.sh
+++ b/scripts/old/deploy_in_kubernetes.sh
@@ -81,7 +81,7 @@ for COMPONENT in $COMPONENTS; do
     if [ "$COMPONENT" == "automation" ] || [ "$COMPONENT" == "policy" ]; then
         docker build -t "$IMAGE_NAME" -f ./src/"$COMPONENT"/Dockerfile ./src/"$COMPONENT"/ > "$BUILD_LOG"
     else 
-        docker build -t "$IMAGE_NAME" -f ./src/"$COMPONENT"/Dockerfile ./src/ > "$BUILD_LOG"
+        docker build -t "$IMAGE_NAME" -f ./src/"$COMPONENT"/Dockerfile . > "$BUILD_LOG"
     fi
 
     if [ -n "$REGISTRY_IMAGE" ]; then
diff --git a/scripts/old/open_dashboard.sh b/scripts/old/open_dashboard.sh
index a3864d2d04c5fb0b3220967f13c324cd554e7963..d0529a00921be896ae976c86d10d67139719de9c 100755
--- a/scripts/old/open_dashboard.sh
+++ b/scripts/old/open_dashboard.sh
@@ -18,8 +18,8 @@
 
 K8S_NAMESPACE=${K8S_NAMESPACE:-'tfs'}
 
-GRAFANA_IP=$(kubectl get service/webuiservice -n ${K8S_NAMESPACE} -o jsonpath='{.spec.clusterIP}')
-GRAFANA_PORT=$(kubectl get service webuiservice-public --namespace $K8S_NAMESPACE -o 'jsonpath={.spec.ports[?(@.port==3000)].nodePort}')
+GRAFANA_IP=$(kubectl get service/webuiservice -n ${TFS_K8S_NAMESPACE} -o jsonpath='{.spec.clusterIP}')
+GRAFANA_PORT=3000 #$(kubectl get service webuiservice --namespace $TFS_K8S_NAMESPACE -o 'jsonpath={.spec.ports[?(@.port==3000)].nodePort}')
 URL=http://${GRAFANA_IP}:${GRAFANA_PORT}
 
 echo Opening Dashboard on URL ${URL}
diff --git a/scripts/old/open_webui.sh b/scripts/old/open_webui.sh
index 2d4659bf6080a989774050b7ff36ddb88ba41eae..d539c1970adb7882c9621fc909acf21c2dde743a 100755
--- a/scripts/old/open_webui.sh
+++ b/scripts/old/open_webui.sh
@@ -23,6 +23,58 @@ WEBUI_PORT=8004
 # GRAFANA_PORT=$(kubectl get service ${WEBUI_SERVICE_NAME} --namespace $K8S_NAMESPACE -o 'jsonpath={.spec.ports[?(@.port==3000)].nodePort}')
 GRAFANA_PORT=3000
 
+echo "Configuring WebUI DataStores and Dashboards..."
+sleep 3
+INFLUXDB_HOST="monitoringservice"
+INFLUXDB_PORT=$(kubectl --namespace $TFS_K8S_NAMESPACE get service/monitoringservice -o jsonpath='{.spec.ports[?(@.name=="influxdb")].port}')
+INFLUXDB_URL="http://${INFLUXDB_HOST}:${INFLUXDB_PORT}"
+INFLUXDB_USER=$(kubectl --namespace $TFS_K8S_NAMESPACE get secrets influxdb-secrets -o jsonpath='{.data.INFLUXDB_ADMIN_USER}' | base64 --decode)
+INFLUXDB_PASSWORD=$(kubectl --namespace $TFS_K8S_NAMESPACE get secrets influxdb-secrets -o jsonpath='{.data.INFLUXDB_ADMIN_PASSWORD}' | base64 --decode)
+INFLUXDB_DATABASE=$(kubectl --namespace $TFS_K8S_NAMESPACE get secrets influxdb-secrets -o jsonpath='{.data.INFLUXDB_DB}' | base64 --decode)
+# Exposed through the ingress controller "tfs-ingress"
+# GRAFANA_HOSTNAME="127.0.0.1"
+# GRAFANA_PORT="80"
+# GRAFANA_BASEURL="/grafana"
+# Default Grafana credentials
+GRAFANA_USERNAME="admin"
+GRAFANA_PASSWORD="admin"
+# Default Grafana API URL
+GRAFANA_URL_DEFAULT=http://${GRAFANA_USERNAME}:${GRAFANA_PASSWORD}@${WEBUI_IP}:${GRAFANA_PORT} #"http://${GRAFANA_USERNAME}:${GRAFANA_PASSWORD}@${GRAFANA_HOSTNAME}:${GRAFANA_PORT}${GRAFANA_BASEURL}"
+# Updated Grafana API URL
+GRAFANA_URL_UPDATED=http://${GRAFANA_USERNAME}:${TFS_GRAFANA_PASSWORD}@${WEBUI_IP}:${GRAFANA_PORT} #"http://${GRAFANA_USERNAME}:${TFS_GRAFANA_PASSWORD}@${GRAFANA_HOSTNAME}:${GRAFANA_PORT}${GRAFANA_BASEURL}"
+echo "Connecting to grafana at URL: ${GRAFANA_URL_DEFAULT}..."
+# Configure Grafana Admin Password
+# Ref: https://grafana.com/docs/grafana/latest/http_api/user/#change-password
+curl -X PUT -H "Content-Type: application/json" -d '{
+    "oldPassword": "'${GRAFANA_PASSWORD}'",
+    "newPassword": "'${TFS_GRAFANA_PASSWORD}'",
+    "confirmNew" : "'${TFS_GRAFANA_PASSWORD}'"
+}' ${GRAFANA_URL_DEFAULT}/api/user/password
+echo
+# Create InfluxDB DataSource
+# Ref: https://grafana.com/docs/grafana/latest/http_api/data_source/
+curl -X POST -H "Content-Type: application/json" -d '{
+    "type"     : "influxdb",
+    "name"     : "InfluxDB",
+    "url"      : "'"$INFLUXDB_URL"'",
+    "access"   : "proxy",
+    "basicAuth": false,
+    "user"     : "'"$INFLUXDB_USER"'",
+    "password" : "'"$INFLUXDB_PASSWORD"'",
+    "isDefault": true,
+    "database" : "'"$INFLUXDB_DATABASE"'"
+}' ${GRAFANA_URL_UPDATED}/api/datasources
+echo
+# Create Monitoring Dashboard
+# Ref: https://grafana.com/docs/grafana/latest/http_api/dashboard/
+curl -X POST -H "Content-Type: application/json" \
+-d '@src/webui/grafana_dashboard.json' \
+${GRAFANA_URL_UPDATED}/api/dashboards/db
+echo
+DASHBOARD_URL="${GRAFANA_URL_UPDATED}/api/dashboards/uid/tf-l3-monit"
+DASHBOARD_ID=$(curl -s "${DASHBOARD_URL}" | jq '.dashboard.id')
+curl -X POST ${GRAFANA_URL_UPDATED}/api/user/stars/dashboard/${DASHBOARD_ID}
+
 # Open WebUI
 UI_URL="http://${WEBUI_IP}:${WEBUI_PORT}"
 echo "Opening web UI on URL ${UI_URL}"
diff --git a/scripts/show_logs_monitoring.sh b/scripts/show_logs_monitoring.sh
index bd37d0d142f76d532219f1dcdcbd229914d3b0b5..520a9da1c652553eb90acd083caf5724275f4efe 100755
--- a/scripts/show_logs_monitoring.sh
+++ b/scripts/show_logs_monitoring.sh
@@ -24,4 +24,4 @@ export TFS_K8S_NAMESPACE=${TFS_K8S_NAMESPACE:-"tfs"}
 # Automated steps start here
 ########################################################################################################################
 
-kubectl --namespace $TFS_K8S_NAMESPACE logs deployment/monitoringservice -c server
+kubectl --namespace $TFS_K8S_NAMESPACE logs deployment/monitoringserver
diff --git a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/Constants.py b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/Constants.py
index d432e5605cfebaa01c2a8faf0cbffcff110d7dbe..b6177cb9cdfca0c63404b77f28eb9bebf8a4a518 100644
--- a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/Constants.py
+++ b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/Constants.py
@@ -51,4 +51,4 @@ BEARER_MAPPINGS = {
     'R4@D2:3/1': ('R4@D2', '3/1', '10.0.2.4', '65002:104', 100, '2.4.3.1', 24),
     'R4@D2:3/2': ('R4@D2', '3/2', '10.0.2.4', '65002:104', 100, '2.4.3.2', 24),
     'R4@D2:3/3': ('R4@D2', '3/3', '10.0.2.4', '65002:104', 100, '2.4.3.3', 24),
-}
+}
\ No newline at end of file
diff --git a/src/compute/tests/Constants.py b/src/compute/tests/Constants.py
index 640124b07fd8e5dc0dff0635175b1499544f1b2d..cb1331c7445888070c6c3d5ecef6136f9f149916 100644
--- a/src/compute/tests/Constants.py
+++ b/src/compute/tests/Constants.py
@@ -82,4 +82,4 @@ SERVICE_CONNECTION_POINTS_2 = [
     {'service_endpoint_id': 'ep-3',
         'service_endpoint_encapsulation_type': 'dot1q',
         'service_endpoint_encapsulation_info': {'vlan': 1234}},
-]
+]
\ No newline at end of file
diff --git a/src/context/tests/context_report.xml b/src/context/tests/context_report.xml
new file mode 100644
index 0000000000000000000000000000000000000000..5ee1c17cd6f59c58d55a5eba38de7ea0366a757c
--- /dev/null
+++ b/src/context/tests/context_report.xml
@@ -0,0 +1,1539 @@
+<?xml version="1.0" encoding="utf-8"?><testsuites><testsuite name="pytest" errors="0" failures="25" skipped="0" tests="51" time="8.764" timestamp="2022-07-29T09:29:23.786468" hostname="613b7e973910"><testcase classname="context.tests.test_unitary" name="test_grpc_context[all_inmemory]" time="0.028" /><testcase classname="context.tests.test_unitary" name="test_grpc_topology[all_inmemory]" time="0.026" /><testcase classname="context.tests.test_unitary" name="test_grpc_device[all_inmemory]" time="0.139" /><testcase classname="context.tests.test_unitary" name="test_grpc_link[all_inmemory]" time="0.139" /><testcase classname="context.tests.test_unitary" name="test_grpc_service[all_inmemory]" time="0.152" /><testcase classname="context.tests.test_unitary" name="test_grpc_connection[all_inmemory]" time="0.274" /><testcase classname="context.tests.test_unitary" name="test_rest_populate_database[all_inmemory]" time="0.093" /><testcase classname="context.tests.test_unitary" name="test_rest_get_context_ids[all_inmemory]" time="1.033" /><testcase classname="context.tests.test_unitary" name="test_rest_get_contexts[all_inmemory]" time="0.009" /><testcase classname="context.tests.test_unitary" name="test_rest_get_context[all_inmemory]" time="0.009" /><testcase classname="context.tests.test_unitary" name="test_rest_get_topology_ids[all_inmemory]" time="0.006" /><testcase classname="context.tests.test_unitary" name="test_rest_get_topologies[all_inmemory]" time="0.013" /><testcase classname="context.tests.test_unitary" name="test_rest_get_topology[all_inmemory]" time="0.012" /><testcase classname="context.tests.test_unitary" name="test_rest_get_service_ids[all_inmemory]" time="0.007" /><testcase classname="context.tests.test_unitary" name="test_rest_get_services[all_inmemory]" time="0.039" /><testcase classname="context.tests.test_unitary" name="test_rest_get_service[all_inmemory]" time="0.017" /><testcase classname="context.tests.test_unitary" name="test_rest_get_device_ids[all_inmemory]" time="0.005" /><testcase classname="context.tests.test_unitary" name="test_rest_get_devices[all_inmemory]" time="0.070" /><testcase classname="context.tests.test_unitary" name="test_rest_get_device[all_inmemory]" time="0.027" /><testcase classname="context.tests.test_unitary" name="test_rest_get_link_ids[all_inmemory]" time="0.005" /><testcase classname="context.tests.test_unitary" name="test_rest_get_links[all_inmemory]" time="0.023" /><testcase classname="context.tests.test_unitary" name="test_rest_get_link[all_inmemory]" time="0.011" /><testcase classname="context.tests.test_unitary" name="test_rest_get_connection_ids[all_inmemory]" time="0.007" /><testcase classname="context.tests.test_unitary" name="test_rest_get_connections[all_inmemory]" time="0.032" /><testcase classname="context.tests.test_unitary" name="test_rest_get_connection[all_inmemory]" time="0.032" /><testcase classname="context.tests.test_unitary" name="test_grpc_context[all_redis]" time="0.477"><failure message="redis.exceptions.ConnectionError: Error 111 connecting to 127.0.0.1:6379. Connection refused.">self = Connection&lt;host=127.0.0.1,port=6379,db=0&gt;
+
+    def connect(self):
+        "Connects to the Redis server if not already connected"
+        if self._sock:
+            return
+        try:
+&gt;           sock = self.retry.call_with_retry(
+                lambda: self._connect(), lambda error: self.disconnect(error)
+            )
+
+/usr/local/lib/python3.9/site-packages/redis/connection.py:607: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+self = &lt;redis.retry.Retry object at 0x7fb0186487f0&gt;
+do = &lt;function Connection.connect.&lt;locals&gt;.&lt;lambda&gt; at 0x7fb01a79edc0&gt;
+fail = &lt;function Connection.connect.&lt;locals&gt;.&lt;lambda&gt; at 0x7fb0186538b0&gt;
+
+    def call_with_retry(self, do, fail):
+        """
+        Execute an operation that might fail and returns its result, or
+        raise the exception that was thrown depending on the `Backoff` object.
+        `do`: the operation to call. Expects no argument.
+        `fail`: the failure handler, expects the last error that was thrown
+        """
+        self._backoff.reset()
+        failures = 0
+        while True:
+            try:
+&gt;               return do()
+
+/usr/local/lib/python3.9/site-packages/redis/retry.py:45: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+&gt;       lambda: self._connect(), lambda error: self.disconnect(error)
+    )
+
+/usr/local/lib/python3.9/site-packages/redis/connection.py:608: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+self = Connection&lt;host=127.0.0.1,port=6379,db=0&gt;
+
+    def _connect(self):
+        "Create a TCP socket connection"
+        # we want to mimic what socket.create_connection does to support
+        # ipv4/ipv6, but we want to set options prior to calling
+        # socket.connect()
+        err = None
+        for res in socket.getaddrinfo(
+            self.host, self.port, self.socket_type, socket.SOCK_STREAM
+        ):
+            family, socktype, proto, canonname, socket_address = res
+            sock = None
+            try:
+                sock = socket.socket(family, socktype, proto)
+                # TCP_NODELAY
+                sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+    
+                # TCP_KEEPALIVE
+                if self.socket_keepalive:
+                    sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
+                    for k, v in self.socket_keepalive_options.items():
+                        sock.setsockopt(socket.IPPROTO_TCP, k, v)
+    
+                # set the socket_connect_timeout before we connect
+                sock.settimeout(self.socket_connect_timeout)
+    
+                # connect
+                sock.connect(socket_address)
+    
+                # set the socket_timeout now that we're connected
+                sock.settimeout(self.socket_timeout)
+                return sock
+    
+            except OSError as _:
+                err = _
+                if sock is not None:
+                    sock.close()
+    
+        if err is not None:
+&gt;           raise err
+
+/usr/local/lib/python3.9/site-packages/redis/connection.py:673: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+self = Connection&lt;host=127.0.0.1,port=6379,db=0&gt;
+
+    def _connect(self):
+        "Create a TCP socket connection"
+        # we want to mimic what socket.create_connection does to support
+        # ipv4/ipv6, but we want to set options prior to calling
+        # socket.connect()
+        err = None
+        for res in socket.getaddrinfo(
+            self.host, self.port, self.socket_type, socket.SOCK_STREAM
+        ):
+            family, socktype, proto, canonname, socket_address = res
+            sock = None
+            try:
+                sock = socket.socket(family, socktype, proto)
+                # TCP_NODELAY
+                sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+    
+                # TCP_KEEPALIVE
+                if self.socket_keepalive:
+                    sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
+                    for k, v in self.socket_keepalive_options.items():
+                        sock.setsockopt(socket.IPPROTO_TCP, k, v)
+    
+                # set the socket_connect_timeout before we connect
+                sock.settimeout(self.socket_connect_timeout)
+    
+                # connect
+&gt;               sock.connect(socket_address)
+E               ConnectionRefusedError: [Errno 111] Connection refused
+
+/usr/local/lib/python3.9/site-packages/redis/connection.py:661: ConnectionRefusedError
+
+During handling of the above exception, another exception occurred:
+
+context_client_grpc = &lt;context.client.ContextClient.ContextClient object at 0x7fb018f15a30&gt;
+context_db_mb = (&lt;common.orm.Database.Database object at 0x7fb018f15910&gt;, &lt;common.message_broker.MessageBroker.MessageBroker object at 0x7fb018f15460&gt;)
+
+    def test_grpc_context(
+        context_client_grpc : ContextClient,                # pylint: disable=redefined-outer-name
+        context_db_mb : Tuple[Database, MessageBroker]):    # pylint: disable=redefined-outer-name
+        context_database = context_db_mb[0]
+    
+        # ----- Clean the database -----------------------------------------------------------------------------------------
+&gt;       context_database.clear_all()
+
+context/tests/test_unitary.py:128: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+common/orm/Database.py:32: in clear_all
+    for key in self._backend.keys():
+common/orm/backend/redis/RedisBackend.py:48: in keys
+    return [k.decode('UTF-8') for k in self._client.keys()]
+/usr/local/lib/python3.9/site-packages/redis/commands/core.py:1370: in keys
+    return self.execute_command("KEYS", pattern, **kwargs)
+/usr/local/lib/python3.9/site-packages/redis/client.py:1173: in execute_command
+    conn = self.connection or pool.get_connection(command_name, **options)
+/usr/local/lib/python3.9/site-packages/redis/connection.py:1370: in get_connection
+    connection.connect()
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+self = Connection&lt;host=127.0.0.1,port=6379,db=0&gt;
+
+    def connect(self):
+        "Connects to the Redis server if not already connected"
+        if self._sock:
+            return
+        try:
+            sock = self.retry.call_with_retry(
+                lambda: self._connect(), lambda error: self.disconnect(error)
+            )
+        except socket.timeout:
+            raise TimeoutError("Timeout connecting to server")
+        except OSError as e:
+&gt;           raise ConnectionError(self._error_message(e))
+E           redis.exceptions.ConnectionError: Error 111 connecting to 127.0.0.1:6379. Connection refused.
+
+/usr/local/lib/python3.9/site-packages/redis/connection.py:613: ConnectionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_grpc_topology[all_redis]" time="0.002"><failure message="redis.exceptions.ConnectionError: Error 111 connecting to 127.0.0.1:6379. Connection refused.">self = Connection&lt;host=127.0.0.1,port=6379,db=0&gt;
+
+    def connect(self):
+        "Connects to the Redis server if not already connected"
+        if self._sock:
+            return
+        try:
+&gt;           sock = self.retry.call_with_retry(
+                lambda: self._connect(), lambda error: self.disconnect(error)
+            )
+
+/usr/local/lib/python3.9/site-packages/redis/connection.py:607: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+self = &lt;redis.retry.Retry object at 0x7fb0186487f0&gt;
+do = &lt;function Connection.connect.&lt;locals&gt;.&lt;lambda&gt; at 0x7fb018563b80&gt;
+fail = &lt;function Connection.connect.&lt;locals&gt;.&lt;lambda&gt; at 0x7fb018587550&gt;
+
+    def call_with_retry(self, do, fail):
+        """
+        Execute an operation that might fail and returns its result, or
+        raise the exception that was thrown depending on the `Backoff` object.
+        `do`: the operation to call. Expects no argument.
+        `fail`: the failure handler, expects the last error that was thrown
+        """
+        self._backoff.reset()
+        failures = 0
+        while True:
+            try:
+&gt;               return do()
+
+/usr/local/lib/python3.9/site-packages/redis/retry.py:45: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+&gt;       lambda: self._connect(), lambda error: self.disconnect(error)
+    )
+
+/usr/local/lib/python3.9/site-packages/redis/connection.py:608: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+self = Connection&lt;host=127.0.0.1,port=6379,db=0&gt;
+
+    def _connect(self):
+        "Create a TCP socket connection"
+        # we want to mimic what socket.create_connection does to support
+        # ipv4/ipv6, but we want to set options prior to calling
+        # socket.connect()
+        err = None
+        for res in socket.getaddrinfo(
+            self.host, self.port, self.socket_type, socket.SOCK_STREAM
+        ):
+            family, socktype, proto, canonname, socket_address = res
+            sock = None
+            try:
+                sock = socket.socket(family, socktype, proto)
+                # TCP_NODELAY
+                sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+    
+                # TCP_KEEPALIVE
+                if self.socket_keepalive:
+                    sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
+                    for k, v in self.socket_keepalive_options.items():
+                        sock.setsockopt(socket.IPPROTO_TCP, k, v)
+    
+                # set the socket_connect_timeout before we connect
+                sock.settimeout(self.socket_connect_timeout)
+    
+                # connect
+                sock.connect(socket_address)
+    
+                # set the socket_timeout now that we're connected
+                sock.settimeout(self.socket_timeout)
+                return sock
+    
+            except OSError as _:
+                err = _
+                if sock is not None:
+                    sock.close()
+    
+        if err is not None:
+&gt;           raise err
+
+/usr/local/lib/python3.9/site-packages/redis/connection.py:673: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+self = Connection&lt;host=127.0.0.1,port=6379,db=0&gt;
+
+    def _connect(self):
+        "Create a TCP socket connection"
+        # we want to mimic what socket.create_connection does to support
+        # ipv4/ipv6, but we want to set options prior to calling
+        # socket.connect()
+        err = None
+        for res in socket.getaddrinfo(
+            self.host, self.port, self.socket_type, socket.SOCK_STREAM
+        ):
+            family, socktype, proto, canonname, socket_address = res
+            sock = None
+            try:
+                sock = socket.socket(family, socktype, proto)
+                # TCP_NODELAY
+                sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+    
+                # TCP_KEEPALIVE
+                if self.socket_keepalive:
+                    sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
+                    for k, v in self.socket_keepalive_options.items():
+                        sock.setsockopt(socket.IPPROTO_TCP, k, v)
+    
+                # set the socket_connect_timeout before we connect
+                sock.settimeout(self.socket_connect_timeout)
+    
+                # connect
+&gt;               sock.connect(socket_address)
+E               ConnectionRefusedError: [Errno 111] Connection refused
+
+/usr/local/lib/python3.9/site-packages/redis/connection.py:661: ConnectionRefusedError
+
+During handling of the above exception, another exception occurred:
+
+context_client_grpc = &lt;context.client.ContextClient.ContextClient object at 0x7fb018f15a30&gt;
+context_db_mb = (&lt;common.orm.Database.Database object at 0x7fb018f15910&gt;, &lt;common.message_broker.MessageBroker.MessageBroker object at 0x7fb018f15460&gt;)
+
+    def test_grpc_topology(
+        context_client_grpc : ContextClient,                # pylint: disable=redefined-outer-name
+        context_db_mb : Tuple[Database, MessageBroker]):    # pylint: disable=redefined-outer-name
+        context_database = context_db_mb[0]
+    
+        # ----- Clean the database -----------------------------------------------------------------------------------------
+&gt;       context_database.clear_all()
+
+context/tests/test_unitary.py:249: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+common/orm/Database.py:32: in clear_all
+    for key in self._backend.keys():
+common/orm/backend/redis/RedisBackend.py:48: in keys
+    return [k.decode('UTF-8') for k in self._client.keys()]
+/usr/local/lib/python3.9/site-packages/redis/commands/core.py:1370: in keys
+    return self.execute_command("KEYS", pattern, **kwargs)
+/usr/local/lib/python3.9/site-packages/redis/client.py:1173: in execute_command
+    conn = self.connection or pool.get_connection(command_name, **options)
+/usr/local/lib/python3.9/site-packages/redis/connection.py:1370: in get_connection
+    connection.connect()
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+self = Connection&lt;host=127.0.0.1,port=6379,db=0&gt;
+
+    def connect(self):
+        "Connects to the Redis server if not already connected"
+        if self._sock:
+            return
+        try:
+            sock = self.retry.call_with_retry(
+                lambda: self._connect(), lambda error: self.disconnect(error)
+            )
+        except socket.timeout:
+            raise TimeoutError("Timeout connecting to server")
+        except OSError as e:
+&gt;           raise ConnectionError(self._error_message(e))
+E           redis.exceptions.ConnectionError: Error 111 connecting to 127.0.0.1:6379. Connection refused.
+
+/usr/local/lib/python3.9/site-packages/redis/connection.py:613: ConnectionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_grpc_device[all_redis]" time="0.001"><failure message="redis.exceptions.ConnectionError: Error 111 connecting to 127.0.0.1:6379. Connection refused.">self = Connection&lt;host=127.0.0.1,port=6379,db=0&gt;
+
+    def connect(self):
+        "Connects to the Redis server if not already connected"
+        if self._sock:
+            return
+        try:
+&gt;           sock = self.retry.call_with_retry(
+                lambda: self._connect(), lambda error: self.disconnect(error)
+            )
+
+/usr/local/lib/python3.9/site-packages/redis/connection.py:607: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+self = &lt;redis.retry.Retry object at 0x7fb0186487f0&gt;
+do = &lt;function Connection.connect.&lt;locals&gt;.&lt;lambda&gt; at 0x7fb018683820&gt;
+fail = &lt;function Connection.connect.&lt;locals&gt;.&lt;lambda&gt; at 0x7fb018653f70&gt;
+
+    def call_with_retry(self, do, fail):
+        """
+        Execute an operation that might fail and returns its result, or
+        raise the exception that was thrown depending on the `Backoff` object.
+        `do`: the operation to call. Expects no argument.
+        `fail`: the failure handler, expects the last error that was thrown
+        """
+        self._backoff.reset()
+        failures = 0
+        while True:
+            try:
+&gt;               return do()
+
+/usr/local/lib/python3.9/site-packages/redis/retry.py:45: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+&gt;       lambda: self._connect(), lambda error: self.disconnect(error)
+    )
+
+/usr/local/lib/python3.9/site-packages/redis/connection.py:608: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+self = Connection&lt;host=127.0.0.1,port=6379,db=0&gt;
+
+    def _connect(self):
+        "Create a TCP socket connection"
+        # we want to mimic what socket.create_connection does to support
+        # ipv4/ipv6, but we want to set options prior to calling
+        # socket.connect()
+        err = None
+        for res in socket.getaddrinfo(
+            self.host, self.port, self.socket_type, socket.SOCK_STREAM
+        ):
+            family, socktype, proto, canonname, socket_address = res
+            sock = None
+            try:
+                sock = socket.socket(family, socktype, proto)
+                # TCP_NODELAY
+                sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+    
+                # TCP_KEEPALIVE
+                if self.socket_keepalive:
+                    sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
+                    for k, v in self.socket_keepalive_options.items():
+                        sock.setsockopt(socket.IPPROTO_TCP, k, v)
+    
+                # set the socket_connect_timeout before we connect
+                sock.settimeout(self.socket_connect_timeout)
+    
+                # connect
+                sock.connect(socket_address)
+    
+                # set the socket_timeout now that we're connected
+                sock.settimeout(self.socket_timeout)
+                return sock
+    
+            except OSError as _:
+                err = _
+                if sock is not None:
+                    sock.close()
+    
+        if err is not None:
+&gt;           raise err
+
+/usr/local/lib/python3.9/site-packages/redis/connection.py:673: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+self = Connection&lt;host=127.0.0.1,port=6379,db=0&gt;
+
+    def _connect(self):
+        "Create a TCP socket connection"
+        # we want to mimic what socket.create_connection does to support
+        # ipv4/ipv6, but we want to set options prior to calling
+        # socket.connect()
+        err = None
+        for res in socket.getaddrinfo(
+            self.host, self.port, self.socket_type, socket.SOCK_STREAM
+        ):
+            family, socktype, proto, canonname, socket_address = res
+            sock = None
+            try:
+                sock = socket.socket(family, socktype, proto)
+                # TCP_NODELAY
+                sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+    
+                # TCP_KEEPALIVE
+                if self.socket_keepalive:
+                    sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
+                    for k, v in self.socket_keepalive_options.items():
+                        sock.setsockopt(socket.IPPROTO_TCP, k, v)
+    
+                # set the socket_connect_timeout before we connect
+                sock.settimeout(self.socket_connect_timeout)
+    
+                # connect
+&gt;               sock.connect(socket_address)
+E               ConnectionRefusedError: [Errno 111] Connection refused
+
+/usr/local/lib/python3.9/site-packages/redis/connection.py:661: ConnectionRefusedError
+
+During handling of the above exception, another exception occurred:
+
+context_client_grpc = &lt;context.client.ContextClient.ContextClient object at 0x7fb018f15a30&gt;
+context_db_mb = (&lt;common.orm.Database.Database object at 0x7fb018f15910&gt;, &lt;common.message_broker.MessageBroker.MessageBroker object at 0x7fb018f15460&gt;)
+
+    def test_grpc_device(
+        context_client_grpc : ContextClient,                # pylint: disable=redefined-outer-name
+        context_db_mb : Tuple[Database, MessageBroker]):    # pylint: disable=redefined-outer-name
+        context_database = context_db_mb[0]
+    
+        # ----- Clean the database -----------------------------------------------------------------------------------------
+&gt;       context_database.clear_all()
+
+context/tests/test_unitary.py:381: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+common/orm/Database.py:32: in clear_all
+    for key in self._backend.keys():
+common/orm/backend/redis/RedisBackend.py:48: in keys
+    return [k.decode('UTF-8') for k in self._client.keys()]
+/usr/local/lib/python3.9/site-packages/redis/commands/core.py:1370: in keys
+    return self.execute_command("KEYS", pattern, **kwargs)
+/usr/local/lib/python3.9/site-packages/redis/client.py:1173: in execute_command
+    conn = self.connection or pool.get_connection(command_name, **options)
+/usr/local/lib/python3.9/site-packages/redis/connection.py:1370: in get_connection
+    connection.connect()
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+self = Connection&lt;host=127.0.0.1,port=6379,db=0&gt;
+
+    def connect(self):
+        "Connects to the Redis server if not already connected"
+        if self._sock:
+            return
+        try:
+            sock = self.retry.call_with_retry(
+                lambda: self._connect(), lambda error: self.disconnect(error)
+            )
+        except socket.timeout:
+            raise TimeoutError("Timeout connecting to server")
+        except OSError as e:
+&gt;           raise ConnectionError(self._error_message(e))
+E           redis.exceptions.ConnectionError: Error 111 connecting to 127.0.0.1:6379. Connection refused.
+
+/usr/local/lib/python3.9/site-packages/redis/connection.py:613: ConnectionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_grpc_link[all_redis]" time="0.001"><failure message="redis.exceptions.ConnectionError: Error 111 connecting to 127.0.0.1:6379. Connection refused.">self = Connection&lt;host=127.0.0.1,port=6379,db=0&gt;
+
+    def connect(self):
+        "Connects to the Redis server if not already connected"
+        if self._sock:
+            return
+        try:
+&gt;           sock = self.retry.call_with_retry(
+                lambda: self._connect(), lambda error: self.disconnect(error)
+            )
+
+/usr/local/lib/python3.9/site-packages/redis/connection.py:607: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+self = &lt;redis.retry.Retry object at 0x7fb0186487f0&gt;
+do = &lt;function Connection.connect.&lt;locals&gt;.&lt;lambda&gt; at 0x7fb0186c0550&gt;
+fail = &lt;function Connection.connect.&lt;locals&gt;.&lt;lambda&gt; at 0x7fb0186c0670&gt;
+
+    def call_with_retry(self, do, fail):
+        """
+        Execute an operation that might fail and returns its result, or
+        raise the exception that was thrown depending on the `Backoff` object.
+        `do`: the operation to call. Expects no argument.
+        `fail`: the failure handler, expects the last error that was thrown
+        """
+        self._backoff.reset()
+        failures = 0
+        while True:
+            try:
+&gt;               return do()
+
+/usr/local/lib/python3.9/site-packages/redis/retry.py:45: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+&gt;       lambda: self._connect(), lambda error: self.disconnect(error)
+    )
+
+/usr/local/lib/python3.9/site-packages/redis/connection.py:608: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+self = Connection&lt;host=127.0.0.1,port=6379,db=0&gt;
+
+    def _connect(self):
+        "Create a TCP socket connection"
+        # we want to mimic what socket.create_connection does to support
+        # ipv4/ipv6, but we want to set options prior to calling
+        # socket.connect()
+        err = None
+        for res in socket.getaddrinfo(
+            self.host, self.port, self.socket_type, socket.SOCK_STREAM
+        ):
+            family, socktype, proto, canonname, socket_address = res
+            sock = None
+            try:
+                sock = socket.socket(family, socktype, proto)
+                # TCP_NODELAY
+                sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+    
+                # TCP_KEEPALIVE
+                if self.socket_keepalive:
+                    sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
+                    for k, v in self.socket_keepalive_options.items():
+                        sock.setsockopt(socket.IPPROTO_TCP, k, v)
+    
+                # set the socket_connect_timeout before we connect
+                sock.settimeout(self.socket_connect_timeout)
+    
+                # connect
+                sock.connect(socket_address)
+    
+                # set the socket_timeout now that we're connected
+                sock.settimeout(self.socket_timeout)
+                return sock
+    
+            except OSError as _:
+                err = _
+                if sock is not None:
+                    sock.close()
+    
+        if err is not None:
+&gt;           raise err
+
+/usr/local/lib/python3.9/site-packages/redis/connection.py:673: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+self = Connection&lt;host=127.0.0.1,port=6379,db=0&gt;
+
+    def _connect(self):
+        "Create a TCP socket connection"
+        # we want to mimic what socket.create_connection does to support
+        # ipv4/ipv6, but we want to set options prior to calling
+        # socket.connect()
+        err = None
+        for res in socket.getaddrinfo(
+            self.host, self.port, self.socket_type, socket.SOCK_STREAM
+        ):
+            family, socktype, proto, canonname, socket_address = res
+            sock = None
+            try:
+                sock = socket.socket(family, socktype, proto)
+                # TCP_NODELAY
+                sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+    
+                # TCP_KEEPALIVE
+                if self.socket_keepalive:
+                    sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
+                    for k, v in self.socket_keepalive_options.items():
+                        sock.setsockopt(socket.IPPROTO_TCP, k, v)
+    
+                # set the socket_connect_timeout before we connect
+                sock.settimeout(self.socket_connect_timeout)
+    
+                # connect
+&gt;               sock.connect(socket_address)
+E               ConnectionRefusedError: [Errno 111] Connection refused
+
+/usr/local/lib/python3.9/site-packages/redis/connection.py:661: ConnectionRefusedError
+
+During handling of the above exception, another exception occurred:
+
+context_client_grpc = &lt;context.client.ContextClient.ContextClient object at 0x7fb018f15a30&gt;
+context_db_mb = (&lt;common.orm.Database.Database object at 0x7fb018f15910&gt;, &lt;common.message_broker.MessageBroker.MessageBroker object at 0x7fb018f15460&gt;)
+
+    def test_grpc_link(
+        context_client_grpc : ContextClient,                # pylint: disable=redefined-outer-name
+        context_db_mb : Tuple[Database, MessageBroker]):    # pylint: disable=redefined-outer-name
+        context_database = context_db_mb[0]
+    
+        # ----- Clean the database -----------------------------------------------------------------------------------------
+&gt;       context_database.clear_all()
+
+context/tests/test_unitary.py:556: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+common/orm/Database.py:32: in clear_all
+    for key in self._backend.keys():
+common/orm/backend/redis/RedisBackend.py:48: in keys
+    return [k.decode('UTF-8') for k in self._client.keys()]
+/usr/local/lib/python3.9/site-packages/redis/commands/core.py:1370: in keys
+    return self.execute_command("KEYS", pattern, **kwargs)
+/usr/local/lib/python3.9/site-packages/redis/client.py:1173: in execute_command
+    conn = self.connection or pool.get_connection(command_name, **options)
+/usr/local/lib/python3.9/site-packages/redis/connection.py:1370: in get_connection
+    connection.connect()
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+self = Connection&lt;host=127.0.0.1,port=6379,db=0&gt;
+
+    def connect(self):
+        "Connects to the Redis server if not already connected"
+        if self._sock:
+            return
+        try:
+            sock = self.retry.call_with_retry(
+                lambda: self._connect(), lambda error: self.disconnect(error)
+            )
+        except socket.timeout:
+            raise TimeoutError("Timeout connecting to server")
+        except OSError as e:
+&gt;           raise ConnectionError(self._error_message(e))
+E           redis.exceptions.ConnectionError: Error 111 connecting to 127.0.0.1:6379. Connection refused.
+
+/usr/local/lib/python3.9/site-packages/redis/connection.py:613: ConnectionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_grpc_service[all_redis]" time="0.001"><failure message="redis.exceptions.ConnectionError: Error 111 connecting to 127.0.0.1:6379. Connection refused.">self = Connection&lt;host=127.0.0.1,port=6379,db=0&gt;
+
+    def connect(self):
+        "Connects to the Redis server if not already connected"
+        if self._sock:
+            return
+        try:
+&gt;           sock = self.retry.call_with_retry(
+                lambda: self._connect(), lambda error: self.disconnect(error)
+            )
+
+/usr/local/lib/python3.9/site-packages/redis/connection.py:607: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+self = &lt;redis.retry.Retry object at 0x7fb0186487f0&gt;
+do = &lt;function Connection.connect.&lt;locals&gt;.&lt;lambda&gt; at 0x7fb018f2f700&gt;
+fail = &lt;function Connection.connect.&lt;locals&gt;.&lt;lambda&gt; at 0x7fb0186188b0&gt;
+
+    def call_with_retry(self, do, fail):
+        """
+        Execute an operation that might fail and returns its result, or
+        raise the exception that was thrown depending on the `Backoff` object.
+        `do`: the operation to call. Expects no argument.
+        `fail`: the failure handler, expects the last error that was thrown
+        """
+        self._backoff.reset()
+        failures = 0
+        while True:
+            try:
+&gt;               return do()
+
+/usr/local/lib/python3.9/site-packages/redis/retry.py:45: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+&gt;       lambda: self._connect(), lambda error: self.disconnect(error)
+    )
+
+/usr/local/lib/python3.9/site-packages/redis/connection.py:608: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+self = Connection&lt;host=127.0.0.1,port=6379,db=0&gt;
+
+    def _connect(self):
+        "Create a TCP socket connection"
+        # we want to mimic what socket.create_connection does to support
+        # ipv4/ipv6, but we want to set options prior to calling
+        # socket.connect()
+        err = None
+        for res in socket.getaddrinfo(
+            self.host, self.port, self.socket_type, socket.SOCK_STREAM
+        ):
+            family, socktype, proto, canonname, socket_address = res
+            sock = None
+            try:
+                sock = socket.socket(family, socktype, proto)
+                # TCP_NODELAY
+                sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+    
+                # TCP_KEEPALIVE
+                if self.socket_keepalive:
+                    sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
+                    for k, v in self.socket_keepalive_options.items():
+                        sock.setsockopt(socket.IPPROTO_TCP, k, v)
+    
+                # set the socket_connect_timeout before we connect
+                sock.settimeout(self.socket_connect_timeout)
+    
+                # connect
+                sock.connect(socket_address)
+    
+                # set the socket_timeout now that we're connected
+                sock.settimeout(self.socket_timeout)
+                return sock
+    
+            except OSError as _:
+                err = _
+                if sock is not None:
+                    sock.close()
+    
+        if err is not None:
+&gt;           raise err
+
+/usr/local/lib/python3.9/site-packages/redis/connection.py:673: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+self = Connection&lt;host=127.0.0.1,port=6379,db=0&gt;
+
+    def _connect(self):
+        "Create a TCP socket connection"
+        # we want to mimic what socket.create_connection does to support
+        # ipv4/ipv6, but we want to set options prior to calling
+        # socket.connect()
+        err = None
+        for res in socket.getaddrinfo(
+            self.host, self.port, self.socket_type, socket.SOCK_STREAM
+        ):
+            family, socktype, proto, canonname, socket_address = res
+            sock = None
+            try:
+                sock = socket.socket(family, socktype, proto)
+                # TCP_NODELAY
+                sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+    
+                # TCP_KEEPALIVE
+                if self.socket_keepalive:
+                    sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
+                    for k, v in self.socket_keepalive_options.items():
+                        sock.setsockopt(socket.IPPROTO_TCP, k, v)
+    
+                # set the socket_connect_timeout before we connect
+                sock.settimeout(self.socket_connect_timeout)
+    
+                # connect
+&gt;               sock.connect(socket_address)
+E               ConnectionRefusedError: [Errno 111] Connection refused
+
+/usr/local/lib/python3.9/site-packages/redis/connection.py:661: ConnectionRefusedError
+
+During handling of the above exception, another exception occurred:
+
+context_client_grpc = &lt;context.client.ContextClient.ContextClient object at 0x7fb018f15a30&gt;
+context_db_mb = (&lt;common.orm.Database.Database object at 0x7fb018f15910&gt;, &lt;common.message_broker.MessageBroker.MessageBroker object at 0x7fb018f15460&gt;)
+
+    def test_grpc_service(
+        context_client_grpc : ContextClient,                # pylint: disable=redefined-outer-name
+        context_db_mb : Tuple[Database, MessageBroker]):    # pylint: disable=redefined-outer-name
+        context_database = context_db_mb[0]
+    
+        # ----- Clean the database -----------------------------------------------------------------------------------------
+&gt;       context_database.clear_all()
+
+context/tests/test_unitary.py:739: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+common/orm/Database.py:32: in clear_all
+    for key in self._backend.keys():
+common/orm/backend/redis/RedisBackend.py:48: in keys
+    return [k.decode('UTF-8') for k in self._client.keys()]
+/usr/local/lib/python3.9/site-packages/redis/commands/core.py:1370: in keys
+    return self.execute_command("KEYS", pattern, **kwargs)
+/usr/local/lib/python3.9/site-packages/redis/client.py:1173: in execute_command
+    conn = self.connection or pool.get_connection(command_name, **options)
+/usr/local/lib/python3.9/site-packages/redis/connection.py:1370: in get_connection
+    connection.connect()
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+self = Connection&lt;host=127.0.0.1,port=6379,db=0&gt;
+
+    def connect(self):
+        "Connects to the Redis server if not already connected"
+        if self._sock:
+            return
+        try:
+            sock = self.retry.call_with_retry(
+                lambda: self._connect(), lambda error: self.disconnect(error)
+            )
+        except socket.timeout:
+            raise TimeoutError("Timeout connecting to server")
+        except OSError as e:
+&gt;           raise ConnectionError(self._error_message(e))
+E           redis.exceptions.ConnectionError: Error 111 connecting to 127.0.0.1:6379. Connection refused.
+
+/usr/local/lib/python3.9/site-packages/redis/connection.py:613: ConnectionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_grpc_connection[all_redis]" time="0.001"><failure message="redis.exceptions.ConnectionError: Error 111 connecting to 127.0.0.1:6379. Connection refused.">self = Connection&lt;host=127.0.0.1,port=6379,db=0&gt;
+
+    def connect(self):
+        "Connects to the Redis server if not already connected"
+        if self._sock:
+            return
+        try:
+&gt;           sock = self.retry.call_with_retry(
+                lambda: self._connect(), lambda error: self.disconnect(error)
+            )
+
+/usr/local/lib/python3.9/site-packages/redis/connection.py:607: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+self = &lt;redis.retry.Retry object at 0x7fb0186487f0&gt;
+do = &lt;function Connection.connect.&lt;locals&gt;.&lt;lambda&gt; at 0x7fb0186a43a0&gt;
+fail = &lt;function Connection.connect.&lt;locals&gt;.&lt;lambda&gt; at 0x7fb0186a4310&gt;
+
+    def call_with_retry(self, do, fail):
+        """
+        Execute an operation that might fail and returns its result, or
+        raise the exception that was thrown depending on the `Backoff` object.
+        `do`: the operation to call. Expects no argument.
+        `fail`: the failure handler, expects the last error that was thrown
+        """
+        self._backoff.reset()
+        failures = 0
+        while True:
+            try:
+&gt;               return do()
+
+/usr/local/lib/python3.9/site-packages/redis/retry.py:45: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+&gt;       lambda: self._connect(), lambda error: self.disconnect(error)
+    )
+
+/usr/local/lib/python3.9/site-packages/redis/connection.py:608: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+self = Connection&lt;host=127.0.0.1,port=6379,db=0&gt;
+
+    def _connect(self):
+        "Create a TCP socket connection"
+        # we want to mimic what socket.create_connection does to support
+        # ipv4/ipv6, but we want to set options prior to calling
+        # socket.connect()
+        err = None
+        for res in socket.getaddrinfo(
+            self.host, self.port, self.socket_type, socket.SOCK_STREAM
+        ):
+            family, socktype, proto, canonname, socket_address = res
+            sock = None
+            try:
+                sock = socket.socket(family, socktype, proto)
+                # TCP_NODELAY
+                sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+    
+                # TCP_KEEPALIVE
+                if self.socket_keepalive:
+                    sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
+                    for k, v in self.socket_keepalive_options.items():
+                        sock.setsockopt(socket.IPPROTO_TCP, k, v)
+    
+                # set the socket_connect_timeout before we connect
+                sock.settimeout(self.socket_connect_timeout)
+    
+                # connect
+                sock.connect(socket_address)
+    
+                # set the socket_timeout now that we're connected
+                sock.settimeout(self.socket_timeout)
+                return sock
+    
+            except OSError as _:
+                err = _
+                if sock is not None:
+                    sock.close()
+    
+        if err is not None:
+&gt;           raise err
+
+/usr/local/lib/python3.9/site-packages/redis/connection.py:673: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+self = Connection&lt;host=127.0.0.1,port=6379,db=0&gt;
+
+    def _connect(self):
+        "Create a TCP socket connection"
+        # we want to mimic what socket.create_connection does to support
+        # ipv4/ipv6, but we want to set options prior to calling
+        # socket.connect()
+        err = None
+        for res in socket.getaddrinfo(
+            self.host, self.port, self.socket_type, socket.SOCK_STREAM
+        ):
+            family, socktype, proto, canonname, socket_address = res
+            sock = None
+            try:
+                sock = socket.socket(family, socktype, proto)
+                # TCP_NODELAY
+                sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+    
+                # TCP_KEEPALIVE
+                if self.socket_keepalive:
+                    sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
+                    for k, v in self.socket_keepalive_options.items():
+                        sock.setsockopt(socket.IPPROTO_TCP, k, v)
+    
+                # set the socket_connect_timeout before we connect
+                sock.settimeout(self.socket_connect_timeout)
+    
+                # connect
+&gt;               sock.connect(socket_address)
+E               ConnectionRefusedError: [Errno 111] Connection refused
+
+/usr/local/lib/python3.9/site-packages/redis/connection.py:661: ConnectionRefusedError
+
+During handling of the above exception, another exception occurred:
+
+context_client_grpc = &lt;context.client.ContextClient.ContextClient object at 0x7fb018f15a30&gt;
+context_db_mb = (&lt;common.orm.Database.Database object at 0x7fb018f15910&gt;, &lt;common.message_broker.MessageBroker.MessageBroker object at 0x7fb018f15460&gt;)
+
+    def test_grpc_connection(
+        context_client_grpc : ContextClient,                # pylint: disable=redefined-outer-name
+        context_db_mb : Tuple[Database, MessageBroker]):    # pylint: disable=redefined-outer-name
+        context_database = context_db_mb[0]
+    
+        # ----- Clean the database -----------------------------------------------------------------------------------------
+&gt;       context_database.clear_all()
+
+context/tests/test_unitary.py:926: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+common/orm/Database.py:32: in clear_all
+    for key in self._backend.keys():
+common/orm/backend/redis/RedisBackend.py:48: in keys
+    return [k.decode('UTF-8') for k in self._client.keys()]
+/usr/local/lib/python3.9/site-packages/redis/commands/core.py:1370: in keys
+    return self.execute_command("KEYS", pattern, **kwargs)
+/usr/local/lib/python3.9/site-packages/redis/client.py:1173: in execute_command
+    conn = self.connection or pool.get_connection(command_name, **options)
+/usr/local/lib/python3.9/site-packages/redis/connection.py:1370: in get_connection
+    connection.connect()
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+self = Connection&lt;host=127.0.0.1,port=6379,db=0&gt;
+
+    def connect(self):
+        "Connects to the Redis server if not already connected"
+        if self._sock:
+            return
+        try:
+            sock = self.retry.call_with_retry(
+                lambda: self._connect(), lambda error: self.disconnect(error)
+            )
+        except socket.timeout:
+            raise TimeoutError("Timeout connecting to server")
+        except OSError as e:
+&gt;           raise ConnectionError(self._error_message(e))
+E           redis.exceptions.ConnectionError: Error 111 connecting to 127.0.0.1:6379. Connection refused.
+
+/usr/local/lib/python3.9/site-packages/redis/connection.py:613: ConnectionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_populate_database[all_redis]" time="0.001"><failure message="redis.exceptions.ConnectionError: Error 111 connecting to 127.0.0.1:6379. Connection refused.">self = Connection&lt;host=127.0.0.1,port=6379,db=0&gt;
+
+    def connect(self):
+        "Connects to the Redis server if not already connected"
+        if self._sock:
+            return
+        try:
+&gt;           sock = self.retry.call_with_retry(
+                lambda: self._connect(), lambda error: self.disconnect(error)
+            )
+
+/usr/local/lib/python3.9/site-packages/redis/connection.py:607: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+self = &lt;redis.retry.Retry object at 0x7fb0186487f0&gt;
+do = &lt;function Connection.connect.&lt;locals&gt;.&lt;lambda&gt; at 0x7fb0186d49d0&gt;
+fail = &lt;function Connection.connect.&lt;locals&gt;.&lt;lambda&gt; at 0x7fb0186d4280&gt;
+
+    def call_with_retry(self, do, fail):
+        """
+        Execute an operation that might fail and returns its result, or
+        raise the exception that was thrown depending on the `Backoff` object.
+        `do`: the operation to call. Expects no argument.
+        `fail`: the failure handler, expects the last error that was thrown
+        """
+        self._backoff.reset()
+        failures = 0
+        while True:
+            try:
+&gt;               return do()
+
+/usr/local/lib/python3.9/site-packages/redis/retry.py:45: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+&gt;       lambda: self._connect(), lambda error: self.disconnect(error)
+    )
+
+/usr/local/lib/python3.9/site-packages/redis/connection.py:608: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+self = Connection&lt;host=127.0.0.1,port=6379,db=0&gt;
+
+    def _connect(self):
+        "Create a TCP socket connection"
+        # we want to mimic what socket.create_connection does to support
+        # ipv4/ipv6, but we want to set options prior to calling
+        # socket.connect()
+        err = None
+        for res in socket.getaddrinfo(
+            self.host, self.port, self.socket_type, socket.SOCK_STREAM
+        ):
+            family, socktype, proto, canonname, socket_address = res
+            sock = None
+            try:
+                sock = socket.socket(family, socktype, proto)
+                # TCP_NODELAY
+                sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+    
+                # TCP_KEEPALIVE
+                if self.socket_keepalive:
+                    sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
+                    for k, v in self.socket_keepalive_options.items():
+                        sock.setsockopt(socket.IPPROTO_TCP, k, v)
+    
+                # set the socket_connect_timeout before we connect
+                sock.settimeout(self.socket_connect_timeout)
+    
+                # connect
+                sock.connect(socket_address)
+    
+                # set the socket_timeout now that we're connected
+                sock.settimeout(self.socket_timeout)
+                return sock
+    
+            except OSError as _:
+                err = _
+                if sock is not None:
+                    sock.close()
+    
+        if err is not None:
+&gt;           raise err
+
+/usr/local/lib/python3.9/site-packages/redis/connection.py:673: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+self = Connection&lt;host=127.0.0.1,port=6379,db=0&gt;
+
+    def _connect(self):
+        "Create a TCP socket connection"
+        # we want to mimic what socket.create_connection does to support
+        # ipv4/ipv6, but we want to set options prior to calling
+        # socket.connect()
+        err = None
+        for res in socket.getaddrinfo(
+            self.host, self.port, self.socket_type, socket.SOCK_STREAM
+        ):
+            family, socktype, proto, canonname, socket_address = res
+            sock = None
+            try:
+                sock = socket.socket(family, socktype, proto)
+                # TCP_NODELAY
+                sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+    
+                # TCP_KEEPALIVE
+                if self.socket_keepalive:
+                    sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
+                    for k, v in self.socket_keepalive_options.items():
+                        sock.setsockopt(socket.IPPROTO_TCP, k, v)
+    
+                # set the socket_connect_timeout before we connect
+                sock.settimeout(self.socket_connect_timeout)
+    
+                # connect
+&gt;               sock.connect(socket_address)
+E               ConnectionRefusedError: [Errno 111] Connection refused
+
+/usr/local/lib/python3.9/site-packages/redis/connection.py:661: ConnectionRefusedError
+
+During handling of the above exception, another exception occurred:
+
+context_db_mb = (&lt;common.orm.Database.Database object at 0x7fb018f15910&gt;, &lt;common.message_broker.MessageBroker.MessageBroker object at 0x7fb018f15460&gt;)
+context_service_grpc = &lt;context.service.grpc_server.ContextService.ContextService object at 0x7fb018f158e0&gt;
+
+    def test_rest_populate_database(
+        context_db_mb : Tuple[Database, MessageBroker], # pylint: disable=redefined-outer-name
+        context_service_grpc : ContextService           # pylint: disable=redefined-outer-name
+        ):
+        database = context_db_mb[0]
+&gt;       database.clear_all()
+
+context/tests/test_unitary.py:1179: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+common/orm/Database.py:32: in clear_all
+    for key in self._backend.keys():
+common/orm/backend/redis/RedisBackend.py:48: in keys
+    return [k.decode('UTF-8') for k in self._client.keys()]
+/usr/local/lib/python3.9/site-packages/redis/commands/core.py:1370: in keys
+    return self.execute_command("KEYS", pattern, **kwargs)
+/usr/local/lib/python3.9/site-packages/redis/client.py:1173: in execute_command
+    conn = self.connection or pool.get_connection(command_name, **options)
+/usr/local/lib/python3.9/site-packages/redis/connection.py:1370: in get_connection
+    connection.connect()
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+self = Connection&lt;host=127.0.0.1,port=6379,db=0&gt;
+
+    def connect(self):
+        "Connects to the Redis server if not already connected"
+        if self._sock:
+            return
+        try:
+            sock = self.retry.call_with_retry(
+                lambda: self._connect(), lambda error: self.disconnect(error)
+            )
+        except socket.timeout:
+            raise TimeoutError("Timeout connecting to server")
+        except OSError as e:
+&gt;           raise ConnectionError(self._error_message(e))
+E           redis.exceptions.ConnectionError: Error 111 connecting to 127.0.0.1:6379. Connection refused.
+
+/usr/local/lib/python3.9/site-packages/redis/connection.py:613: ConnectionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_context_ids[all_redis]" time="1.033"><failure message="AssertionError: Reply failed with code 500&#10;assert 500 == 200&#10;  +500&#10;  -200">context_service_rest = &lt;RestServer(Thread-71, started daemon 140392926267136)&gt;
+
+    def test_rest_get_context_ids(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+&gt;       reply = do_rest_request('/context_ids')
+
+context/tests/test_unitary.py:1183: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+url = '/context_ids'
+
+    def do_rest_request(url : str):
+        base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT)
+        request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url)
+        LOGGER.warning('Request: GET {:s}'.format(str(request_url)))
+        reply = requests.get(request_url)
+        LOGGER.warning('Reply: {:s}'.format(str(reply.text)))
+&gt;       assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code)
+E       AssertionError: Reply failed with code 500
+E       assert 500 == 200
+E         +500
+E         -200
+
+context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_contexts[all_redis]" time="0.006"><failure message="AssertionError: Reply failed with code 500&#10;assert 500 == 200&#10;  +500&#10;  -200">context_service_rest = &lt;RestServer(Thread-71, started daemon 140392926267136)&gt;
+
+    def test_rest_get_contexts(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+&gt;       reply = do_rest_request('/contexts')
+
+context/tests/test_unitary.py:1187: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+url = '/contexts'
+
+    def do_rest_request(url : str):
+        base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT)
+        request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url)
+        LOGGER.warning('Request: GET {:s}'.format(str(request_url)))
+        reply = requests.get(request_url)
+        LOGGER.warning('Reply: {:s}'.format(str(reply.text)))
+&gt;       assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code)
+E       AssertionError: Reply failed with code 500
+E       assert 500 == 200
+E         +500
+E         -200
+
+context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_context[all_redis]" time="0.007"><failure message="AssertionError: Reply failed with code 500&#10;assert 500 == 200&#10;  +500&#10;  -200">context_service_rest = &lt;RestServer(Thread-71, started daemon 140392926267136)&gt;
+
+    def test_rest_get_context(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+        context_uuid = urllib.parse.quote(DEFAULT_CONTEXT_UUID)
+&gt;       reply = do_rest_request('/context/{:s}'.format(context_uuid))
+
+context/tests/test_unitary.py:1192: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+url = '/context/admin'
+
+    def do_rest_request(url : str):
+        base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT)
+        request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url)
+        LOGGER.warning('Request: GET {:s}'.format(str(request_url)))
+        reply = requests.get(request_url)
+        LOGGER.warning('Reply: {:s}'.format(str(reply.text)))
+&gt;       assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code)
+E       AssertionError: Reply failed with code 500
+E       assert 500 == 200
+E         +500
+E         -200
+
+context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_topology_ids[all_redis]" time="0.007"><failure message="AssertionError: Reply failed with code 500&#10;assert 500 == 200&#10;  +500&#10;  -200">context_service_rest = &lt;RestServer(Thread-71, started daemon 140392926267136)&gt;
+
+    def test_rest_get_topology_ids(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+        context_uuid = urllib.parse.quote(DEFAULT_CONTEXT_UUID)
+&gt;       reply = do_rest_request('/context/{:s}/topology_ids'.format(context_uuid))
+
+context/tests/test_unitary.py:1197: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+url = '/context/admin/topology_ids'
+
+    def do_rest_request(url : str):
+        base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT)
+        request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url)
+        LOGGER.warning('Request: GET {:s}'.format(str(request_url)))
+        reply = requests.get(request_url)
+        LOGGER.warning('Reply: {:s}'.format(str(reply.text)))
+&gt;       assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code)
+E       AssertionError: Reply failed with code 500
+E       assert 500 == 200
+E         +500
+E         -200
+
+context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_topologies[all_redis]" time="0.007"><failure message="AssertionError: Reply failed with code 500&#10;assert 500 == 200&#10;  +500&#10;  -200">context_service_rest = &lt;RestServer(Thread-71, started daemon 140392926267136)&gt;
+
+    def test_rest_get_topologies(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+        context_uuid = urllib.parse.quote(DEFAULT_CONTEXT_UUID)
+&gt;       reply = do_rest_request('/context/{:s}/topologies'.format(context_uuid))
+
+context/tests/test_unitary.py:1202: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+url = '/context/admin/topologies'
+
+    def do_rest_request(url : str):
+        base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT)
+        request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url)
+        LOGGER.warning('Request: GET {:s}'.format(str(request_url)))
+        reply = requests.get(request_url)
+        LOGGER.warning('Reply: {:s}'.format(str(reply.text)))
+&gt;       assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code)
+E       AssertionError: Reply failed with code 500
+E       assert 500 == 200
+E         +500
+E         -200
+
+context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_topology[all_redis]" time="0.007"><failure message="AssertionError: Reply failed with code 500&#10;assert 500 == 200&#10;  +500&#10;  -200">context_service_rest = &lt;RestServer(Thread-71, started daemon 140392926267136)&gt;
+
+    def test_rest_get_topology(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+        context_uuid = urllib.parse.quote(DEFAULT_CONTEXT_UUID)
+        topology_uuid = urllib.parse.quote(DEFAULT_TOPOLOGY_UUID)
+&gt;       reply = do_rest_request('/context/{:s}/topology/{:s}'.format(context_uuid, topology_uuid))
+
+context/tests/test_unitary.py:1208: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+url = '/context/admin/topology/admin'
+
+    def do_rest_request(url : str):
+        base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT)
+        request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url)
+        LOGGER.warning('Request: GET {:s}'.format(str(request_url)))
+        reply = requests.get(request_url)
+        LOGGER.warning('Reply: {:s}'.format(str(reply.text)))
+&gt;       assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code)
+E       AssertionError: Reply failed with code 500
+E       assert 500 == 200
+E         +500
+E         -200
+
+context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_service_ids[all_redis]" time="0.007"><failure message="AssertionError: Reply failed with code 500&#10;assert 500 == 200&#10;  +500&#10;  -200">context_service_rest = &lt;RestServer(Thread-71, started daemon 140392926267136)&gt;
+
+    def test_rest_get_service_ids(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+        context_uuid = urllib.parse.quote(DEFAULT_CONTEXT_UUID)
+&gt;       reply = do_rest_request('/context/{:s}/service_ids'.format(context_uuid))
+
+context/tests/test_unitary.py:1213: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+url = '/context/admin/service_ids'
+
+    def do_rest_request(url : str):
+        base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT)
+        request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url)
+        LOGGER.warning('Request: GET {:s}'.format(str(request_url)))
+        reply = requests.get(request_url)
+        LOGGER.warning('Reply: {:s}'.format(str(reply.text)))
+&gt;       assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code)
+E       AssertionError: Reply failed with code 500
+E       assert 500 == 200
+E         +500
+E         -200
+
+context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_services[all_redis]" time="0.007"><failure message="AssertionError: Reply failed with code 500&#10;assert 500 == 200&#10;  +500&#10;  -200">context_service_rest = &lt;RestServer(Thread-71, started daemon 140392926267136)&gt;
+
+    def test_rest_get_services(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+        context_uuid = urllib.parse.quote(DEFAULT_CONTEXT_UUID)
+&gt;       reply = do_rest_request('/context/{:s}/services'.format(context_uuid))
+
+context/tests/test_unitary.py:1218: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+url = '/context/admin/services'
+
+    def do_rest_request(url : str):
+        base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT)
+        request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url)
+        LOGGER.warning('Request: GET {:s}'.format(str(request_url)))
+        reply = requests.get(request_url)
+        LOGGER.warning('Reply: {:s}'.format(str(reply.text)))
+&gt;       assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code)
+E       AssertionError: Reply failed with code 500
+E       assert 500 == 200
+E         +500
+E         -200
+
+context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_service[all_redis]" time="0.007"><failure message="AssertionError: Reply failed with code 500&#10;assert 500 == 200&#10;  +500&#10;  -200">context_service_rest = &lt;RestServer(Thread-71, started daemon 140392926267136)&gt;
+
+    def test_rest_get_service(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+        context_uuid = urllib.parse.quote(DEFAULT_CONTEXT_UUID)
+        service_uuid = urllib.parse.quote(SERVICE_R1_R2_UUID, safe='')
+&gt;       reply = do_rest_request('/context/{:s}/service/{:s}'.format(context_uuid, service_uuid))
+
+context/tests/test_unitary.py:1224: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+url = '/context/admin/service/SVC%3AR1%2FEP100-R2%2FEP100'
+
+    def do_rest_request(url : str):
+        base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT)
+        request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url)
+        LOGGER.warning('Request: GET {:s}'.format(str(request_url)))
+        reply = requests.get(request_url)
+        LOGGER.warning('Reply: {:s}'.format(str(reply.text)))
+&gt;       assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code)
+E       AssertionError: Reply failed with code 500
+E       assert 500 == 200
+E         +500
+E         -200
+
+context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_device_ids[all_redis]" time="0.006"><failure message="AssertionError: Reply failed with code 500&#10;assert 500 == 200&#10;  +500&#10;  -200">context_service_rest = &lt;RestServer(Thread-71, started daemon 140392926267136)&gt;
+
+    def test_rest_get_device_ids(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+&gt;       reply = do_rest_request('/device_ids')
+
+context/tests/test_unitary.py:1228: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+url = '/device_ids'
+
+    def do_rest_request(url : str):
+        base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT)
+        request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url)
+        LOGGER.warning('Request: GET {:s}'.format(str(request_url)))
+        reply = requests.get(request_url)
+        LOGGER.warning('Reply: {:s}'.format(str(reply.text)))
+&gt;       assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code)
+E       AssertionError: Reply failed with code 500
+E       assert 500 == 200
+E         +500
+E         -200
+
+context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_devices[all_redis]" time="0.006"><failure message="AssertionError: Reply failed with code 500&#10;assert 500 == 200&#10;  +500&#10;  -200">context_service_rest = &lt;RestServer(Thread-71, started daemon 140392926267136)&gt;
+
+    def test_rest_get_devices(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+&gt;       reply = do_rest_request('/devices')
+
+context/tests/test_unitary.py:1232: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+url = '/devices'
+
+    def do_rest_request(url : str):
+        base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT)
+        request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url)
+        LOGGER.warning('Request: GET {:s}'.format(str(request_url)))
+        reply = requests.get(request_url)
+        LOGGER.warning('Reply: {:s}'.format(str(reply.text)))
+&gt;       assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code)
+E       AssertionError: Reply failed with code 500
+E       assert 500 == 200
+E         +500
+E         -200
+
+context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_device[all_redis]" time="0.007"><failure message="AssertionError: Reply failed with code 500&#10;assert 500 == 200&#10;  +500&#10;  -200">context_service_rest = &lt;RestServer(Thread-71, started daemon 140392926267136)&gt;
+
+    def test_rest_get_device(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+        device_uuid = urllib.parse.quote(DEVICE_R1_UUID, safe='')
+&gt;       reply = do_rest_request('/device/{:s}'.format(device_uuid))
+
+context/tests/test_unitary.py:1237: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+url = '/device/R1'
+
+    def do_rest_request(url : str):
+        base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT)
+        request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url)
+        LOGGER.warning('Request: GET {:s}'.format(str(request_url)))
+        reply = requests.get(request_url)
+        LOGGER.warning('Reply: {:s}'.format(str(reply.text)))
+&gt;       assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code)
+E       AssertionError: Reply failed with code 500
+E       assert 500 == 200
+E         +500
+E         -200
+
+context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_link_ids[all_redis]" time="0.007"><failure message="AssertionError: Reply failed with code 500&#10;assert 500 == 200&#10;  +500&#10;  -200">context_service_rest = &lt;RestServer(Thread-71, started daemon 140392926267136)&gt;
+
+    def test_rest_get_link_ids(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+&gt;       reply = do_rest_request('/link_ids')
+
+context/tests/test_unitary.py:1241: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+url = '/link_ids'
+
+    def do_rest_request(url : str):
+        base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT)
+        request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url)
+        LOGGER.warning('Request: GET {:s}'.format(str(request_url)))
+        reply = requests.get(request_url)
+        LOGGER.warning('Reply: {:s}'.format(str(reply.text)))
+&gt;       assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code)
+E       AssertionError: Reply failed with code 500
+E       assert 500 == 200
+E         +500
+E         -200
+
+context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_links[all_redis]" time="0.006"><failure message="AssertionError: Reply failed with code 500&#10;assert 500 == 200&#10;  +500&#10;  -200">context_service_rest = &lt;RestServer(Thread-71, started daemon 140392926267136)&gt;
+
+    def test_rest_get_links(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+&gt;       reply = do_rest_request('/links')
+
+context/tests/test_unitary.py:1245: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+url = '/links'
+
+    def do_rest_request(url : str):
+        base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT)
+        request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url)
+        LOGGER.warning('Request: GET {:s}'.format(str(request_url)))
+        reply = requests.get(request_url)
+        LOGGER.warning('Reply: {:s}'.format(str(reply.text)))
+&gt;       assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code)
+E       AssertionError: Reply failed with code 500
+E       assert 500 == 200
+E         +500
+E         -200
+
+context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_link[all_redis]" time="0.008"><failure message="AssertionError: Reply failed with code 500&#10;assert 500 == 200&#10;  +500&#10;  -200">context_service_rest = &lt;RestServer(Thread-71, started daemon 140392926267136)&gt;
+
+    def test_rest_get_link(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+        link_uuid = urllib.parse.quote(LINK_R1_R2_UUID, safe='')
+&gt;       reply = do_rest_request('/link/{:s}'.format(link_uuid))
+
+context/tests/test_unitary.py:1250: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+url = '/link/R1%2FEP2-R2%2FEP1'
+
+    def do_rest_request(url : str):
+        base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT)
+        request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url)
+        LOGGER.warning('Request: GET {:s}'.format(str(request_url)))
+        reply = requests.get(request_url)
+        LOGGER.warning('Reply: {:s}'.format(str(reply.text)))
+&gt;       assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code)
+E       AssertionError: Reply failed with code 500
+E       assert 500 == 200
+E         +500
+E         -200
+
+context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_connection_ids[all_redis]" time="0.007"><failure message="AssertionError: Reply failed with code 500&#10;assert 500 == 200&#10;  +500&#10;  -200">context_service_rest = &lt;RestServer(Thread-71, started daemon 140392926267136)&gt;
+
+    def test_rest_get_connection_ids(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+        context_uuid = urllib.parse.quote(DEFAULT_CONTEXT_UUID)
+        service_uuid = urllib.parse.quote(SERVICE_R1_R3_UUID, safe='')
+&gt;       reply = do_rest_request('/context/{:s}/service/{:s}/connection_ids'.format(context_uuid, service_uuid))
+
+context/tests/test_unitary.py:1256: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+url = '/context/admin/service/SVC%3AR1%2FEP100-R3%2FEP100/connection_ids'
+
+    def do_rest_request(url : str):
+        base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT)
+        request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url)
+        LOGGER.warning('Request: GET {:s}'.format(str(request_url)))
+        reply = requests.get(request_url)
+        LOGGER.warning('Reply: {:s}'.format(str(reply.text)))
+&gt;       assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code)
+E       AssertionError: Reply failed with code 500
+E       assert 500 == 200
+E         +500
+E         -200
+
+context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_connections[all_redis]" time="0.007"><failure message="AssertionError: Reply failed with code 500&#10;assert 500 == 200&#10;  +500&#10;  -200">context_service_rest = &lt;RestServer(Thread-71, started daemon 140392926267136)&gt;
+
+    def test_rest_get_connections(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+        context_uuid = urllib.parse.quote(DEFAULT_CONTEXT_UUID)
+        service_uuid = urllib.parse.quote(SERVICE_R1_R3_UUID, safe='')
+&gt;       reply = do_rest_request('/context/{:s}/service/{:s}/connections'.format(context_uuid, service_uuid))
+
+context/tests/test_unitary.py:1262: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+url = '/context/admin/service/SVC%3AR1%2FEP100-R3%2FEP100/connections'
+
+    def do_rest_request(url : str):
+        base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT)
+        request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url)
+        LOGGER.warning('Request: GET {:s}'.format(str(request_url)))
+        reply = requests.get(request_url)
+        LOGGER.warning('Reply: {:s}'.format(str(reply.text)))
+&gt;       assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code)
+E       AssertionError: Reply failed with code 500
+E       assert 500 == 200
+E         +500
+E         -200
+
+context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_rest_get_connection[all_redis]" time="0.007"><failure message="AssertionError: Reply failed with code 500&#10;assert 500 == 200&#10;  +500&#10;  -200">context_service_rest = &lt;RestServer(Thread-71, started daemon 140392926267136)&gt;
+
+    def test_rest_get_connection(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+        connection_uuid = urllib.parse.quote(CONNECTION_R1_R3_UUID, safe='')
+&gt;       reply = do_rest_request('/connection/{:s}'.format(connection_uuid))
+
+context/tests/test_unitary.py:1267: 
+_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
+
+url = '/connection/CON%3AR1%2FEP100-R3%2FEP100'
+
+    def do_rest_request(url : str):
+        base_url = get_service_baseurl_http(ServiceNameEnum.CONTEXT)
+        request_url = 'http://{:s}:{:s}{:s}{:s}'.format(str(LOCAL_HOST), str(HTTP_PORT), str(base_url), url)
+        LOGGER.warning('Request: GET {:s}'.format(str(request_url)))
+        reply = requests.get(request_url)
+        LOGGER.warning('Reply: {:s}'.format(str(reply.text)))
+&gt;       assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code)
+E       AssertionError: Reply failed with code 500
+E       assert 500 == 200
+E         +500
+E         -200
+
+context/tests/test_unitary.py:116: AssertionError</failure></testcase><testcase classname="context.tests.test_unitary" name="test_tools_fast_string_hasher" time="0.423" /></testsuite></testsuites>
\ No newline at end of file
diff --git a/src/device/tests/device_report.xml b/src/device/tests/device_report.xml
new file mode 100644
index 0000000000000000000000000000000000000000..c05ea0ba79d2b1b6fb5434a76c2e6af022eb2e2c
--- /dev/null
+++ b/src/device/tests/device_report.xml
@@ -0,0 +1 @@
+<?xml version="1.0" encoding="utf-8"?><testsuites><testsuite name="pytest" errors="0" failures="0" skipped="0" tests="0" time="0.017" timestamp="2022-07-29T09:28:47.168633" hostname="86d45e18bd70" /></testsuites>
\ No newline at end of file
diff --git a/src/monitoring/.gitlab-ci.yml b/src/monitoring/.gitlab-ci.yml
index 246b29bd42a889b0662a8ab0cb8b198e8f4b92ab..ef3a8c39a045dd059f8a7942223bdc20775ae92c 100644
--- a/src/monitoring/.gitlab-ci.yml
+++ b/src/monitoring/.gitlab-ci.yml
@@ -56,7 +56,7 @@ unit test monitoring:
     - docker pull questdb/questdb
     - docker run --name questdb -d -p 9000:9000  -p 9009:9009  -p 8812:8812  -p 9003:9003  -e QDB_CAIRO_COMMIT_LAG=1000 -e QDB_CAIRO_MAX_UNCOMMITTED_ROWS=100000 --network=teraflowbridge --rm questdb/questdb
     - sleep 10
-    - docker run --name $IMAGE_NAME -d -p 7070:7070 --env METRICSDB_HOSTNAME=localhost --env METRICSDB_ILP_PORT=9009 --env METRICSDB_REST_PORT=9000 --env METRICSDB_TABLE=monitoring -v "$PWD/src/$IMAGE_NAME/tests:/opt/results" --network=teraflowbridge $CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG
+    - docker run --name $IMAGE_NAME -d -p 7070:7070 --env METRICSDB_HOSTNAME=questdb --env METRICSDB_ILP_PORT=9009 --env METRICSDB_REST_PORT=9000 --env METRICSDB_TABLE=monitoring -v "$PWD/src/$IMAGE_NAME/tests:/opt/results" --network=teraflowbridge $CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG
     - sleep 30
     - docker ps -a
     - docker logs $IMAGE_NAME
diff --git a/src/monitoring/requirements.in b/src/monitoring/requirements.in
index 95953f100c448557471d112cd1e5e8a072320b30..e0176e0266ad6239dabb3aeedc273ddc0b638ded 100644
--- a/src/monitoring/requirements.in
+++ b/src/monitoring/requirements.in
@@ -5,17 +5,18 @@ fastcache==1.1.0
 #opencensus[stackdriver]
 #google-cloud-profiler
 #numpy
-#Jinja2==3.0.3
-#ncclient==0.6.13
-#p4runtime==1.3.0
-#paramiko==2.9.2
-influx-line-protocol==0.1.4
+Jinja2==3.0.3
+ncclient==0.6.13
+p4runtime==1.3.0
+paramiko==2.9.2
+# influx-line-protocol==0.1.4
 python-dateutil==2.8.2
 python-json-logger==2.0.2
 pytz==2021.3
 redis==4.1.2
 requests==2.27.1
 xmltodict==0.12.0
+questdb==1.0.1
 
 # pip's dependency resolver does not take into account installed packages.
 # p4runtime does not specify the version of grpcio/protobuf it needs, so it tries to install latest one
diff --git a/src/monitoring/service/EventTools.py b/src/monitoring/service/EventTools.py
index cbcf920f1c5dc98a18b0e48a123bc6490f55737c..4999d2a95991d79ed5417948e220d35aa668c653 100644
--- a/src/monitoring/service/EventTools.py
+++ b/src/monitoring/service/EventTools.py
@@ -19,16 +19,13 @@ import grpc
 
 from common.rpc_method_wrapper.ServiceExceptions import ServiceException
 from context.client.ContextClient import ContextClient
-#from common.proto import kpi_sample_types_pb2
+
 from common.proto.context_pb2 import Empty, EventTypeEnum
 
-from common.logger import getJSONLogger
 from monitoring.client.MonitoringClient import MonitoringClient
+from monitoring.service.MonitoringServiceServicerImpl import LOGGER
 from common.proto import monitoring_pb2
 
-LOGGER = getJSONLogger('monitoringservice-server')
-LOGGER.setLevel('DEBUG')
-
 class EventsDeviceCollector:
     def __init__(self) -> None: # pylint: disable=redefined-outer-name
         self._events_queue = Queue()
@@ -74,7 +71,7 @@ class EventsDeviceCollector:
             kpi_id_list = []
 
             while not self._events_queue.empty():
-                LOGGER.info('getting Kpi by KpiID')
+                # LOGGER.info('getting Kpi by KpiID')
                 event = self.get_event(block=True)
                 if event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE:
                     device = self._context_client.GetDevice(event.device_id)
diff --git a/src/monitoring/service/MetricsDBTools.py b/src/monitoring/service/MetricsDBTools.py
index ea6180aa072bd48a04f26d019ba1e4ab9e08af88..dc194c430c9700a2d89e0757c75c64025082ac29 100644
--- a/src/monitoring/service/MetricsDBTools.py
+++ b/src/monitoring/service/MetricsDBTools.py
@@ -12,41 +12,64 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from influx_line_protocol import Metric
-import socket
+from questdb.ingress import Sender, IngressError
 import requests
 import json
-import sys
+import logging
+import datetime
+
+LOGGER = logging.getLogger(__name__)
 
 class MetricsDB():
   def __init__(self, host, ilp_port, rest_port, table):
-      self.socket=socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-      self.host=host
-      self.ilp_port=ilp_port
-      self.rest_port=rest_port
-      self.table=table
+    self.host=host
+    self.ilp_port=int(ilp_port)
+    self.rest_port=rest_port
+    self.table=table
+    self.create_table()
 
   def write_KPI(self,time,kpi_id,kpi_sample_type,device_id,endpoint_id,service_id,kpi_value):
-    self.socket.connect((self.host,self.ilp_port))
-    metric = Metric(self.table)
-    metric.with_timestamp(time)
-    metric.add_tag('kpi_id', kpi_id)
-    metric.add_tag('kpi_sample_type', kpi_sample_type)
-    metric.add_tag('device_id', device_id)
-    metric.add_tag('endpoint_id', endpoint_id)
-    metric.add_tag('service_id', service_id)
-    metric.add_value('kpi_value', kpi_value)
-    str_metric = str(metric)
-    str_metric += "\n"
-    self.socket.sendall((str_metric).encode())
-    self.socket.close()
+    counter=0
+    number_of_retries=10
+    while (counter<number_of_retries):
+      try:
+        with Sender(self.host, self.ilp_port) as sender:
+          sender.row(
+          self.table,
+          symbols={
+              'kpi_id': kpi_id,
+              'kpi_sample_type': kpi_sample_type,
+              'device_id': device_id,
+              'endpoint_id': endpoint_id,
+              'service_id': service_id},
+          columns={
+              'kpi_value': kpi_value},
+          at=datetime.datetime.fromtimestamp(time))
+          sender.flush()
+        counter=number_of_retries
+        LOGGER.info(f"KPI written")
+      except IngressError as ierr:
+        # LOGGER.info(ierr)
+        # LOGGER.info(f"Ingress Retry number {counter}")
+        counter=counter+1
+
 
   def run_query(self, sql_query):
-      query_params = {'query': sql_query, 'fmt' : 'json'}
-      url = f"http://{self.host}:{self.rest_port}/exec"
-      try:
-          response = requests.get(url, params=query_params)
-          json_response = json.loads(response.text)
-          print(json_response)
-      except requests.exceptions.RequestException as e:
-          print(f'Error: {e}', file=sys.stderr)
+    query_params = {'query': sql_query, 'fmt' : 'json'}
+    url = f"http://{self.host}:{self.rest_port}/exec"
+    response = requests.get(url, params=query_params)
+    json_response = json.loads(response.text)
+    LOGGER.info(f"Query executed, result:{json_response}")
+  
+  def create_table(self):
+    query = f'CREATE TABLE IF NOT EXISTS {self.table}'\
+    '(kpi_id SYMBOL,'\
+    'kpi_sample_type SYMBOL,'\
+    'device_id SYMBOL,'\
+    'endpoint_id SYMBOL,'\
+    'service_id SYMBOL,'\
+    'timestamp TIMESTAMP,'\
+    'kpi_value DOUBLE)'\
+    'TIMESTAMP(timestamp);'
+    self.run_query(query)
+    LOGGER.info(f"Table {self.table} created")
diff --git a/src/monitoring/service/MonitoringServiceServicerImpl.py b/src/monitoring/service/MonitoringServiceServicerImpl.py
index d9f8b1e100bada795f8d6c91a796f458da8d212f..df3b907415aabe0ed4c276ac6ac09582636ebe6b 100644
--- a/src/monitoring/service/MonitoringServiceServicerImpl.py
+++ b/src/monitoring/service/MonitoringServiceServicerImpl.py
@@ -18,6 +18,7 @@ from typing import Iterator
 
 from common.Constants import ServiceNameEnum
 from common.Settings import get_setting, get_service_port_grpc, get_service_host
+from common.logger import getJSONLogger
 from common.proto.context_pb2 import Empty
 from common.proto.device_pb2 import MonitoringSettings
 from common.proto.kpi_sample_types_pb2 import KpiSampleType
@@ -26,23 +27,23 @@ from common.proto.monitoring_pb2 import AlarmResponse, AlarmDescriptor, AlarmIDL
     KpiDescriptor, KpiList, KpiQuery, SubsDescriptor, SubscriptionID, AlarmID, KpiDescriptorList, \
     MonitorKpiRequest, Kpi, AlarmSubscription
 from common.rpc_method_wrapper.ServiceExceptions import ServiceException
-from common.tools.timestamp.Converters import timestamp_float_to_string
 
 from monitoring.service import SqliteTools, MetricsDBTools
 from device.client.DeviceClient import DeviceClient
 
 from prometheus_client import Counter, Summary
 
-LOGGER = logging.getLogger(__name__)
+LOGGER = getJSONLogger('monitoringservice-server')
+LOGGER.setLevel('DEBUG')
 
 MONITORING_GETINSTANTKPI_REQUEST_TIME = Summary(
     'monitoring_getinstantkpi_processing_seconds', 'Time spent processing monitoring instant kpi request')
 MONITORING_INCLUDEKPI_COUNTER = Counter('monitoring_includekpi_counter', 'Monitoring include kpi request counter')
 
-METRICSDB_HOSTNAME = os.environ.get("METRICSDB_HOSTNAME")
-METRICSDB_ILP_PORT = os.environ.get("METRICSDB_ILP_PORT")
+METRICSDB_HOSTNAME  = os.environ.get("METRICSDB_HOSTNAME")
+METRICSDB_ILP_PORT  = os.environ.get("METRICSDB_ILP_PORT")
 METRICSDB_REST_PORT = os.environ.get("METRICSDB_REST_PORT")
-METRICSDB_TABLE = os.environ.get("METRICSDB_TABLE")
+METRICSDB_TABLE     = os.environ.get("METRICSDB_TABLE")
 
 
 DEVICESERVICE_SERVICE_HOST = get_setting('DEVICESERVICE_SERVICE_HOST',      default=get_service_host(ServiceNameEnum.DEVICE)     )
@@ -57,8 +58,8 @@ class MonitoringServiceServicerImpl(MonitoringServiceServicer):
         self.sql_db = SqliteTools.SQLite('monitoring.db')
         self.deviceClient = DeviceClient(host=DEVICESERVICE_SERVICE_HOST, port=DEVICESERVICE_SERVICE_PORT_GRPC)  # instantiate the client
 
-        # Set metrics_db client
         self.metrics_db = MetricsDBTools.MetricsDB(METRICSDB_HOSTNAME,METRICSDB_ILP_PORT,METRICSDB_REST_PORT,METRICSDB_TABLE)
+        LOGGER.info('MetricsDB initialized')
 
     # SetKpi (SetKpiRequest) returns (KpiId) {}
     def SetKpi(
@@ -80,7 +81,6 @@ class MonitoringServiceServicerImpl(MonitoringServiceServicer):
                 kpi_description, kpi_sample_type, kpi_device_id, kpi_endpoint_id, kpi_service_id)
 
             kpi_id.kpi_id.uuid = str(data)
-
             # CREATEKPI_COUNTER_COMPLETED.inc()
             return kpi_id
         except ServiceException as e:
@@ -161,7 +161,7 @@ class MonitoringServiceServicerImpl(MonitoringServiceServicer):
             deviceId        = kpiDescriptor.device_id.device_uuid.uuid
             endpointId      = kpiDescriptor.endpoint_id.endpoint_uuid.uuid
             serviceId       = kpiDescriptor.service_id.service_uuid.uuid
-            time_stamp      = timestamp_float_to_string(request.timestamp.timestamp)
+            time_stamp      = request.timestamp.timestamp
             kpi_value       = getattr(request.kpi_value, request.kpi_value.WhichOneof('value'))
 
             # Build the structure to be included as point in the MetricsDB
diff --git a/src/monitoring/service/__main__.py b/src/monitoring/service/__main__.py
index e37412fa004704d089a8e00bada8033d8abe53bd..3334a860ccd94d51390ab5f5869d25e2475084ee 100644
--- a/src/monitoring/service/__main__.py
+++ b/src/monitoring/service/__main__.py
@@ -45,8 +45,8 @@ def start_monitoring():
                 # Create Monitor Kpi Requests
                 monitor_kpi_request = monitoring_pb2.MonitorKpiRequest()
                 monitor_kpi_request.kpi_id.CopyFrom(kpi_id)
-                monitor_kpi_request.sampling_duration_s = 86400
-                monitor_kpi_request.sampling_interval_s = 30
+                monitor_kpi_request.monitoring_window_s = 86400
+                monitor_kpi_request.sampling_rate_s = 30
                 events_collector._monitoring_client.MonitorKpi(monitor_kpi_request)
     else:
         # Terminate is set, looping terminates
diff --git a/src/monitoring/tests/Messages.py b/src/monitoring/tests/Messages.py
index 7b7f4150e5c084bbf25c6a4d9c1c47b70e3f76a0..cf81ceed1e134240415ec1aabe8796cd4486f75f 100644
--- a/src/monitoring/tests/Messages.py
+++ b/src/monitoring/tests/Messages.py
@@ -11,17 +11,13 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+import datetime
 
 from common.proto import monitoring_pb2
 from common.proto.kpi_sample_types_pb2 import KpiSampleType
-from common.tools.timestamp.Converters import timestamp_string_to_float
+from common.tools.timestamp.Converters import timestamp_string_to_float, timestamp_utcnow_to_float
 
 
-def kpi():
-    _kpi                    = monitoring_pb2.Kpi()
-    _kpi.kpi_id.kpi_id.uuid = 'KPIID0000'   # pylint: disable=maybe-no-member
-    return _kpi
-
 def kpi_id():
     _kpi_id             = monitoring_pb2.KpiId()
     _kpi_id.kpi_id.uuid = str(1)            # pylint: disable=maybe-no-member
@@ -43,9 +39,9 @@ def monitor_kpi_request(kpi_uuid, monitoring_window_s, sampling_rate_s):
     _monitor_kpi_request.sampling_rate_s     = sampling_rate_s
     return _monitor_kpi_request
 
-def include_kpi_request():
+def include_kpi_request(kpi_id):
     _include_kpi_request                        = monitoring_pb2.Kpi()
-    _include_kpi_request.kpi_id.kpi_id.uuid     = str(1)    # pylint: disable=maybe-no-member
-    _include_kpi_request.timestamp.timestamp    = timestamp_string_to_float("2021-10-12T13:14:42Z")
-    _include_kpi_request.kpi_value.int32Val       = 500       # pylint: disable=maybe-no-member
+    _include_kpi_request.kpi_id.kpi_id.uuid     = kpi_id.kpi_id.uuid
+    _include_kpi_request.timestamp.timestamp    = timestamp_utcnow_to_float()
+    _include_kpi_request.kpi_value.int32Val     = 500       # pylint: disable=maybe-no-member
     return _include_kpi_request
diff --git a/src/monitoring/tests/test_unitary.py b/src/monitoring/tests/test_unitary.py
index 45f55cb05f8c0cb5c9cdb2b3f02ed70c80285009..b62b5f97f965beb75ddaafa122ac8f026faab686 100644
--- a/src/monitoring/tests/test_unitary.py
+++ b/src/monitoring/tests/test_unitary.py
@@ -12,16 +12,18 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import copy, logging, os, pytest
+import copy, os, pytest
 from time import sleep
 from typing import Tuple
 from common.Constants import ServiceNameEnum
 from common.Settings import (
     ENVVAR_SUFIX_SERVICE_HOST, ENVVAR_SUFIX_SERVICE_PORT_GRPC, get_env_var_name, get_service_port_grpc)
+from common.logger import getJSONLogger
 from common.orm.Database import Database
 from common.orm.Factory import get_database_backend, BackendEnum as DatabaseBackendEnum
 from common.message_broker.Factory import get_messagebroker_backend, BackendEnum as MessageBrokerBackendEnum
 from common.message_broker.MessageBroker import MessageBroker
+from common.proto import monitoring_pb2
 from common.proto.monitoring_pb2 import KpiId, KpiDescriptor
 
 from context.client.ContextClient import ContextClient
@@ -38,17 +40,17 @@ from device.service.drivers import DRIVERS  # pylint: disable=wrong-import-posit
 
 # pylint: disable=wrong-import-position
 from monitoring.client.MonitoringClient import MonitoringClient
-from common.proto import context_pb2, monitoring_pb2
 from common.proto.kpi_sample_types_pb2 import KpiSampleType
 from monitoring.service import SqliteTools, MetricsDBTools
 from monitoring.service.MonitoringService import MonitoringService
 from monitoring.service.EventTools import EventsDeviceCollector
-from monitoring.tests.Messages import create_kpi_request, include_kpi_request, kpi, kpi_id, monitor_kpi_request
+from monitoring.tests.Messages import create_kpi_request, include_kpi_request, monitor_kpi_request
 from monitoring.tests.Objects import DEVICE_DEV1, DEVICE_DEV1_CONNECT_RULES, DEVICE_DEV1_UUID
 
+from monitoring.service.MonitoringServiceServicerImpl import LOGGER
 
-LOGGER = logging.getLogger(__name__)
-LOGGER.setLevel(logging.DEBUG)
+# LOGGER = getJSONLogger('monitoringservice-server')
+# LOGGER.setLevel('DEBUG')
 
 ###########################
 # Tests Setup
@@ -180,7 +182,7 @@ def test_monitor_kpi(
         monitoring_client : MonitoringClient,           # pylint: disable=redefined-outer-name
         context_db_mb : Tuple[Database, MessageBroker]  # pylint: disable=redefined-outer-name
     ):
-    LOGGER.warning('test_monitor_kpi begin')
+    LOGGER.info('test_monitor_kpi begin')
 
     context_database = context_db_mb[0]
 
@@ -213,14 +215,14 @@ def test_monitor_kpi(
 def test_include_kpi(monitoring_client): # pylint: disable=redefined-outer-name
     # make call to server
     LOGGER.warning('test_include_kpi requesting')
-    response = monitoring_client.IncludeKpi(include_kpi_request())
-    LOGGER.debug(str(response))
+    kpi_id = monitoring_client.SetKpi(create_kpi_request())
+    response = monitoring_client.IncludeKpi(include_kpi_request(kpi_id))
     assert isinstance(response, Empty)
 
 # Test case that makes use of client fixture to test server's GetStreamKpi method
 def test_get_stream_kpi(monitoring_client): # pylint: disable=redefined-outer-name
     LOGGER.warning('test_getstream_kpi begin')
-    response = monitoring_client.GetStreamKpi(kpi())
+    response = monitoring_client.GetStreamKpi(monitoring_pb2.Kpi())
     LOGGER.debug(str(response))
     #assert isinstance(response, Kpi)
 
@@ -235,8 +237,9 @@ def test_get_stream_kpi(monitoring_client): # pylint: disable=redefined-outer-na
 def test_get_kpidescritor_kpi(monitoring_client): # pylint: disable=redefined-outer-name
     LOGGER.warning('test_getkpidescritor_kpi begin')
     response = monitoring_client.SetKpi(create_kpi_request())
+    # LOGGER.debug(str(response))
     response = monitoring_client.GetKpiDescriptor(response)
-    LOGGER.debug(str(response))
+    # LOGGER.debug(str(response))
     assert isinstance(response, KpiDescriptor)
 
 def test_sqlitedb_tools_insert_kpi(sql_db): # pylint: disable=redefined-outer-name
diff --git a/src/tests/ofc22/run_test_01_bootstrap.sh b/src/tests/ofc22/run_test_01_bootstrap.sh
index be30b15189786de3fd2f593a1584c73890e9e4fe..ef23c28eb0af1158bd31dd9f35f330e7225bdd07 100755
--- a/src/tests/ofc22/run_test_01_bootstrap.sh
+++ b/src/tests/ofc22/run_test_01_bootstrap.sh
@@ -13,4 +13,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+# make sure to source the following scripts:
+# - my_deploy.sh
+# - tfs_runtime_env_vars.sh
+
 pytest --verbose src/tests/ofc22/tests/test_functional_bootstrap.py
diff --git a/src/webui/grafana_backup_dashboard.json b/src/webui/grafana_backup_dashboard.json
new file mode 100644
index 0000000000000000000000000000000000000000..58a856a6c50de422b1f6bde1e2799d53762db916
--- /dev/null
+++ b/src/webui/grafana_backup_dashboard.json
@@ -0,0 +1,320 @@
+{
+    "annotations": {
+      "list": [
+        {
+          "builtIn": 1,
+          "datasource": "-- Grafana --",
+          "enable": true,
+          "hide": true,
+          "iconColor": "rgba(0, 211, 255, 1)",
+          "name": "Annotations & Alerts",
+          "target": {
+            "limit": 100,
+            "matchAny": false,
+            "tags": [],
+            "type": "dashboard"
+          },
+          "type": "dashboard"
+        }
+      ]
+    },
+    "editable": true,
+    "fiscalYearStartMonth": 0,
+    "gnetId": null,
+    "graphTooltip": 0,
+    "id": 1,
+    "iteration": 1664282779131,
+    "links": [],
+    "liveNow": false,
+    "panels": [
+      {
+        "datasource": null,
+        "fieldConfig": {
+          "defaults": {
+            "color": {
+              "mode": "palette-classic"
+            },
+            "custom": {
+              "axisLabel": "",
+              "axisPlacement": "auto",
+              "barAlignment": 0,
+              "drawStyle": "line",
+              "fillOpacity": 0,
+              "gradientMode": "none",
+              "hideFrom": {
+                "legend": false,
+                "tooltip": false,
+                "viz": false
+              },
+              "lineInterpolation": "smooth",
+              "lineWidth": 1,
+              "pointSize": 5,
+              "scaleDistribution": {
+                "type": "linear"
+              },
+              "showPoints": "never",
+              "spanNulls": false,
+              "stacking": {
+                "group": "A",
+                "mode": "none"
+              },
+              "thresholdsStyle": {
+                "mode": "off"
+              }
+            },
+            "mappings": [],
+            "thresholds": {
+              "mode": "absolute",
+              "steps": [
+                {
+                  "color": "green",
+                  "value": null
+                },
+                {
+                  "color": "red",
+                  "value": 80
+                }
+              ]
+            }
+          },
+          "overrides": [
+            {
+              "matcher": {
+                "id": "byRegexp",
+                "options": ".* PACKETS_.*"
+              },
+              "properties": [
+                {
+                  "id": "custom.axisPlacement",
+                  "value": "left"
+                },
+                {
+                  "id": "unit",
+                  "value": "pps"
+                },
+                {
+                  "id": "custom.axisLabel",
+                  "value": "Packets / sec"
+                },
+                {
+                  "id": "custom.axisSoftMin",
+                  "value": 0
+                }
+              ]
+            },
+            {
+              "matcher": {
+                "id": "byRegexp",
+                "options": ".* BYTES_.*"
+              },
+              "properties": [
+                {
+                  "id": "custom.axisPlacement",
+                  "value": "right"
+                },
+                {
+                  "id": "unit",
+                  "value": "Bps"
+                },
+                {
+                  "id": "custom.axisLabel",
+                  "value": "Bytes / sec"
+                },
+                {
+                  "id": "custom.axisSoftMin",
+                  "value": 0
+                }
+              ]
+            }
+          ]
+        },
+        "gridPos": {
+          "h": 19,
+          "w": 24,
+          "x": 0,
+          "y": 0
+        },
+        "id": 2,
+        "options": {
+          "legend": {
+            "calcs": [
+              "first",
+              "min",
+              "mean",
+              "max",
+              "lastNotNull"
+            ],
+            "displayMode": "table",
+            "placement": "right"
+          },
+          "tooltip": {
+            "mode": "multi"
+          }
+        },
+        "targets": [
+          {
+            "format": "time_series",
+            "group": [],
+            "hide": false,
+            "metricColumn": "kpi_value",
+            "rawQuery": false,
+            "rawSql": "SELECT\n  timestamp AS \"time\",\n  kpi_value AS metric,\n  kpi_value AS \"kpi_value\"\nFROM monitoring\nWHERE\n  $__timeFilter(timestamp) AND\n  device_id = $device_id AND\n  endpoint_id = $endpoint_id\nORDER BY 1,2",
+            "refId": "A",
+            "select": [
+              [
+                {
+                  "params": [
+                    "kpi_value"
+                  ],
+                  "type": "column"
+                },
+                {
+                  "params": [
+                    "avg"
+                  ],
+                  "type": "aggregate"
+                },
+                {
+                  "params": [
+                    "kpi_value"
+                  ],
+                  "type": "alias"
+                }
+              ]
+            ],
+            "table": "monitoring",
+            "timeColumn": "timestamp",
+            "where": [
+              {
+                "name": "$__timeFilter",
+                "params": [],
+                "type": "macro"
+              },
+              {
+                "name": "",
+                "params": [
+                  "device_id",
+                  "=",
+                  "$device_id"
+                ],
+                "type": "expression"
+              },
+              {
+                "name": "",
+                "params": [
+                  "endpoint_id",
+                  "=",
+                  "$endpoint_id"
+                ],
+                "type": "expression"
+              }
+            ]
+          }
+        ],
+        "title": "L3 Monitoring Packets/Bytes Received/Sent",
+        "transformations": [],
+        "type": "timeseries"
+      }
+    ],
+    "refresh": "",
+    "schemaVersion": 32,
+    "style": "dark",
+    "tags": [],
+    "templating": {
+      "list": [
+        {
+          "allValue": null,
+          "current": {
+            "selected": true,
+            "text": [
+              "R1-EMU"
+            ],
+            "value": [
+              "R1-EMU"
+            ]
+          },
+          "datasource": null,
+          "definition": "SELECT DISTINCT device_id FROM monitoring;",
+          "description": null,
+          "error": null,
+          "hide": 0,
+          "includeAll": true,
+          "label": "Device",
+          "multi": true,
+          "name": "device_id",
+          "options": [],
+          "query": "SELECT DISTINCT device_id FROM monitoring;",
+          "refresh": 2,
+          "regex": "",
+          "skipUrlSync": false,
+          "sort": 0,
+          "type": "query"
+        },
+        {
+          "allValue": null,
+          "current": {
+            "selected": true,
+            "text": [
+              "13/1/2"
+            ],
+            "value": [
+              "13/1/2"
+            ]
+          },
+          "datasource": null,
+          "definition": "SELECT DISTINCT endpoint_id FROM monitoring WHERE device_id IN (${device_id})",
+          "description": null,
+          "error": null,
+          "hide": 0,
+          "includeAll": true,
+          "label": "EndPoint",
+          "multi": true,
+          "name": "endpoint_id",
+          "options": [],
+          "query": "SELECT DISTINCT endpoint_id FROM monitoring WHERE device_id IN (${device_id})",
+          "refresh": 2,
+          "regex": "",
+          "skipUrlSync": false,
+          "sort": 0,
+          "type": "query"
+        },
+        {
+          "allValue": null,
+          "current": {
+            "selected": true,
+            "text": [
+              "All"
+            ],
+            "value": [
+              "$__all"
+            ]
+          },
+          "datasource": null,
+          "definition": "SELECT DISTINCT kpi_sample_type FROM monitoring;",
+          "description": null,
+          "error": null,
+          "hide": 0,
+          "includeAll": true,
+          "label": "Kpi Sample Type",
+          "multi": true,
+          "name": "kpi_sample_type",
+          "options": [],
+          "query": "SELECT DISTINCT kpi_sample_type FROM monitoring;",
+          "refresh": 2,
+          "regex": "",
+          "skipUrlSync": false,
+          "sort": 0,
+          "type": "query"
+        }
+      ]
+    },
+    "time": {
+      "from": "now-5m",
+      "to": "now"
+    },
+    "timepicker": {},
+    "timezone": "",
+    "title": "L3 Monitoring",
+    "uid": "tf-l3-monit",
+    "version": 3
+  }
\ No newline at end of file
diff --git a/src/webui/grafana_dashboard.json b/src/webui/grafana_dashboard.json
index a845ac20c7861b86fd1931452b7802b3f1e57aa8..49148825a973aecca5901ffac2249fed6057f4d0 100644
--- a/src/webui/grafana_dashboard.json
+++ b/src/webui/grafana_dashboard.json
@@ -193,19 +193,19 @@
             "tags": [
               {
                 "key": "device_id",
-                "operator": "=~",
+                "operator": "=",
                 "value": "/^$device_id$/"
               },
               {
                 "condition": "AND",
                 "key": "endpoint_id",
-                "operator": "=~",
+                "operator": "=",
                 "value": "/^$endpoint_id$/"
               },
               {
                 "condition": "AND",
                 "key": "kpi_sample_type",
-                "operator": "=~",
+                "operator": "=",
                 "value": "/^$kpi_sample_type$/"
               }
             ]
@@ -236,7 +236,7 @@
             ]
           },
           "datasource": null,
-          "definition": "SHOW TAG VALUES FROM samples WITH KEY=\"device_id\"",
+          "definition": "SELECT DISTINCT device_id FROM monitoring;",
           "description": null,
           "error": null,
           "hide": 0,
@@ -245,7 +245,7 @@
           "multi": true,
           "name": "device_id",
           "options": [],
-          "query": "SHOW TAG VALUES FROM samples WITH KEY=\"device_id\"",
+          "query": "SELECT DISTINCT device_id FROM monitoring;",
           "refresh": 2,
           "regex": "",
           "skipUrlSync": false,
@@ -264,7 +264,7 @@
             ]
           },
           "datasource": null,
-          "definition": "SHOW TAG VALUES FROM samples WITH KEY=\"endpoint_id\" WHERE \"device_id\"=~/^$device_id$/",
+          "definition": "SELECT DISTINCT endpoint_id FROM monitoring WHERE device_id IN (${device_id})",
           "description": null,
           "error": null,
           "hide": 0,
@@ -273,7 +273,7 @@
           "multi": true,
           "name": "endpoint_id",
           "options": [],
-          "query": "SHOW TAG VALUES FROM samples WITH KEY=\"endpoint_id\" WHERE \"device_id\"=~/^$device_id$/",
+          "query": "SELECT DISTINCT endpoint_id FROM monitoring WHERE device_id IN (${device_id})",
           "refresh": 2,
           "regex": "",
           "skipUrlSync": false,
@@ -292,7 +292,7 @@
             ]
           },
           "datasource": null,
-          "definition": "SHOW TAG VALUES FROM samples WITH KEY=\"kpi_sample_type\"",
+          "definition": "SELECT DISTINCT kpi_sample_type FROM monitoring;",
           "description": null,
           "error": null,
           "hide": 0,
@@ -301,7 +301,7 @@
           "multi": true,
           "name": "kpi_sample_type",
           "options": [],
-          "query": "SHOW TAG VALUES FROM samples WITH KEY=\"kpi_sample_type\"",
+          "query": "SELECT DISTINCT kpi_sample_type FROM monitoring;",
           "refresh": 2,
           "regex": "",
           "skipUrlSync": false,
diff --git a/tutorial/2-2-ofc22.md b/tutorial/2-2-ofc22.md
index 5a0547d640267ebd45030d10ce8673d984c6b137..651b7777d56dda7c125ba79697f3df78e4fba7ce 100644
--- a/tutorial/2-2-ofc22.md
+++ b/tutorial/2-2-ofc22.md
@@ -49,6 +49,18 @@ Notes:
 
 ## 2.2.5. Test execution
 
+Before executing the tests, the environment variables need to be prepared. First, make sure to load your deployment variables by:
+
+```
+source my_deploy.sh
+```
+
+Then, you also need to load the environment variables to support the execution of the tests by:
+
+```
+source tfs_runtime_env_vars.sh
+```
+
 To execute this functional test, four main steps needs to be carried out:
 1. Device bootstrapping
 2. L3VPN Service creation