diff --git a/expose_ingress_grpc.sh b/expose_ingress_grpc.sh
index 37d72aa8d66e1d2ff2e4677f245db8eaf2438ac4..b456dfd65ea92ca6dc0a85ac88067cc4e28deb97 100755
--- a/expose_ingress_grpc.sh
+++ b/expose_ingress_grpc.sh
@@ -18,10 +18,10 @@
 ########################################################################################################################
 
 # If not already set, set the name of the Kubernetes namespace to deploy to.
-export TFS_K8S_NAMESPACE=${TFS_K8S_NAMESPACE:-"tfs-dev"}
+export TFS_K8S_NAMESPACE=${TFS_K8S_NAMESPACE:-"tfs"}
 
 # If not already set, set the list of components you want to build images for, and deploy.
-export TFS_COMPONENTS=${TFS_COMPONENTS:-"context device automation policy service compute monitoring dbscanserving opticalattackmitigator opticalcentralizedattackdetector webui"}
+export TFS_COMPONENTS=${TFS_COMPONENTS:-"l3_attackmitigator l3_centralizedattackdetector"}
 
 ########################################################################################################################
 # Automated steps start here
@@ -37,7 +37,8 @@ for COMPONENT in $TFS_COMPONENTS; do
         continue;
     fi
 
-    PATCH='{"data": {"'${SERVICE_GRPC_PORT}'": "'$TFS_K8S_NAMESPACE'/'${COMPONENT}service':'${SERVICE_GRPC_PORT}'"}}'
+    COMPONENT_OBJNAME=$(echo "${COMPONENT}" | sed "s/\_/-/")
+    PATCH='{"data": {"'${SERVICE_GRPC_PORT}'": "'$TFS_K8S_NAMESPACE'/'${COMPONENT_OBJNAME}service':'${SERVICE_GRPC_PORT}'"}}'
     #echo "PATCH: ${PATCH}"
     kubectl patch configmap nginx-ingress-tcp-microk8s-conf --namespace ingress --patch "${PATCH}"
 
diff --git a/log_netstat.txt b/log_netstat.txt
new file mode 100644
index 0000000000000000000000000000000000000000..119a594ca7a1b353981f27461d68c452eab4803b
--- /dev/null
+++ b/log_netstat.txt
@@ -0,0 +1,505 @@
+Active Internet connections (servers and established)
+Proto Recv-Q Send-Q Local Address           Foreign Address         State       PID/Program name    
+tcp        0      0 127.0.0.1:10256         0.0.0.0:*               LISTEN      153218/kubelite     
+tcp        0      0 127.0.0.53:53           0.0.0.0:*               LISTEN      678/systemd-resolve 
+tcp        0      0 0.0.0.0:22              0.0.0.0:*               LISTEN      993/sshd: /usr/sbin 
+tcp        0      0 127.0.0.1:19001         0.0.0.0:*               LISTEN      153813/k8s-dqlite   
+tcp        0      0 127.0.0.1:42265         0.0.0.0:*               LISTEN      849/containerd      
+tcp        0      0 127.0.0.1:1338          0.0.0.0:*               LISTEN      153238/containerd   
+tcp        0      0 127.0.0.1:40101         0.0.0.0:*               LISTEN      153238/containerd   
+tcp        0      0 127.0.0.1:10248         0.0.0.0:*               LISTEN      153218/kubelite     
+tcp        0      0 127.0.0.1:10249         0.0.0.0:*               LISTEN      153218/kubelite     
+tcp        0      0 127.0.0.1:9099          0.0.0.0:*               LISTEN      158723/calico-node  
+tcp        0      0 192.168.165.78:51518    10.1.49.5:8080          TIME_WAIT   -                   
+tcp        0      0 127.0.0.1:43812         127.0.0.1:9099          TIME_WAIT   -                   
+tcp        0      0 192.168.165.78:36372    10.1.49.5:8181          TIME_WAIT   -                   
+tcp        0      0 127.0.0.1:43654         127.0.0.1:9099          TIME_WAIT   -                   
+tcp        0      0 127.0.0.1:44512         127.0.0.1:19001         ESTABLISHED 153813/k8s-dqlite   
+tcp        0      0 192.168.165.78:51790    10.1.49.5:8080          TIME_WAIT   -                   
+tcp        0      0 127.0.0.1:54044         127.0.0.1:16443         ESTABLISHED 2021571/kubectl     
+tcp        0      0 192.168.165.78:36296    10.1.49.27:10254        TIME_WAIT   -                   
+tcp        0      0 192.168.165.78:22       192.168.159.218:34102   ESTABLISHED 1657076/sshd: ubunt 
+tcp        0      0 127.0.0.1:43816         127.0.0.1:9099          TIME_WAIT   -                   
+tcp        0      0 192.168.165.78:36404    10.1.49.27:10254        TIME_WAIT   -                   
+tcp        0      0 127.0.0.1:19001         127.0.0.1:44358         ESTABLISHED 153813/k8s-dqlite   
+tcp        0      0 127.0.0.1:43874         127.0.0.1:9099          TIME_WAIT   -                   
+tcp        0      0 127.0.0.1:43710         127.0.0.1:9099          TIME_WAIT   -                   
+tcp        0      0 127.0.0.1:43764         127.0.0.1:9099          TIME_WAIT   -                   
+tcp        0      0 127.0.0.1:60624         127.0.0.1:16443         ESTABLISHED 153218/kubelite     
+tcp        0      0 127.0.0.1:43760         127.0.0.1:9099          TIME_WAIT   -                   
+tcp        0      0 127.0.0.1:43706         127.0.0.1:9099          TIME_WAIT   -                   
+tcp        0      0 127.0.0.1:44602         127.0.0.1:19001         ESTABLISHED 153813/k8s-dqlite   
+tcp        0      0 127.0.0.1:43598         127.0.0.1:9099          TIME_WAIT   -                   
+tcp        0      0 192.168.165.78:36288    10.152.183.1:443        ESTABLISHED 158724/calico-node  
+tcp        0      0 127.0.0.1:43650         127.0.0.1:9099          TIME_WAIT   -                   
+tcp        0      0 127.0.0.1:44358         127.0.0.1:19001         ESTABLISHED 153813/k8s-dqlite   
+tcp        0      0 127.0.0.1:19001         127.0.0.1:44512         ESTABLISHED 153813/k8s-dqlite   
+tcp        0      0 192.168.165.78:22       192.168.159.138:54058   ESTABLISHED 1747499/sshd: ubunt 
+tcp        0      0 192.168.165.78:36294    10.152.183.1:443        ESTABLISHED 158725/calico-node  
+tcp        0      0 127.0.0.1:19001         127.0.0.1:44602         ESTABLISHED 153813/k8s-dqlite   
+tcp        0      0 192.168.165.78:22       192.168.159.138:44376   ESTABLISHED 1708137/sshd: ubunt 
+tcp        0      0 127.0.0.1:60498         127.0.0.1:16443         ESTABLISHED 153218/kubelite     
+tcp        0      0 127.0.0.1:43870         127.0.0.1:9099          TIME_WAIT   -                   
+tcp        0      0 192.168.165.78:51732    10.1.49.5:8080          TIME_WAIT   -                   
+tcp        0      0 192.168.165.78:36320    10.1.49.5:8181          TIME_WAIT   -                   
+tcp        0      0 192.168.165.78:22       192.168.159.218:34080   ESTABLISHED 1656657/sshd: ubunt 
+tcp        0      0 192.168.165.78:22       192.168.159.218:34066   ESTABLISHED 1656216/sshd: ubunt 
+tcp        0      0 127.0.0.1:43602         127.0.0.1:9099          TIME_WAIT   -                   
+tcp        0      0 192.168.165.78:42658    192.168.165.73:22       ESTABLISHED 2004106/ssh         
+tcp        0      0 192.168.165.78:36286    10.152.183.1:443        ESTABLISHED 158723/calico-node  
+tcp6       0      0 :::10257                :::*                    LISTEN      153218/kubelite     
+tcp6       0      0 :::10259                :::*                    LISTEN      153218/kubelite     
+tcp6       0      0 :::22                   :::*                    LISTEN      993/sshd: /usr/sbin 
+tcp6       0      0 :::16443                :::*                    LISTEN      153218/kubelite     
+tcp6       0      0 :::25000                :::*                    LISTEN      153442/cluster-agen 
+tcp6       0      0 :::10250                :::*                    LISTEN      153218/kubelite     
+tcp6       0      0 127.0.0.1:16443         127.0.0.1:60498         ESTABLISHED 153218/kubelite     
+tcp6       0      0 ::1:33470               ::1:16443               ESTABLISHED 153218/kubelite     
+tcp6       0      0 192.168.165.78:16443    10.1.49.4:55496         ESTABLISHED 153218/kubelite     
+tcp6       0      0 192.168.165.78:16443    192.168.165.78:15945    ESTABLISHED 153218/kubelite     
+tcp6       0      0 127.0.0.1:16443         127.0.0.1:54044         ESTABLISHED 153218/kubelite     
+tcp6       0      0 192.168.165.78:16443    10.1.49.27:41900        ESTABLISHED 153218/kubelite     
+tcp6       0      0 127.0.0.1:16443         127.0.0.1:60624         ESTABLISHED 153218/kubelite     
+tcp6       0      0 192.168.165.78:16443    10.1.49.36:36644        ESTABLISHED 153218/kubelite     
+tcp6       0      0 ::1:16443               ::1:33470               ESTABLISHED 153218/kubelite     
+tcp6       0      0 192.168.165.78:16443    192.168.165.78:2426     ESTABLISHED 153218/kubelite     
+tcp6       0      0 192.168.165.78:16443    192.168.165.78:44930    ESTABLISHED 153218/kubelite     
+tcp6       0      0 192.168.165.78:16443    10.1.49.5:44498         ESTABLISHED 153218/kubelite     
+udp        0      0 127.0.0.53:53           0.0.0.0:*                           678/systemd-resolve 
+udp        0      0 192.168.165.78:68       0.0.0.0:*                           676/systemd-network 
+udp        0      0 0.0.0.0:4789            0.0.0.0:*                           -                   
+raw6       0      0 :::58                   :::*                    7           676/systemd-network 
+Active UNIX domain sockets (servers and established)
+Proto RefCnt Flags       Type       State         I-Node   PID/Program name     Path
+unix  2      [ ACC ]     SEQPACKET  LISTENING     16349    1/init               /run/udev/control
+unix  2      [ ]         DGRAM                    75891028 1656328/systemd      /run/user/1000/systemd/notify
+unix  2      [ ACC ]     STREAM     LISTENING     75891031 1656328/systemd      /run/user/1000/systemd/private
+unix  2      [ ACC ]     STREAM     LISTENING     75891036 1656328/systemd      /run/user/1000/bus
+unix  2      [ ACC ]     STREAM     LISTENING     10818763 153238/containerd    /var/snap/microk8s/common/run/containerd.sock
+unix  2      [ ACC ]     STREAM     LISTENING     76734713 1993846/containerd-  /run/containerd/s/7fd73cf0ff99008a70760efe5bcb1dd044abc113df1dc6cf083bf7e1002751ac
+unix  2      [ ACC ]     STREAM     LISTENING     75891037 1656328/systemd      /run/user/1000/gnupg/S.dirmngr
+unix  2      [ ACC ]     STREAM     LISTENING     75891038 1656328/systemd      /run/user/1000/gnupg/S.gpg-agent.browser
+unix  2      [ ACC ]     STREAM     LISTENING     75891039 1656328/systemd      /run/user/1000/gnupg/S.gpg-agent.extra
+unix  2      [ ACC ]     STREAM     LISTENING     16333    1/init               @/org/kernel/linux/storage/multipathd
+unix  2      [ ACC ]     STREAM     LISTENING     75891040 1656328/systemd      /run/user/1000/gnupg/S.gpg-agent.ssh
+unix  2      [ ACC ]     STREAM     LISTENING     75891041 1656328/systemd      /run/user/1000/gnupg/S.gpg-agent
+unix  2      [ ACC ]     STREAM     LISTENING     75891042 1656328/systemd      /run/user/1000/pk-debconf-socket
+unix  2      [ ACC ]     STREAM     LISTENING     75891044 1656328/systemd      /run/user/1000/snapd-session-agent.socket
+unix  3      [ ]         DGRAM                    16317    1/init               /run/systemd/notify
+unix  2      [ ACC ]     STREAM     LISTENING     16320    1/init               /run/systemd/private
+unix  2      [ ACC ]     STREAM     LISTENING     76763893 2000428/containerd-  /run/containerd/s/74902a72b569db045be2ae56eb637a03613f227b3686598a3dcc596de650dff1
+unix  2      [ ACC ]     STREAM     LISTENING     16322    1/init               /run/systemd/userdb/io.systemd.DynamicUser
+unix  2      [ ACC ]     STREAM     LISTENING     21889    1/init               /var/snap/lxd/common/lxd/unix.socket
+unix  2      [ ACC ]     STREAM     LISTENING     16331    1/init               /run/lvm/lvmpolld.socket
+unix  2      [ ]         DGRAM                    16334    1/init               /run/systemd/journal/syslog
+unix  2      [ ACC ]     STREAM     LISTENING     10818443 153813/k8s-dqlite    @snap.microk8s.dqlite-3297041220608546238
+unix  14     [ ]         DGRAM                    16342    1/init               /run/systemd/journal/dev-log
+unix  2      [ ACC ]     STREAM     LISTENING     16344    1/init               /run/systemd/journal/stdout
+unix  10     [ ]         DGRAM                    16346    1/init               /run/systemd/journal/socket
+unix  2      [ ACC ]     STREAM     LISTENING     76741211 1995377/containerd-  /run/containerd/s/2f02eff4c6dfd9a149a897f90617eb9847bc560df5e6945364ef7ae1b4e65b58
+unix  2      [ ACC ]     STREAM     LISTENING     16361    373/systemd-journal  /run/systemd/journal/io.systemd.journal
+unix  2      [ ACC ]     STREAM     LISTENING     21884    1/init               /run/dbus/system_bus_socket
+unix  2      [ ACC ]     STREAM     LISTENING     29172    1291/dockerd         /var/run/docker/metrics.sock
+unix  2      [ ACC ]     STREAM     LISTENING     21886    1/init               /run/docker.sock
+unix  2      [ ACC ]     STREAM     LISTENING     10823698 153218/kubelite      /var/snap/microk8s/common/var/lib/kubelet/pod-resources/4170367821
+unix  2      [ ACC ]     STREAM     LISTENING     21891    1/init               /run/snapd.socket
+unix  2      [ ACC ]     STREAM     LISTENING     21893    1/init               /run/snapd-snap.socket
+unix  2      [ ACC ]     STREAM     LISTENING     21895    1/init               /run/uuidd/request
+unix  2      [ ACC ]     STREAM     LISTENING     76727527 1992258/containerd-  /run/containerd/s/26f7fcfee36414966033abad0d9c862620ad0ecebd9dcb38b9fc9b8af241cd67
+unix  2      [ ]         DGRAM                    10820154 153218/kubelite      @04bf3
+unix  2      [ ]         DGRAM                    10818438 153813/k8s-dqlite    @04bf4
+unix  2      [ ACC ]     STREAM     LISTENING     24093    727/irqbalance       /run/irqbalance//irqbalance727.sock
+unix  2      [ ACC ]     STREAM     LISTENING     10834752 157875/containerd-s  /run/containerd/s/9a30df63191c3fdc41a06352a43552ce058b824674d9732b0c3f263cd1d00b3c
+unix  2      [ ACC ]     STREAM     LISTENING     45701    7199/containerd-shi  /run/containerd/s/97b2e2570eade66d010987545f35052305ff73dfdab784518d6cc8b1b628d012
+unix  2      [ ACC ]     STREAM     LISTENING     10822626 153218/kubelite      /var/lib/kubelet/device-plugins/kubelet.sock
+unix  2      [ ACC ]     STREAM     LISTENING     27619    1291/dockerd         /var/run/docker/libnetwork/5c623704ae16.sock
+unix  2      [ ACC ]     STREAM     LISTENING     10818468 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  2      [ ]         DGRAM                    10840243 158726/calico-node   @04c11
+unix  2      [ ]         DGRAM                    10839674 158724/calico-node   @04c12
+unix  2      [ ]         DGRAM                    10840244 158723/calico-node   @04c13
+unix  2      [ ]         DGRAM                    10842158 158725/calico-node   @04c14
+unix  2      [ ]         DGRAM                    10841427 158722/calico-node   @04c10
+unix  2      [ ACC ]     STREAM     LISTENING     27767    849/containerd       /run/containerd/containerd.sock.ttrpc
+unix  2      [ ACC ]     STREAM     LISTENING     27769    849/containerd       /run/containerd/containerd.sock
+unix  2      [ ACC ]     STREAM     LISTENING     10842891 159430/containerd-s  /run/containerd/s/f8abee19ee2ab88a6078d5d5bdf925d9807a8fe96705ca1a68504ad5983cfb0f
+unix  2      [ ACC ]     STREAM     LISTENING     10818762 153238/containerd    /var/snap/microk8s/common/run/containerd.sock.ttrpc
+unix  2      [ ACC ]     STREAM     LISTENING     21888    1/init               @ISCSIADM_ABSTRACT_NAMESPACE
+unix  2      [ ACC ]     STREAM     LISTENING     45765    7243/containerd-shi  /run/containerd/s/49d0f35fada5a7773a8c988323a9a644d34169bf777bba772679ac9060ba36cb
+unix  2      [ ACC ]     STREAM     LISTENING     70794128 3724693/containerd-  /run/containerd/s/118ffd607cfb90cfac4a1da3cf88604281157c82ffe97e058a1ec3db1f60f17a
+unix  2      [ ACC ]     STREAM     LISTENING     61694265 43569/containerd-sh  /run/containerd/s/89a24c1ddc03373c709cb7773fec3778d644907bf173a78b074cc5072559ce55
+unix  2      [ ]         DGRAM                    30180    1291/dockerd         @00015
+unix  3      [ ]         STREAM     CONNECTED     75890991 1656328/systemd      
+unix  3      [ ]         STREAM     CONNECTED     10834612 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10820410 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10821930 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10816522 1/init               /run/systemd/journal/stdout
+unix  3      [ ]         STREAM     CONNECTED     10820392 153218/kubelite      
+unix  3      [ ]         DGRAM                    20230    676/systemd-network  
+unix  3      [ ]         STREAM     CONNECTED     10836591 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10822679 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10820352 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10821425 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     19020    1/init               /run/systemd/journal/stdout
+unix  3      [ ]         STREAM     CONNECTED     10834613 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10822690 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10818504 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10820388 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     25306    720/dbus-daemon      
+unix  3      [ ]         STREAM     CONNECTED     10820543 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10822677 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     76762052 2000428/containerd-  /run/containerd/s/74902a72b569db045be2ae56eb637a03613f227b3686598a3dcc596de650dff1
+unix  3      [ ]         STREAM     CONNECTED     10820359 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10821427 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     17822    557/multipathd       
+unix  3      [ ]         DGRAM                    20232    676/systemd-network  
+unix  3      [ ]         STREAM     CONNECTED     10820420 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10822688 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10820354 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10820393 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  2      [ ]         DGRAM                    26225    733/rsyslogd         
+unix  3      [ ]         STREAM     CONNECTED     25307    720/dbus-daemon      
+unix  3      [ ]         STREAM     CONNECTED     10822702 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10820412 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     25420    991/ModemManager     
+unix  3      [ ]         STREAM     CONNECTED     10818510 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  2      [ ]         DGRAM                    27658    847/udisksd          
+unix  3      [ ]         STREAM     CONNECTED     10821429 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     25315    720/dbus-daemon      /run/dbus/system_bus_socket
+unix  3      [ ]         STREAM     CONNECTED     20315    676/systemd-network  
+unix  3      [ ]         STREAM     CONNECTED     10820423 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10822686 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     76763033 153238/containerd    /var/snap/microk8s/common/run/containerd.sock.ttrpc
+unix  3      [ ]         STREAM     CONNECTED     10820356 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10822671 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     26417    982/python3          
+unix  3      [ ]         STREAM     CONNECTED     10836571 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10820422 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     76741254 1995377/containerd-  /run/containerd/s/2f02eff4c6dfd9a149a897f90617eb9847bc560df5e6945364ef7ae1b4e65b58
+unix  3      [ ]         STREAM     CONNECTED     10820351 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10820408 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10820415 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10820414 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     76733066 153238/containerd    
+unix  3      [ ]         STREAM     CONNECTED     10820360 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10822659 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         DGRAM                    20231    676/systemd-network  
+unix  3      [ ]         STREAM     CONNECTED     10822829 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10820411 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     76733788 1993846/containerd-  /run/containerd/s/7fd73cf0ff99008a70760efe5bcb1dd044abc113df1dc6cf083bf7e1002751ac
+unix  3      [ ]         STREAM     CONNECTED     10818512 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10820391 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     26713    847/udisksd          
+unix  3      [ ]         STREAM     CONNECTED     26418    982/python3          
+unix  3      [ ]         STREAM     CONNECTED     25308    720/dbus-daemon      /run/dbus/system_bus_socket
+unix  3      [ ]         STREAM     CONNECTED     10822830 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10822681 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     76742131 153238/containerd    
+unix  3      [ ]         STREAM     CONNECTED     10820357 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10822661 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     75891737 1/init               /run/systemd/journal/stdout
+unix  3      [ ]         STREAM     CONNECTED     10822832 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10820409 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10818514 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10820389 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     26216    1/init               /run/systemd/journal/stdout
+unix  3      [ ]         STREAM     CONNECTED     10822695 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10820413 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10818521 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10816723 1/init               /run/systemd/journal/stdout
+unix  3      [ ]         STREAM     CONNECTED     10820405 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     23728    1/init               /run/systemd/journal/stdout
+unix  3      [ ]         DGRAM                    20233    676/systemd-network  
+unix  3      [ ]         STREAM     CONNECTED     10834611 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10820418 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     76762050 153238/containerd    
+unix  3      [ ]         STREAM     CONNECTED     10816722 153238/containerd    
+unix  3      [ ]         STREAM     CONNECTED     10820385 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  2      [ ]         DGRAM                    25305    720/dbus-daemon      
+unix  3      [ ]         STREAM     CONNECTED     23580    1/init               /run/systemd/journal/stdout
+unix  3      [ ]         STREAM     CONNECTED     10822699 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10820417 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     76762062 2000428/containerd-  
+unix  3      [ ]         STREAM     CONNECTED     10818508 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10820387 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     28901    808/snapd            
+unix  2      [ ]         DGRAM                    20226    676/systemd-network  
+unix  3      [ ]         STREAM     CONNECTED     10822700 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10822692 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10819761 153813/k8s-dqlite    
+unix  3      [ ]         STREAM     CONNECTED     10820355 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10820390 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     28902    720/dbus-daemon      /run/dbus/system_bus_socket
+unix  3      [ ]         STREAM     CONNECTED     76764788 2000428/containerd-  /run/containerd/s/74902a72b569db045be2ae56eb637a03613f227b3686598a3dcc596de650dff1
+unix  3      [ ]         STREAM     CONNECTED     10822036 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     75904544 1661884/dbus-daemon  
+unix  3      [ ]         STREAM     CONNECTED     75889919 720/dbus-daemon      /run/dbus/system_bus_socket
+unix  3      [ ]         STREAM     CONNECTED     10834793 157875/containerd-s  
+unix  3      [ ]         STREAM     CONNECTED     10822676 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10818704 153238/containerd    
+unix  3      [ ]         STREAM     CONNECTED     76727805 153238/containerd    
+unix  3      [ ]         STREAM     CONNECTED     75891193 1656494/sshd: ubunt  
+unix  3      [ ]         STREAM     CONNECTED     23447    808/snapd            
+unix  3      [ ]         STREAM     CONNECTED     20334    1/init               
+unix  3      [ ]         STREAM     CONNECTED     10834760 153238/containerd    
+unix  3      [ ]         STREAM     CONNECTED     10820399 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     25545    720/dbus-daemon      /run/dbus/system_bus_socket
+unix  3      [ ]         STREAM     CONNECTED     76725164 1992258/containerd-  /run/containerd/s/26f7fcfee36414966033abad0d9c862620ad0ecebd9dcb38b9fc9b8af241cd67
+unix  3      [ ]         DGRAM                    16318    1/init               
+unix  3      [ ]         STREAM     CONNECTED     10822666 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     29173    1291/dockerd         
+unix  2      [ ]         STREAM     CONNECTED     75892187 1656657/sshd: ubunt  
+unix  3      [ ]         STREAM     CONNECTED     70794183 153238/containerd    
+unix  3      [ ]         STREAM     CONNECTED     10818555 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  2      [ ]         DGRAM                    16992    404/systemd-udevd    
+unix  3      [ ]         STREAM     CONNECTED     10837703 157875/containerd-s  /run/containerd/s/9a30df63191c3fdc41a06352a43552ce058b824674d9732b0c3f263cd1d00b3c
+unix  3      [ ]         STREAM     CONNECTED     10822667 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     61692764 43569/containerd-sh  /run/containerd/s/89a24c1ddc03373c709cb7773fec3778d644907bf173a78b074cc5072559ce55
+unix  3      [ ]         STREAM     CONNECTED     10834614 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     26596    1291/dockerd         
+unix  3      [ ]         STREAM     CONNECTED     10821943 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10820406 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     76727531 153238/containerd    
+unix  2      [ ]         DGRAM                    75892224 1656657/sshd: ubunt  
+unix  3      [ ]         STREAM     CONNECTED     10822707 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     75904543 1661884/dbus-daemon  
+unix  2      [ ]         DGRAM                    75891006 1656328/systemd      
+unix  3      [ ]         STREAM     CONNECTED     10834794 153238/containerd    /var/snap/microk8s/common/run/containerd.sock.ttrpc
+unix  3      [ ]         STREAM     CONNECTED     10820401 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     30206    849/containerd       /run/containerd/containerd.sock
+unix  3      [ ]         STREAM     CONNECTED     76733819 153238/containerd    /var/snap/microk8s/common/run/containerd.sock.ttrpc
+unix  3      [ ]         STREAM     CONNECTED     22844    715/accounts-daemon  
+unix  3      [ ]         STREAM     CONNECTED     76740292 1995377/containerd-  /run/containerd/s/2f02eff4c6dfd9a149a897f90617eb9847bc560df5e6945364ef7ae1b4e65b58
+unix  3      [ ]         STREAM     CONNECTED     10822028 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10818953 1/init               /run/systemd/journal/stdout
+unix  3      [ ]         STREAM     CONNECTED     24829    847/udisksd          
+unix  3      [ ]         STREAM     CONNECTED     10822684 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10816097 7243/containerd-shi  /run/containerd/s/49d0f35fada5a7773a8c988323a9a644d34169bf777bba772679ac9060ba36cb
+unix  2      [ ]         DGRAM                    76828033 2024548/sudo         
+unix  3      [ ]         STREAM     CONNECTED     10834632 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     25641    844/systemd-logind   
+unix  3      [ ]         STREAM     CONNECTED     76125569 1747580/sshd: ubunt  
+unix  3      [ ]         DGRAM                    16994    404/systemd-udevd    
+unix  3      [ ]         STREAM     CONNECTED     75891032 1656328/systemd      
+unix  3      [ ]         STREAM     CONNECTED     10824613 7199/containerd-shi  /run/containerd/s/97b2e2570eade66d010987545f35052305ff73dfdab784518d6cc8b1b628d012
+unix  3      [ ]         STREAM     CONNECTED     10822694 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     70794846 3724693/containerd-  /run/containerd/s/118ffd607cfb90cfac4a1da3cf88604281157c82ffe97e058a1ec3db1f60f17a
+unix  3      [ ]         STREAM     CONNECTED     10822035 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     20224    676/systemd-network  
+unix  2      [ ]         DGRAM                    75891000 1656336/(sd-pam)     
+unix  3      [ ]         STREAM     CONNECTED     10842901 159430/containerd-s  /run/containerd/s/f8abee19ee2ab88a6078d5d5bdf925d9807a8fe96705ca1a68504ad5983cfb0f
+unix  3      [ ]         STREAM     CONNECTED     10820403 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     76727559 153238/containerd    /var/snap/microk8s/common/run/containerd.sock.ttrpc
+unix  3      [ ]         STREAM     CONNECTED     76125570 1747499/sshd: ubunt  
+unix  3      [ ]         STREAM     CONNECTED     10821932 153218/kubelite      
+unix  2      [ ]         DGRAM                    75904542 1661884/dbus-daemon  
+unix  3      [ ]         DGRAM                    75891029 1656328/systemd      
+unix  3      [ ]         STREAM     CONNECTED     10822673 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     25544    729/python3          
+unix  3      [ ]         STREAM     CONNECTED     28750    1/init               /run/systemd/journal/stdout
+unix  3      [ ]         STREAM     CONNECTED     75891194 1656216/sshd: ubunt  
+unix  3      [ ]         STREAM     CONNECTED     10822053 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10816263 153834/bash          
+unix  3      [ ]         STREAM     CONNECTED     24094    715/accounts-daemon  
+unix  3      [ ]         STREAM     CONNECTED     10823666 153238/containerd    
+unix  3      [ ]         STREAM     CONNECTED     10822674 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  2      [ ]         DGRAM                    30181    1291/dockerd         
+unix  3      [ ]         STREAM     CONNECTED     70794135 153238/containerd    
+unix  3      [ ]         STREAM     CONNECTED     10821596 153238/containerd    /var/snap/microk8s/common/run/containerd.sock
+unix  3      [ ]         DGRAM                    16319    1/init               
+unix  3      [ ]         STREAM     CONNECTED     10822669 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     30205    1291/dockerd         
+unix  3      [ ]         STREAM     CONNECTED     76733818 1993846/containerd-  
+unix  3      [ ]         STREAM     CONNECTED     76742798 153238/containerd    
+unix  3      [ ]         STREAM     CONNECTED     10820538 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     19793    591/systemd-timesyn  
+unix  3      [ ]         STREAM     CONNECTED     10822683 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     76727558 1992258/containerd-  
+unix  3      [ ]         STREAM     CONNECTED     10834633 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     26215    982/python3          
+unix  3      [ ]         STREAM     CONNECTED     25312    720/dbus-daemon      /run/dbus/system_bus_socket
+unix  2      [ ]         STREAM     CONNECTED     76124109 1747499/sshd: ubunt  
+unix  3      [ ]         STREAM     CONNECTED     10843795 153238/containerd    
+unix  3      [ ]         STREAM     CONNECTED     10820397 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     28093    849/containerd       /run/containerd/containerd.sock
+unix  3      [ ]         STREAM     CONNECTED     76726215 1992258/containerd-  /run/containerd/s/26f7fcfee36414966033abad0d9c862620ad0ecebd9dcb38b9fc9b8af241cd67
+unix  3      [ ]         STREAM     CONNECTED     10815296 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     76766488 153238/containerd    
+unix  3      [ ]         STREAM     CONNECTED     18549    1/init               /run/systemd/journal/stdout
+unix  3      [ ]         DGRAM                    75891030 1656328/systemd      
+unix  3      [ ]         STREAM     CONNECTED     10822665 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     22925    1/init               /run/systemd/journal/stdout
+unix  3      [ ]         STREAM     CONNECTED     70794862 3724693/containerd-  /run/containerd/s/118ffd607cfb90cfac4a1da3cf88604281157c82ffe97e058a1ec3db1f60f17a
+unix  3      [ ]         STREAM     CONNECTED     25311    720/dbus-daemon      /run/dbus/system_bus_socket
+unix  3      [ ]         DGRAM                    16995    404/systemd-udevd    
+unix  3      [ ]         STREAM     CONNECTED     10822670 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     61692763 153238/containerd    
+unix  3      [ ]         STREAM     CONNECTED     22134    729/python3          
+unix  3      [ ]         STREAM     CONNECTED     61695831 153238/containerd    
+unix  3      [ ]         STREAM     CONNECTED     10820527 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10818531 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     25642    1/init               /run/systemd/journal/stdout
+unix  3      [ ]         STREAM     CONNECTED     23654    1/init               /run/systemd/journal/stdout
+unix  3      [ ]         STREAM     CONNECTED     75904546 1661884/dbus-daemon  /run/user/1000/bus
+unix  3      [ ]         STREAM     CONNECTED     10822703 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10820367 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     70794854 153238/containerd    /var/snap/microk8s/common/run/containerd.sock.ttrpc
+unix  3      [ ]         STREAM     CONNECTED     10820425 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10818541 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     21964    719/cron             
+unix  2      [ ]         DGRAM                    16363    373/systemd-journal  
+unix  3      [ ]         STREAM     CONNECTED     10822825 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10820368 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10821947 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10820364 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10818748 153238/containerd    
+unix  3      [ ]         STREAM     CONNECTED     75892273 1656657/sshd: ubunt  
+unix  3      [ ]         STREAM     CONNECTED     26335    991/ModemManager     
+unix  3      [ ]         STREAM     CONNECTED     10820530 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10820361 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10821514 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10820365 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     25523    993/sshd: /usr/sbin  
+unix  3      [ ]         STREAM     CONNECTED     26781    982/python3          
+unix  3      [ ]         STREAM     CONNECTED     23726    678/systemd-resolve  
+unix  3      [ ]         STREAM     CONNECTED     10820529 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10818535 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     70795893 3724693/containerd-  
+unix  3      [ ]         STREAM     CONNECTED     22924    1/init               /run/systemd/journal/stdout
+unix  3      [ ]         STREAM     CONNECTED     75892272 1656745/sshd: ubunt  
+unix  3      [ ]         STREAM     CONNECTED     27718    720/dbus-daemon      /run/dbus/system_bus_socket
+unix  3      [ ]         STREAM     CONNECTED     10822697 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10820362 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     26267    1/init               /run/systemd/journal/stdout
+unix  3      [ ]         STREAM     CONNECTED     25310    720/dbus-daemon      /run/dbus/system_bus_socket
+unix  3      [ ]         STREAM     CONNECTED     10820534 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10818529 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     25863    844/systemd-logind   
+unix  3      [ ]         STREAM     CONNECTED     76740307 153238/containerd    /var/snap/microk8s/common/run/containerd.sock.ttrpc
+unix  3      [ ]         STREAM     CONNECTED     10820526 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10820363 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     26312    729/python3          
+unix  3      [ ]         STREAM     CONNECTED     76727565 1992258/containerd-  /run/containerd/s/26f7fcfee36414966033abad0d9c862620ad0ecebd9dcb38b9fc9b8af241cd67
+unix  3      [ ]         STREAM     CONNECTED     10820533 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10818523 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     26313    729/python3          
+unix  3      [ ]         STREAM     CONNECTED     25551    720/dbus-daemon      /run/dbus/system_bus_socket
+unix  3      [ ]         STREAM     CONNECTED     61696138 43569/containerd-sh  /run/containerd/s/89a24c1ddc03373c709cb7773fec3778d644907bf173a78b074cc5072559ce55
+unix  3      [ ]         STREAM     CONNECTED     10820429 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10818533 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10819669 7199/containerd-shi  /run/containerd/s/97b2e2570eade66d010987545f35052305ff73dfdab784518d6cc8b1b628d012
+unix  2      [ ]         DGRAM                    25854    844/systemd-logind   
+unix  3      [ ]         STREAM     CONNECTED     23449    1/init               /run/systemd/journal/stdout
+unix  2      [ ]         DGRAM                    23666    678/systemd-resolve  
+unix  3      [ ]         STREAM     CONNECTED     76741228 1995377/containerd-  
+unix  3      [ ]         STREAM     CONNECTED     10822705 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10820366 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     25314    720/dbus-daemon      /run/dbus/system_bus_socket
+unix  3      [ ]         STREAM     CONNECTED     76726081 153238/containerd    
+unix  3      [ ]         STREAM     CONNECTED     75905108 1656328/systemd      
+unix  3      [ ]         STREAM     CONNECTED     10820426 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10818537 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10821516 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10818527 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10820531 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10821433 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     26672    849/containerd       
+unix  3      [ ]         STREAM     CONNECTED     20411    1/init               /run/systemd/journal/stdout
+unix  3      [ ]         STREAM     CONNECTED     25313    720/dbus-daemon      /run/dbus/system_bus_socket
+unix  3      [ ]         DGRAM                    19140    591/systemd-timesyn  
+unix  3      [ ]         STREAM     CONNECTED     10821566 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10820375 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  2      [ ]         DGRAM                    76034092 1708137/sshd: ubunt  
+unix  3      [ ]         STREAM     CONNECTED     10821568 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10822657 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     61696211 43569/containerd-sh  
+unix  3      [ ]         STREAM     CONNECTED     10845407 159430/containerd-s  
+unix  3      [ ]         STREAM     CONNECTED     22705    591/systemd-timesyn  
+unix  3      [ ]         STREAM     CONNECTED     76821069 153813/k8s-dqlite    @snap.microk8s.dqlite-3297041220608546238
+unix  3      [ ]         STREAM     CONNECTED     10820370 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10821569 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10818557 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     19132    1/init               /run/systemd/journal/stdout
+unix  3      [ ]         STREAM     CONNECTED     10818547 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10818560 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10821422 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     75893794 1657352/sshd: ubunt  
+unix  3      [ ]         STREAM     CONNECTED     76826078 153813/k8s-dqlite    @snap.microk8s.dqlite-3297041220608546238
+unix  3      [ ]         STREAM     CONNECTED     10820371 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10821617 153238/containerd    /var/snap/microk8s/common/run/containerd.sock
+unix  3      [ ]         STREAM     CONNECTED     10816531 1/init               /run/systemd/journal/stdout
+unix  3      [ ]         STREAM     CONNECTED     26680    1/init               /run/systemd/journal/stdout
+unix  3      [ ]         STREAM     CONNECTED     10818758 153238/containerd    
+unix  3      [ ]         STREAM     CONNECTED     10820379 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10818860 1/init               /run/systemd/journal/stdout
+unix  2      [ ]         DGRAM                    16837    1/init               
+unix  2      [ ]         DGRAM                    76125402 1747499/sshd: ubunt  
+unix  3      [ ]         STREAM     CONNECTED     10838275 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10820395 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10845417 153238/containerd    
+unix  3      [ ]         STREAM     CONNECTED     10818042 153238/containerd    /var/snap/microk8s/common/run/containerd.sock.ttrpc
+unix  3      [ ]         STREAM     CONNECTED     76034810 1708137/sshd: ubunt  
+unix  3      [ ]         STREAM     CONNECTED     23212    732/polkitd          
+unix  3      [ ]         DGRAM                    19138    591/systemd-timesyn  
+unix  3      [ ]         STREAM     CONNECTED     10820372 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     16983    404/systemd-udevd    
+unix  2      [ ]         DGRAM                    75893200 1657076/sshd: ubunt  
+unix  3      [ ]         STREAM     CONNECTED     10822663 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10816148 7243/containerd-shi  /run/containerd/s/49d0f35fada5a7773a8c988323a9a644d34169bf777bba772679ac9060ba36cb
+unix  3      [ ]         DGRAM                    19137    591/systemd-timesyn  
+unix  3      [ ]         STREAM     CONNECTED     76827723 153813/k8s-dqlite    
+unix  3      [ ]         STREAM     CONNECTED     10820384 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     24081    727/irqbalance       
+unix  3      [ ]         STREAM     CONNECTED     10821565 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10821423 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     61694305 153238/containerd    /var/snap/microk8s/common/run/containerd.sock.ttrpc
+unix  2      [ ]         DGRAM                    25550    991/ModemManager     
+unix  3      [ ]         STREAM     CONNECTED     25309    720/dbus-daemon      /run/dbus/system_bus_socket
+unix  3      [ ]         STREAM     CONNECTED     10821420 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  2      [ ]         STREAM     CONNECTED     76030946 1708137/sshd: ubunt  
+unix  3      [ ]         STREAM     CONNECTED     10821586 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10820377 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10841097 157875/containerd-s  /run/containerd/s/9a30df63191c3fdc41a06352a43552ce058b824674d9732b0c3f263cd1d00b3c
+unix  3      [ ]         STREAM     CONNECTED     26264    1/init               /run/systemd/journal/stdout
+unix  3      [ ]         STREAM     CONNECTED     10820373 153218/kubelite      
+unix  2      [ ]         STREAM     CONNECTED     75889329 1656216/sshd: ubunt  
+unix  3      [ ]         STREAM     CONNECTED     10821435 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10843825 159430/containerd-s  /run/containerd/s/f8abee19ee2ab88a6078d5d5bdf925d9807a8fe96705ca1a68504ad5983cfb0f
+unix  3      [ ]         STREAM     CONNECTED     76823736 153813/k8s-dqlite    
+unix  3      [ ]         STREAM     CONNECTED     10820536 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10818553 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     23935    720/dbus-daemon      
+unix  3      [ ]         STREAM     CONNECTED     10822658 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10845408 153238/containerd    /var/snap/microk8s/common/run/containerd.sock.ttrpc
+unix  3      [ ]         STREAM     CONNECTED     10820535 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10820374 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     23936    1/init               /run/systemd/journal/stdout
+unix  3      [ ]         STREAM     CONNECTED     23652    678/systemd-resolve  
+unix  2      [ ]         DGRAM                    75889396 1656216/sshd: ubunt  
+unix  3      [ ]         STREAM     CONNECTED     10821431 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     23868    1/init               /run/systemd/journal/stdout
+unix  3      [ ]         STREAM     CONNECTED     75902701 1/init               /run/systemd/journal/stdout
+unix  3      [ ]         STREAM     CONNECTED     75893795 1657076/sshd: ubunt  
+unix  2      [ ]         DGRAM                    19135    591/systemd-timesyn  
+unix  3      [ ]         STREAM     CONNECTED     10818549 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10821587 153238/containerd    /var/snap/microk8s/common/run/containerd.sock
+unix  3      [ ]         STREAM     CONNECTED     10822664 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     76824368 153813/k8s-dqlite    
+unix  3      [ ]         STREAM     CONNECTED     10820380 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10814222 153214/bash          
+unix  3      [ ]         STREAM     CONNECTED     10818559 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10818543 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  3      [ ]         STREAM     CONNECTED     10841096 153238/containerd    
+unix  3      [ ]         STREAM     CONNECTED     76824370 153813/k8s-dqlite    @snap.microk8s.dqlite-3297041220608546238
+unix  3      [ ]         STREAM     CONNECTED     10820369 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10821616 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10820382 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  2      [ ]         DGRAM                    22062    719/cron             
+unix  3      [ ]         STREAM     CONNECTED     76034809 1708269/sshd: ubunt  
+unix  3      [ ]         DGRAM                    19139    591/systemd-timesyn  
+unix  3      [ ]         STREAM     CONNECTED     10818545 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
+unix  2      [ ]         STREAM     CONNECTED     75893110 1657076/sshd: ubunt  
+unix  3      [ ]         STREAM     CONNECTED     10838274 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     10818558 153218/kubelite      
+unix  3      [ ]         STREAM     CONNECTED     75903585 1661884/dbus-daemon  
+unix  3      [ ]         STREAM     CONNECTED     10816359 7199/containerd-shi  
+unix  3      [ ]         STREAM     CONNECTED     10818551 153813/k8s-dqlite    /var/snap/microk8s/3597/var/kubernetes/backend/kine.sock:12379
diff --git a/my_deploy.sh b/my_deploy.sh
index 28148123dcf05618e7e811025ed0f7c6ea9ca3ca..d8cc597ab2bf1eef847c10234742d220ad3278c9 100644
--- a/my_deploy.sh
+++ b/my_deploy.sh
@@ -1,5 +1,5 @@
 export TFS_REGISTRY_IMAGE="http://localhost:32000/tfs/"
-export TFS_COMPONENTS="context device automation service compute monitoring webui"
+export TFS_COMPONENTS="context device service compute l3_attackmitigator l3_centralizedattackdetector"
 export TFS_IMAGE_TAG="dev"
 export TFS_K8S_NAMESPACE="tfs"
 export TFS_EXTRA_MANIFESTS="manifests/nginx_ingress_http.yaml"
diff --git a/ofc22 b/ofc22
index 1102ec2a635bd3fcc1142dd2982d4e3224ccaca5..a0c40e71642cd7eb19ee30975513abd9e5e943e6 120000
--- a/ofc22
+++ b/ofc22
@@ -1 +1 @@
-src/tests/ofc22/
\ No newline at end of file
+src/tests/ofc22
\ No newline at end of file
diff --git a/proto/l3_attackmitigator.proto b/proto/l3_attackmitigator.proto
index 532f2b6fdf3f555e06c6dd4bc995f60dc65bca07..8f1a947dd712e83453675fbe2333e7963fc58324 100644
--- a/proto/l3_attackmitigator.proto
+++ b/proto/l3_attackmitigator.proto
@@ -36,7 +36,8 @@ message L3AttackmitigatorOutput {
 	string port_o = 9;
 	string port_d = 10;
 	string ml_id = 11;
-	string service_id = 12; 
-	float time_start = 13;
-	float time_end = 14;
+	context.ServiceId service_id = 12;
+	context.EndPointId endpoint_id = 13; 
+	float time_start = 14;
+	float time_end = 15;
 }
diff --git a/proto/l3_centralizedattackdetector.proto b/proto/l3_centralizedattackdetector.proto
index e3358dcd705978f62e9ce500d3e0ddce7b7428f7..e251e6feb9981ae3821e6b580e3b966aebc3a889 100644
--- a/proto/l3_centralizedattackdetector.proto
+++ b/proto/l3_centralizedattackdetector.proto
@@ -14,6 +14,8 @@
 
 syntax = "proto3";
 
+import "context.proto";
+
 service L3Centralizedattackdetector {
   // Sends a greeting
   rpc SendInput (L3CentralizedattackdetectorMetrics) returns (Empty) {}
@@ -27,26 +29,29 @@ message L3CentralizedattackdetectorMetrics {
 	There are currently 9 values and 
 	*/
 
-    // Machine learning
-	float n_packets_server_seconds = 1;
-	float n_packets_client_seconds = 2;
-	float n_bits_server_seconds = 3;
-	float n_bits_client_seconds = 4;
-	float n_bits_server_n_packets_server = 5;
-	float n_bits_client_n_packets_client = 6;
-	float n_packets_server_n_packets_client = 7;
-	float n_bits_server_n_bits_client = 8;
+	// Machine learning model features
+	float c_pkts_all = 1;
+	float c_ack_cnt = 2;
+	float c_bytes_uniq = 3;
+	float c_pkts_data = 4;
+	float c_bytes_all = 5;
+	float s_pkts_all = 6;
+	float s_ack_cnt = 7;
+	float s_bytes_uniq = 8;
+	float s_pkts_data = 9;
+	float s_bytes_all = 10;
 
 	// Conection identifier
-	string ip_o = 9;
-	string port_o = 10;
-	string ip_d = 11;
-	string port_d = 12;
-	string flow_id = 13;
-	string service_id = 14;
-	string protocol = 15;
-	float time_start = 16;
-	float time_end = 17;
+	string ip_o = 11;
+	string port_o = 12;
+	string ip_d = 13;
+	string port_d = 14;
+	string flow_id = 15;
+	context.ServiceId service_id = 16;
+	context.EndPointId endpoint_id = 17;
+	string protocol = 18;
+	float time_start = 19;
+	float time_end = 20;
 }
 
 message Empty {
diff --git a/scripts/scenario3/l3/README.md b/scripts/scenario3/l3/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..f66d8e351033d2762a77269243b6d3bb2a1d7022
--- /dev/null
+++ b/scripts/scenario3/l3/README.md
@@ -0,0 +1,3 @@
+# Scripts to automatically run the "Attack Detection & Mitigation at the L3 Layer" workflow (Scenario 3).
+"launch_l3_attack_detection_and_mitigation.sh" launches the TeraFlow OS components, which includes the CentralizedAttackDetector and AttackMitigator componentes necessary to perform this workflow.
+"launch_l3_attack_detection_and_mitigation_complete.sh" also launches the DistributedAttackDetector, which monitors the network data plane and passively collects traffic packets and aggregates them in network flows, which are then provided to the CentralizedAttackDetector to detect attacks that may be occurring in the network.
diff --git a/scripts/scenario3/l3/launch_l3_attack_detection_and_mitigation.sh b/scripts/scenario3/l3/launch_l3_attack_detection_and_mitigation.sh
new file mode 100644
index 0000000000000000000000000000000000000000..8243f042a98eac41e11ff465646ed7cb7e1dc05d
--- /dev/null
+++ b/scripts/scenario3/l3/launch_l3_attack_detection_and_mitigation.sh
@@ -0,0 +1,9 @@
+cd /home/ubuntu/tfs-ctrl
+source my_deploy.sh
+./deploy.sh
+./show_deploy.sh
+
+source tfs_runtime_env_vars.sh
+
+ofc22/run_test_01_bootstrap.sh
+ofc22/run_test_02_create_service.sh
diff --git a/scripts/scenario3/l3/launch_l3_attack_detection_and_mitigation_complete.sh b/scripts/scenario3/l3/launch_l3_attack_detection_and_mitigation_complete.sh
new file mode 100644
index 0000000000000000000000000000000000000000..8f9d24590fbf0ec31eaecd59f7cc2e24e0bff11e
--- /dev/null
+++ b/scripts/scenario3/l3/launch_l3_attack_detection_and_mitigation_complete.sh
@@ -0,0 +1,11 @@
+cd /home/ubuntu/tfs-ctrl
+source my_deploy.sh
+./deploy.sh
+./show_deploy.sh
+
+source tfs_runtime_env_vars.sh
+
+ofc22/run_test_01_bootstrap.sh
+ofc22/run_test_02_create_service.sh
+
+sshpass -p "ubuntu" ssh -o StrictHostKeyChecking=no -n -f ubuntu@192.168.165.73 "sh -c 'nohup /home/ubuntu/TeraflowDockerDistributed/restart.sh > /dev/null 2>&1 &'"
diff --git a/src/l3_attackmitigator/Dockerfile b/src/l3_attackmitigator/Dockerfile
index 2b814f0eed8fbfba96a759212ae5ff0e2172c14f..9ecc9bc1216bb5c589cf782dfdb881f178555f1e 100644
--- a/src/l3_attackmitigator/Dockerfile
+++ b/src/l3_attackmitigator/Dockerfile
@@ -63,6 +63,7 @@ RUN python3 -m pip install -r requirements.txt
 # Add component files into working directory
 WORKDIR /var/teraflow
 COPY src/l3_attackmitigator/. l3_attackmitigator
+COPY src/monitoring/. monitoring
 
 # Start the service
 ENTRYPOINT ["python", "-m", "l3_attackmitigator.service"]
diff --git a/src/l3_attackmitigator/dump.txt b/src/l3_attackmitigator/dump.txt
new file mode 100644
index 0000000000000000000000000000000000000000..63109855f446b2932713c82b46549a0b1d7bab8c
--- /dev/null
+++ b/src/l3_attackmitigator/dump.txt
@@ -0,0 +1,184 @@
+----- Database Dump [182 entries] -------------------------
+  [ set] ConfigModel/instances                    :: {'ConfigModel[O1-OLS:running]', 'ConfigModel[R1-EMU:running]', 'ConfigModel[R2-EMU:running]', 'ConfigModel[R3-EMU:running]', 'ConfigModel[R4-EMU:running]', 'ConfigModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:running]', 'ConfigModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:running]'}
+  [dict] ConfigModel[O1-OLS:running]              :: {'pk': 'O1-OLS:running'}
+  [ set] ConfigModel[O1-OLS:running]/references   :: {'ConfigRuleModel[O1-OLS:running:3a759acf34501f21]:config_fk', 'ConfigRuleModel[O1-OLS:running:8c9564a0c5b96ab9]:config_fk', 'ConfigRuleModel[O1-OLS:running:ca87dd7146b3c377]:config_fk', 'ConfigRuleModel[O1-OLS:running:e17c13e1d2595abf]:config_fk', 'ConfigRuleModel[O1-OLS:running:e34b75892d5e4b98]:config_fk', 'DeviceModel[O1-OLS]:device_config_fk'}
+  [dict] ConfigModel[R1-EMU:running]              :: {'pk': 'R1-EMU:running'}
+  [ set] ConfigModel[R1-EMU:running]/references   :: {'ConfigRuleModel[R1-EMU:running:01c09c16fa7a1c42]:config_fk', 'ConfigRuleModel[R1-EMU:running:0a74a06e46cadf4d]:config_fk', 'ConfigRuleModel[R1-EMU:running:1522507a2c46f302]:config_fk', 'ConfigRuleModel[R1-EMU:running:25538c1a5a7413d6]:config_fk', 'ConfigRuleModel[R1-EMU:running:27e66bd8b5269276]:config_fk', 'ConfigRuleModel[R1-EMU:running:2984fb5629b5cfe0]:config_fk', 'ConfigRuleModel[R1-EMU:running:2c9ac49a866a7cdd]:config_fk', 'ConfigRuleModel[R1-EMU:running:50183717ae57e751]:config_fk', 'ConfigRuleModel[R1-EMU:running:5079ff9ab906b93d]:config_fk', 'ConfigRuleModel[R1-EMU:running:53b5477acb05a84b]:config_fk', 'ConfigRuleModel[R1-EMU:running:69a4986d322f94e3]:config_fk', 'ConfigRuleModel[R1-EMU:running:89925119fe702a08]:config_fk', 'ConfigRuleModel[R1-EMU:running:8c1c2310b4321b60]:config_fk', 'ConfigRuleModel[R1-EMU:running:93ceccde538188a9]:config_fk', 'ConfigRuleModel[R1-EMU:running:afbfa638d35357a6]:config_fk', 'ConfigRuleModel[R1-EMU:running:c5755579f473f6c6]:config_fk', 'ConfigRuleModel[R1-EMU:running:c8bd82d94b5dfbcf]:config_fk', 'ConfigRuleModel[R1-EMU:running:caeaf4702e128cde]:config_fk', 'ConfigRuleModel[R1-EMU:running:efff2baa1ee0efd4]:config_fk', 'DeviceModel[R1-EMU]:device_config_fk'}
+  [dict] ConfigModel[R2-EMU:running]              :: {'pk': 'R2-EMU:running'}
+  [ set] ConfigModel[R2-EMU:running]/references   :: {'ConfigRuleModel[R2-EMU:running:1522507a2c46f302]:config_fk', 'ConfigRuleModel[R2-EMU:running:afbfa638d35357a6]:config_fk', 'DeviceModel[R2-EMU]:device_config_fk'}
+  [dict] ConfigModel[R3-EMU:running]              :: {'pk': 'R3-EMU:running'}
+  [ set] ConfigModel[R3-EMU:running]/references   :: {'ConfigRuleModel[R3-EMU:running:01c09c16fa7a1c42]:config_fk', 'ConfigRuleModel[R3-EMU:running:1522507a2c46f302]:config_fk', 'ConfigRuleModel[R3-EMU:running:25538c1a5a7413d6]:config_fk', 'ConfigRuleModel[R3-EMU:running:27e66bd8b5269276]:config_fk', 'ConfigRuleModel[R3-EMU:running:2984fb5629b5cfe0]:config_fk', 'ConfigRuleModel[R3-EMU:running:2c9ac49a866a7cdd]:config_fk', 'ConfigRuleModel[R3-EMU:running:50183717ae57e751]:config_fk', 'ConfigRuleModel[R3-EMU:running:5079ff9ab906b93d]:config_fk', 'ConfigRuleModel[R3-EMU:running:53b5477acb05a84b]:config_fk', 'ConfigRuleModel[R3-EMU:running:69a4986d322f94e3]:config_fk', 'ConfigRuleModel[R3-EMU:running:89925119fe702a08]:config_fk', 'ConfigRuleModel[R3-EMU:running:8c1c2310b4321b60]:config_fk', 'ConfigRuleModel[R3-EMU:running:93ceccde538188a9]:config_fk', 'ConfigRuleModel[R3-EMU:running:afbfa638d35357a6]:config_fk', 'ConfigRuleModel[R3-EMU:running:b2c708c23571c108]:config_fk', 'ConfigRuleModel[R3-EMU:running:c8bd82d94b5dfbcf]:config_fk', 'ConfigRuleModel[R3-EMU:running:cabc97474c2cca7a]:config_fk', 'ConfigRuleModel[R3-EMU:running:caeaf4702e128cde]:config_fk', 'ConfigRuleModel[R3-EMU:running:efff2baa1ee0efd4]:config_fk', 'DeviceModel[R3-EMU]:device_config_fk'}
+  [dict] ConfigModel[R4-EMU:running]              :: {'pk': 'R4-EMU:running'}
+  [ set] ConfigModel[R4-EMU:running]/references   :: {'ConfigRuleModel[R4-EMU:running:1522507a2c46f302]:config_fk', 'ConfigRuleModel[R4-EMU:running:afbfa638d35357a6]:config_fk', 'DeviceModel[R4-EMU]:device_config_fk'}
+  [dict] ConfigModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:running] :: {'pk': 'admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:running'}
+  [ set] ConfigModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:running]/references :: {'ConfigRuleModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:running:d0f8a2a67d26b6de]:config_fk', 'ServiceModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical]:service_config_fk'}
+  [dict] ConfigModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:running] :: {'pk': 'admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:running'}
+  [ set] ConfigModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:running]/references :: {'ConfigRuleModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:running:20b78ae0ce0a3460]:config_fk', 'ConfigRuleModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:running:299d6ac2d2edbf77]:config_fk', 'ConfigRuleModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:running:6a3ef52e178adab0]:config_fk', 'ServiceModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0]:service_config_fk'}
+  [ set] ConfigRuleModel/instances                :: {'ConfigRuleModel[O1-OLS:running:3a759acf34501f21]', 'ConfigRuleModel[O1-OLS:running:8c9564a0c5b96ab9]', 'ConfigRuleModel[O1-OLS:running:ca87dd7146b3c377]', 'ConfigRuleModel[O1-OLS:running:e17c13e1d2595abf]', 'ConfigRuleModel[O1-OLS:running:e34b75892d5e4b98]', 'ConfigRuleModel[R1-EMU:running:01c09c16fa7a1c42]', 'ConfigRuleModel[R1-EMU:running:0a74a06e46cadf4d]', 'ConfigRuleModel[R1-EMU:running:1522507a2c46f302]', 'ConfigRuleModel[R1-EMU:running:25538c1a5a7413d6]', 'ConfigRuleModel[R1-EMU:running:27e66bd8b5269276]', 'ConfigRuleModel[R1-EMU:running:2984fb5629b5cfe0]', 'ConfigRuleModel[R1-EMU:running:2c9ac49a866a7cdd]', 'ConfigRuleModel[R1-EMU:running:50183717ae57e751]', 'ConfigRuleModel[R1-EMU:running:5079ff9ab906b93d]', 'ConfigRuleModel[R1-EMU:running:53b5477acb05a84b]', 'ConfigRuleModel[R1-EMU:running:69a4986d322f94e3]', 'ConfigRuleModel[R1-EMU:running:89925119fe702a08]', 'ConfigRuleModel[R1-EMU:running:8c1c2310b4321b60]', 'ConfigRuleModel[R1-EMU:running:93ceccde538188a9]', 'ConfigRuleModel[R1-EMU:running:afbfa638d35357a6]', 'ConfigRuleModel[R1-EMU:running:c5755579f473f6c6]', 'ConfigRuleModel[R1-EMU:running:c8bd82d94b5dfbcf]', 'ConfigRuleModel[R1-EMU:running:caeaf4702e128cde]', 'ConfigRuleModel[R1-EMU:running:efff2baa1ee0efd4]', 'ConfigRuleModel[R2-EMU:running:1522507a2c46f302]', 'ConfigRuleModel[R2-EMU:running:afbfa638d35357a6]', 'ConfigRuleModel[R3-EMU:running:01c09c16fa7a1c42]', 'ConfigRuleModel[R3-EMU:running:1522507a2c46f302]', 'ConfigRuleModel[R3-EMU:running:25538c1a5a7413d6]', 'ConfigRuleModel[R3-EMU:running:27e66bd8b5269276]', 'ConfigRuleModel[R3-EMU:running:2984fb5629b5cfe0]', 'ConfigRuleModel[R3-EMU:running:2c9ac49a866a7cdd]', 'ConfigRuleModel[R3-EMU:running:50183717ae57e751]', 'ConfigRuleModel[R3-EMU:running:5079ff9ab906b93d]', 'ConfigRuleModel[R3-EMU:running:53b5477acb05a84b]', 'ConfigRuleModel[R3-EMU:running:69a4986d322f94e3]', 'ConfigRuleModel[R3-EMU:running:89925119fe702a08]', 'ConfigRuleModel[R3-EMU:running:8c1c2310b4321b60]', 'ConfigRuleModel[R3-EMU:running:93ceccde538188a9]', 'ConfigRuleModel[R3-EMU:running:afbfa638d35357a6]', 'ConfigRuleModel[R3-EMU:running:b2c708c23571c108]', 'ConfigRuleModel[R3-EMU:running:c8bd82d94b5dfbcf]', 'ConfigRuleModel[R3-EMU:running:cabc97474c2cca7a]', 'ConfigRuleModel[R3-EMU:running:caeaf4702e128cde]', 'ConfigRuleModel[R3-EMU:running:efff2baa1ee0efd4]', 'ConfigRuleModel[R4-EMU:running:1522507a2c46f302]', 'ConfigRuleModel[R4-EMU:running:afbfa638d35357a6]', 'ConfigRuleModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:running:d0f8a2a67d26b6de]', 'ConfigRuleModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:running:20b78ae0ce0a3460]', 'ConfigRuleModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:running:299d6ac2d2edbf77]', 'ConfigRuleModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:running:6a3ef52e178adab0]'}
+  [dict] ConfigRuleModel[O1-OLS:running:3a759acf34501f21] :: {'action': 'SET', 'config_fk': 'ConfigModel[O1-OLS:running]', 'key': '/endpoints/endpoint[50296d99-58cc-5ce7-82f5-fc8ee4eec2ec]', 'pk': 'O1-OLS:running:3a759acf34501f21', 'position': '3', 'value': '{"sample_types": {}, "type": "optical", "uuid": "50296d99-58cc-5ce7-82f5-fc8ee4eec2ec"}'}
+  [dict] ConfigRuleModel[O1-OLS:running:8c9564a0c5b96ab9] :: {'action': 'SET', 'config_fk': 'ConfigModel[O1-OLS:running]', 'key': '/service[a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical]', 'pk': 'O1-OLS:running:8c9564a0c5b96ab9', 'position': '4', 'value': '{"capacity_unit": "GHz", "capacity_value": 1, "direction": "UNIDIRECTIONAL", "input_sip": "aade6001-f00b-5e2f-a357-6a0a9d3de870", "layer_protocol_name": "PHOTONIC_MEDIA", "layer_protocol_qualifier": "tapi-photonic-media:PHOTONIC_LAYER_QUALIFIER_NMC", "output_sip": "0ef74f99-1acc-57bd-ab9d-4b958b06c513", "uuid": "a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical"}'}
+  [dict] ConfigRuleModel[O1-OLS:running:ca87dd7146b3c377] :: {'action': 'SET', 'config_fk': 'ConfigModel[O1-OLS:running]', 'key': '/endpoints/endpoint[0ef74f99-1acc-57bd-ab9d-4b958b06c513]', 'pk': 'O1-OLS:running:ca87dd7146b3c377', 'position': '2', 'value': '{"sample_types": {}, "type": "optical", "uuid": "0ef74f99-1acc-57bd-ab9d-4b958b06c513"}'}
+  [dict] ConfigRuleModel[O1-OLS:running:e17c13e1d2595abf] :: {'action': 'SET', 'config_fk': 'ConfigModel[O1-OLS:running]', 'key': '/endpoints/endpoint[aade6001-f00b-5e2f-a357-6a0a9d3de870]', 'pk': 'O1-OLS:running:e17c13e1d2595abf', 'position': '0', 'value': '{"sample_types": {}, "type": "optical", "uuid": "aade6001-f00b-5e2f-a357-6a0a9d3de870"}'}
+  [dict] ConfigRuleModel[O1-OLS:running:e34b75892d5e4b98] :: {'action': 'SET', 'config_fk': 'ConfigModel[O1-OLS:running]', 'key': '/endpoints/endpoint[eb287d83-f05e-53ec-ab5a-adf6bd2b5418]', 'pk': 'O1-OLS:running:e34b75892d5e4b98', 'position': '1', 'value': '{"sample_types": {}, "type": "optical", "uuid": "eb287d83-f05e-53ec-ab5a-adf6bd2b5418"}'}
+  [dict] ConfigRuleModel[R1-EMU:running:01c09c16fa7a1c42] :: {'action': 'SET', 'config_fk': 'ConfigModel[R1-EMU:running]', 'key': '/routing_policy/bgp_defined_set[cc20bd3149a0-NetInst_rt_import][route-target:65000:333]', 'pk': 'R1-EMU:running:01c09c16fa7a1c42', 'position': '12', 'value': '{"ext_community_member": "route-target:65000:333", "ext_community_set_name": "cc20bd3149a0-NetInst_rt_import"}'}
+  [dict] ConfigRuleModel[R1-EMU:running:0a74a06e46cadf4d] :: {'action': 'SET', 'config_fk': 'ConfigModel[R1-EMU:running]', 'key': '/interface[13/1/2]/subinterface[400]', 'pk': 'R1-EMU:running:0a74a06e46cadf4d', 'position': '10', 'value': '{"address_ip": "3.3.2.1", "address_prefix": 24, "description": "a6086be7-62a1-4bd1-a084-cc20bd3149a0-NetSubIf", "index": 400, "name": "13/1/2", "vlan_id": 400}'}
+  [dict] ConfigRuleModel[R1-EMU:running:1522507a2c46f302] :: {'action': 'SET', 'config_fk': 'ConfigModel[R1-EMU:running]', 'key': '/endpoints/endpoint[13/0/0]', 'pk': 'R1-EMU:running:1522507a2c46f302', 'position': '0', 'value': '{"sample_types": {}, "type": "optical", "uuid": "13/0/0"}'}
+  [dict] ConfigRuleModel[R1-EMU:running:25538c1a5a7413d6] :: {'action': 'SET', 'config_fk': 'ConfigModel[R1-EMU:running]', 'key': '/network_instance[cc20bd3149a0-NetInst]', 'pk': 'R1-EMU:running:25538c1a5a7413d6', 'position': '2', 'value': '{"description": "a6086be7-62a1-4bd1-a084-cc20bd3149a0-NetIf", "name": "cc20bd3149a0-NetInst", "route_distinguisher": "65000:100", "type": "L3VRF"}'}
+  [dict] ConfigRuleModel[R1-EMU:running:27e66bd8b5269276] :: {'action': 'SET', 'config_fk': 'ConfigModel[R1-EMU:running]', 'key': '/routing_policy/policy_definition[cc20bd3149a0-NetInst_import]', 'pk': 'R1-EMU:running:27e66bd8b5269276', 'position': '13', 'value': '{"policy_name": "cc20bd3149a0-NetInst_import"}'}
+  [dict] ConfigRuleModel[R1-EMU:running:2984fb5629b5cfe0] :: {'action': 'SET', 'config_fk': 'ConfigModel[R1-EMU:running]', 'key': '/network_instance[cc20bd3149a0-NetInst]/table_connections[DIRECTLY_CONNECTED][BGP][IPV4]', 'pk': 'R1-EMU:running:2984fb5629b5cfe0', 'position': '6', 'value': '{"address_family": "IPV4", "dst_protocol": "BGP", "name": "cc20bd3149a0-NetInst", "src_protocol": "DIRECTLY_CONNECTED"}'}
+  [dict] ConfigRuleModel[R1-EMU:running:2c9ac49a866a7cdd] :: {'action': 'SET', 'config_fk': 'ConfigModel[R1-EMU:running]', 'key': '/routing_policy/policy_definition[cc20bd3149a0-NetInst_import]/statement[3]', 'pk': 'R1-EMU:running:2c9ac49a866a7cdd', 'position': '14', 'value': '{"ext_community_set_name": "cc20bd3149a0-NetInst_rt_import", "match_set_options": "ANY", "policy_name": "cc20bd3149a0-NetInst_import", "policy_result": "ACCEPT_ROUTE", "statement_name": "3"}'}
+  [dict] ConfigRuleModel[R1-EMU:running:50183717ae57e751] :: {'action': 'SET', 'config_fk': 'ConfigModel[R1-EMU:running]', 'key': '/routing_policy/policy_definition[cc20bd3149a0-NetInst_export]', 'pk': 'R1-EMU:running:50183717ae57e751', 'position': '17', 'value': '{"policy_name": "cc20bd3149a0-NetInst_export"}'}
+  [dict] ConfigRuleModel[R1-EMU:running:5079ff9ab906b93d] :: {'action': 'SET', 'config_fk': 'ConfigModel[R1-EMU:running]', 'key': '/routing_policy/bgp_defined_set[cc20bd3149a0-NetInst_rt_export]', 'pk': 'R1-EMU:running:5079ff9ab906b93d', 'position': '15', 'value': '{"ext_community_set_name": "cc20bd3149a0-NetInst_rt_export"}'}
+  [dict] ConfigRuleModel[R1-EMU:running:53b5477acb05a84b] :: {'action': 'SET', 'config_fk': 'ConfigModel[R1-EMU:running]', 'key': '/network_instance[cc20bd3149a0-NetInst]/table_connections[STATIC][BGP][IPV4]', 'pk': 'R1-EMU:running:53b5477acb05a84b', 'position': '5', 'value': '{"address_family": "IPV4", "dst_protocol": "BGP", "name": "cc20bd3149a0-NetInst", "src_protocol": "STATIC"}'}
+  [dict] ConfigRuleModel[R1-EMU:running:69a4986d322f94e3] :: {'action': 'SET', 'config_fk': 'ConfigModel[R1-EMU:running]', 'key': '/routing_policy/bgp_defined_set[cc20bd3149a0-NetInst_rt_import]', 'pk': 'R1-EMU:running:69a4986d322f94e3', 'position': '11', 'value': '{"ext_community_set_name": "cc20bd3149a0-NetInst_rt_import"}'}
+  [dict] ConfigRuleModel[R1-EMU:running:89925119fe702a08] :: {'action': 'SET', 'config_fk': 'ConfigModel[R1-EMU:running]', 'key': '/network_instance[cc20bd3149a0-NetInst]/inter_instance_policies[cc20bd3149a0-NetInst_export]', 'pk': 'R1-EMU:running:89925119fe702a08', 'position': '8', 'value': '{"export_policy": "cc20bd3149a0-NetInst_export", "name": "cc20bd3149a0-NetInst"}'}
+  [dict] ConfigRuleModel[R1-EMU:running:8c1c2310b4321b60] :: {'action': 'SET', 'config_fk': 'ConfigModel[R1-EMU:running]', 'key': '/routing_policy/policy_definition[cc20bd3149a0-NetInst_export]/statement[3]', 'pk': 'R1-EMU:running:8c1c2310b4321b60', 'position': '18', 'value': '{"ext_community_set_name": "cc20bd3149a0-NetInst_rt_export", "match_set_options": "ANY", "policy_name": "cc20bd3149a0-NetInst_export", "policy_result": "ACCEPT_ROUTE", "statement_name": "3"}'}
+  [dict] ConfigRuleModel[R1-EMU:running:93ceccde538188a9] :: {'action': 'SET', 'config_fk': 'ConfigModel[R1-EMU:running]', 'key': '/routing_policy/bgp_defined_set[cc20bd3149a0-NetInst_rt_export][route-target:65000:333]', 'pk': 'R1-EMU:running:93ceccde538188a9', 'position': '16', 'value': '{"ext_community_member": "route-target:65000:333", "ext_community_set_name": "cc20bd3149a0-NetInst_rt_export"}'}
+  [dict] ConfigRuleModel[R1-EMU:running:afbfa638d35357a6] :: {'action': 'SET', 'config_fk': 'ConfigModel[R1-EMU:running]', 'key': '/endpoints/endpoint[13/1/2]', 'pk': 'R1-EMU:running:afbfa638d35357a6', 'position': '1', 'value': '{"sample_types": {"101": "/endpoints/endpoint[13/1/2]/state/packets_transmitted", "102": "/endpoints/endpoint[13/1/2]/state/packets_received", "201": "/endpoints/endpoint[13/1/2]/state/bytes_transmitted", "202": "/endpoints/endpoint[13/1/2]/state/bytes_received"}, "type": "copper", "uuid": "13/1/2"}'}
+  [dict] ConfigRuleModel[R1-EMU:running:c5755579f473f6c6] :: {'action': 'SET', 'config_fk': 'ConfigModel[R1-EMU:running]', 'key': '/network_instance[cc20bd3149a0-NetInst]/interface[13/1/2.400]', 'pk': 'R1-EMU:running:c5755579f473f6c6', 'position': '3', 'value': '{"id": "13/1/2.400", "interface": "13/1/2", "name": "cc20bd3149a0-NetInst", "subinterface": 400}'}
+  [dict] ConfigRuleModel[R1-EMU:running:c8bd82d94b5dfbcf] :: {'action': 'SET', 'config_fk': 'ConfigModel[R1-EMU:running]', 'key': '/interface[13/1/2]', 'pk': 'R1-EMU:running:c8bd82d94b5dfbcf', 'position': '9', 'value': '{"description": "a6086be7-62a1-4bd1-a084-cc20bd3149a0-NetIf", "mtu": 1512, "name": "13/1/2"}'}
+  [dict] ConfigRuleModel[R1-EMU:running:caeaf4702e128cde] :: {'action': 'SET', 'config_fk': 'ConfigModel[R1-EMU:running]', 'key': '/network_instance[cc20bd3149a0-NetInst]/protocols[BGP]', 'pk': 'R1-EMU:running:caeaf4702e128cde', 'position': '4', 'value': '{"as": 65000, "identifier": "BGP", "name": "cc20bd3149a0-NetInst", "protocol_name": "BGP"}'}
+  [dict] ConfigRuleModel[R1-EMU:running:efff2baa1ee0efd4] :: {'action': 'SET', 'config_fk': 'ConfigModel[R1-EMU:running]', 'key': '/network_instance[cc20bd3149a0-NetInst]/inter_instance_policies[cc20bd3149a0-NetInst_import]', 'pk': 'R1-EMU:running:efff2baa1ee0efd4', 'position': '7', 'value': '{"import_policy": "cc20bd3149a0-NetInst_import", "name": "cc20bd3149a0-NetInst"}'}
+  [dict] ConfigRuleModel[R2-EMU:running:1522507a2c46f302] :: {'action': 'SET', 'config_fk': 'ConfigModel[R2-EMU:running]', 'key': '/endpoints/endpoint[13/0/0]', 'pk': 'R2-EMU:running:1522507a2c46f302', 'position': '0', 'value': '{"sample_types": {}, "type": "optical", "uuid": "13/0/0"}'}
+  [dict] ConfigRuleModel[R2-EMU:running:afbfa638d35357a6] :: {'action': 'SET', 'config_fk': 'ConfigModel[R2-EMU:running]', 'key': '/endpoints/endpoint[13/1/2]', 'pk': 'R2-EMU:running:afbfa638d35357a6', 'position': '1', 'value': '{"sample_types": {"101": "/endpoints/endpoint[13/1/2]/state/packets_transmitted", "102": "/endpoints/endpoint[13/1/2]/state/packets_received", "201": "/endpoints/endpoint[13/1/2]/state/bytes_transmitted", "202": "/endpoints/endpoint[13/1/2]/state/bytes_received"}, "type": "copper", "uuid": "13/1/2"}'}
+  [dict] ConfigRuleModel[R3-EMU:running:01c09c16fa7a1c42] :: {'action': 'SET', 'config_fk': 'ConfigModel[R3-EMU:running]', 'key': '/routing_policy/bgp_defined_set[cc20bd3149a0-NetInst_rt_import][route-target:65000:333]', 'pk': 'R3-EMU:running:01c09c16fa7a1c42', 'position': '12', 'value': '{"ext_community_member": "route-target:65000:333", "ext_community_set_name": "cc20bd3149a0-NetInst_rt_import"}'}
+  [dict] ConfigRuleModel[R3-EMU:running:1522507a2c46f302] :: {'action': 'SET', 'config_fk': 'ConfigModel[R3-EMU:running]', 'key': '/endpoints/endpoint[13/0/0]', 'pk': 'R3-EMU:running:1522507a2c46f302', 'position': '0', 'value': '{"sample_types": {}, "type": "optical", "uuid": "13/0/0"}'}
+  [dict] ConfigRuleModel[R3-EMU:running:25538c1a5a7413d6] :: {'action': 'SET', 'config_fk': 'ConfigModel[R3-EMU:running]', 'key': '/network_instance[cc20bd3149a0-NetInst]', 'pk': 'R3-EMU:running:25538c1a5a7413d6', 'position': '2', 'value': '{"description": "a6086be7-62a1-4bd1-a084-cc20bd3149a0-NetIf", "name": "cc20bd3149a0-NetInst", "route_distinguisher": "65000:200", "type": "L3VRF"}'}
+  [dict] ConfigRuleModel[R3-EMU:running:27e66bd8b5269276] :: {'action': 'SET', 'config_fk': 'ConfigModel[R3-EMU:running]', 'key': '/routing_policy/policy_definition[cc20bd3149a0-NetInst_import]', 'pk': 'R3-EMU:running:27e66bd8b5269276', 'position': '13', 'value': '{"policy_name": "cc20bd3149a0-NetInst_import"}'}
+  [dict] ConfigRuleModel[R3-EMU:running:2984fb5629b5cfe0] :: {'action': 'SET', 'config_fk': 'ConfigModel[R3-EMU:running]', 'key': '/network_instance[cc20bd3149a0-NetInst]/table_connections[DIRECTLY_CONNECTED][BGP][IPV4]', 'pk': 'R3-EMU:running:2984fb5629b5cfe0', 'position': '6', 'value': '{"address_family": "IPV4", "dst_protocol": "BGP", "name": "cc20bd3149a0-NetInst", "src_protocol": "DIRECTLY_CONNECTED"}'}
+  [dict] ConfigRuleModel[R3-EMU:running:2c9ac49a866a7cdd] :: {'action': 'SET', 'config_fk': 'ConfigModel[R3-EMU:running]', 'key': '/routing_policy/policy_definition[cc20bd3149a0-NetInst_import]/statement[3]', 'pk': 'R3-EMU:running:2c9ac49a866a7cdd', 'position': '14', 'value': '{"ext_community_set_name": "cc20bd3149a0-NetInst_rt_import", "match_set_options": "ANY", "policy_name": "cc20bd3149a0-NetInst_import", "policy_result": "ACCEPT_ROUTE", "statement_name": "3"}'}
+  [dict] ConfigRuleModel[R3-EMU:running:50183717ae57e751] :: {'action': 'SET', 'config_fk': 'ConfigModel[R3-EMU:running]', 'key': '/routing_policy/policy_definition[cc20bd3149a0-NetInst_export]', 'pk': 'R3-EMU:running:50183717ae57e751', 'position': '17', 'value': '{"policy_name": "cc20bd3149a0-NetInst_export"}'}
+  [dict] ConfigRuleModel[R3-EMU:running:5079ff9ab906b93d] :: {'action': 'SET', 'config_fk': 'ConfigModel[R3-EMU:running]', 'key': '/routing_policy/bgp_defined_set[cc20bd3149a0-NetInst_rt_export]', 'pk': 'R3-EMU:running:5079ff9ab906b93d', 'position': '15', 'value': '{"ext_community_set_name": "cc20bd3149a0-NetInst_rt_export"}'}
+  [dict] ConfigRuleModel[R3-EMU:running:53b5477acb05a84b] :: {'action': 'SET', 'config_fk': 'ConfigModel[R3-EMU:running]', 'key': '/network_instance[cc20bd3149a0-NetInst]/table_connections[STATIC][BGP][IPV4]', 'pk': 'R3-EMU:running:53b5477acb05a84b', 'position': '5', 'value': '{"address_family": "IPV4", "dst_protocol": "BGP", "name": "cc20bd3149a0-NetInst", "src_protocol": "STATIC"}'}
+  [dict] ConfigRuleModel[R3-EMU:running:69a4986d322f94e3] :: {'action': 'SET', 'config_fk': 'ConfigModel[R3-EMU:running]', 'key': '/routing_policy/bgp_defined_set[cc20bd3149a0-NetInst_rt_import]', 'pk': 'R3-EMU:running:69a4986d322f94e3', 'position': '11', 'value': '{"ext_community_set_name": "cc20bd3149a0-NetInst_rt_import"}'}
+  [dict] ConfigRuleModel[R3-EMU:running:89925119fe702a08] :: {'action': 'SET', 'config_fk': 'ConfigModel[R3-EMU:running]', 'key': '/network_instance[cc20bd3149a0-NetInst]/inter_instance_policies[cc20bd3149a0-NetInst_export]', 'pk': 'R3-EMU:running:89925119fe702a08', 'position': '8', 'value': '{"export_policy": "cc20bd3149a0-NetInst_export", "name": "cc20bd3149a0-NetInst"}'}
+  [dict] ConfigRuleModel[R3-EMU:running:8c1c2310b4321b60] :: {'action': 'SET', 'config_fk': 'ConfigModel[R3-EMU:running]', 'key': '/routing_policy/policy_definition[cc20bd3149a0-NetInst_export]/statement[3]', 'pk': 'R3-EMU:running:8c1c2310b4321b60', 'position': '18', 'value': '{"ext_community_set_name": "cc20bd3149a0-NetInst_rt_export", "match_set_options": "ANY", "policy_name": "cc20bd3149a0-NetInst_export", "policy_result": "ACCEPT_ROUTE", "statement_name": "3"}'}
+  [dict] ConfigRuleModel[R3-EMU:running:93ceccde538188a9] :: {'action': 'SET', 'config_fk': 'ConfigModel[R3-EMU:running]', 'key': '/routing_policy/bgp_defined_set[cc20bd3149a0-NetInst_rt_export][route-target:65000:333]', 'pk': 'R3-EMU:running:93ceccde538188a9', 'position': '16', 'value': '{"ext_community_member": "route-target:65000:333", "ext_community_set_name": "cc20bd3149a0-NetInst_rt_export"}'}
+  [dict] ConfigRuleModel[R3-EMU:running:afbfa638d35357a6] :: {'action': 'SET', 'config_fk': 'ConfigModel[R3-EMU:running]', 'key': '/endpoints/endpoint[13/1/2]', 'pk': 'R3-EMU:running:afbfa638d35357a6', 'position': '1', 'value': '{"sample_types": {"101": "/endpoints/endpoint[13/1/2]/state/packets_transmitted", "102": "/endpoints/endpoint[13/1/2]/state/packets_received", "201": "/endpoints/endpoint[13/1/2]/state/bytes_transmitted", "202": "/endpoints/endpoint[13/1/2]/state/bytes_received"}, "type": "copper", "uuid": "13/1/2"}'}
+  [dict] ConfigRuleModel[R3-EMU:running:b2c708c23571c108] :: {'action': 'SET', 'config_fk': 'ConfigModel[R3-EMU:running]', 'key': '/interface[13/1/2]/subinterface[500]', 'pk': 'R3-EMU:running:b2c708c23571c108', 'position': '10', 'value': '{"address_ip": "3.3.1.1", "address_prefix": 24, "description": "a6086be7-62a1-4bd1-a084-cc20bd3149a0-NetSubIf", "index": 500, "name": "13/1/2", "vlan_id": 500}'}
+  [dict] ConfigRuleModel[R3-EMU:running:c8bd82d94b5dfbcf] :: {'action': 'SET', 'config_fk': 'ConfigModel[R3-EMU:running]', 'key': '/interface[13/1/2]', 'pk': 'R3-EMU:running:c8bd82d94b5dfbcf', 'position': '9', 'value': '{"description": "a6086be7-62a1-4bd1-a084-cc20bd3149a0-NetIf", "mtu": 1512, "name": "13/1/2"}'}
+  [dict] ConfigRuleModel[R3-EMU:running:cabc97474c2cca7a] :: {'action': 'SET', 'config_fk': 'ConfigModel[R3-EMU:running]', 'key': '/network_instance[cc20bd3149a0-NetInst]/interface[13/1/2.500]', 'pk': 'R3-EMU:running:cabc97474c2cca7a', 'position': '3', 'value': '{"id": "13/1/2.500", "interface": "13/1/2", "name": "cc20bd3149a0-NetInst", "subinterface": 500}'}
+  [dict] ConfigRuleModel[R3-EMU:running:caeaf4702e128cde] :: {'action': 'SET', 'config_fk': 'ConfigModel[R3-EMU:running]', 'key': '/network_instance[cc20bd3149a0-NetInst]/protocols[BGP]', 'pk': 'R3-EMU:running:caeaf4702e128cde', 'position': '4', 'value': '{"as": 65000, "identifier": "BGP", "name": "cc20bd3149a0-NetInst", "protocol_name": "BGP"}'}
+  [dict] ConfigRuleModel[R3-EMU:running:efff2baa1ee0efd4] :: {'action': 'SET', 'config_fk': 'ConfigModel[R3-EMU:running]', 'key': '/network_instance[cc20bd3149a0-NetInst]/inter_instance_policies[cc20bd3149a0-NetInst_import]', 'pk': 'R3-EMU:running:efff2baa1ee0efd4', 'position': '7', 'value': '{"import_policy": "cc20bd3149a0-NetInst_import", "name": "cc20bd3149a0-NetInst"}'}
+  [dict] ConfigRuleModel[R4-EMU:running:1522507a2c46f302] :: {'action': 'SET', 'config_fk': 'ConfigModel[R4-EMU:running]', 'key': '/endpoints/endpoint[13/0/0]', 'pk': 'R4-EMU:running:1522507a2c46f302', 'position': '0', 'value': '{"sample_types": {}, "type": "optical", "uuid": "13/0/0"}'}
+  [dict] ConfigRuleModel[R4-EMU:running:afbfa638d35357a6] :: {'action': 'SET', 'config_fk': 'ConfigModel[R4-EMU:running]', 'key': '/endpoints/endpoint[13/1/2]', 'pk': 'R4-EMU:running:afbfa638d35357a6', 'position': '1', 'value': '{"sample_types": {"101": "/endpoints/endpoint[13/1/2]/state/packets_transmitted", "102": "/endpoints/endpoint[13/1/2]/state/packets_received", "201": "/endpoints/endpoint[13/1/2]/state/bytes_transmitted", "202": "/endpoints/endpoint[13/1/2]/state/bytes_received"}, "type": "copper", "uuid": "13/1/2"}'}
+  [dict] ConfigRuleModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:running:d0f8a2a67d26b6de] :: {'action': 'SET', 'config_fk': 'ConfigModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:running]', 'key': 'settings', 'pk': 'admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:running:d0f8a2a67d26b6de', 'position': '0', 'value': '{"capacity_unit": "GHz", "capacity_value": 1, "direction": "UNIDIRECTIONAL", "layer_proto_name": "PHOTONIC_MEDIA", "layer_proto_qual": "tapi-photonic-media:PHOTONIC_LAYER_QUALIFIER_NMC"}'}
+  [dict] ConfigRuleModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:running:20b78ae0ce0a3460] :: {'action': 'SET', 'config_fk': 'ConfigModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:running]', 'key': '/settings', 'pk': 'admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:running:20b78ae0ce0a3460', 'position': '0', 'value': '{"address_families": ["IPV4"], "bgp_as": 65000, "bgp_route_target": "65000:333", "mtu": 1512}'}
+  [dict] ConfigRuleModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:running:299d6ac2d2edbf77] :: {'action': 'SET', 'config_fk': 'ConfigModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:running]', 'key': '/device[R3-EMU]/endpoint[13/1/2]/settings', 'pk': 'admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:running:299d6ac2d2edbf77', 'position': '2', 'value': '{"address_ip": "3.3.1.1", "address_prefix": 24, "route_distinguisher": "65000:200", "router_id": "20.20.20.1", "sub_interface_index": 500, "vlan_id": 500}'}
+  [dict] ConfigRuleModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:running:6a3ef52e178adab0] :: {'action': 'SET', 'config_fk': 'ConfigModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:running]', 'key': '/device[R1-EMU]/endpoint[13/1/2]/settings', 'pk': 'admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:running:6a3ef52e178adab0', 'position': '1', 'value': '{"address_ip": "3.3.2.1", "address_prefix": 24, "route_distinguisher": "65000:100", "router_id": "10.10.10.1", "sub_interface_index": 400, "vlan_id": 400}'}
+  [ set] ConnectionModel/instances                :: {'ConnectionModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router]', 'ConnectionModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:emu-optical-line-system]'}
+  [dict] ConnectionModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router] :: {'connection_uuid': 'a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router', 'path_fk': 'PathModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router]', 'pk': 'a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router', 'service_fk': 'ServiceModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0]'}
+  [ set] ConnectionModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router]/references :: {'ConnectionSubServiceModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router--admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical]:connection_fk'}
+  [dict] ConnectionModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:emu-optical-line-system] :: {'connection_uuid': 'a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:emu-optical-line-system', 'path_fk': 'PathModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:emu-optical-line-system]', 'pk': 'a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:emu-optical-line-system', 'service_fk': 'ServiceModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical]'}
+  [ set] ConnectionSubServiceModel/instances      :: {'ConnectionSubServiceModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router--admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical]'}
+  [dict] ConnectionSubServiceModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router--admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical] :: {'connection_fk': 'ConnectionModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router]', 'pk': 'a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router--admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical', 'sub_service_fk': 'ServiceModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical]'}
+  [ set] ConstraintsModel/instances               :: {'ConstraintsModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:constraints]', 'ConstraintsModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:constraints]'}
+  [dict] ConstraintsModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:constraints] :: {'pk': 'admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:constraints'}
+  [ set] ConstraintsModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:constraints]/references :: {'ServiceModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0]:service_constraints_fk'}
+  [dict] ConstraintsModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:constraints] :: {'pk': 'admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:constraints'}
+  [ set] ConstraintsModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:constraints]/references :: {'ServiceModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical]:service_constraints_fk'}
+  [ set] ContextModel/instances                   :: {'ContextModel[admin]'}
+  [dict] ContextModel[admin]                      :: {'context_uuid': 'admin', 'pk': 'admin'}
+  [ set] ContextModel[admin]/references           :: {'ServiceModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical]:context_fk', 'ServiceModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0]:context_fk', 'TopologyModel[admin/admin]:context_fk'}
+  [ set] DeviceModel/instances                    :: {'DeviceModel[O1-OLS]', 'DeviceModel[R1-EMU]', 'DeviceModel[R2-EMU]', 'DeviceModel[R3-EMU]', 'DeviceModel[R4-EMU]'}
+  [dict] DeviceModel[O1-OLS]                      :: {'device_config_fk': 'ConfigModel[O1-OLS:running]', 'device_operational_status': 'DISABLED', 'device_type': 'emu-optical-line-system', 'device_uuid': 'O1-OLS', 'pk': 'O1-OLS'}
+  [ set] DeviceModel[O1-OLS]/references           :: {'DriverModel[O1-OLS/UNDEFINED]:device_fk', 'EndPointModel[O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513]:device_fk', 'EndPointModel[O1-OLS/50296d99-58cc-5ce7-82f5-fc8ee4eec2ec]:device_fk', 'EndPointModel[O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870]:device_fk', 'EndPointModel[O1-OLS/eb287d83-f05e-53ec-ab5a-adf6bd2b5418]:device_fk'}
+  [dict] DeviceModel[R1-EMU]                      :: {'device_config_fk': 'ConfigModel[R1-EMU:running]', 'device_operational_status': 'DISABLED', 'device_type': 'emu-packet-router', 'device_uuid': 'R1-EMU', 'pk': 'R1-EMU'}
+  [ set] DeviceModel[R1-EMU]/references           :: {'DriverModel[R1-EMU/UNDEFINED]:device_fk', 'EndPointModel[R1-EMU/13/0/0]:device_fk', 'EndPointModel[R1-EMU/13/1/2]:device_fk'}
+  [dict] DeviceModel[R2-EMU]                      :: {'device_config_fk': 'ConfigModel[R2-EMU:running]', 'device_operational_status': 'DISABLED', 'device_type': 'emu-packet-router', 'device_uuid': 'R2-EMU', 'pk': 'R2-EMU'}
+  [ set] DeviceModel[R2-EMU]/references           :: {'DriverModel[R2-EMU/UNDEFINED]:device_fk', 'EndPointModel[R2-EMU/13/0/0]:device_fk', 'EndPointModel[R2-EMU/13/1/2]:device_fk'}
+  [dict] DeviceModel[R3-EMU]                      :: {'device_config_fk': 'ConfigModel[R3-EMU:running]', 'device_operational_status': 'DISABLED', 'device_type': 'emu-packet-router', 'device_uuid': 'R3-EMU', 'pk': 'R3-EMU'}
+  [ set] DeviceModel[R3-EMU]/references           :: {'DriverModel[R3-EMU/UNDEFINED]:device_fk', 'EndPointModel[R3-EMU/13/0/0]:device_fk', 'EndPointModel[R3-EMU/13/1/2]:device_fk'}
+  [dict] DeviceModel[R4-EMU]                      :: {'device_config_fk': 'ConfigModel[R4-EMU:running]', 'device_operational_status': 'DISABLED', 'device_type': 'emu-packet-router', 'device_uuid': 'R4-EMU', 'pk': 'R4-EMU'}
+  [ set] DeviceModel[R4-EMU]/references           :: {'DriverModel[R4-EMU/UNDEFINED]:device_fk', 'EndPointModel[R4-EMU/13/0/0]:device_fk', 'EndPointModel[R4-EMU/13/1/2]:device_fk'}
+  [ set] DriverModel/instances                    :: {'DriverModel[O1-OLS/UNDEFINED]', 'DriverModel[R1-EMU/UNDEFINED]', 'DriverModel[R2-EMU/UNDEFINED]', 'DriverModel[R3-EMU/UNDEFINED]', 'DriverModel[R4-EMU/UNDEFINED]'}
+  [dict] DriverModel[O1-OLS/UNDEFINED]            :: {'device_fk': 'DeviceModel[O1-OLS]', 'driver': 'UNDEFINED', 'pk': 'O1-OLS/UNDEFINED'}
+  [dict] DriverModel[R1-EMU/UNDEFINED]            :: {'device_fk': 'DeviceModel[R1-EMU]', 'driver': 'UNDEFINED', 'pk': 'R1-EMU/UNDEFINED'}
+  [dict] DriverModel[R2-EMU/UNDEFINED]            :: {'device_fk': 'DeviceModel[R2-EMU]', 'driver': 'UNDEFINED', 'pk': 'R2-EMU/UNDEFINED'}
+  [dict] DriverModel[R3-EMU/UNDEFINED]            :: {'device_fk': 'DeviceModel[R3-EMU]', 'driver': 'UNDEFINED', 'pk': 'R3-EMU/UNDEFINED'}
+  [dict] DriverModel[R4-EMU/UNDEFINED]            :: {'device_fk': 'DeviceModel[R4-EMU]', 'driver': 'UNDEFINED', 'pk': 'R4-EMU/UNDEFINED'}
+  [ set] EndPointModel/instances                  :: {'EndPointModel[O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513]', 'EndPointModel[O1-OLS/50296d99-58cc-5ce7-82f5-fc8ee4eec2ec]', 'EndPointModel[O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870]', 'EndPointModel[O1-OLS/eb287d83-f05e-53ec-ab5a-adf6bd2b5418]', 'EndPointModel[R1-EMU/13/0/0]', 'EndPointModel[R1-EMU/13/1/2]', 'EndPointModel[R2-EMU/13/0/0]', 'EndPointModel[R2-EMU/13/1/2]', 'EndPointModel[R3-EMU/13/0/0]', 'EndPointModel[R3-EMU/13/1/2]', 'EndPointModel[R4-EMU/13/0/0]', 'EndPointModel[R4-EMU/13/1/2]'}
+  [dict] EndPointModel[O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513] :: {'device_fk': 'DeviceModel[O1-OLS]', 'endpoint_type': 'optical', 'endpoint_uuid': '0ef74f99-1acc-57bd-ab9d-4b958b06c513', 'pk': 'O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513'}
+  [ set] EndPointModel[O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513]/references :: {'LinkEndPointModel[R3-EMU/13/0/0==O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513--O1-OLS]:endpoint_fk', 'PathHopModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:emu-optical-line-system:O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513]:endpoint_fk', 'ServiceEndPointModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical--O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513]:endpoint_fk'}
+  [dict] EndPointModel[O1-OLS/50296d99-58cc-5ce7-82f5-fc8ee4eec2ec] :: {'device_fk': 'DeviceModel[O1-OLS]', 'endpoint_type': 'optical', 'endpoint_uuid': '50296d99-58cc-5ce7-82f5-fc8ee4eec2ec', 'pk': 'O1-OLS/50296d99-58cc-5ce7-82f5-fc8ee4eec2ec'}
+  [ set] EndPointModel[O1-OLS/50296d99-58cc-5ce7-82f5-fc8ee4eec2ec]/references :: {'LinkEndPointModel[R4-EMU/13/0/0==O1-OLS/50296d99-58cc-5ce7-82f5-fc8ee4eec2ec--O1-OLS]:endpoint_fk'}
+  [dict] EndPointModel[O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870] :: {'device_fk': 'DeviceModel[O1-OLS]', 'endpoint_type': 'optical', 'endpoint_uuid': 'aade6001-f00b-5e2f-a357-6a0a9d3de870', 'pk': 'O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870'}
+  [ set] EndPointModel[O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870]/references :: {'LinkEndPointModel[R1-EMU/13/0/0==O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870--O1-OLS]:endpoint_fk', 'PathHopModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:emu-optical-line-system:O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870]:endpoint_fk', 'ServiceEndPointModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical--O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870]:endpoint_fk'}
+  [dict] EndPointModel[O1-OLS/eb287d83-f05e-53ec-ab5a-adf6bd2b5418] :: {'device_fk': 'DeviceModel[O1-OLS]', 'endpoint_type': 'optical', 'endpoint_uuid': 'eb287d83-f05e-53ec-ab5a-adf6bd2b5418', 'pk': 'O1-OLS/eb287d83-f05e-53ec-ab5a-adf6bd2b5418'}
+  [ set] EndPointModel[O1-OLS/eb287d83-f05e-53ec-ab5a-adf6bd2b5418]/references :: {'LinkEndPointModel[R2-EMU/13/0/0==O1-OLS/eb287d83-f05e-53ec-ab5a-adf6bd2b5418--O1-OLS]:endpoint_fk'}
+  [dict] EndPointModel[R1-EMU/13/0/0]             :: {'device_fk': 'DeviceModel[R1-EMU]', 'endpoint_type': 'optical', 'endpoint_uuid': '13/0/0', 'pk': 'R1-EMU/13/0/0'}
+  [ set] EndPointModel[R1-EMU/13/0/0]/references  :: {'LinkEndPointModel[R1-EMU/13/0/0==O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870--R1-EMU]:endpoint_fk', 'PathHopModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router:R1-EMU/13/0/0]:endpoint_fk'}
+  [dict] EndPointModel[R1-EMU/13/1/2]             :: {'device_fk': 'DeviceModel[R1-EMU]', 'endpoint_type': 'copper', 'endpoint_uuid': '13/1/2', 'pk': 'R1-EMU/13/1/2'}
+  [ set] EndPointModel[R1-EMU/13/1/2]/references  :: {'KpiSampleTypeModel[R1-EMU/13/1/2/BYTES_RECEIVED]:endpoint_fk', 'KpiSampleTypeModel[R1-EMU/13/1/2/BYTES_TRANSMITTED]:endpoint_fk', 'KpiSampleTypeModel[R1-EMU/13/1/2/PACKETS_RECEIVED]:endpoint_fk', 'KpiSampleTypeModel[R1-EMU/13/1/2/PACKETS_TRANSMITTED]:endpoint_fk', 'PathHopModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router:R1-EMU/13/1/2]:endpoint_fk', 'ServiceEndPointModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0--R1-EMU/13/1/2]:endpoint_fk'}
+  [dict] EndPointModel[R2-EMU/13/0/0]             :: {'device_fk': 'DeviceModel[R2-EMU]', 'endpoint_type': 'optical', 'endpoint_uuid': '13/0/0', 'pk': 'R2-EMU/13/0/0'}
+  [ set] EndPointModel[R2-EMU/13/0/0]/references  :: {'LinkEndPointModel[R2-EMU/13/0/0==O1-OLS/eb287d83-f05e-53ec-ab5a-adf6bd2b5418--R2-EMU]:endpoint_fk'}
+  [dict] EndPointModel[R2-EMU/13/1/2]             :: {'device_fk': 'DeviceModel[R2-EMU]', 'endpoint_type': 'copper', 'endpoint_uuid': '13/1/2', 'pk': 'R2-EMU/13/1/2'}
+  [ set] EndPointModel[R2-EMU/13/1/2]/references  :: {'KpiSampleTypeModel[R2-EMU/13/1/2/BYTES_RECEIVED]:endpoint_fk', 'KpiSampleTypeModel[R2-EMU/13/1/2/BYTES_TRANSMITTED]:endpoint_fk', 'KpiSampleTypeModel[R2-EMU/13/1/2/PACKETS_RECEIVED]:endpoint_fk', 'KpiSampleTypeModel[R2-EMU/13/1/2/PACKETS_TRANSMITTED]:endpoint_fk'}
+  [dict] EndPointModel[R3-EMU/13/0/0]             :: {'device_fk': 'DeviceModel[R3-EMU]', 'endpoint_type': 'optical', 'endpoint_uuid': '13/0/0', 'pk': 'R3-EMU/13/0/0'}
+  [ set] EndPointModel[R3-EMU/13/0/0]/references  :: {'LinkEndPointModel[R3-EMU/13/0/0==O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513--R3-EMU]:endpoint_fk', 'PathHopModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router:R3-EMU/13/0/0]:endpoint_fk'}
+  [dict] EndPointModel[R3-EMU/13/1/2]             :: {'device_fk': 'DeviceModel[R3-EMU]', 'endpoint_type': 'copper', 'endpoint_uuid': '13/1/2', 'pk': 'R3-EMU/13/1/2'}
+  [ set] EndPointModel[R3-EMU/13/1/2]/references  :: {'KpiSampleTypeModel[R3-EMU/13/1/2/BYTES_RECEIVED]:endpoint_fk', 'KpiSampleTypeModel[R3-EMU/13/1/2/BYTES_TRANSMITTED]:endpoint_fk', 'KpiSampleTypeModel[R3-EMU/13/1/2/PACKETS_RECEIVED]:endpoint_fk', 'KpiSampleTypeModel[R3-EMU/13/1/2/PACKETS_TRANSMITTED]:endpoint_fk', 'PathHopModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router:R3-EMU/13/1/2]:endpoint_fk', 'ServiceEndPointModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0--R3-EMU/13/1/2]:endpoint_fk'}
+  [dict] EndPointModel[R4-EMU/13/0/0]             :: {'device_fk': 'DeviceModel[R4-EMU]', 'endpoint_type': 'optical', 'endpoint_uuid': '13/0/0', 'pk': 'R4-EMU/13/0/0'}
+  [ set] EndPointModel[R4-EMU/13/0/0]/references  :: {'LinkEndPointModel[R4-EMU/13/0/0==O1-OLS/50296d99-58cc-5ce7-82f5-fc8ee4eec2ec--R4-EMU]:endpoint_fk'}
+  [dict] EndPointModel[R4-EMU/13/1/2]             :: {'device_fk': 'DeviceModel[R4-EMU]', 'endpoint_type': 'copper', 'endpoint_uuid': '13/1/2', 'pk': 'R4-EMU/13/1/2'}
+  [ set] EndPointModel[R4-EMU/13/1/2]/references  :: {'KpiSampleTypeModel[R4-EMU/13/1/2/BYTES_RECEIVED]:endpoint_fk', 'KpiSampleTypeModel[R4-EMU/13/1/2/BYTES_TRANSMITTED]:endpoint_fk', 'KpiSampleTypeModel[R4-EMU/13/1/2/PACKETS_RECEIVED]:endpoint_fk', 'KpiSampleTypeModel[R4-EMU/13/1/2/PACKETS_TRANSMITTED]:endpoint_fk'}
+  [ set] KpiSampleTypeModel/instances             :: {'KpiSampleTypeModel[R1-EMU/13/1/2/BYTES_RECEIVED]', 'KpiSampleTypeModel[R1-EMU/13/1/2/BYTES_TRANSMITTED]', 'KpiSampleTypeModel[R1-EMU/13/1/2/PACKETS_RECEIVED]', 'KpiSampleTypeModel[R1-EMU/13/1/2/PACKETS_TRANSMITTED]', 'KpiSampleTypeModel[R2-EMU/13/1/2/BYTES_RECEIVED]', 'KpiSampleTypeModel[R2-EMU/13/1/2/BYTES_TRANSMITTED]', 'KpiSampleTypeModel[R2-EMU/13/1/2/PACKETS_RECEIVED]', 'KpiSampleTypeModel[R2-EMU/13/1/2/PACKETS_TRANSMITTED]', 'KpiSampleTypeModel[R3-EMU/13/1/2/BYTES_RECEIVED]', 'KpiSampleTypeModel[R3-EMU/13/1/2/BYTES_TRANSMITTED]', 'KpiSampleTypeModel[R3-EMU/13/1/2/PACKETS_RECEIVED]', 'KpiSampleTypeModel[R3-EMU/13/1/2/PACKETS_TRANSMITTED]', 'KpiSampleTypeModel[R4-EMU/13/1/2/BYTES_RECEIVED]', 'KpiSampleTypeModel[R4-EMU/13/1/2/BYTES_TRANSMITTED]', 'KpiSampleTypeModel[R4-EMU/13/1/2/PACKETS_RECEIVED]', 'KpiSampleTypeModel[R4-EMU/13/1/2/PACKETS_TRANSMITTED]'}
+  [dict] KpiSampleTypeModel[R1-EMU/13/1/2/BYTES_RECEIVED] :: {'endpoint_fk': 'EndPointModel[R1-EMU/13/1/2]', 'kpi_sample_type': 'BYTES_RECEIVED', 'pk': 'R1-EMU/13/1/2/BYTES_RECEIVED'}
+  [dict] KpiSampleTypeModel[R1-EMU/13/1/2/BYTES_TRANSMITTED] :: {'endpoint_fk': 'EndPointModel[R1-EMU/13/1/2]', 'kpi_sample_type': 'BYTES_TRANSMITTED', 'pk': 'R1-EMU/13/1/2/BYTES_TRANSMITTED'}
+  [dict] KpiSampleTypeModel[R1-EMU/13/1/2/PACKETS_RECEIVED] :: {'endpoint_fk': 'EndPointModel[R1-EMU/13/1/2]', 'kpi_sample_type': 'PACKETS_RECEIVED', 'pk': 'R1-EMU/13/1/2/PACKETS_RECEIVED'}
+  [dict] KpiSampleTypeModel[R1-EMU/13/1/2/PACKETS_TRANSMITTED] :: {'endpoint_fk': 'EndPointModel[R1-EMU/13/1/2]', 'kpi_sample_type': 'PACKETS_TRANSMITTED', 'pk': 'R1-EMU/13/1/2/PACKETS_TRANSMITTED'}
+  [dict] KpiSampleTypeModel[R2-EMU/13/1/2/BYTES_RECEIVED] :: {'endpoint_fk': 'EndPointModel[R2-EMU/13/1/2]', 'kpi_sample_type': 'BYTES_RECEIVED', 'pk': 'R2-EMU/13/1/2/BYTES_RECEIVED'}
+  [dict] KpiSampleTypeModel[R2-EMU/13/1/2/BYTES_TRANSMITTED] :: {'endpoint_fk': 'EndPointModel[R2-EMU/13/1/2]', 'kpi_sample_type': 'BYTES_TRANSMITTED', 'pk': 'R2-EMU/13/1/2/BYTES_TRANSMITTED'}
+  [dict] KpiSampleTypeModel[R2-EMU/13/1/2/PACKETS_RECEIVED] :: {'endpoint_fk': 'EndPointModel[R2-EMU/13/1/2]', 'kpi_sample_type': 'PACKETS_RECEIVED', 'pk': 'R2-EMU/13/1/2/PACKETS_RECEIVED'}
+  [dict] KpiSampleTypeModel[R2-EMU/13/1/2/PACKETS_TRANSMITTED] :: {'endpoint_fk': 'EndPointModel[R2-EMU/13/1/2]', 'kpi_sample_type': 'PACKETS_TRANSMITTED', 'pk': 'R2-EMU/13/1/2/PACKETS_TRANSMITTED'}
+  [dict] KpiSampleTypeModel[R3-EMU/13/1/2/BYTES_RECEIVED] :: {'endpoint_fk': 'EndPointModel[R3-EMU/13/1/2]', 'kpi_sample_type': 'BYTES_RECEIVED', 'pk': 'R3-EMU/13/1/2/BYTES_RECEIVED'}
+  [dict] KpiSampleTypeModel[R3-EMU/13/1/2/BYTES_TRANSMITTED] :: {'endpoint_fk': 'EndPointModel[R3-EMU/13/1/2]', 'kpi_sample_type': 'BYTES_TRANSMITTED', 'pk': 'R3-EMU/13/1/2/BYTES_TRANSMITTED'}
+  [dict] KpiSampleTypeModel[R3-EMU/13/1/2/PACKETS_RECEIVED] :: {'endpoint_fk': 'EndPointModel[R3-EMU/13/1/2]', 'kpi_sample_type': 'PACKETS_RECEIVED', 'pk': 'R3-EMU/13/1/2/PACKETS_RECEIVED'}
+  [dict] KpiSampleTypeModel[R3-EMU/13/1/2/PACKETS_TRANSMITTED] :: {'endpoint_fk': 'EndPointModel[R3-EMU/13/1/2]', 'kpi_sample_type': 'PACKETS_TRANSMITTED', 'pk': 'R3-EMU/13/1/2/PACKETS_TRANSMITTED'}
+  [dict] KpiSampleTypeModel[R4-EMU/13/1/2/BYTES_RECEIVED] :: {'endpoint_fk': 'EndPointModel[R4-EMU/13/1/2]', 'kpi_sample_type': 'BYTES_RECEIVED', 'pk': 'R4-EMU/13/1/2/BYTES_RECEIVED'}
+  [dict] KpiSampleTypeModel[R4-EMU/13/1/2/BYTES_TRANSMITTED] :: {'endpoint_fk': 'EndPointModel[R4-EMU/13/1/2]', 'kpi_sample_type': 'BYTES_TRANSMITTED', 'pk': 'R4-EMU/13/1/2/BYTES_TRANSMITTED'}
+  [dict] KpiSampleTypeModel[R4-EMU/13/1/2/PACKETS_RECEIVED] :: {'endpoint_fk': 'EndPointModel[R4-EMU/13/1/2]', 'kpi_sample_type': 'PACKETS_RECEIVED', 'pk': 'R4-EMU/13/1/2/PACKETS_RECEIVED'}
+  [dict] KpiSampleTypeModel[R4-EMU/13/1/2/PACKETS_TRANSMITTED] :: {'endpoint_fk': 'EndPointModel[R4-EMU/13/1/2]', 'kpi_sample_type': 'PACKETS_TRANSMITTED', 'pk': 'R4-EMU/13/1/2/PACKETS_TRANSMITTED'}
+  [ set] LinkEndPointModel/instances              :: {'LinkEndPointModel[R1-EMU/13/0/0==O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870--O1-OLS]', 'LinkEndPointModel[R1-EMU/13/0/0==O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870--R1-EMU]', 'LinkEndPointModel[R2-EMU/13/0/0==O1-OLS/eb287d83-f05e-53ec-ab5a-adf6bd2b5418--O1-OLS]', 'LinkEndPointModel[R2-EMU/13/0/0==O1-OLS/eb287d83-f05e-53ec-ab5a-adf6bd2b5418--R2-EMU]', 'LinkEndPointModel[R3-EMU/13/0/0==O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513--O1-OLS]', 'LinkEndPointModel[R3-EMU/13/0/0==O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513--R3-EMU]', 'LinkEndPointModel[R4-EMU/13/0/0==O1-OLS/50296d99-58cc-5ce7-82f5-fc8ee4eec2ec--O1-OLS]', 'LinkEndPointModel[R4-EMU/13/0/0==O1-OLS/50296d99-58cc-5ce7-82f5-fc8ee4eec2ec--R4-EMU]'}
+  [dict] LinkEndPointModel[R1-EMU/13/0/0==O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870--O1-OLS] :: {'endpoint_fk': 'EndPointModel[O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870]', 'link_fk': 'LinkModel[R1-EMU/13/0/0==O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870]', 'pk': 'R1-EMU/13/0/0==O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870--O1-OLS'}
+  [dict] LinkEndPointModel[R1-EMU/13/0/0==O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870--R1-EMU] :: {'endpoint_fk': 'EndPointModel[R1-EMU/13/0/0]', 'link_fk': 'LinkModel[R1-EMU/13/0/0==O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870]', 'pk': 'R1-EMU/13/0/0==O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870--R1-EMU'}
+  [dict] LinkEndPointModel[R2-EMU/13/0/0==O1-OLS/eb287d83-f05e-53ec-ab5a-adf6bd2b5418--O1-OLS] :: {'endpoint_fk': 'EndPointModel[O1-OLS/eb287d83-f05e-53ec-ab5a-adf6bd2b5418]', 'link_fk': 'LinkModel[R2-EMU/13/0/0==O1-OLS/eb287d83-f05e-53ec-ab5a-adf6bd2b5418]', 'pk': 'R2-EMU/13/0/0==O1-OLS/eb287d83-f05e-53ec-ab5a-adf6bd2b5418--O1-OLS'}
+  [dict] LinkEndPointModel[R2-EMU/13/0/0==O1-OLS/eb287d83-f05e-53ec-ab5a-adf6bd2b5418--R2-EMU] :: {'endpoint_fk': 'EndPointModel[R2-EMU/13/0/0]', 'link_fk': 'LinkModel[R2-EMU/13/0/0==O1-OLS/eb287d83-f05e-53ec-ab5a-adf6bd2b5418]', 'pk': 'R2-EMU/13/0/0==O1-OLS/eb287d83-f05e-53ec-ab5a-adf6bd2b5418--R2-EMU'}
+  [dict] LinkEndPointModel[R3-EMU/13/0/0==O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513--O1-OLS] :: {'endpoint_fk': 'EndPointModel[O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513]', 'link_fk': 'LinkModel[R3-EMU/13/0/0==O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513]', 'pk': 'R3-EMU/13/0/0==O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513--O1-OLS'}
+  [dict] LinkEndPointModel[R3-EMU/13/0/0==O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513--R3-EMU] :: {'endpoint_fk': 'EndPointModel[R3-EMU/13/0/0]', 'link_fk': 'LinkModel[R3-EMU/13/0/0==O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513]', 'pk': 'R3-EMU/13/0/0==O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513--R3-EMU'}
+  [dict] LinkEndPointModel[R4-EMU/13/0/0==O1-OLS/50296d99-58cc-5ce7-82f5-fc8ee4eec2ec--O1-OLS] :: {'endpoint_fk': 'EndPointModel[O1-OLS/50296d99-58cc-5ce7-82f5-fc8ee4eec2ec]', 'link_fk': 'LinkModel[R4-EMU/13/0/0==O1-OLS/50296d99-58cc-5ce7-82f5-fc8ee4eec2ec]', 'pk': 'R4-EMU/13/0/0==O1-OLS/50296d99-58cc-5ce7-82f5-fc8ee4eec2ec--O1-OLS'}
+  [dict] LinkEndPointModel[R4-EMU/13/0/0==O1-OLS/50296d99-58cc-5ce7-82f5-fc8ee4eec2ec--R4-EMU] :: {'endpoint_fk': 'EndPointModel[R4-EMU/13/0/0]', 'link_fk': 'LinkModel[R4-EMU/13/0/0==O1-OLS/50296d99-58cc-5ce7-82f5-fc8ee4eec2ec]', 'pk': 'R4-EMU/13/0/0==O1-OLS/50296d99-58cc-5ce7-82f5-fc8ee4eec2ec--R4-EMU'}
+  [ set] LinkModel/instances                      :: {'LinkModel[R1-EMU/13/0/0==O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870]', 'LinkModel[R2-EMU/13/0/0==O1-OLS/eb287d83-f05e-53ec-ab5a-adf6bd2b5418]', 'LinkModel[R3-EMU/13/0/0==O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513]', 'LinkModel[R4-EMU/13/0/0==O1-OLS/50296d99-58cc-5ce7-82f5-fc8ee4eec2ec]'}
+  [dict] LinkModel[R1-EMU/13/0/0==O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870] :: {'link_uuid': 'R1-EMU/13/0/0==O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870', 'pk': 'R1-EMU/13/0/0==O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870'}
+  [ set] LinkModel[R1-EMU/13/0/0==O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870]/references :: {'LinkEndPointModel[R1-EMU/13/0/0==O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870--O1-OLS]:link_fk', 'LinkEndPointModel[R1-EMU/13/0/0==O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870--R1-EMU]:link_fk'}
+  [dict] LinkModel[R2-EMU/13/0/0==O1-OLS/eb287d83-f05e-53ec-ab5a-adf6bd2b5418] :: {'link_uuid': 'R2-EMU/13/0/0==O1-OLS/eb287d83-f05e-53ec-ab5a-adf6bd2b5418', 'pk': 'R2-EMU/13/0/0==O1-OLS/eb287d83-f05e-53ec-ab5a-adf6bd2b5418'}
+  [ set] LinkModel[R2-EMU/13/0/0==O1-OLS/eb287d83-f05e-53ec-ab5a-adf6bd2b5418]/references :: {'LinkEndPointModel[R2-EMU/13/0/0==O1-OLS/eb287d83-f05e-53ec-ab5a-adf6bd2b5418--O1-OLS]:link_fk', 'LinkEndPointModel[R2-EMU/13/0/0==O1-OLS/eb287d83-f05e-53ec-ab5a-adf6bd2b5418--R2-EMU]:link_fk'}
+  [dict] LinkModel[R3-EMU/13/0/0==O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513] :: {'link_uuid': 'R3-EMU/13/0/0==O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513', 'pk': 'R3-EMU/13/0/0==O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513'}
+  [ set] LinkModel[R3-EMU/13/0/0==O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513]/references :: {'LinkEndPointModel[R3-EMU/13/0/0==O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513--O1-OLS]:link_fk', 'LinkEndPointModel[R3-EMU/13/0/0==O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513--R3-EMU]:link_fk'}
+  [dict] LinkModel[R4-EMU/13/0/0==O1-OLS/50296d99-58cc-5ce7-82f5-fc8ee4eec2ec] :: {'link_uuid': 'R4-EMU/13/0/0==O1-OLS/50296d99-58cc-5ce7-82f5-fc8ee4eec2ec', 'pk': 'R4-EMU/13/0/0==O1-OLS/50296d99-58cc-5ce7-82f5-fc8ee4eec2ec'}
+  [ set] LinkModel[R4-EMU/13/0/0==O1-OLS/50296d99-58cc-5ce7-82f5-fc8ee4eec2ec]/references :: {'LinkEndPointModel[R4-EMU/13/0/0==O1-OLS/50296d99-58cc-5ce7-82f5-fc8ee4eec2ec--O1-OLS]:link_fk', 'LinkEndPointModel[R4-EMU/13/0/0==O1-OLS/50296d99-58cc-5ce7-82f5-fc8ee4eec2ec--R4-EMU]:link_fk'}
+  [ set] PathHopModel/instances                   :: {'PathHopModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router:R1-EMU/13/0/0]', 'PathHopModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router:R1-EMU/13/1/2]', 'PathHopModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router:R3-EMU/13/0/0]', 'PathHopModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router:R3-EMU/13/1/2]', 'PathHopModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:emu-optical-line-system:O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513]', 'PathHopModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:emu-optical-line-system:O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870]'}
+  [dict] PathHopModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router:R1-EMU/13/0/0] :: {'endpoint_fk': 'EndPointModel[R1-EMU/13/0/0]', 'path_fk': 'PathModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router]', 'pk': 'a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router:R1-EMU/13/0/0', 'position': '1'}
+  [dict] PathHopModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router:R1-EMU/13/1/2] :: {'endpoint_fk': 'EndPointModel[R1-EMU/13/1/2]', 'path_fk': 'PathModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router]', 'pk': 'a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router:R1-EMU/13/1/2', 'position': '0'}
+  [dict] PathHopModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router:R3-EMU/13/0/0] :: {'endpoint_fk': 'EndPointModel[R3-EMU/13/0/0]', 'path_fk': 'PathModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router]', 'pk': 'a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router:R3-EMU/13/0/0', 'position': '2'}
+  [dict] PathHopModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router:R3-EMU/13/1/2] :: {'endpoint_fk': 'EndPointModel[R3-EMU/13/1/2]', 'path_fk': 'PathModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router]', 'pk': 'a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router:R3-EMU/13/1/2', 'position': '3'}
+  [dict] PathHopModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:emu-optical-line-system:O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513] :: {'endpoint_fk': 'EndPointModel[O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513]', 'path_fk': 'PathModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:emu-optical-line-system]', 'pk': 'a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:emu-optical-line-system:O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513', 'position': '1'}
+  [dict] PathHopModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:emu-optical-line-system:O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870] :: {'endpoint_fk': 'EndPointModel[O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870]', 'path_fk': 'PathModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:emu-optical-line-system]', 'pk': 'a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:emu-optical-line-system:O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870', 'position': '0'}
+  [ set] PathModel/instances                      :: {'PathModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router]', 'PathModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:emu-optical-line-system]'}
+  [dict] PathModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router] :: {'pk': 'a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router'}
+  [ set] PathModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router]/references :: {'ConnectionModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router]:path_fk', 'PathHopModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router:R1-EMU/13/0/0]:path_fk', 'PathHopModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router:R1-EMU/13/1/2]:path_fk', 'PathHopModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router:R3-EMU/13/0/0]:path_fk', 'PathHopModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router:R3-EMU/13/1/2]:path_fk'}
+  [dict] PathModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:emu-optical-line-system] :: {'pk': 'a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:emu-optical-line-system'}
+  [ set] PathModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:emu-optical-line-system]/references :: {'ConnectionModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:emu-optical-line-system]:path_fk', 'PathHopModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:emu-optical-line-system:O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513]:path_fk', 'PathHopModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:emu-optical-line-system:O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870]:path_fk'}
+  [ set] ServiceEndPointModel/instances           :: {'ServiceEndPointModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0--R1-EMU/13/1/2]', 'ServiceEndPointModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0--R3-EMU/13/1/2]', 'ServiceEndPointModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical--O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513]', 'ServiceEndPointModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical--O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870]'}
+  [dict] ServiceEndPointModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0--R1-EMU/13/1/2] :: {'endpoint_fk': 'EndPointModel[R1-EMU/13/1/2]', 'pk': 'a6086be7-62a1-4bd1-a084-cc20bd3149a0--R1-EMU/13/1/2', 'service_fk': 'ServiceModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0]'}
+  [dict] ServiceEndPointModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0--R3-EMU/13/1/2] :: {'endpoint_fk': 'EndPointModel[R3-EMU/13/1/2]', 'pk': 'a6086be7-62a1-4bd1-a084-cc20bd3149a0--R3-EMU/13/1/2', 'service_fk': 'ServiceModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0]'}
+  [dict] ServiceEndPointModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical--O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513] :: {'endpoint_fk': 'EndPointModel[O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513]', 'pk': 'a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical--O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513', 'service_fk': 'ServiceModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical]'}
+  [dict] ServiceEndPointModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical--O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870] :: {'endpoint_fk': 'EndPointModel[O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870]', 'pk': 'a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical--O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870', 'service_fk': 'ServiceModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical]'}
+  [ set] ServiceModel/instances                   :: {'ServiceModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical]', 'ServiceModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0]'}
+  [dict] ServiceModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical] :: {'context_fk': 'ContextModel[admin]', 'pk': 'admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical', 'service_config_fk': 'ConfigModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:running]', 'service_constraints_fk': 'ConstraintsModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:constraints]', 'service_status': 'ACTIVE', 'service_type': 'TAPI_CONNECTIVITY_SERVICE', 'service_uuid': 'a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical'}
+  [ set] ServiceModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical]/references :: {'ConnectionModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical:emu-optical-line-system]:service_fk', 'ConnectionSubServiceModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router--admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical]:sub_service_fk', 'ServiceEndPointModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical--O1-OLS/0ef74f99-1acc-57bd-ab9d-4b958b06c513]:service_fk', 'ServiceEndPointModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:optical--O1-OLS/aade6001-f00b-5e2f-a357-6a0a9d3de870]:service_fk'}
+  [dict] ServiceModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0] :: {'context_fk': 'ContextModel[admin]', 'pk': 'admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0', 'service_config_fk': 'ConfigModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:running]', 'service_constraints_fk': 'ConstraintsModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0:constraints]', 'service_status': 'ACTIVE', 'service_type': 'L3NM', 'service_uuid': 'a6086be7-62a1-4bd1-a084-cc20bd3149a0'}
+  [ set] ServiceModel[admin/a6086be7-62a1-4bd1-a084-cc20bd3149a0]/references :: {'ConnectionModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0:emu-packet-router]:service_fk', 'ServiceEndPointModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0--R1-EMU/13/1/2]:service_fk', 'ServiceEndPointModel[a6086be7-62a1-4bd1-a084-cc20bd3149a0--R3-EMU/13/1/2]:service_fk'}
+  [ set] TopologyModel/instances                  :: {'TopologyModel[admin/admin]'}
+  [dict] TopologyModel[admin/admin]               :: {'context_fk': 'ContextModel[admin]', 'pk': 'admin/admin', 'topology_uuid': 'admin'}
+-----------------------------------------------------------
\ No newline at end of file
diff --git a/src/l3_attackmitigator/service/l3_attackmitigatorServiceServicerImpl.py b/src/l3_attackmitigator/service/l3_attackmitigatorServiceServicerImpl.py
index e53bcd214f57cb351cb2f3d24787898dce8bf155..3398d15a5fa368e61b1b63faf0519c3fec1964f7 100644
--- a/src/l3_attackmitigator/service/l3_attackmitigatorServiceServicerImpl.py
+++ b/src/l3_attackmitigator/service/l3_attackmitigatorServiceServicerImpl.py
@@ -14,182 +14,106 @@
 
 from __future__ import print_function
 import logging
-from common.proto.l3_centralizedattackdetector_pb2 import (
-    Empty
-)
-from common.proto.l3_attackmitigator_pb2_grpc import (
-    L3AttackmitigatorServicer,
-)
+from common.proto.l3_centralizedattackdetector_pb2 import Empty
+from common.proto.l3_attackmitigator_pb2_grpc import L3AttackmitigatorServicer
 from common.proto.context_pb2 import (
-    Service, ServiceId, ServiceConfig, ServiceTypeEnum, ServiceStatusEnum, ServiceStatus, Context, ContextId, Uuid, Timestamp, ConfigRule, ConfigRule_Custom, ConfigActionEnum, Device, DeviceId, DeviceConfig, DeviceOperationalStatusEnum, DeviceDriverEnum, EndPoint, Link, LinkId, EndPoint, EndPointId, Topology, TopologyId
-)
-from common.proto.context_pb2_grpc import (
-    ContextServiceStub
-)
-from common.proto.service_pb2_grpc import (
-    ServiceServiceStub
+    Service,
+    ServiceId,
+    ServiceConfig,
+    ServiceTypeEnum,
+    ServiceStatusEnum,
+    ServiceStatus,
+    Context,
+    ContextId,
+    Uuid,
+    Timestamp,
+    ConfigRule,
+    ConfigRule_Custom,
+    ConfigActionEnum,
+    Device,
+    DeviceId,
+    DeviceConfig,
+    DeviceOperationalStatusEnum,
+    DeviceDriverEnum,
+    EndPoint,
+    Link,
+    LinkId,
+    EndPoint,
+    EndPointId,
+    Topology,
+    TopologyId,
 )
+from common.proto.context_pb2_grpc import ContextServiceStub
+from common.proto.service_pb2_grpc import ServiceServiceStub
 from datetime import datetime
 import grpc
+import time
+import json
+
+# KPIs and Monitoring
+from common.proto.monitoring_pb2 import KpiDescriptor
+from common.proto.kpi_sample_types_pb2 import KpiSampleType
+
+# from monitoring.client.MonitoringClient import MonitoringClient
+from monitoring.client.MonitoringClient import MonitoringClient
+from common.proto.monitoring_pb2 import Kpi
+from common.proto.context_pb2 import Timestamp
 
 LOGGER = logging.getLogger(__name__)
 CONTEXT_CHANNEL = "192.168.165.78:1010"
 SERVICE_CHANNEL = "192.168.165.78:3030"
 
-class l3_attackmitigatorServiceServicerImpl(L3AttackmitigatorServicer):
-
-    def GetNewService(self, service_id):
-        service = Service()
-        service_id_obj = self.GenerateServiceId(service_id)
-        """
-        ServiceId()
-        context_id = ContextId()
-        uuid = Uuid()
-        uuid.uuid = service_id
-        context_id.context_uuid.CopyFrom(uuid)
-        service_id_obj.context_id.CopyFrom(context_id)
-        service_id_obj.service_uuid.CopyFrom(uuid)
-        """
-        service.service_id.CopyFrom(service_id_obj)
-        service.service_type = ServiceTypeEnum.SERVICETYPE_L3NM
-        service_status = ServiceStatus()
-        service_status.service_status = ServiceStatusEnum.SERVICESTATUS_ACTIVE
-        service.service_status.CopyFrom(service_status)
-        timestamp = Timestamp()
-        timestamp.timestamp = datetime.timestamp(datetime.now())
-        service.timestamp.CopyFrom(timestamp)
-        return service
-    
-    def GetNewContext(self, service_id):
-        context = Context()
-        context_id = ContextId()
-        uuid = Uuid()
-        uuid.uuid = service_id
-        context_id.context_uuid.CopyFrom(uuid)
-        context.context_id.CopyFrom(context_id)
-        return context
-
-    def GetNewDevice(self, service_id):
-        device = Device()
-        device_id = DeviceId()
-        uuid = Uuid()
-        uuid.uuid = service_id
-        device_id.device_uuid.CopyFrom(uuid)
-        device.device_type="test"
-        device.device_id.CopyFrom(device_id)
-        device.device_operational_status = DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_ENABLED
-        return device
-
-    def GetNewLink(self, service_id):
-        link = Link()
-        link_id = LinkId()
-        uuid = Uuid()
-        uuid.uuid = service_id
-        link_id.link_uuid.CopyFrom(uuid)
-        link.link_id.CopyFrom(link_id)
-        return link
-
-    def GetNewTopology(self,context_id, device_id, link_id):
-        topology = Topology()
-        topology_id = TopologyId()
-        topology_id.context_id.CopyFrom(context_id)
-        uuid = Uuid()
-        uuid.uuid = "test_crypto"
-        topology_id.topology_uuid.CopyFrom(uuid)
-        topology.topology_id.CopyFrom(topology_id)
-        topology.device_ids.extend([device_id])
-        topology.link_ids.extend([link_id])
-        return topology
-
-    def GetNewEndpoint(self, topology_id, device_id, uuid_name):
-        endpoint = EndPoint()
-        endpoint_id = EndPointId()
-        endpoint_id.topology_id.CopyFrom(topology_id)
-        endpoint_id.device_id.CopyFrom(device_id)
-        uuid = Uuid()
-        uuid.uuid = uuid_name
-        endpoint_id.endpoint_uuid.CopyFrom(uuid)
-        endpoint.endpoint_id.CopyFrom(endpoint_id)
-        endpoint.endpoint_type = "test"
-        return endpoint
-        
 
+class l3_attackmitigatorServiceServicerImpl(L3AttackmitigatorServicer):
     def __init__(self):
         LOGGER.debug("Creating Servicer...")
         self.last_value = -1
         self.last_tag = 0
-        
-        context = self.GetNewContext("test_crypto")
-        print(context, flush=True)
-        print(self.SetContext(context))
+        self.monitoring_client = MonitoringClient()
+        self.predicted_class_kpi_id = None
+        self.class_probability_kpi_id = None
 
-        service = self.GetNewService("test_crypto")
-        print("This is the new service", self.CreateService(service), flush = True)
+    def create_predicted_class_kpi(self, client: MonitoringClient, service_id):
+        # create kpi
+        kpi_description: KpiDescriptor = KpiDescriptor()
+        kpi_description.kpi_description = "L3 security status of service {}".format(service_id)
+        # kpi_description.service_id.service_uuid.uuid = service_id
+        kpi_description.service_id.service_uuid.uuid = str(service_id)
+        kpi_description.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_UNKNOWN
+        new_kpi = client.SetKpi(kpi_description)
 
-        ip_o = "127.0.0.1"
-        ip_d = "127.0.0.2"
-        port_o = "123"
-        port_d = "124"
+        LOGGER.info("Created Predicted Class KPI {}...".format(new_kpi.kpi_id))
 
-        service_id = self.GenerateServiceId("test_crypto")
+        return new_kpi
 
-        config_rule = self.GetConfigRule(ip_o, ip_d, port_o, port_d)
-
-        service = self.GetService(service_id)
-        print("Service obtained from id", service, flush=True)
-        
-        config_rule = self.GetConfigRule(ip_o, ip_d, port_o, port_d)
+    def create_class_prob_kpi(self, client: MonitoringClient, service_id):
+        # create kpi
+        kpi_description: KpiDescriptor = KpiDescriptor()
+        kpi_description.kpi_description = "L3 security status of service {}".format(service_id)
+        kpi_description.service_id.service_uuid.uuid = service_id
+        kpi_description.kpi_sample_type = KpiSampleType.KPISAMPLETYPE_UNKNOWN
+        new_kpi = client.SetKpi(kpi_description)
 
-        #service_config = service.service_config
-        #service_config.append(config_rule)
-        
-        service_config = ServiceConfig()
-        service_config.config_rules.extend([config_rule])
-        service.service_config.CopyFrom(service_config)
-        
-        device = self.GetNewDevice("test_crypto")
-        print("New device", device, flush=True)
-        device_id = self.SetDevice(device)
-
-        link = self.GetNewLink("test_crypto")
-        print("New link", link, flush=True)
-        link_id = self.SetLink(link)
-        
-        topology = self.GetNewTopology(context.context_id, device.device_id, link.link_id)
-        print("New topology", topology, flush=True)
-        topology_id = self.SetTopology(topology)
-
-        endpoint = self.GetNewEndpoint(topology.topology_id, device.device_id, "test_crypto")
-        print("New endpoint", endpoint, flush=True)
-        link.link_endpoint_ids.extend([endpoint.endpoint_id])
-
-        self.SetLink(link)
-
-        print("Service with new rule", service, flush=True)
-        self.UpdateService(service)
+        LOGGER.info("Created Class Probability KPI {}...".format(new_kpi.kpi_id))
 
-        service2 = self.GetService(service_id)
-        print("Service obtained from id after updating", service2, flush=True)
-        
+        return new_kpi
 
     def GenerateRuleValue(self, ip_o, ip_d, port_o, port_d):
         value = {
-            'ipv4:source-address': ip_o,
-            'ipv4:destination-address': ip_d,
-            'transport:source-port': port_o,
-            'transport:destination-port': port_d,
-            'forwarding-action': 'DROP',
+            "ipv4:source-address": ip_o,
+            "ipv4:destination-address": ip_d,
+            "transport:source-port": port_o,
+            "transport:destination-port": port_d,
+            "forwarding-action": "DROP",
         }
         return value
 
-    def GetConfigRule(self, ip_o, ip_d, port_o, port_d):
-        config_rule = ConfigRule()
-        config_rule_custom = ConfigRule_Custom()
-        config_rule.action = ConfigActionEnum.CONFIGACTION_SET
-        config_rule_custom.resource_key = 'test'
-        config_rule_custom.resource_value = str(self.GenerateRuleValue(ip_o, ip_d, port_o, port_d))
-        config_rule.custom.CopyFrom(config_rule_custom)
-        return config_rule
+    def GenerateContextId(self, context_id):
+        context_id_obj = ContextId()
+        uuid = Uuid()
+        uuid.uuid = context_id
+        context_id_obj.context_uuid.CopyFrom(uuid)
+        return context_id_obj
 
     def GenerateServiceId(self, service_id):
         service_id_obj = ServiceId()
@@ -200,11 +124,21 @@ class l3_attackmitigatorServiceServicerImpl(L3AttackmitigatorServicer):
         service_id_obj.context_id.CopyFrom(context_id)
         service_id_obj.service_uuid.CopyFrom(uuid)
         return service_id_obj
-   
+
+    def GetConfigRule(self, ip_o, ip_d, port_o, port_d):
+        config_rule = ConfigRule()
+        config_rule_custom = ConfigRule_Custom()
+        config_rule.action = ConfigActionEnum.CONFIGACTION_SET
+        config_rule_custom.resource_key = "test"
+        # config_rule_custom.resource_value = str(self.GenerateRuleValue(ip_o, ip_d, port_o, port_d))
+        config_rule_custom.resource_value = json.dumps(self.GenerateRuleValue(ip_o, ip_d, port_o, port_d))
+        config_rule.custom.CopyFrom(config_rule_custom)
+        return config_rule
+
     def SendOutput(self, request, context):
         # SEND CONFIDENCE TO MITIGATION SERVER
         print("Server received mitigation values...", request.confidence, flush=True)
-        
+
         last_value = request.confidence
         last_tag = request.tag
 
@@ -213,62 +147,93 @@ class l3_attackmitigatorServiceServicerImpl(L3AttackmitigatorServicer):
         port_o = request.port_o
         port_d = request.port_d
 
-        service_id = self.GenerateServiceId(request.service_id)
+        # service_id = self.GenerateServiceId(request.service_id)
+        # service = GetService(service_id)
+
+        # context_id = self.GenerateContextId("admin")
+
+        sentinel = True
+        counter = 0
+
+        # service_id_list = self.ListServiceIds(context_id)
+
+        # print(hello, flush = True)
+        # print(hello.service_ids[0].service_uuid.uuid, flush=True)
+
+        # service_id = service_id_list.service_ids[0]
+        service_id = request.service_id
+
+        print("Service id: ", service_id, flush=True)
+
+        while sentinel:
+            try:
+                service = self.GetService(service_id)
+                sentinel = False
+            except Exception as e:
+                counter = counter + 1
+                print("Waiting 2 seconds", counter, e, flush=True)
+                time.sleep(2)
+
+        print("Service obtained from id: ", service, flush=True)
+
+        if self.predicted_class_kpi_id is None:
+            self.predicted_class_kpi_id = self.create_predicted_class_kpi(self.monitoring_client, service_id)
+
+        if self.class_probability_kpi_id is None:
+            self.class_probability_kpi_id = self.create_class_prob_kpi(self.monitoring_client, service_id)
+
+        # Monitoring
+        # Packet -> DAD -> CAD -> ML -> (2 Instantaneous Value: higher class probability, predicted class) -> Monitoring
+        # In addition, two counters:
+        # Counter 1: Total number of crypto attack connections
+        # Counter 2: Rate of crypto attack connections with respect to the total number of connections
+
+        kpi_class = Kpi()
+        kpi_class.kpi_id.kpi_id.uuid = self.predicted_class_kpi_id.uuid
+        kpi_class.kpi_value.int32Val = 1 if request.tag_name == "Crypto" else 0
+
+        kpi_prob = Kpi()
+        kpi_prob.kpi_id.kpi_id.uuid = self.class_probability_kpi_id.uuid
+        kpi_prob.kpi_value.floatVal = request.confidence
+
+        kpi_class.timestamp = kpi_prob.timestamp = Timestamp()
+
+        self.monitoring_client.IncludeKpi(kpi_class)
+        self.monitoring_client.IncludeKpi(kpi_prob)
 
         config_rule = self.GetConfigRule(ip_o, ip_d, port_o, port_d)
-        
-        service = GetService(service_id)
-        print(service)
-        #service.config_rules.append(config_rule)
-        #UpdateService(service)
 
-        # RETURN OK TO THE CALLER
-        return Empty(
-            message=f"OK, received values: {last_tag} with confidence {last_value}."
-        )
-     
-    def SetDevice(self, device):
-        with grpc.insecure_channel(CONTEXT_CHANNEL) as channel:
-            stub = ContextServiceStub(channel)
-            return stub.SetDevice(device)
+        service_config = ServiceConfig()
+        service_config.config_rules.extend([config_rule])
+        service.service_config.CopyFrom(service_config)
 
-    def SetLink(self, link):
-        with grpc.insecure_channel(CONTEXT_CHANNEL) as channel:
-            stub = ContextServiceStub(channel)
-            return stub.SetLink(link)
+        print("Service with new rule: ", service, flush=True)
+        self.UpdateService(service)
 
-    def SetTopology(self, link):
-        with grpc.insecure_channel(CONTEXT_CHANNEL) as channel:
-            stub = ContextServiceStub(channel)
-            return stub.SetTopology(link)
+        service2 = self.GetService(service_id)
+        print("Service obtained from id after updating: ", service2, flush=True)
 
+        # RETURN OK TO THE CALLER
+        return Empty(message=f"OK, received values: {last_tag} with confidence {last_value}.")
 
     def GetService(self, service_id):
         with grpc.insecure_channel(CONTEXT_CHANNEL) as channel:
             stub = ContextServiceStub(channel)
             return stub.GetService(service_id)
 
-    def SetContext(self, context):
+    def ListServiceIds(self, context_id):
         with grpc.insecure_channel(CONTEXT_CHANNEL) as channel:
             stub = ContextServiceStub(channel)
-            return stub.SetContext(context)
+            return stub.ListServiceIds(context_id)
 
     def UpdateService(self, service):
         with grpc.insecure_channel(SERVICE_CHANNEL) as channel:
             stub = ServiceServiceStub(channel)
             stub.UpdateService(service)
 
-    def CreateService(self, service):
-        with grpc.insecure_channel(SERVICE_CHANNEL) as channel:
-            stub = ServiceServiceStub(channel)
-            stub.CreateService(service)
-
     def GetMitigation(self, request, context):
         # GET OR PERFORM MITIGATION STRATEGY
         logging.debug("")
         print("Returing mitigation strategy...")
         k = self.last_value * 2
-        return Empty(
-            message=f"Mitigation with double confidence = {k}"
-        )
-    
+        return Empty(message=f"Mitigation with double confidence = {k}")
diff --git a/src/l3_attackmitigator/service/test_create_service.py b/src/l3_attackmitigator/service/test_create_service.py
new file mode 100644
index 0000000000000000000000000000000000000000..9ddbc97d730114aa5497ff52e1aa696f82f1ee90
--- /dev/null
+++ b/src/l3_attackmitigator/service/test_create_service.py
@@ -0,0 +1,265 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+import logging
+from common.proto.l3_centralizedattackdetector_pb2 import (
+    Empty
+)
+from common.proto.l3_attackmitigator_pb2_grpc import (
+    L3AttackmitigatorServicer,
+)
+from common.proto.context_pb2 import (
+    Service, ServiceId, ServiceConfig, ServiceTypeEnum, ServiceStatusEnum, ServiceStatus, Context, ContextId, Uuid, Timestamp, ConfigRule, ConfigRule_Custom, ConfigActionEnum, Device, DeviceId, DeviceConfig, DeviceOperationalStatusEnum, DeviceDriverEnum, EndPoint, Link, LinkId, EndPoint, EndPointId, Topology, TopologyId
+)
+from common.proto.context_pb2_grpc import (
+    ContextServiceStub
+)
+from common.proto.service_pb2_grpc import (
+    ServiceServiceStub
+)
+from datetime import datetime
+import grpc
+
+LOGGER = logging.getLogger(__name__)
+CONTEXT_CHANNEL = "192.168.165.78:1010"
+SERVICE_CHANNEL = "192.168.165.78:3030"
+
+class l3_attackmitigatorServiceServicerImpl(L3AttackmitigatorServicer):
+
+    def GetNewService(self, service_id):
+        service = Service()
+        service_id_obj = self.GenerateServiceId(service_id)
+        service.service_id.CopyFrom(service_id_obj)
+        service.service_type = ServiceTypeEnum.SERVICETYPE_L3NM
+        service_status = ServiceStatus()
+        service_status.service_status = ServiceStatusEnum.SERVICESTATUS_ACTIVE
+        service.service_status.CopyFrom(service_status)
+        timestamp = Timestamp()
+        timestamp.timestamp = datetime.timestamp(datetime.now())
+        service.timestamp.CopyFrom(timestamp)
+        return service
+    
+    def GetNewContext(self, service_id):
+        context = Context()
+        context_id = ContextId()
+        uuid = Uuid()
+        uuid.uuid = service_id
+        context_id.context_uuid.CopyFrom(uuid)
+        context.context_id.CopyFrom(context_id)
+        return context
+
+    def GetNewDevice(self, service_id):
+        device = Device()
+        device_id = DeviceId()
+        uuid = Uuid()
+        uuid.uuid = service_id
+        device_id.device_uuid.CopyFrom(uuid)
+        device.device_type="test"
+        device.device_id.CopyFrom(device_id)
+        device.device_operational_status = DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_ENABLED
+        return device
+
+    def GetNewLink(self, service_id):
+        link = Link()
+        link_id = LinkId()
+        uuid = Uuid()
+        uuid.uuid = service_id
+        link_id.link_uuid.CopyFrom(uuid)
+        link.link_id.CopyFrom(link_id)
+        return link
+
+    def GetNewTopology(self,context_id, device_id, link_id):
+        topology = Topology()
+        topology_id = TopologyId()
+        topology_id.context_id.CopyFrom(context_id)
+        uuid = Uuid()
+        uuid.uuid = "test_crypto"
+        topology_id.topology_uuid.CopyFrom(uuid)
+        topology.topology_id.CopyFrom(topology_id)
+        topology.device_ids.extend([device_id])
+        topology.link_ids.extend([link_id])
+        return topology
+
+    def GetNewEndpoint(self, topology_id, device_id, uuid_name):
+        endpoint = EndPoint()
+        endpoint_id = EndPointId()
+        endpoint_id.topology_id.CopyFrom(topology_id)
+        endpoint_id.device_id.CopyFrom(device_id)
+        uuid = Uuid()
+        uuid.uuid = uuid_name
+        endpoint_id.endpoint_uuid.CopyFrom(uuid)
+        endpoint.endpoint_id.CopyFrom(endpoint_id)
+        endpoint.endpoint_type = "test"
+        return endpoint
+        
+
+    def __init__(self):
+        LOGGER.debug("Creating Servicer...")
+        self.last_value = -1
+        self.last_tag = 0
+       """ 
+        context = self.GetNewContext("test_crypto")
+        print(context, flush=True)
+        print(self.SetContext(context))
+
+        service = self.GetNewService("test_crypto")
+        print("This is the new service", self.CreateService(service), flush = True)
+
+        ip_o = "127.0.0.1"
+        ip_d = "127.0.0.2"
+        port_o = "123"
+        port_d = "124"
+
+        service_id = self.GenerateServiceId("test_crypto")
+
+        config_rule = self.GetConfigRule(ip_o, ip_d, port_o, port_d)
+
+        service = self.GetService(service_id)
+        print("Service obtained from id", service, flush=True)
+        
+        config_rule = self.GetConfigRule(ip_o, ip_d, port_o, port_d)
+
+        #service_config = service.service_config
+        #service_config.append(config_rule)
+        
+        service_config = ServiceConfig()
+        service_config.config_rules.extend([config_rule])
+        service.service_config.CopyFrom(service_config)
+        
+        device = self.GetNewDevice("test_crypto")
+        print("New device", device, flush=True)
+        device_id = self.SetDevice(device)
+
+        link = self.GetNewLink("test_crypto")
+        print("New link", link, flush=True)
+        link_id = self.SetLink(link)
+        
+        topology = self.GetNewTopology(context.context_id, device.device_id, link.link_id)
+        print("New topology", topology, flush=True)
+        topology_id = self.SetTopology(topology)
+
+        endpoint = self.GetNewEndpoint(topology.topology_id, device.device_id, "test_crypto")
+        print("New endpoint", endpoint, flush=True)
+        link.link_endpoint_ids.extend([endpoint.endpoint_id])
+
+        self.SetLink(link)
+
+        print("Service with new rule", service, flush=True)
+        self.UpdateService(service)
+
+        service2 = self.GetService(service_id)
+        print("Service obtained from id after updating", service2, flush=True)
+        """
+
+    def GenerateRuleValue(self, ip_o, ip_d, port_o, port_d):
+        value = {
+            'ipv4:source-address': ip_o,
+            'ipv4:destination-address': ip_d,
+            'transport:source-port': port_o,
+            'transport:destination-port': port_d,
+            'forwarding-action': 'DROP',
+        }
+        return value
+
+    def GetConfigRule(self, ip_o, ip_d, port_o, port_d):
+        config_rule = ConfigRule()
+        config_rule_custom = ConfigRule_Custom()
+        config_rule.action = ConfigActionEnum.CONFIGACTION_SET
+        config_rule_custom.resource_key = 'test'
+        config_rule_custom.resource_value = str(self.GenerateRuleValue(ip_o, ip_d, port_o, port_d))
+        config_rule.custom.CopyFrom(config_rule_custom)
+        return config_rule
+
+    def GenerateServiceId(self, service_id):
+        service_id_obj = ServiceId()
+        context_id = ContextId()
+        uuid = Uuid()
+        uuid.uuid = service_id
+        context_id.context_uuid.CopyFrom(uuid)
+        service_id_obj.context_id.CopyFrom(context_id)
+        service_id_obj.service_uuid.CopyFrom(uuid)
+        return service_id_obj
+   
+    def SendOutput(self, request, context):
+        # SEND CONFIDENCE TO MITIGATION SERVER
+        print("Server received mitigation values...", request.confidence, flush=True)
+        
+        last_value = request.confidence
+        last_tag = request.tag
+
+        ip_o = request.ip_o
+        ip_d = request.ip_d
+        port_o = request.port_o
+        port_d = request.port_d
+
+        service_id = self.GenerateServiceId(request.service_id)
+
+        config_rule = self.GetConfigRule(ip_o, ip_d, port_o, port_d)
+        
+        service = GetService(service_id)
+        print(service)
+        #service.config_rules.append(config_rule)
+        #UpdateService(service)
+
+        # RETURN OK TO THE CALLER
+        return Empty(
+            message=f"OK, received values: {last_tag} with confidence {last_value}."
+        )
+     
+    def SetDevice(self, device):
+        with grpc.insecure_channel(CONTEXT_CHANNEL) as channel:
+            stub = ContextServiceStub(channel)
+            return stub.SetDevice(device)
+
+    def SetLink(self, link):
+        with grpc.insecure_channel(CONTEXT_CHANNEL) as channel:
+            stub = ContextServiceStub(channel)
+            return stub.SetLink(link)
+
+    def SetTopology(self, link):
+        with grpc.insecure_channel(CONTEXT_CHANNEL) as channel:
+            stub = ContextServiceStub(channel)
+            return stub.SetTopology(link)
+
+
+    def GetService(self, service_id):
+        with grpc.insecure_channel(CONTEXT_CHANNEL) as channel:
+            stub = ContextServiceStub(channel)
+            return stub.GetService(service_id)
+
+    def SetContext(self, context):
+        with grpc.insecure_channel(CONTEXT_CHANNEL) as channel:
+            stub = ContextServiceStub(channel)
+            return stub.SetContext(context)
+
+    def UpdateService(self, service):
+        with grpc.insecure_channel(SERVICE_CHANNEL) as channel:
+            stub = ServiceServiceStub(channel)
+            stub.UpdateService(service)
+
+    def CreateService(self, service):
+        with grpc.insecure_channel(SERVICE_CHANNEL) as channel:
+            stub = ServiceServiceStub(channel)
+            stub.CreateService(service)
+
+    def GetMitigation(self, request, context):
+        # GET OR PERFORM MITIGATION STRATEGY
+        logging.debug("")
+        print("Returing mitigation strategy...")
+        k = self.last_value * 2
+        return Empty(
+            message=f"Mitigation with double confidence = {k}"
+        )
+    
diff --git a/src/l3_centralizedattackdetector/service/l3_centralizedattackdetectorServiceServicerImpl.py b/src/l3_centralizedattackdetector/service/l3_centralizedattackdetectorServiceServicerImpl.py
index a84ac181ee70c66908e5fbd1a1254eb56d9a3c9b..f9f39cf7025d603a891445438b87578fccb27ac4 100644
--- a/src/l3_centralizedattackdetector/service/l3_centralizedattackdetectorServiceServicerImpl.py
+++ b/src/l3_centralizedattackdetector/service/l3_centralizedattackdetectorServiceServicerImpl.py
@@ -19,26 +19,21 @@ import grpc
 import numpy as np
 import onnxruntime as rt
 import logging
-from common.proto.l3_centralizedattackdetector_pb2 import (
-    Empty,
-)
-from common.proto.l3_centralizedattackdetector_pb2_grpc import (
-    L3CentralizedattackdetectorServicer,
-)
-
-from common.proto.l3_attackmitigator_pb2 import (
-    L3AttackmitigatorOutput,
-)
-from common.proto.l3_attackmitigator_pb2_grpc import (
-    L3AttackmitigatorStub,
-)
+from common.proto.l3_centralizedattackdetector_pb2 import Empty
+from common.proto.l3_centralizedattackdetector_pb2_grpc import L3CentralizedattackdetectorServicer
+
+from common.proto.l3_attackmitigator_pb2 import L3AttackmitigatorOutput
+from common.proto.l3_attackmitigator_pb2_grpc import L3AttackmitigatorStub
+
 
 LOGGER = logging.getLogger(__name__)
 here = os.path.dirname(os.path.abspath(__file__))
-MODEL_FILE = os.path.join(here, "ml_model/teraflow_rf.onnx")
+MODEL_FILE = os.path.join(here, "ml_model/crypto_5g_rf_spider_features.onnx")
+
+classification_threshold = os.getenv("CAD_CLASSIFICATION_THRESHOLD", 0.5)
 
-class l3_centralizedattackdetectorServiceServicerImpl(L3CentralizedattackdetectorServicer):
 
+class l3_centralizedattackdetectorServiceServicerImpl(L3CentralizedattackdetectorServicer):
     def __init__(self):
         LOGGER.debug("Creating Servicer...")
         self.inference_values = []
@@ -46,25 +41,27 @@ class l3_centralizedattackdetectorServiceServicerImpl(L3Centralizedattackdetecto
         self.input_name = self.model.get_inputs()[0].name
         self.label_name = self.model.get_outputs()[0].name
         self.prob_name = self.model.get_outputs()[1].name
-        
 
     def make_inference(self, request):
         # ML MODEL
-        x_data = np.array([
+        x_data = np.array(
+            [
                 [
-                    request.n_packets_server_seconds,
-                    request.n_packets_client_seconds,
-                    request.n_bits_server_seconds,
-                    request.n_bits_client_seconds,
-                    request.n_bits_server_n_packets_server,
-                    request.n_bits_client_n_packets_client,
-                    request.n_packets_server_n_packets_client,
-                    request.n_bits_server_n_bits_client,
+                    request.c_pkts_all,
+                    request.c_ack_cnt,
+                    request.c_bytes_uniq,
+                    request.c_pkts_data,
+                    request.c_bytes_all,
+                    request.s_pkts_all,
+                    request.s_ack_cnt,
+                    request.s_bytes_uniq,
+                    request.s_pkts_data,
+                    request.s_bytes_all,
                 ]
-            ])
+            ]
+        )
 
-        predictions = self.model.run(
-            [self.prob_name], {self.input_name: x_data.astype(np.float32)})[0]
+        predictions = self.model.run([self.prob_name], {self.input_name: x_data.astype(np.float32)})[0]
         # Output format
         output_message = {
             "confidence": None,
@@ -82,7 +79,7 @@ class l3_centralizedattackdetectorServiceServicerImpl(L3Centralizedattackdetecto
             "time_start": request.time_start,
             "time_end": request.time_end,
         }
-        if predictions[0][1] >= 0.5:
+        if predictions[0][1] >= classification_threshold:
             output_message["confidence"] = predictions[0][1]
             output_message["tag_name"] = "Crypto"
             output_message["tag"] = 1
@@ -104,28 +101,25 @@ class l3_centralizedattackdetectorServiceServicerImpl(L3Centralizedattackdetecto
         # MAKE INFERENCE
         output = self.make_inference(request)
 
-        # SEND INFO TO MITIGATION SERVER
-
-                
-        try:
-            with grpc.insecure_channel("192.168.165.78:10002") as channel:
-                stub = L3AttackmitigatorStub(channel)
-                print("Sending to mitigator...")
-                response = stub.SendOutput(output)
-                #print("Response received", response, "Hola", flush=True)
-                #print("Sent output to mitigator and received: ", response.message) #FIX No message received
-
-                # RETURN "OK" TO THE CALLER
-            return Empty(
-                 message="OK, information received and mitigator notified"
-            )
-        except Exception as e:
-            print("This is an exception", repr(e), flush=True)
-            print('Couldnt find l3_attackmitigator')
-            return Empty(
-                message="Mitigator Not found"
-            )
-        
+        if output.tag_name == "Crypto":
+            # SEND INFO TO MITIGATION SERVER
+            try:
+                with grpc.insecure_channel("192.168.165.78:10002") as channel:
+                    stub = L3AttackmitigatorStub(channel)
+                    print("Sending to mitigator...", flush=True)
+                    response = stub.SendOutput(output)
+                    # print("Response received", response, "Hola", flush=True)
+                    # print("Sent output to mitigator and received: ", response.message) #FIX No message received
+
+                    # RETURN "OK" TO THE CALLER
+                return Empty(message="OK, information received and mitigator notified abou the attack")
+            except Exception as e:
+                print("This is an exception", repr(e), flush=True)
+                print("Couldnt find l3_attackmitigator", flush=True)
+                return Empty(message="Mitigator Not found")
+        else:
+            print("No attack detected", flush=True)
+            return Empty(message="OK, information received (no attack detected)")
 
     def GetOutput(self, request, context):
         logging.debug("")
@@ -134,6 +128,3 @@ class l3_centralizedattackdetectorServiceServicerImpl(L3Centralizedattackdetecto
         k = np.sum(k)
         return self.make_inference(k)
 
-
-
-    
diff --git a/src/l3_centralizedattackdetector/service/l3_centralizedattackdetectorServiceServicerImpl_old.py b/src/l3_centralizedattackdetector/service/l3_centralizedattackdetectorServiceServicerImpl_old.py
new file mode 100644
index 0000000000000000000000000000000000000000..7201d817dac581218130fcf74a81ea02804e416c
--- /dev/null
+++ b/src/l3_centralizedattackdetector/service/l3_centralizedattackdetectorServiceServicerImpl_old.py
@@ -0,0 +1,141 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+from datetime import datetime
+import os
+import grpc
+import numpy as np
+import onnxruntime as rt
+import logging
+from common.proto.l3_centralizedattackdetector_pb2 import (
+    Empty,
+)
+from common.proto.l3_centralizedattackdetector_pb2_grpc import (
+    L3CentralizedattackdetectorServicer,
+)
+
+from common.proto.l3_attackmitigator_pb2 import (
+    L3AttackmitigatorOutput,
+)
+from common.proto.l3_attackmitigator_pb2_grpc import (
+    L3AttackmitigatorStub,
+)
+
+LOGGER = logging.getLogger(__name__)
+here = os.path.dirname(os.path.abspath(__file__))
+MODEL_FILE = os.path.join(here, "ml_model/crypto_5g_rf_teraflow_features.onnx")
+
+classification_threshold = os.getenv(CAD_CLASSIFICATION_THRESHOLD, 0.5)
+
+class l3_centralizedattackdetectorServiceServicerImpl(L3CentralizedattackdetectorServicer):
+
+    def __init__(self):
+        LOGGER.debug("Creating Servicer...")
+        self.inference_values = []
+        self.model = rt.InferenceSession(MODEL_FILE)
+        self.input_name = self.model.get_inputs()[0].name
+        self.label_name = self.model.get_outputs()[0].name
+        self.prob_name = self.model.get_outputs()[1].name
+        
+
+    def make_inference(self, request):
+        # ML MODEL
+        x_data = np.array([
+                [
+                    request.n_packets_server_seconds,
+                    request.n_packets_client_seconds,
+                    request.n_bits_server_seconds,
+                    request.n_bits_client_seconds,
+                    request.n_bits_server_n_packets_server,
+                    request.n_bits_client_n_packets_client,
+                    request.n_packets_server_n_packets_client,
+                    request.n_bits_server_n_bits_client,
+                ]
+            ])
+
+        predictions = self.model.run(
+            [self.prob_name], {self.input_name: x_data.astype(np.float32)})[0]
+        # Output format
+        output_message = {
+            "confidence": None,
+            "timestamp": datetime.now().strftime("%d/%m/%Y %H:%M:%S"),
+            "ip_o": request.ip_o,
+            "ip_d": request.ip_d,
+            "tag_name": None,
+            "tag": None,
+            "flow_id": request.flow_id,
+            "protocol": request.protocol,
+            "port_o": request.port_o,
+            "port_d": request.port_d,
+            "ml_id": "RandomForest",
+     #       "service_id": request.service_id,
+            "time_start": request.time_start,
+            "time_end": request.time_end,
+        }
+        if predictions[0][1] >= classification_threshold:
+            output_message["confidence"] = predictions[0][1]
+            output_message["tag_name"] = "Crypto"
+            output_message["tag"] = 1
+        else:
+            output_message["confidence"] = predictions[0][0]
+            output_message["tag_name"] = "Normal"
+            output_message["tag"] = 0
+
+        return L3AttackmitigatorOutput(**output_message)
+
+    def SendInput(self, request, context):
+        # PERFORM INFERENCE WITH SENT INPUTS
+        logging.debug("")
+        print("Inferencing ...", flush=True)
+
+        # STORE VALUES
+        self.inference_values.append(request)
+
+        # MAKE INFERENCE
+        output = self.make_inference(request)
+
+        # SEND INFO TO MITIGATION SERVER
+
+                
+        try:
+            with grpc.insecure_channel("192.168.165.78:10002") as channel:
+                stub = L3AttackmitigatorStub(channel)
+                print("Sending to mitigator...")
+                response = stub.SendOutput(output)
+                #print("Response received", response, "Hola", flush=True)
+                #print("Sent output to mitigator and received: ", response.message) #FIX No message received
+
+                # RETURN "OK" TO THE CALLER
+            return Empty(
+                 message="OK, information received and mitigator notified"
+            )
+        except Exception as e:
+            print("This is an exception", repr(e), flush=True)
+            print('Couldnt find l3_attackmitigator')
+            return Empty(
+                message="Mitigator Not found"
+            )
+        
+
+    def GetOutput(self, request, context):
+        logging.debug("")
+        print("Returing inference output...")
+        k = np.multiply(self.inference_values, [2])
+        k = np.sum(k)
+        return self.make_inference(k)
+
+
+
+    
diff --git a/src/l3_centralizedattackdetector/service/ml_model/crypto_5g_rf_spider_features.onnx b/src/l3_centralizedattackdetector/service/ml_model/crypto_5g_rf_spider_features.onnx
new file mode 100644
index 0000000000000000000000000000000000000000..5b22b5f3d673960bf6d9a72bb6873d8f4886b89a
Binary files /dev/null and b/src/l3_centralizedattackdetector/service/ml_model/crypto_5g_rf_spider_features.onnx differ
diff --git a/src/l3_centralizedattackdetector/service/ml_model/crypto_5g_rf_teraflow_features.onnx b/src/l3_centralizedattackdetector/service/ml_model/crypto_5g_rf_teraflow_features.onnx
new file mode 100644
index 0000000000000000000000000000000000000000..37fa52d45db8e027ed4a645923d0d5b6f316bfa3
Binary files /dev/null and b/src/l3_centralizedattackdetector/service/ml_model/crypto_5g_rf_teraflow_features.onnx differ
diff --git a/src/l3_centralizedattackdetector/service/ml_model/teraflow_rf.onnx b/src/l3_centralizedattackdetector/service/ml_model/teraflow_rf_OLD.onnx
similarity index 100%
rename from src/l3_centralizedattackdetector/service/ml_model/teraflow_rf.onnx
rename to src/l3_centralizedattackdetector/service/ml_model/teraflow_rf_OLD.onnx
diff --git a/src/tests/ofc22/descriptors_emulated.json b/src/tests/ofc22/descriptors_emulated.json
index eb22506238e03a161f0e2b8aaeadf5fd31cf547b..126a879121f564a72278ffa8fa36fc3f699a9898 100644
--- a/src/tests/ofc22/descriptors_emulated.json
+++ b/src/tests/ofc22/descriptors_emulated.json
@@ -18,9 +18,9 @@
             "device_id": {"device_uuid": {"uuid": "R1-EMU"}},
             "device_type": "emu-packet-router",
             "device_config": {"config_rules": [
-                {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}},
-                {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}},
-                {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"13/0/0\"}, {\"sample_types\": [101, 102, 201, 202], \"type\": \"copper\", \"uuid\": \"13/1/2\"}]}"}}
+                {"action": 1, "resource_key": "_connect/address", "resource_value": "127.0.0.1"},
+                {"action": 1, "resource_key": "_connect/port", "resource_value": "0"},
+                {"action": 1, "resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"13/0/0\"}, {\"sample_types\": [101, 102, 201, 202], \"type\": \"copper\", \"uuid\": \"13/1/2\"}]}"}
             ]},
             "device_operational_status": 1,
             "device_drivers": [0],
@@ -30,9 +30,9 @@
             "device_id": {"device_uuid": {"uuid": "R2-EMU"}},
             "device_type": "emu-packet-router",
             "device_config": {"config_rules": [
-                {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}},
-                {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}},
-                {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"13/0/0\"}, {\"sample_types\": [101, 102, 201, 202], \"type\": \"copper\", \"uuid\": \"13/1/2\"}]}"}}
+                {"action": 1, "resource_key": "_connect/address", "resource_value": "127.0.0.1"},
+                {"action": 1, "resource_key": "_connect/port", "resource_value": "0"},
+                {"action": 1, "resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"13/0/0\"}, {\"sample_types\": [101, 102, 201, 202], \"type\": \"copper\", \"uuid\": \"13/1/2\"}]}"}
             ]},
             "device_operational_status": 1,
             "device_drivers": [0],
@@ -42,9 +42,9 @@
             "device_id": {"device_uuid": {"uuid": "R3-EMU"}},
             "device_type": "emu-packet-router",
             "device_config": {"config_rules": [
-                {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}},
-                {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}},
-                {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"13/0/0\"}, {\"sample_types\": [101, 102, 201, 202], \"type\": \"copper\", \"uuid\": \"13/1/2\"}]}"}}
+                {"action": 1, "resource_key": "_connect/address", "resource_value": "127.0.0.1"},
+                {"action": 1, "resource_key": "_connect/port", "resource_value": "0"},
+                {"action": 1, "resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"13/0/0\"}, {\"sample_types\": [101, 102, 201, 202], \"type\": \"copper\", \"uuid\": \"13/1/2\"}]}"}
             ]},
             "device_operational_status": 1,
             "device_drivers": [0],
@@ -54,9 +54,9 @@
             "device_id": {"device_uuid": {"uuid": "R4-EMU"}},
             "device_type": "emu-packet-router",
             "device_config": {"config_rules": [
-                {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}},
-                {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}},
-                {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"13/0/0\"}, {\"sample_types\": [101, 102, 201, 202], \"type\": \"copper\", \"uuid\": \"13/1/2\"}]}"}}
+                {"action": 1, "resource_key": "_connect/address", "resource_value": "127.0.0.1"},
+                {"action": 1, "resource_key": "_connect/port", "resource_value": "0"},
+                {"action": 1, "resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"13/0/0\"}, {\"sample_types\": [101, 102, 201, 202], \"type\": \"copper\", \"uuid\": \"13/1/2\"}]}"}
             ]},
             "device_operational_status": 1,
             "device_drivers": [0],
@@ -66,9 +66,9 @@
             "device_id": {"device_uuid": {"uuid": "O1-OLS"}},
             "device_type": "emu-optical-line-system",
             "device_config": {"config_rules": [
-                {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}},
-                {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}},
-                {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"aade6001-f00b-5e2f-a357-6a0a9d3de870\"}, {\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"eb287d83-f05e-53ec-ab5a-adf6bd2b5418\"}, {\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"0ef74f99-1acc-57bd-ab9d-4b958b06c513\"}, {\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"50296d99-58cc-5ce7-82f5-fc8ee4eec2ec\"}]}"}}
+                {"action": 1, "resource_key": "_connect/address", "resource_value": "127.0.0.1"},
+                {"action": 1, "resource_key": "_connect/port", "resource_value": "0"},
+                {"action": 1, "resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"aade6001-f00b-5e2f-a357-6a0a9d3de870\"}, {\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"eb287d83-f05e-53ec-ab5a-adf6bd2b5418\"}, {\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"0ef74f99-1acc-57bd-ab9d-4b958b06c513\"}, {\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"50296d99-58cc-5ce7-82f5-fc8ee4eec2ec\"}]}"}
             ]},
             "device_operational_status": 1,
             "device_drivers": [0],
@@ -105,4 +105,4 @@
             ]
         }
     ]
-}
+}
\ No newline at end of file
diff --git a/src/tests/ofc22/tests/LoadDescriptors.py b/src/tests/ofc22/tests/LoadDescriptors.py
index 4d3af78f5c9a3fd9b09d94f24bb8aaec48af6b7a..33bc699af933601e4c6d4b8dbc7b0c51206241ef 100644
--- a/src/tests/ofc22/tests/LoadDescriptors.py
+++ b/src/tests/ofc22/tests/LoadDescriptors.py
@@ -15,7 +15,7 @@
 import json, logging, sys
 from common.Settings import get_setting
 from context.client.ContextClient import ContextClient
-from context.proto.context_pb2 import Context, Device, Link, Topology
+from common.proto.context_pb2 import Context, Device, Link, Topology
 from device.client.DeviceClient import DeviceClient
 
 LOGGER = logging.getLogger(__name__)
diff --git a/src/tests/ofc22/tests/Objects.py b/src/tests/ofc22/tests/Objects.py
index bda08d7761ab3ad794246e6f94932c147a787993..d2fb32ebb20b7bcdda9ac12b7a7390c46e6fb1d1 100644
--- a/src/tests/ofc22/tests/Objects.py
+++ b/src/tests/ofc22/tests/Objects.py
@@ -21,7 +21,7 @@ from common.tools.object_factory.Device import (
 from common.tools.object_factory.EndPoint import json_endpoint, json_endpoint_id
 from common.tools.object_factory.Link import json_link, json_link_id
 from common.tools.object_factory.Topology import json_topology, json_topology_id
-from context.proto.kpi_sample_types_pb2 import KpiSampleType
+from common.proto.kpi_sample_types_pb2 import KpiSampleType
 
 # ----- Context --------------------------------------------------------------------------------------------------------
 CONTEXT_ID = json_context_id(DEFAULT_CONTEXT_UUID)
@@ -228,4 +228,4 @@ DEVICES = [
     (DEVICE_O1, DEVICE_O1_CONNECT_RULES),
 ]
 
-LINKS = [LINK_R1_O1, LINK_R2_O1, LINK_R3_O1, LINK_R4_O1]
+LINKS = [LINK_R1_O1, LINK_R2_O1, LINK_R3_O1, LINK_R4_O1]
\ No newline at end of file
diff --git a/src/tests/ofc22/tests/common/Constants.py b/src/tests/ofc22/tests/common/Constants.py
new file mode 100644
index 0000000000000000000000000000000000000000..f18d4384035f2310355d7a16c5a709720b5b07e9
--- /dev/null
+++ b/src/tests/ofc22/tests/common/Constants.py
@@ -0,0 +1,80 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+from enum import Enum
+
+# Default logging level
+DEFAULT_LOG_LEVEL = logging.WARNING
+
+# Default gRPC server settings
+DEFAULT_GRPC_BIND_ADDRESS = '0.0.0.0'
+DEFAULT_GRPC_MAX_WORKERS  = 10
+DEFAULT_GRPC_GRACE_PERIOD = 60
+
+# Default HTTP server settings
+DEFAULT_HTTP_BIND_ADDRESS = '0.0.0.0'
+
+# Default Prometheus settings
+DEFAULT_METRICS_PORT = 9192
+
+# Default context and topology UUIDs
+DEFAULT_CONTEXT_UUID = 'admin'
+DEFAULT_TOPOLOGY_UUID = 'admin'
+
+# Default service names
+class ServiceNameEnum(Enum):
+    CONTEXT       = 'context'
+    DEVICE        = 'device'
+    SERVICE       = 'service'
+    SLICE         = 'slice'
+    AUTOMATION    = 'automation'
+    POLICY        = 'policy'
+    MONITORING    = 'monitoring'
+    DLT           = 'dlt'
+    COMPUTE       = 'compute'
+    CYBERSECURITY = 'cybersecurity'
+    INTERDOMAIN   = 'interdomain'
+    PATHCOMP      = 'pathcomp'
+    WEBUI         = 'webui'
+
+# Default gRPC service ports
+DEFAULT_SERVICE_GRPC_PORTS = {
+    ServiceNameEnum.CONTEXT      .value :  1010,
+    ServiceNameEnum.DEVICE       .value :  2020,
+    ServiceNameEnum.SERVICE      .value :  3030,
+    ServiceNameEnum.SLICE        .value :  4040,
+    ServiceNameEnum.AUTOMATION   .value :  5050,
+    ServiceNameEnum.POLICY       .value :  6060,
+    ServiceNameEnum.MONITORING   .value :  7070,
+    ServiceNameEnum.DLT          .value :  8080,
+    ServiceNameEnum.COMPUTE      .value :  9090,
+    ServiceNameEnum.CYBERSECURITY.value : 10000,
+    ServiceNameEnum.INTERDOMAIN  .value : 10010,
+    ServiceNameEnum.PATHCOMP     .value : 10020,
+}
+
+# Default HTTP/REST-API service ports
+DEFAULT_SERVICE_HTTP_PORTS = {
+    ServiceNameEnum.CONTEXT   .value : 8080,
+    ServiceNameEnum.COMPUTE   .value : 8080,
+    ServiceNameEnum.WEBUI     .value : 8004,
+}
+
+# Default HTTP/REST-API service base URLs
+DEFAULT_SERVICE_HTTP_BASEURLS = {
+    ServiceNameEnum.CONTEXT   .value : '/api',
+    ServiceNameEnum.COMPUTE   .value : '/restconf/data',
+    ServiceNameEnum.WEBUI     .value : None,
+}
diff --git a/src/tests/ofc22/tests/common/DeviceTypes.py b/src/tests/ofc22/tests/common/DeviceTypes.py
new file mode 100644
index 0000000000000000000000000000000000000000..bf871a2d5afa6a73f1c9dd39431c64a7f31bbd7e
--- /dev/null
+++ b/src/tests/ofc22/tests/common/DeviceTypes.py
@@ -0,0 +1,26 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from enum import Enum
+
+class DeviceTypeEnum(Enum):
+    EMULATED_OPTICAL_LINE_SYSTEM = 'emu-optical-line-system'
+    EMULATED_PACKET_ROUTER       = 'emu-packet-router'
+    MICROVAWE_RADIO_SYSTEM       = 'microwave-radio-system'
+    OPTICAL_ROADM                = 'optical-roadm'
+    OPTICAL_TRANDPONDER          = 'optical-trandponder'
+    OPTICAL_LINE_SYSTEM          = 'optical-line-system'
+    PACKET_ROUTER                = 'packet-router'
+    PACKET_SWITCH                = 'packet-switch'
+    P4_SWITCH                    = 'p4-switch'
diff --git a/src/tests/ofc22/tests/common/Settings.py b/src/tests/ofc22/tests/common/Settings.py
new file mode 100644
index 0000000000000000000000000000000000000000..e9d5f406d2acad0d31ae94b604183c129d28f146
--- /dev/null
+++ b/src/tests/ofc22/tests/common/Settings.py
@@ -0,0 +1,98 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging, os, time
+from typing import List
+from common.Constants import (
+    DEFAULT_GRPC_BIND_ADDRESS, DEFAULT_GRPC_GRACE_PERIOD, DEFAULT_GRPC_MAX_WORKERS, DEFAULT_HTTP_BIND_ADDRESS,
+    DEFAULT_LOG_LEVEL, DEFAULT_METRICS_PORT, DEFAULT_SERVICE_GRPC_PORTS, DEFAULT_SERVICE_HTTP_BASEURLS,
+    DEFAULT_SERVICE_HTTP_PORTS, ServiceNameEnum
+)
+
+LOGGER = logging.getLogger(__name__)
+
+DEFAULT_RESTART_DELAY = 5.0 # seconds
+
+ENVVAR_KUBERNETES_PORT            = 'KUBERNETES_PORT'
+ENVVAR_GRPC_BIND_ADDRESS          = 'GRPC_BIND_ADDRESS'
+ENVVAR_GRPC_MAX_WORKERS           = 'GRPC_MAX_WORKERS'
+ENVVAR_GRPC_GRACE_PERIOD          = 'GRPC_GRACE_PERIOD'
+ENVVAR_HTTP_BIND_ADDRESS          = 'HTTP_BIND_ADDRESS'
+ENVVAR_LOG_LEVEL                  = 'LOG_LEVEL'
+ENVVAR_METRICS_PORT               = 'METRICS_PORT'
+
+ENVVAR_SUFIX_SERVICE_BASEURL_HTTP = 'SERVICE_BASEURL_HTTP'
+ENVVAR_SUFIX_SERVICE_HOST         = 'SERVICE_HOST'
+ENVVAR_SUFIX_SERVICE_PORT_GRPC    = 'SERVICE_PORT_GRPC'
+ENVVAR_SUFIX_SERVICE_PORT_HTTP    = 'SERVICE_PORT_HTTP'
+
+def wait_for_environment_variables(
+    required_environment_variables : List[str] = [], wait_delay_seconds : float = DEFAULT_RESTART_DELAY
+):
+    if ENVVAR_KUBERNETES_PORT not in os.environ: return # We're not running in Kubernetes, nothing to wait for
+    missing_variables = set(required_environment_variables).difference(set(os.environ.keys()))
+    if len(missing_variables) == 0: return # We have all environment variables defined
+    msg = 'Variables({:s}) are missing in Environment({:s}), restarting in {:f} seconds...'
+    LOGGER.error(msg.format(str(missing_variables), str(os.environ), wait_delay_seconds))
+    time.sleep(wait_delay_seconds)
+    raise Exception('Restarting...')
+
+def get_setting(name, **kwargs):
+    value = os.environ.get(name)
+    if 'settings' in kwargs:
+        value = kwargs['settings'].pop(name, value)
+    if value is not None: return value
+    if 'default' in kwargs: return kwargs['default']
+    raise Exception('Setting({:s}) not specified in environment or configuration'.format(str(name)))
+
+def get_env_var_name(service_name : ServiceNameEnum, env_var_group):
+    return ('{:s}SERVICE_{:s}'.format(service_name.value, env_var_group)).upper()
+
+def get_service_host(service_name : ServiceNameEnum):
+    envvar_name = get_env_var_name(service_name, ENVVAR_SUFIX_SERVICE_HOST)
+    default_value = ('{:s}service'.format(service_name.value))
+    return get_setting(envvar_name, default=default_value)
+
+def get_service_port_grpc(service_name : ServiceNameEnum):
+    envvar_name = get_env_var_name(service_name, ENVVAR_SUFIX_SERVICE_PORT_GRPC)
+    default_value = DEFAULT_SERVICE_GRPC_PORTS.get(service_name.value)
+    return get_setting(envvar_name, default=default_value)
+
+def get_service_port_http(service_name : ServiceNameEnum):
+    envvar_name = get_env_var_name(service_name, ENVVAR_SUFIX_SERVICE_PORT_HTTP)
+    default_value = DEFAULT_SERVICE_HTTP_PORTS.get(service_name.value)
+    return get_setting(envvar_name, default=default_value)
+
+def get_service_baseurl_http(service_name : ServiceNameEnum):
+    envvar_name = get_env_var_name(service_name, ENVVAR_SUFIX_SERVICE_BASEURL_HTTP)
+    default_value = DEFAULT_SERVICE_HTTP_BASEURLS.get(service_name.value)
+    return get_setting(envvar_name, default=default_value)
+
+def get_log_level():
+    return get_setting(ENVVAR_LOG_LEVEL, default=DEFAULT_LOG_LEVEL)
+
+def get_metrics_port():
+    return get_setting(ENVVAR_METRICS_PORT, default=DEFAULT_METRICS_PORT)
+
+def get_grpc_bind_address():
+    return get_setting(ENVVAR_GRPC_BIND_ADDRESS, default=DEFAULT_GRPC_BIND_ADDRESS)
+
+def get_grpc_max_workers():
+    return get_setting(ENVVAR_GRPC_MAX_WORKERS, default=DEFAULT_GRPC_MAX_WORKERS)
+
+def get_grpc_grace_period():
+    return get_setting(ENVVAR_GRPC_GRACE_PERIOD, default=DEFAULT_GRPC_GRACE_PERIOD)
+
+def get_http_bind_address():
+    return get_setting(ENVVAR_HTTP_BIND_ADDRESS, default=DEFAULT_HTTP_BIND_ADDRESS)
diff --git a/src/tests/ofc22/tests/common/__init__.py b/src/tests/ofc22/tests/common/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..70a33251242c51f49140e596b8208a19dd5245f7
--- /dev/null
+++ b/src/tests/ofc22/tests/common/__init__.py
@@ -0,0 +1,14 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
diff --git a/src/tests/ofc22/tests/common/database/api/context/slice/SliceStatus.py b/src/tests/ofc22/tests/common/database/api/context/slice/SliceStatus.py
new file mode 100644
index 0000000000000000000000000000000000000000..d97b3944999e58cc2ad54f28ed6e22232b5fcd71
--- /dev/null
+++ b/src/tests/ofc22/tests/common/database/api/context/slice/SliceStatus.py
@@ -0,0 +1,31 @@
+from enum import Enum
+
+class SliceStatus(Enum):
+    PLANNED = 0
+    INIT    = 1
+    ACTIVE  = 2
+    DEINIT  = 3
+
+ANY_TO_ENUM = {
+    0: SliceStatus.PLANNED,
+    1: SliceStatus.INIT,
+    2: SliceStatus.ACTIVE,
+    3: SliceStatus.DEINIT,
+
+    '0': SliceStatus.PLANNED,
+    '1': SliceStatus.INIT,
+    '2': SliceStatus.ACTIVE,
+    '3': SliceStatus.DEINIT,
+
+    'planned': SliceStatus.PLANNED,
+    'init': SliceStatus.INIT,
+    'active': SliceStatus.ACTIVE,
+    'deinit': SliceStatus.DEINIT,
+}
+
+def slicestatus_enum_values():
+    return {m.value for m in SliceStatus.__members__.values()}
+
+def to_slicestatus_enum(int_or_str):
+    if isinstance(int_or_str, str): int_or_str = int_or_str.lower()
+    return ANY_TO_ENUM.get(int_or_str)
diff --git a/src/tests/ofc22/tests/common/database/api/context/slice/__init__.py b/src/tests/ofc22/tests/common/database/api/context/slice/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/tests/ofc22/tests/common/logger.py b/src/tests/ofc22/tests/common/logger.py
new file mode 100644
index 0000000000000000000000000000000000000000..c90e0bcf30c41d6dab1d4f63cbb475ab544a019a
--- /dev/null
+++ b/src/tests/ofc22/tests/common/logger.py
@@ -0,0 +1,44 @@
+#!/usr/bin/python
+#
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import sys
+from pythonjsonlogger import jsonlogger
+
+
+# TODO(yoshifumi) this class is duplicated since other Python services are
+# not sharing the modules for logging.
+class CustomJsonFormatter(jsonlogger.JsonFormatter):
+    def add_fields(self, log_record, record, message_dict):
+        super(CustomJsonFormatter, self).add_fields(log_record, record, message_dict)
+        if not log_record.get('timestamp'):
+            log_record['timestamp'] = record.created
+        if log_record.get('severity'):
+            log_record['severity'] = log_record['severity'].upper()
+        else:
+            log_record['severity'] = record.levelname
+
+
+def getJSONLogger(name):
+    logger = logging.getLogger(name)
+    handler = logging.StreamHandler(sys.stdout)
+    # formatter = CustomJsonFormatter('(timestamp) (severity) (name) (message)')
+    formatter = logging.Formatter('[%(asctime)s] {%(pathname)s:%(lineno)d} %(levelname)s - %(message)s')
+    handler.setFormatter(formatter)
+    logger.addHandler(handler)
+    logger.setLevel(logging.DEBUG)
+    return logger
+
diff --git a/src/tests/ofc22/tests/common/message_broker/Constants.py b/src/tests/ofc22/tests/common/message_broker/Constants.py
new file mode 100644
index 0000000000000000000000000000000000000000..d3f5190742303b9948b6d973eb2d5030a17a89cc
--- /dev/null
+++ b/src/tests/ofc22/tests/common/message_broker/Constants.py
@@ -0,0 +1,15 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+CONSUME_TIMEOUT = 0.1 # seconds
diff --git a/src/tests/ofc22/tests/common/message_broker/Factory.py b/src/tests/ofc22/tests/common/message_broker/Factory.py
new file mode 100644
index 0000000000000000000000000000000000000000..a64913df02805dabc9c1924ea16966e43454b7b5
--- /dev/null
+++ b/src/tests/ofc22/tests/common/message_broker/Factory.py
@@ -0,0 +1,46 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging, os
+from typing import Optional, Union
+from .backend._Backend import _Backend
+from .backend.BackendEnum import BackendEnum
+from .backend.inmemory.InMemoryBackend import InMemoryBackend
+from .backend.redis.RedisBackend import RedisBackend
+
+LOGGER = logging.getLogger(__name__)
+
+BACKENDS = {
+    BackendEnum.INMEMORY.value: InMemoryBackend,
+    BackendEnum.REDIS.value: RedisBackend,
+    #BackendEnum.KAFKA.value: KafkaBackend,
+    #BackendEnum.RABBITMQ.value: RabbitMQBackend,
+    #BackendEnum.ZEROMQ.value: ZeroMQBackend,
+}
+
+DEFAULT_MB_BACKEND = BackendEnum.INMEMORY
+
+def get_messagebroker_backend(backend : Optional[Union[str, BackendEnum]] = None, **settings) -> _Backend:
+    # return an instance of MessageBroker initialized with selected backend.
+    # The backend is selected using following criteria (first that is not None is selected):
+    # 1. user selected by parameter (backend=...)
+    # 2. environment variable MB_BACKEND
+    # 3. default backend: INMEMORY
+    if backend is None: backend = os.environ.get('MB_BACKEND', DEFAULT_MB_BACKEND)
+    if backend is None: raise Exception('MessageBroker Backend not specified')
+    if isinstance(backend, BackendEnum): backend = backend.value
+    backend_class = BACKENDS.get(backend)
+    if backend_class is None: raise Exception('Unsupported MessageBrokerBackend({:s})'.format(backend))
+    LOGGER.info('Selected MessageBroker Backend: {:s}'.format(backend))
+    return backend_class(**settings)
diff --git a/src/tests/ofc22/tests/common/message_broker/Message.py b/src/tests/ofc22/tests/common/message_broker/Message.py
new file mode 100644
index 0000000000000000000000000000000000000000..484c795e77cf9699057a65a85a7df73c53779523
--- /dev/null
+++ b/src/tests/ofc22/tests/common/message_broker/Message.py
@@ -0,0 +1,19 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import NamedTuple
+
+class Message(NamedTuple):
+    topic: str
+    content: str
diff --git a/src/tests/ofc22/tests/common/message_broker/MessageBroker.py b/src/tests/ofc22/tests/common/message_broker/MessageBroker.py
new file mode 100644
index 0000000000000000000000000000000000000000..2a561727573247042b0d1755175d0730a97ef1d4
--- /dev/null
+++ b/src/tests/ofc22/tests/common/message_broker/MessageBroker.py
@@ -0,0 +1,41 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+from typing import Iterator, Set
+from .backend._Backend import _Backend
+from .Constants import CONSUME_TIMEOUT
+from .Message import Message
+
+LOGGER = logging.getLogger(__name__)
+
+class MessageBroker:
+    def __init__(self, backend : _Backend):
+        if not isinstance(backend, _Backend):
+            str_class_path = '{}.{}'.format(_Backend.__module__, _Backend.__name__)
+            raise AttributeError('backend must inherit from {}'.format(str_class_path))
+        self._backend = backend
+
+    @property
+    def backend(self) -> _Backend: return self._backend
+
+    def publish(self, message : Message) -> None:
+        self._backend.publish(message.topic, message.content)
+
+    def consume(self, topic_names : Set[str], consume_timeout : float = CONSUME_TIMEOUT) -> Iterator[Message]:
+        for pair in self._backend.consume(topic_names, consume_timeout=consume_timeout):
+            yield Message(*pair)
+
+    def terminate(self):
+        self._backend.terminate()
diff --git a/src/tests/ofc22/tests/common/message_broker/__init__.py b/src/tests/ofc22/tests/common/message_broker/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..70a33251242c51f49140e596b8208a19dd5245f7
--- /dev/null
+++ b/src/tests/ofc22/tests/common/message_broker/__init__.py
@@ -0,0 +1,14 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
diff --git a/src/tests/ofc22/tests/common/message_broker/backend/BackendEnum.py b/src/tests/ofc22/tests/common/message_broker/backend/BackendEnum.py
new file mode 100644
index 0000000000000000000000000000000000000000..bf95f176479fb227503dd04a9dde2b81789ec006
--- /dev/null
+++ b/src/tests/ofc22/tests/common/message_broker/backend/BackendEnum.py
@@ -0,0 +1,22 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from enum import Enum
+
+class BackendEnum(Enum):
+    INMEMORY = 'inmemory'
+    REDIS = 'redis'
+    #KAFKA = 'kafka'
+    #RABBITMQ = 'rabbitmq'
+    #ZEROMQ = 'zeromq'
diff --git a/src/tests/ofc22/tests/common/message_broker/backend/_Backend.py b/src/tests/ofc22/tests/common/message_broker/backend/_Backend.py
new file mode 100644
index 0000000000000000000000000000000000000000..1e03b2bd27f4949f65d7df839bc5ba6d4da9df0d
--- /dev/null
+++ b/src/tests/ofc22/tests/common/message_broker/backend/_Backend.py
@@ -0,0 +1,28 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import Iterator, Set, Tuple
+
+class _Backend:
+    def __init__(self, **settings) -> None:
+        raise NotImplementedError()
+
+    def terminate(self) -> None:
+        raise NotImplementedError()
+
+    def publish(self, topic_name : str, message_content : str) -> None:
+        raise NotImplementedError()
+
+    def consume(self, topic_names : Set[str], consume_timeout : float) -> Iterator[Tuple[str, str]]:
+        raise NotImplementedError()
diff --git a/src/tests/ofc22/tests/common/message_broker/backend/__init__.py b/src/tests/ofc22/tests/common/message_broker/backend/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..70a33251242c51f49140e596b8208a19dd5245f7
--- /dev/null
+++ b/src/tests/ofc22/tests/common/message_broker/backend/__init__.py
@@ -0,0 +1,14 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
diff --git a/src/tests/ofc22/tests/common/message_broker/backend/inmemory/InMemoryBackend.py b/src/tests/ofc22/tests/common/message_broker/backend/inmemory/InMemoryBackend.py
new file mode 100644
index 0000000000000000000000000000000000000000..c0a87c667b2bf6d48d8a482c601e48289de7bc90
--- /dev/null
+++ b/src/tests/ofc22/tests/common/message_broker/backend/inmemory/InMemoryBackend.py
@@ -0,0 +1,55 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# InMemeory MessageBroker Backend
+# -------------------------------
+# - WARNING: DESIGNED AND BUILT FOR UNIT TESTING AND INTEGRATION TESTING PURPOSES ONLY !!!
+#            USE ANOTHER BACKEND IN PRODUCTION ENVIRONMENTS.
+
+import logging, threading
+from queue import Queue, Empty
+from typing import Dict, Iterator, Set, Tuple
+from .._Backend import _Backend
+
+LOGGER = logging.getLogger(__name__)
+
+class InMemoryBackend(_Backend):
+    def __init__(self, **settings) -> None: # pylint: disable=super-init-not-called
+        self._lock = threading.Lock()
+        self._terminate = threading.Event()
+        self._topic__to__queues : Dict[str, Set[Queue]] = {}
+
+    def terminate(self) -> None:
+        self._terminate.set()
+
+    def publish(self, topic_name : str, message_content : str) -> None:
+        queues = self._topic__to__queues.get(topic_name, None)
+        if queues is None: return
+        for queue in queues: queue.put_nowait((topic_name, message_content))
+
+    def consume(self, topic_names : Set[str], consume_timeout : float) -> Iterator[Tuple[str, str]]:
+        queue = Queue()
+        for topic_name in topic_names:
+            self._topic__to__queues.setdefault(topic_name, set()).add(queue)
+
+        while not self._terminate.is_set():
+            try:
+                message = queue.get(block=True, timeout=consume_timeout)
+            except Empty:
+                continue
+            if message is None: continue
+            yield message
+
+        for topic_name in topic_names:
+            self._topic__to__queues.get(topic_name, set()).discard(queue)
diff --git a/src/tests/ofc22/tests/common/message_broker/backend/inmemory/__init__.py b/src/tests/ofc22/tests/common/message_broker/backend/inmemory/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..70a33251242c51f49140e596b8208a19dd5245f7
--- /dev/null
+++ b/src/tests/ofc22/tests/common/message_broker/backend/inmemory/__init__.py
@@ -0,0 +1,14 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
diff --git a/src/tests/ofc22/tests/common/message_broker/backend/redis/RedisBackend.py b/src/tests/ofc22/tests/common/message_broker/backend/redis/RedisBackend.py
new file mode 100644
index 0000000000000000000000000000000000000000..41490913cbc8363ae86a031e9d7e703713349368
--- /dev/null
+++ b/src/tests/ofc22/tests/common/message_broker/backend/redis/RedisBackend.py
@@ -0,0 +1,58 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os, threading
+from typing import Any, Dict, Iterator, Set, Tuple
+from redis.client import Redis
+
+from common.message_broker.Message import Message
+from .._Backend import _Backend
+
+DEFAULT_SERVICE_HOST = '127.0.0.1'
+DEFAULT_SERVICE_PORT = 6379
+DEFAULT_DATABASE_ID  = 0
+
+def get_setting(settings : Dict[str, Any], name : str, default : Any) -> Any:
+    value = settings.get(name, os.environ.get(name))
+    return default if value is None else value
+
+class RedisBackend(_Backend):
+    def __init__(self, **settings) -> None: # pylint: disable=super-init-not-called
+        host = get_setting(settings, 'REDIS_SERVICE_HOST', DEFAULT_SERVICE_HOST)
+        port = get_setting(settings, 'REDIS_SERVICE_PORT', DEFAULT_SERVICE_PORT)
+        dbid = get_setting(settings, 'REDIS_DATABASE_ID',  DEFAULT_DATABASE_ID )
+        self._client = Redis.from_url('redis://{host}:{port}/{dbid}'.format(host=host, port=port, dbid=dbid))
+        self._terminate = threading.Event()
+
+    def terminate(self) -> None:
+        self._terminate.set()
+
+    def publish(self, topic_name : str, message_content : str) -> None:
+        self._client.publish(topic_name, message_content)
+
+    def consume(self, topic_names : Set[str], consume_timeout : float) -> Iterator[Tuple[str, str]]:
+        pubsub = self._client.pubsub(ignore_subscribe_messages=True)
+        for topic_name in topic_names: pubsub.subscribe(topic_name)
+
+        while not self._terminate.is_set():
+            message = pubsub.get_message(ignore_subscribe_messages=True, timeout=consume_timeout)
+            if message is None: continue
+            if message['type'] not in {'message', 'pmessage'}: continue
+            topic = message['channel'].decode('UTF-8')
+            content = message['data'].decode('UTF-8')
+            yield Message(topic, content)
+
+        pubsub.unsubscribe()
+        while pubsub.get_message() is not None: pass
+        pubsub.close()
diff --git a/src/tests/ofc22/tests/common/message_broker/backend/redis/__init__.py b/src/tests/ofc22/tests/common/message_broker/backend/redis/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..70a33251242c51f49140e596b8208a19dd5245f7
--- /dev/null
+++ b/src/tests/ofc22/tests/common/message_broker/backend/redis/__init__.py
@@ -0,0 +1,14 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
diff --git a/src/tests/ofc22/tests/common/message_broker/tests/__init__.py b/src/tests/ofc22/tests/common/message_broker/tests/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..70a33251242c51f49140e596b8208a19dd5245f7
--- /dev/null
+++ b/src/tests/ofc22/tests/common/message_broker/tests/__init__.py
@@ -0,0 +1,14 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
diff --git a/src/tests/ofc22/tests/common/message_broker/tests/test_unitary.py b/src/tests/ofc22/tests/common/message_broker/tests/test_unitary.py
new file mode 100644
index 0000000000000000000000000000000000000000..925fcc83e163e9888494dcdc3cddaeaeec1d04fc
--- /dev/null
+++ b/src/tests/ofc22/tests/common/message_broker/tests/test_unitary.py
@@ -0,0 +1,167 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging, os, pytest, threading, time
+from typing import List, Set
+from common.message_broker.Factory import get_messagebroker_backend
+from common.message_broker.Message import Message
+from common.message_broker.MessageBroker import MessageBroker
+from common.message_broker.backend.BackendEnum import BackendEnum
+from common.message_broker.backend._Backend import _Backend
+
+logging.basicConfig(level=logging.INFO)
+LOGGER = logging.getLogger(__name__)
+
+DEFAULT_REDIS_SERVICE_HOST = '127.0.0.1'
+DEFAULT_REDIS_SERVICE_PORT = 6379
+DEFAULT_REDIS_DATABASE_ID  = 0
+
+REDIS_CONFIG = {
+    'REDIS_SERVICE_HOST': os.environ.get('REDIS_SERVICE_HOST', DEFAULT_REDIS_SERVICE_HOST),
+    'REDIS_SERVICE_PORT': os.environ.get('REDIS_SERVICE_PORT', DEFAULT_REDIS_SERVICE_PORT),
+    'REDIS_DATABASE_ID' : os.environ.get('REDIS_DATABASE_ID',  DEFAULT_REDIS_DATABASE_ID ),
+}
+
+SCENARIOS = [
+    ('all_inmemory', BackendEnum.INMEMORY, {}          ),
+    ('all_redis',    BackendEnum.REDIS,    REDIS_CONFIG),
+]
+
+CONSUME_TIMEOUT = 0.1 # seconds
+
+TOPIC_DEVICES  = 'devices'
+TOPIC_LINKS    = 'links'
+TOPIC_SERVICES = 'services'
+
+class Consumer(threading.Thread):
+    def __init__(
+        self, message_broker : MessageBroker, # pylint: disable=redefined-outer-name
+        topic_names : Set[str], output_list : List[Message],
+        consume_timeout=CONSUME_TIMEOUT) -> None:
+
+        super().__init__(daemon=True)
+        self._message_broker = message_broker
+        self._topic_names = topic_names
+        self._output_list = output_list
+        self._consume_timeout = consume_timeout
+
+    def run(self) -> None:
+        LOGGER.info('{:s} subscribes to topics {:s}'.format(self.name, str(self._topic_names)))
+        for message in self._message_broker.consume(self._topic_names, consume_timeout=self._consume_timeout):
+            LOGGER.info('{:s} receives {:s}'.format(self.name, str(message)))
+            self._output_list.append(message)
+        LOGGER.info('{:s} terminates')
+
+@pytest.fixture(scope='session', ids=[str(scenario[0]) for scenario in SCENARIOS], params=SCENARIOS)
+def message_broker(request):
+    name,mb_backend,mb_settings = request.param
+    msg = 'Running scenario {:s} mb_backend={:s}, mb_settings={:s}...'
+    LOGGER.info(msg.format(str(name), str(mb_backend.value), str(mb_settings)))
+    _message_broker = MessageBroker(get_messagebroker_backend(backend=mb_backend, **mb_settings))
+    yield _message_broker
+    _message_broker.terminate()
+
+def test_messagebroker_instantiation():
+    with pytest.raises(AttributeError) as e:
+        MessageBroker(None)
+    str_class_path = '{}.{}'.format(_Backend.__module__, _Backend.__name__)
+    assert str(e.value) == 'backend must inherit from {}'.format(str_class_path)
+
+    assert MessageBroker(get_messagebroker_backend(BackendEnum.INMEMORY)) is not None
+
+def test_messagebroker(message_broker : MessageBroker): # pylint: disable=redefined-outer-name
+    output_list1 : List[Message] = []
+    consumer1 = Consumer(message_broker, {TOPIC_DEVICES, TOPIC_LINKS}, output_list1)
+    consumer1.start()
+
+    output_list2 : List[Message] = []
+    consumer2 = Consumer(message_broker, {TOPIC_DEVICES, TOPIC_SERVICES}, output_list2)
+    consumer2.start()
+
+    output_list3 : List[Message] = []
+    consumer3 = Consumer(message_broker, {TOPIC_SERVICES}, output_list3)
+    consumer3.start()
+
+    LOGGER.info('delay')
+    time.sleep(0.5)
+
+    message = Message(topic=TOPIC_DEVICES, content='new-device-01')
+    LOGGER.info('publish message={:s}'.format(str(message)))
+    message_broker.publish(message)
+
+    message = Message(topic=TOPIC_DEVICES, content='new-device-02')
+    LOGGER.info('publish message={:s}'.format(str(message)))
+    message_broker.publish(message)
+
+    message = Message(topic=TOPIC_LINKS,   content='new-link-01-02')
+    LOGGER.info('publish message={:s}'.format(str(message)))
+    message_broker.publish(message)
+
+    LOGGER.info('delay')
+    time.sleep(0.1)
+
+    message = Message(topic=TOPIC_DEVICES,  content='update-device-01')
+    LOGGER.info('publish message={:s}'.format(str(message)))
+    message_broker.publish(message)
+
+    message = Message(topic=TOPIC_DEVICES,  content='update-device-02')
+    LOGGER.info('publish message={:s}'.format(str(message)))
+    message_broker.publish(message)
+
+    message = Message(topic=TOPIC_SERVICES, content='new-service-01-02')
+    LOGGER.info('publish message={:s}'.format(str(message)))
+    message_broker.publish(message)
+
+    LOGGER.info('delay')
+    time.sleep(0.5)
+
+    LOGGER.info('terminate')
+    message_broker.terminate()
+
+    LOGGER.info('join')
+    consumer1.join()
+    consumer2.join()
+    consumer3.join()
+
+    LOGGER.info('output_list1={:s}'.format(str(output_list1)))
+    LOGGER.info('output_list2={:s}'.format(str(output_list2)))
+    LOGGER.info('output_list3={:s}'.format(str(output_list3)))
+
+    assert len(output_list1) == 5
+    assert output_list1[0].topic == TOPIC_DEVICES
+    assert output_list1[0].content == 'new-device-01'
+    assert output_list1[1].topic == TOPIC_DEVICES
+    assert output_list1[1].content == 'new-device-02'
+    assert output_list1[2].topic == TOPIC_LINKS
+    assert output_list1[2].content == 'new-link-01-02'
+    assert output_list1[3].topic == TOPIC_DEVICES
+    assert output_list1[3].content == 'update-device-01'
+    assert output_list1[4].topic == TOPIC_DEVICES
+    assert output_list1[4].content == 'update-device-02'
+
+    assert len(output_list2) == 5
+    assert output_list2[0].topic == TOPIC_DEVICES
+    assert output_list2[0].content == 'new-device-01'
+    assert output_list2[1].topic == TOPIC_DEVICES
+    assert output_list2[1].content == 'new-device-02'
+    assert output_list2[2].topic == TOPIC_DEVICES
+    assert output_list2[2].content == 'update-device-01'
+    assert output_list2[3].topic == TOPIC_DEVICES
+    assert output_list2[3].content == 'update-device-02'
+    assert output_list2[4].topic == TOPIC_SERVICES
+    assert output_list2[4].content == 'new-service-01-02'
+
+    assert len(output_list3) == 1
+    assert output_list3[0].topic == TOPIC_SERVICES
+    assert output_list3[0].content == 'new-service-01-02'
diff --git a/src/tests/ofc22/tests/common/orm/Database.py b/src/tests/ofc22/tests/common/orm/Database.py
new file mode 100644
index 0000000000000000000000000000000000000000..e5ec7b0a93164595e2f75f9059c223044b171701
--- /dev/null
+++ b/src/tests/ofc22/tests/common/orm/Database.py
@@ -0,0 +1,52 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+from typing import List, Set, Tuple
+from .backend._Backend import _Backend
+
+LOGGER = logging.getLogger(__name__)
+
+class Database:
+    def __init__(self, backend : _Backend):
+        if not isinstance(backend, _Backend):
+            str_class_path = '{}.{}'.format(_Backend.__module__, _Backend.__name__)
+            raise AttributeError('backend must inherit from {}'.format(str_class_path))
+        self._backend = backend
+
+    @property
+    def backend(self) -> _Backend: return self._backend
+
+    def clear_all(self, keep_keys : Set[str] = set()) -> None:
+        for key in self._backend.keys():
+            if key in keep_keys: continue
+            self._backend.delete(key)
+
+    def dump(self) -> List[Tuple[str, str, str]]:
+        entries = self._backend.dump()
+        entries.sort()
+        _entries = []
+        for str_key, str_type, value in entries:
+            if isinstance(value, list):
+                str_value = ', '.join(map("'{:s}'".format, sorted(list(value))))
+                str_value = '[' + str_value + ']'
+            elif isinstance(value, set):
+                str_value = ', '.join(map("'{:s}'".format, sorted(list(value))))
+                str_value = '{' + str_value + '}'
+            elif isinstance(value, dict):
+                sorted_keys = sorted(value.keys())
+                str_value = ', '.join(["'{}': '{}'".format(key, value[key]) for key in sorted_keys])
+                str_value = '{' + str_value + '}'
+            _entries.append((str_type, str_key, str_value))
+        return _entries
diff --git a/src/tests/ofc22/tests/common/orm/Exceptions.py b/src/tests/ofc22/tests/common/orm/Exceptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..cf91bd2f7a628a86fa45f7eb687b3292e00d0f8e
--- /dev/null
+++ b/src/tests/ofc22/tests/common/orm/Exceptions.py
@@ -0,0 +1,19 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+class ConstraintException(Exception):
+    pass
+
+class MutexException(Exception):
+    pass
diff --git a/src/tests/ofc22/tests/common/orm/Factory.py b/src/tests/ofc22/tests/common/orm/Factory.py
new file mode 100644
index 0000000000000000000000000000000000000000..15940ce7cc565c3966e3abfe2604ebd245d2adfc
--- /dev/null
+++ b/src/tests/ofc22/tests/common/orm/Factory.py
@@ -0,0 +1,46 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging, os
+from typing import Optional, Union
+from .backend._Backend import _Backend
+from .backend.BackendEnum import BackendEnum
+from .backend.inmemory.InMemoryBackend import InMemoryBackend
+from .backend.redis.RedisBackend import RedisBackend
+
+LOGGER = logging.getLogger(__name__)
+
+BACKENDS = {
+    BackendEnum.INMEMORY.value: InMemoryBackend,
+    BackendEnum.REDIS.value: RedisBackend,
+    #BackendEnum.MONGODB.value: MongoDBBackend,
+    #BackendEnum.RETHINKDB.value: RethinkDBBackend,
+    #BackendEnum.ETCD.value: EtcdBackend,
+}
+
+DEFAULT_DB_BACKEND = BackendEnum.INMEMORY
+
+def get_database_backend(backend : Optional[Union[str, BackendEnum]] = None, **settings) -> _Backend:
+    # return an instance of Database initialized with selected backend.
+    # The backend is selected using following criteria (first that is not None is selected):
+    # 1. user selected by parameter (backend=...)
+    # 2. environment variable DB_BACKEND
+    # 3. default backend: INMEMORY
+    if backend is None: backend = os.environ.get('DB_BACKEND', DEFAULT_DB_BACKEND)
+    if backend is None: raise Exception('Database Backend not specified')
+    if isinstance(backend, BackendEnum): backend = backend.value
+    backend_class = BACKENDS.get(backend)
+    if backend_class is None: raise Exception('Unsupported DatabaseBackend({:s})'.format(backend))
+    LOGGER.info('Selected Database Backend: {:s}'.format(backend))
+    return backend_class(**settings)
diff --git a/src/tests/ofc22/tests/common/orm/HighLevel.py b/src/tests/ofc22/tests/common/orm/HighLevel.py
new file mode 100644
index 0000000000000000000000000000000000000000..a5bdeae3e9607767b5215f6ff87cb0d8624918d0
--- /dev/null
+++ b/src/tests/ofc22/tests/common/orm/HighLevel.py
@@ -0,0 +1,84 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import Any, Dict, List, Optional, Set, Tuple
+from common.rpc_method_wrapper.ServiceExceptions import NotFoundException
+from common.orm.Database import Database
+from common.orm.backend.Tools import key_to_str
+from common.orm.fields.ForeignKeyField import ForeignKeyField
+from common.orm.model.Model import Model, MetaModel
+
+def get_all_objects(database : Database, model_class : MetaModel) -> List[Model]:
+    db_pks = sorted(list(model_class.get_primary_keys(database)))
+    return [model_class(database, db_pk) for db_pk in db_pks]
+
+def get_object(
+    database : Database, model_class : Model, key_parts : List[str], raise_if_not_found : bool = True
+    ) -> Optional[Model]:
+
+    str_key = key_to_str(key_parts)
+    db_object = model_class(database, str_key, auto_load=False)
+    found = db_object.load()
+    if found: return db_object
+    if raise_if_not_found: raise NotFoundException(model_class.__name__.replace('Model', ''), str_key)
+    return None
+
+def get_related_objects(
+    source_instance : Model, reference_model_class : MetaModel, navigation_field_name : str = None) -> Set[Model]:
+
+    database = source_instance.database
+    db_target_instances = set()
+
+    if navigation_field_name is not None:
+        navigation_fk_field : Optional[ForeignKeyField] = getattr(reference_model_class, navigation_field_name, None)
+        if navigation_fk_field is None or not isinstance(navigation_fk_field, ForeignKeyField):
+            msg = 'navigation_field_name({:s}) must be a ForeignKeyField in reference_model_class({:s})'
+            raise AttributeError(msg.format(navigation_field_name, reference_model_class.__name__))
+        target_model_class = navigation_fk_field.foreign_model
+
+    for db_reference_pk,_ in source_instance.references(reference_model_class):
+        db_reference = reference_model_class(database, db_reference_pk)
+        if navigation_field_name is not None:
+            target_fk_field = getattr(db_reference, navigation_field_name, None)
+            if target_fk_field is None: continue
+            db_reference = target_model_class(database, target_fk_field)
+        db_target_instances.add(db_reference)
+    return db_target_instances
+
+def update_or_create_object(
+    database : Database, model_class : Model, key_parts : List[str], attributes : Dict[str, Any]
+    ) -> Tuple[Model, bool]:
+
+    str_key = key_to_str(key_parts)
+    db_object : Model = model_class(database, str_key, auto_load=False)
+    found = db_object.load()
+    for attr_name, attr_value in attributes.items():
+        setattr(db_object, attr_name, attr_value)
+    db_object.save()
+    updated = found # updated if found, else created
+    return db_object, updated
+
+def get_or_create_object(
+    database : Database, model_class : Model, key_parts : List[str], defaults : Dict[str, Any] = {}
+    ) -> Tuple[Model, bool]:
+
+    str_key = key_to_str(key_parts)
+    db_object : Model = model_class(database, str_key, auto_load=False)
+    found = db_object.load()
+    if not found:
+        for attr_name, attr_value in defaults.items():
+            setattr(db_object, attr_name, attr_value)
+        db_object.save()
+    created = not found # created if not found, else loaded
+    return db_object, created
diff --git a/src/tests/ofc22/tests/common/orm/__init__.py b/src/tests/ofc22/tests/common/orm/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..70a33251242c51f49140e596b8208a19dd5245f7
--- /dev/null
+++ b/src/tests/ofc22/tests/common/orm/__init__.py
@@ -0,0 +1,14 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
diff --git a/src/tests/ofc22/tests/common/orm/backend/BackendEnum.py b/src/tests/ofc22/tests/common/orm/backend/BackendEnum.py
new file mode 100644
index 0000000000000000000000000000000000000000..0297ca7be6c3bd13c2ee7d3cabe75efe23ead51f
--- /dev/null
+++ b/src/tests/ofc22/tests/common/orm/backend/BackendEnum.py
@@ -0,0 +1,22 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from enum import Enum
+
+class BackendEnum(Enum):
+    INMEMORY = 'inmemory'
+    REDIS = 'redis'
+    #MONGODB = 'mongodb'
+    #RETHINKDB = 'rethinkdb'
+    #ETCD = 'etcd'
diff --git a/src/tests/ofc22/tests/common/orm/backend/Tools.py b/src/tests/ofc22/tests/common/orm/backend/Tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..30223feac8c98c4272e56de841c1abafb792c5a6
--- /dev/null
+++ b/src/tests/ofc22/tests/common/orm/backend/Tools.py
@@ -0,0 +1,19 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import List, Union
+
+def key_to_str(key : Union[str, List[str]], separator : str = '/') -> str:
+    if isinstance(key, str): return key
+    return separator.join(map(str, key))
diff --git a/src/tests/ofc22/tests/common/orm/backend/_Backend.py b/src/tests/ofc22/tests/common/orm/backend/_Backend.py
new file mode 100644
index 0000000000000000000000000000000000000000..08b346ea746a4cf1fda393de069c4ab63549973d
--- /dev/null
+++ b/src/tests/ofc22/tests/common/orm/backend/_Backend.py
@@ -0,0 +1,67 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import Any, Dict, List, Optional, Set, Tuple
+
+class _Backend:
+    def __init__(self, **settings) -> None:
+        raise NotImplementedError()
+
+    def lock(self, keys : List[List[str]], owner_key : Optional[str] = None) -> Tuple[bool, str]:
+        raise NotImplementedError()
+
+    def unlock(self, keys : List[List[str]], owner_key : str) -> bool:
+        raise NotImplementedError()
+
+    def keys(self) -> list:
+        raise NotImplementedError()
+
+    def exists(self, key : List[str]) -> bool:
+        raise NotImplementedError()
+
+    def delete(self, key : List[str]) -> bool:
+        raise NotImplementedError()
+
+    def dict_get(self, key : List[str], fields : List[str] = []) -> Dict[str, str]:
+        raise NotImplementedError()
+
+    def dict_update(self, key : List[str], fields : Dict[str, str] = {}) -> None:
+        raise NotImplementedError()
+
+    def dict_delete(self, key : List[str], fields : List[str] = []) -> None:
+        raise NotImplementedError()
+
+    def list_get_all(self, key : List[str]) -> List[str]:
+        raise NotImplementedError()
+
+    def list_push_last(self, key : List[str], item : str) -> None:
+        raise NotImplementedError()
+
+    def list_remove_first_occurrence(self, key : List[str], item: str) -> None:
+        raise NotImplementedError()
+
+    def set_add(self, key : List[str], item : str) -> None:
+        raise NotImplementedError()
+
+    def set_has(self, key : List[str], item : str) -> bool:
+        raise NotImplementedError()
+
+    def set_get_all(self, key : List[str]) -> Set[str]:
+        raise NotImplementedError()
+
+    def set_remove(self, key : List[str], item : str) -> None:
+        raise NotImplementedError()
+
+    def dump(self) -> List[Tuple[str, str, Any]]:
+        raise NotImplementedError()
diff --git a/src/tests/ofc22/tests/common/orm/backend/__init__.py b/src/tests/ofc22/tests/common/orm/backend/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..70a33251242c51f49140e596b8208a19dd5245f7
--- /dev/null
+++ b/src/tests/ofc22/tests/common/orm/backend/__init__.py
@@ -0,0 +1,14 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
diff --git a/src/tests/ofc22/tests/common/orm/backend/inmemory/InMemoryBackend.py b/src/tests/ofc22/tests/common/orm/backend/inmemory/InMemoryBackend.py
new file mode 100644
index 0000000000000000000000000000000000000000..4930d59571bed319f61918f1af6ea5666e54e140
--- /dev/null
+++ b/src/tests/ofc22/tests/common/orm/backend/inmemory/InMemoryBackend.py
@@ -0,0 +1,161 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# InMemeory Database Backend
+# --------------------------
+# - Concurrency is limited to 1 operation at a time
+# - All operations are strictly sequential by means of locks
+# - WARNING: DESIGNED AND BUILT FOR UNIT TESTING AND INTEGRATION TESTING PURPOSES ONLY !!!
+#            USE ANOTHER BACKEND IN PRODUCTION ENVIRONMENTS.
+
+import copy, logging, threading, uuid
+from typing import Any, Dict, List, Optional, Set, Tuple, Union
+from .._Backend import _Backend
+from ..Tools import key_to_str
+from .Tools import get_dict, get_list, get_or_create_dict, get_or_create_list, get_or_create_set, get_set
+
+LOGGER = logging.getLogger(__name__)
+
+class InMemoryBackend(_Backend):
+    def __init__(self, **settings): # pylint: disable=super-init-not-called
+        self._lock = threading.Lock()
+        self._keys : Dict[str, Union[Set[str], List[str], Dict[str, str], str]]= {} # key => set/list/dict/string
+
+    def lock(self, keys : List[List[str]], owner_key : Optional[str] = None) -> Tuple[bool, str]:
+        # InMemoryBackend uses a database where all operations are atomic. Locks are implemented by assigning the lock
+        # owner key into a string variable. If the field is empty and enables to 
+        owner_key = str(uuid.uuid4()) if owner_key is None else owner_key
+        str_keys = {key_to_str(key) for key in keys}
+        with self._lock:
+            acquired_lock_keys : Dict[str, str] = {}
+            for str_key in str_keys:
+                if (str_key in self._keys) and (len(self._keys[str_key]) > 0) and (self._keys[str_key] != owner_key):
+                    # lock already acquired, cannot acquire all locks atomically
+                    for str_key_acquired in acquired_lock_keys:
+                        if str_key_acquired not in self._keys: continue
+                        del self._keys[str_key_acquired]
+                    return False, None
+
+                # lock available, temporarily acquire it; locks will be released if some of them for a requested
+                # key is not available
+                self._keys[str_key] = owner_key
+                acquired_lock_keys[str_key] = owner_key
+            return True, owner_key
+
+    def unlock(self, keys : List[List[str]], owner_key : str) -> bool:
+        str_keys = {key_to_str(key) for key in keys}
+        with self._lock:
+            for str_key in str_keys:
+                if str_key not in self._keys: return False
+                if self._keys[str_key] != owner_key: return False
+            # Up to here, we own all the keys we want to release
+            for str_key in str_keys:
+                del self._keys[str_key]
+            return True
+
+    def keys(self) -> list:
+        with self._lock:
+            return copy.deepcopy(list(self._keys.keys()))
+
+    def exists(self, key : List[str]) -> bool:
+        str_key = key_to_str(key)
+        with self._lock:
+            return str_key in self._keys
+
+    def delete(self, key : List[str]) -> bool:
+        str_key = key_to_str(key)
+        with self._lock:
+            if str_key not in self._keys: return False
+            del self._keys[str_key]
+            return True
+
+    def dict_get(self, key : List[str], fields : List[str] = []) -> Dict[str, str]:
+        str_key = key_to_str(key)
+        with self._lock:
+            container = get_dict(self._keys, str_key)
+            if container is None: return {}
+            if len(fields) == 0: fields = container.keys()
+            return copy.deepcopy({
+                field_name : field_value for field_name,field_value in container.items() if field_name in fields
+            })
+
+    def dict_update(self, key : List[str], fields : Dict[str,str] = {}) -> None:
+        str_key = key_to_str(key)
+        with self._lock:
+            container = get_or_create_dict(self._keys, str_key)
+            container.update(fields)
+
+    def dict_delete(self, key : List[str], fields : List[str] = []) -> None:
+        str_key = key_to_str(key)
+        with self._lock:
+            if len(fields) == 0:
+                if str_key not in self._keys: return False
+                del self._keys[str_key]
+            else:
+                container = get_or_create_dict(self._keys, str_key)
+                for field in list(fields): container.pop(field, None)
+                if len(container) == 0: self._keys.pop(str_key)
+
+    def list_get_all(self, key : List[str]) -> List[str]:
+        str_key = key_to_str(key)
+        with self._lock:
+            container = get_list(self._keys, str_key)
+            if container is None: return []
+            return copy.deepcopy(container)
+
+    def list_push_last(self, key : List[str], item : str) -> None:
+        str_key = key_to_str(key)
+        with self._lock:
+            container = get_or_create_list(self._keys, str_key)
+            container.append(item)
+
+    def list_remove_first_occurrence(self, key : List[str], item: str) -> None:
+        str_key = key_to_str(key)
+        with self._lock:
+            container = get_or_create_list(self._keys, str_key)
+            container.remove(item)
+            if len(container) == 0: self._keys.pop(str_key)
+
+    def set_add(self, key : List[str], item : str) -> None:
+        str_key = key_to_str(key)
+        with self._lock:
+            container = get_or_create_set(self._keys, str_key)
+            container.add(item)
+
+    def set_has(self, key : List[str], item : str) -> bool:
+        str_key = key_to_str(key)
+        with self._lock:
+            container = get_or_create_set(self._keys, str_key)
+            return item in container
+
+    def set_get_all(self, key : List[str]) -> Set[str]:
+        str_key = key_to_str(key)
+        with self._lock:
+            container = get_set(self._keys, str_key)
+            if container is None: return {}
+            return copy.deepcopy(container)
+
+    def set_remove(self, key : List[str], item : str) -> None:
+        str_key = key_to_str(key)
+        with self._lock:
+            container = get_or_create_set(self._keys, str_key)
+            container.discard(item)
+            if len(container) == 0: self._keys.pop(str_key)
+
+    def dump(self) -> List[Tuple[str, str, Any]]:
+        with self._lock:
+            entries = []
+            for str_key,key_value in self._keys.items():
+                entries.append((str_key, type(key_value).__name__, key_value))
+        return entries
diff --git a/src/tests/ofc22/tests/common/orm/backend/inmemory/Tools.py b/src/tests/ofc22/tests/common/orm/backend/inmemory/Tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..9f8e2b5c956f953d45d2ab5a41d7874262ec675c
--- /dev/null
+++ b/src/tests/ofc22/tests/common/orm/backend/inmemory/Tools.py
@@ -0,0 +1,45 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import Dict, List, Set, Union
+
+def get_dict(keys : Dict[str, Union[Dict, List, Set]], str_key : str) -> Dict:
+    return keys.get(str_key, None)
+
+def get_or_create_dict(keys : Dict[str, Union[Dict, List, Set]], str_key : str) -> Dict:
+    container = keys.get(str_key, None)
+    if container is None: container = keys.setdefault(str_key, dict())
+    if not isinstance(container, dict):
+        raise Exception('Key({:s}, {:s}) is not a dict'.format(str(type(container).__name__), str(str_key)))
+    return container
+
+def get_list(keys : Dict[str, Union[Dict, List, Set]], str_key : str) -> List:
+    return keys.get(str_key, None)
+
+def get_or_create_list(keys : Dict[str, Union[Dict, List, Set]], str_key : str) -> List:
+    container = keys.get(str_key, None)
+    if container is None: container = keys.setdefault(str_key, list())
+    if not isinstance(container, list):
+        raise Exception('Key({:s}, {:s}) is not a list'.format(str(type(container).__name__), str(str_key)))
+    return container
+
+def get_set(keys : Dict[str, Union[Dict, List, Set]], str_key : str) -> Set:
+    return keys.get(str_key, None)
+
+def get_or_create_set(keys : Dict[str, Union[Dict, List, Set]], str_key : str) -> Set:
+    container = keys.get(str_key, None)
+    if container is None: container = keys.setdefault(str_key, set())
+    if not isinstance(container, set):
+        raise Exception('Key({:s}, {:s}) is not a set'.format(str(type(container).__name__), str(str_key)))
+    return container
diff --git a/src/tests/ofc22/tests/common/orm/backend/inmemory/__init__.py b/src/tests/ofc22/tests/common/orm/backend/inmemory/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..70a33251242c51f49140e596b8208a19dd5245f7
--- /dev/null
+++ b/src/tests/ofc22/tests/common/orm/backend/inmemory/__init__.py
@@ -0,0 +1,14 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
diff --git a/src/tests/ofc22/tests/common/orm/backend/redis/Mutex.py b/src/tests/ofc22/tests/common/orm/backend/redis/Mutex.py
new file mode 100644
index 0000000000000000000000000000000000000000..6424b7a79076bdcc42aafe46fd9d7dfe75683a4a
--- /dev/null
+++ b/src/tests/ofc22/tests/common/orm/backend/redis/Mutex.py
@@ -0,0 +1,136 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import random, time, uuid
+from typing import Set, Tuple, Union
+from redis.client import Redis
+
+KEY_LOCK = '{}/lock'
+MIN_WAIT_TIME = 0.01
+
+class Mutex:
+    def __init__(self, client: Redis) -> None:
+        if not isinstance(client, Redis):
+            str_class_path = '{}.{}'.format(Redis.__module__, Redis.__name__)
+            raise AttributeError('client must be an instance of {}'.format(str_class_path))
+        self._client = client
+        self._script_release = None
+        self._script_refresh_expire = None
+        self._register_scripts()
+
+    def _register_scripts(self) -> None:
+        # Script mutex_release
+        #   Description: atomic script to release a set of mutex keys, only if all mutex keys are owned by the caller.
+        #                if owner_key matches key stored in all mutexes, remove all mutexes and return 1. if some key
+        #                does not match, do nothing and return 0.
+        #   Keys: set of entity_keys to be released
+        #   Args: owner_key
+        #   Ret : 1 if all keys have been released, 0 otherwise (no action performed)
+        #   Use : acquired = (int(self._script_release(keys=['mutex1', 'mutex2'], args=[owner_key])) == 1)
+        self._script_release = self._client.register_script('\n'.join([
+            "for _,key in ipairs(KEYS) do",
+            "    local owner_key = redis.call('get', key)",
+            "    if owner_key ~= ARGV[1] then return 0 end",
+            "end",
+            "for _,key in ipairs(KEYS) do",
+            "    redis.call('del', key)",
+            "end",
+            "return 1",
+        ]))
+
+        # Script mutex_refresh_expire
+        #   Description: atomic script to refresh expiracy of a set of mutex keys, only if all of them are owned by the
+        #                caller. if owner_key matches key stored in all mutexes, refresh expiracy on all mutexes and
+        #                return 1. if some key does not match, do nothing and return 0.
+        #   Keys: set of entity_keys to be refreshed
+        #   Args: owner_key, expiracy_seconds
+        #   Ret : 1 if all keys have been refreshed, 0 otherwise (no action performed)
+        #   Use : done = (int(self._script_refresh_expire(keys=['mutex1', 'mutex2'], args=[owner_key, seconds])) == 1)
+        self._script_refresh_expire = self._client.register_script('\n'.join([
+            "for _,key in ipairs(KEYS) do",
+            "    local owner_key = redis.call('get', key)",
+            "    if owner_key ~= ARGV[1] then return 0 end",
+            "end",
+            "for _,key in ipairs(KEYS) do",
+            "    redis.call('expire', key, ARGV[2])",
+            "end",
+            "return 1",
+        ]))
+
+    def acquire(self, entity_key_or_keys : Union[str, Set[str]], owner_key : Union[str, None] = None,
+                blocking : bool = True, timeout : Union[float, int] = 5,
+                expiracy_seconds : Union[float, int, None] = None) -> Tuple[bool, str]:
+        # Atomically set all entity_keys or none of them.
+        # entity_key_or_keys contains either a string with a specific entity key or a set with all entity keys to be
+        # set atomically.
+        # owner_key enables to specify the desired key to use to mark the mutex. When releasing, the owner_key must be
+        # correct, otherwise, the key will not be released. It can also be used to check if mutex is still owned by
+        # oneself or was lost and acquired by another party. If set to None, a random key is generated and returned
+        # together with the acquired boolean value.
+        # blocking defines wether the acquisition should be blocking, meaning that acquisition will be retired with
+        # random increments until timeout timeout is elapsed.
+        # Optionally, an expiracy_seconds period can be specified in expiracy_seconds. If mutex is not released after
+        # that period of time, the mutex will be released automatically.
+        # If mutex(es) is(are) acquired, the method returns True and the owner_key used to create the lock; otherwise,
+        # False and None owner_key are returned.
+
+        owner_key = owner_key or str(uuid.uuid4())
+        entity_keys = entity_key_or_keys if isinstance(entity_key_or_keys, set) else {str(entity_key_or_keys)}
+        entity_key_map = {KEY_LOCK.format(entity_key):owner_key for entity_key in entity_keys}
+        acquired = False
+        if blocking:
+            remaining_wait_time = timeout
+            while not acquired:
+                acquired = (self._client.msetnx(entity_key_map) == 1)
+                if acquired: break
+                if remaining_wait_time < MIN_WAIT_TIME: return False, None
+                wait_time = remaining_wait_time * random.random()
+                remaining_wait_time -= wait_time
+                time.sleep(wait_time)
+        else:
+            acquired = (self._client.msetnx(entity_key_map) == 1)
+
+        if not acquired: return False, None
+
+        if expiracy_seconds is not None:
+            pipeline = self._client.pipeline()
+            for entity_key in entity_key_map.keys(): pipeline.expire(entity_key, expiracy_seconds)
+            pipeline.execute()
+
+        return True, owner_key
+
+    def release(self, entity_key_or_keys : Union[str, Set[str]], owner_key : str) -> bool:
+        # release mutex keys only if all of them are owned by the caller
+        # return True if succeeded, False (nothing changed) otherwise
+        entity_keys = entity_key_or_keys if isinstance(entity_key_or_keys, set) else {str(entity_key_or_keys)}
+        entity_keys = {KEY_LOCK.format(entity_key) for entity_key in entity_keys}
+        return int(self._script_release(keys=list(entity_keys), args=[owner_key])) == 1
+
+    def acquired(self, entity_key : str, owner_key : str) -> bool:
+        # check if a mutex is owned by the owner with owner_key
+        value = self._client.get(KEY_LOCK.format(entity_key))
+        if(value is None): return(False)
+        return str(value) == owner_key
+
+    def get_ttl(self, entity_key : str) -> float:
+        # check a mutex's time to live
+        return self._client.ttl(KEY_LOCK.format(entity_key))
+
+    def refresh_expiracy(self, entity_key_or_keys : Union[str, Set[str]], owner_key : str,
+                         expiracy_seconds : Union[float, int]) -> bool:
+        # refresh expiracy on specified mutex keys only if all of them are owned by the caller
+        # return True if succeeded, False (nothing changed) otherwise
+        entity_keys = entity_key_or_keys if isinstance(entity_key_or_keys, set) else {str(entity_key_or_keys)}
+        entity_keys = {KEY_LOCK.format(entity_key) for entity_key in entity_keys}
+        return int(self._script_refresh_expire(keys=entity_keys, args=[owner_key, expiracy_seconds])) == 1
diff --git a/src/tests/ofc22/tests/common/orm/backend/redis/RedisBackend.py b/src/tests/ofc22/tests/common/orm/backend/redis/RedisBackend.py
new file mode 100644
index 0000000000000000000000000000000000000000..30225eaa29b4b7bfd69c4c277ee513192d923b7a
--- /dev/null
+++ b/src/tests/ofc22/tests/common/orm/backend/redis/RedisBackend.py
@@ -0,0 +1,131 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os, uuid
+from typing import Any, Dict, List, Optional, Set, Tuple
+from redis.client import Redis
+from .._Backend import _Backend
+from ..Tools import key_to_str
+from .Mutex import Mutex
+
+DEFAULT_SERVICE_HOST = '127.0.0.1'
+DEFAULT_SERVICE_PORT = 6379
+DEFAULT_DATABASE_ID  = 0
+
+def get_setting(settings : Dict[str, Any], name : str, default : Any) -> Any:
+    value = settings.get(name, os.environ.get(name))
+    return default if value is None else value
+
+class RedisBackend(_Backend):
+    def __init__(self, **settings) -> None: # pylint: disable=super-init-not-called
+        host = get_setting(settings, 'REDIS_SERVICE_HOST', DEFAULT_SERVICE_HOST)
+        port = get_setting(settings, 'REDIS_SERVICE_PORT', DEFAULT_SERVICE_PORT)
+        dbid = get_setting(settings, 'REDIS_DATABASE_ID',  DEFAULT_DATABASE_ID )
+        self._client = Redis.from_url('redis://{host}:{port}/{dbid}'.format(host=host, port=port, dbid=dbid))
+        self._mutex = Mutex(self._client)
+
+    def lock(self, keys : List[List[str]], owner_key : Optional[str] = None) -> Tuple[bool, str]:
+        str_keys = {key_to_str(key) for key in keys}
+        owner_key = str(uuid.uuid4()) if owner_key is None else owner_key
+        return self._mutex.acquire(str_keys, owner_key=owner_key, blocking=True)
+
+    def unlock(self, keys : List[List[str]], owner_key : str) -> bool:
+        str_keys = {key_to_str(key) for key in keys}
+        return self._mutex.release(str_keys, owner_key)
+
+    def keys(self) -> list:
+        return [k.decode('UTF-8') for k in self._client.keys()]
+
+    def exists(self, key : List[str]) -> bool:
+        str_key = key_to_str(key)
+        return self._client.exists(str_key) == 1
+
+    def delete(self, key : List[str]) -> bool:
+        str_key = key_to_str(key)
+        return self._client.delete(str_key) == 1
+
+    def dict_get(self, key : List[str], fields : List[str] = []) -> Dict[str, str]:
+        str_key = key_to_str(key)
+        if len(fields) == 0:
+            keys_values = self._client.hgetall(str_key).items()
+        else:
+            fields = list(fields)
+            keys_values = zip(fields, self._client.hmget(str_key, fields))
+
+        attributes = {}
+        for key,value in keys_values:
+            str_key = key.decode('UTF-8') if isinstance(key, bytes) else key
+            attributes[str_key] = value.decode('UTF-8') if isinstance(value, bytes) else value
+        return attributes
+
+    def dict_update(self, key : List[str], fields : Dict[str, str] = {}) -> None:
+        str_key = key_to_str(key)
+        if len(fields) > 0:
+            self._client.hset(str_key, mapping=fields)
+
+    def dict_delete(self, key : List[str], fields : List[str] = []) -> None:
+        str_key = key_to_str(key)
+        if len(fields) == 0:
+            self._client.delete(str_key)
+        else:
+            self._client.hdel(str_key, set(fields))
+
+    def list_get_all(self, key : List[str]) -> List[str]:
+        str_key = key_to_str(key)
+        return list(map(lambda m: m.decode('UTF-8'), self._client.lrange(str_key, 0, -1)))
+
+    def list_push_last(self, key : List[str], item : str) -> None:
+        str_key = key_to_str(key)
+        self._client.rpush(str_key, item)
+
+    def list_remove_first_occurrence(self, key : List[str], item: str) -> None:
+        str_key = key_to_str(key)
+        self._client.lrem(str_key, 1, item)
+
+    def set_add(self, key : List[str], item : str) -> None:
+        str_key = key_to_str(key)
+        self._client.sadd(str_key, item)
+
+    def set_has(self, key : List[str], item : str) -> bool:
+        str_key = key_to_str(key)
+        return self._client.sismember(str_key, item) == 1
+
+    def set_get_all(self, key : List[str]) -> Set[str]:
+        str_key = key_to_str(key)
+        return set(map(lambda m: m.decode('UTF-8'), self._client.smembers(str_key)))
+
+    def set_remove(self, key : List[str], item : str) -> None:
+        str_key = key_to_str(key)
+        self._client.srem(str_key, item)
+
+    def dump(self) -> List[Tuple[str, str, Any]]:
+        entries = []
+        for str_key in self._client.keys():
+            str_key = str_key.decode('UTF-8')
+            key_type = self._client.type(str_key)
+            if key_type is not None: key_type = key_type.decode('UTF-8')
+            key_type = {
+                'hash'  : 'dict',
+                'list'  : 'list',
+                'set'   : 'set',
+                'string': 'str',
+            }.get(key_type)
+            key_content = {
+                'dict': lambda key: {k.decode('UTF-8'):v.decode('UTF-8') for k,v in self._client.hgetall(key).items()},
+                'list': lambda key: [m.decode('UTF-8') for m in self._client.lrange(key, 0, -1)],
+                'set' : lambda key: {m.decode('UTF-8') for m in self._client.smembers(key)},
+                'str' : lambda key: self._client.get(key).decode('UTF-8'),
+            }.get(key_type, lambda key: 'UNSUPPORTED_TYPE')
+            entries.append((str_key, key_type, key_content(str_key)))
+        return entries
diff --git a/src/tests/ofc22/tests/common/orm/backend/redis/__init__.py b/src/tests/ofc22/tests/common/orm/backend/redis/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..70a33251242c51f49140e596b8208a19dd5245f7
--- /dev/null
+++ b/src/tests/ofc22/tests/common/orm/backend/redis/__init__.py
@@ -0,0 +1,14 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
diff --git a/src/tests/ofc22/tests/common/orm/fields/BooleanField.py b/src/tests/ofc22/tests/common/orm/fields/BooleanField.py
new file mode 100644
index 0000000000000000000000000000000000000000..ae9ac8eafa5803217dc3e7554683a737134bc4e5
--- /dev/null
+++ b/src/tests/ofc22/tests/common/orm/fields/BooleanField.py
@@ -0,0 +1,31 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+from typing import Union
+from common.type_checkers.Checkers import chk_boolean
+from .Field import Field
+
+BOOL_TRUE_VALUES = {'TRUE', 'T', '1'}
+
+class BooleanField(Field):
+    def __init__(self, *args, **kwargs) -> None:
+        super().__init__(*args, type_=bool, **kwargs)
+
+    def validate(self, value : Union[bool, str], try_convert_type=False) -> bool:
+        value = self.is_required(value)
+        if value is None: return None
+        if try_convert_type and isinstance(value, str):
+            return value.upper() in BOOL_TRUE_VALUES
+        return chk_boolean(self.name, value)
diff --git a/src/tests/ofc22/tests/common/orm/fields/EnumeratedField.py b/src/tests/ofc22/tests/common/orm/fields/EnumeratedField.py
new file mode 100644
index 0000000000000000000000000000000000000000..f684649e10bbb985f787ae6f4cb6f4660b5cd943
--- /dev/null
+++ b/src/tests/ofc22/tests/common/orm/fields/EnumeratedField.py
@@ -0,0 +1,38 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+from enum import Enum
+from typing import Union
+from common.orm.fields.Field import Field
+from common.type_checkers.Checkers import chk_issubclass, chk_options, chk_type
+from .Field import Field
+
+class EnumeratedField(Field):
+    def __init__(self, enumeration_class : 'Enum', *args, required : bool = True, **kwargs) -> None:
+        self.enumeration_class : Enum = chk_issubclass('EnumeratedField.enumeration_class', enumeration_class, Enum)
+        super().__init__(*args, type_=self.enumeration_class, required=required, **kwargs)
+
+    def validate(self, value : Union['Enum', str], try_convert_type=False) -> 'Enum':
+        value = super().is_required(value)
+        if value is None: return None
+        if try_convert_type and isinstance(value, str):
+            chk_options(self.name, value, self.enumeration_class.__members__.keys())
+            value = self.enumeration_class.__members__[value]
+        return chk_type(self.name, value, self.enumeration_class)
+
+    def serialize(self, value: 'Enum') -> str:
+        value = self.validate(value, try_convert_type=True)
+        if value is None: return None
+        return str(value.name)
diff --git a/src/tests/ofc22/tests/common/orm/fields/Field.py b/src/tests/ofc22/tests/common/orm/fields/Field.py
new file mode 100644
index 0000000000000000000000000000000000000000..68d868cf740bc00e22bc951cd8bab843d28db21d
--- /dev/null
+++ b/src/tests/ofc22/tests/common/orm/fields/Field.py
@@ -0,0 +1,63 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+import logging
+from typing import TYPE_CHECKING, Any, List, Set, Tuple, Union
+from common.type_checkers.Checkers import chk_boolean, chk_not_none, chk_string, chk_type
+
+if TYPE_CHECKING:
+    from ..model.Model import Model
+
+LOGGER = logging.getLogger(__name__)
+
+# Ref: https://docs.python.org/3.9/howto/descriptor.html
+
+class Field:
+    def __init__(
+        self, name : str = None, type_ : Union[type, Set[type], Tuple[type], List[type]] = object,
+        required : bool = False) -> None:
+
+        self.name = None if name is None else chk_string('Field.name', name)
+        self.type_ = chk_type('Field.type', type_, (type, set, tuple, list))
+        self.required = chk_boolean('Field.required', required)
+
+    def __get__(self, instance : 'Model', objtype=None):
+        if instance is None: return self
+        return instance.__dict__.get(self.name)
+
+    def __set__(self, instance : 'Model', value : Any) -> None:
+        instance.__dict__[self.name] = self.validate(value)
+
+    def __delete__(self, instance : 'Model'):
+        raise AttributeError('Attribute "{:s}" cannot be deleted'.format(self.name))
+
+    def is_required(self, value):
+        if self.required:
+            chk_not_none(self.name, value, reason='is required. It cannot be None.')
+        return value
+    
+    def validate(self, value, try_convert_type=False):
+        value = self.is_required(value)
+        if value is None: return None
+        if try_convert_type: value = self.type_(value)
+        return value
+
+    def serialize(self, value : Any) -> str:
+        value = self.validate(value)
+        if value is None: return None
+        return str(value)
+
+    def deserialize(self, value : str) -> Any:
+        return self.validate(value, try_convert_type=True)
diff --git a/src/tests/ofc22/tests/common/orm/fields/FloatField.py b/src/tests/ofc22/tests/common/orm/fields/FloatField.py
new file mode 100644
index 0000000000000000000000000000000000000000..8d006e77347e3c440a31bd13f59e08267daa5e63
--- /dev/null
+++ b/src/tests/ofc22/tests/common/orm/fields/FloatField.py
@@ -0,0 +1,34 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+from typing import Optional, Union
+from common.type_checkers.Checkers import chk_float
+from .Field import Field
+
+class FloatField(Field):
+    def __init__(
+        self, *args, min_value : Optional[float] = None, max_value : Optional[float] = None, **kwargs) -> None:
+
+        super().__init__(*args, type_=float, **kwargs)
+        self._min_value = None if min_value is None else \
+            chk_float('FloatField.min_value', min_value)
+        self._max_value = None if max_value is None else \
+            chk_float('FloatField.max_value', max_value, min_value=self._min_value)
+
+    def validate(self, value : Union[float, str], try_convert_type=False) -> float:
+        value = super().validate(value)
+        if value is None: return None
+        if try_convert_type and isinstance(value, str): value = float(value)
+        return chk_float(self.name, value, min_value=self._min_value, max_value=self._max_value)
diff --git a/src/tests/ofc22/tests/common/orm/fields/ForeignKeyField.py b/src/tests/ofc22/tests/common/orm/fields/ForeignKeyField.py
new file mode 100644
index 0000000000000000000000000000000000000000..12e720d17952d6b09d0d4911105b156e8593f33b
--- /dev/null
+++ b/src/tests/ofc22/tests/common/orm/fields/ForeignKeyField.py
@@ -0,0 +1,34 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+from typing import TYPE_CHECKING
+from common.type_checkers.Checkers import chk_issubclass, chk_type
+from .StringField import StringField
+
+if TYPE_CHECKING:
+    from ..model.Model import Model
+
+class ForeignKeyField(StringField):
+    def __init__(self, foreign_model : 'Model', *args, required : bool = True, **kwargs) -> None:
+        from ..model.Model import Model
+        self.foreign_model : Model = chk_issubclass('ForeignKeyField.foreign_model', foreign_model, Model)
+        super().__init__(*args, required=required, allow_empty=not required, **kwargs)
+
+    def __set__(self, instance : 'Model', value : 'Model') -> None:
+        model_instance : 'Model' = chk_type('value', value, self.foreign_model)
+        super().__set__(instance, self.validate(model_instance.instance_key))
+
+    def __delete__(self, instance: 'Model'):
+        super().__set__(instance, self.validate(None))
diff --git a/src/tests/ofc22/tests/common/orm/fields/IntegerField.py b/src/tests/ofc22/tests/common/orm/fields/IntegerField.py
new file mode 100644
index 0000000000000000000000000000000000000000..87ddab4646d5426c462ea96f18368e3e49be06c1
--- /dev/null
+++ b/src/tests/ofc22/tests/common/orm/fields/IntegerField.py
@@ -0,0 +1,34 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+from typing import Optional, Union
+from common.type_checkers.Checkers import chk_integer
+from .Field import Field
+
+class IntegerField(Field):
+    def __init__(
+        self, *args, min_value : Optional[int] = None, max_value : Optional[int] = None, **kwargs) -> None:
+
+        super().__init__(*args, type_=int, **kwargs)
+        self._min_value = None if min_value is None else \
+            chk_integer('IntegerField.min_value', min_value)
+        self._max_value = None if max_value is None else \
+            chk_integer('IntegerField.max_value', max_value, min_value=self._min_value)
+
+    def validate(self, value : Union[int, str], try_convert_type=False) -> int:
+        value = super().validate(value)
+        if value is None: return None
+        if try_convert_type and isinstance(value, str): value = int(value)
+        return chk_integer(self.name, value, min_value=self._min_value, max_value=self._max_value)
diff --git a/src/tests/ofc22/tests/common/orm/fields/PrimaryKeyField.py b/src/tests/ofc22/tests/common/orm/fields/PrimaryKeyField.py
new file mode 100644
index 0000000000000000000000000000000000000000..86fdc7e2466f10d3c16cf733fedbfc15fe62f31f
--- /dev/null
+++ b/src/tests/ofc22/tests/common/orm/fields/PrimaryKeyField.py
@@ -0,0 +1,29 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+from typing import TYPE_CHECKING
+from .StringField import StringField
+
+if TYPE_CHECKING:
+    from ..model.Model import Model
+
+class PrimaryKeyField(StringField):
+    def __init__(self, *args, **kwargs) -> None:
+        super().__init__(*args, required=True, allow_empty=False, min_length=1, **kwargs)
+
+    def __set__(self, instance : 'Model', value : str) -> None:
+        if (self.name in instance.__dict__) and (instance.__dict__[self.name] is not None):
+            raise ValueError('PrimaryKeyField cannot be modified')
+        super().__set__(instance, self.validate(value))
diff --git a/src/tests/ofc22/tests/common/orm/fields/StringField.py b/src/tests/ofc22/tests/common/orm/fields/StringField.py
new file mode 100644
index 0000000000000000000000000000000000000000..ead8487eaf60542079b2ca3745f04aba2c0d6de9
--- /dev/null
+++ b/src/tests/ofc22/tests/common/orm/fields/StringField.py
@@ -0,0 +1,39 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+import re
+from typing import Optional, Pattern, Union
+from common.type_checkers.Checkers import chk_boolean, chk_integer, chk_string
+from .Field import Field
+
+class StringField(Field):
+    def __init__(
+        self, *args, allow_empty : bool = False, min_length : Optional[int] = None, max_length : Optional[int] = None,
+        pattern : Optional[Union[Pattern, str]] = None, **kwargs) -> None:
+
+        super().__init__(*args, type_=str, **kwargs)
+        self._allow_empty = chk_boolean('StringField.allow_empty', allow_empty)
+        self._min_length = None if min_length is None else \
+            chk_integer('StringField.min_length', min_length, min_value=0)
+        self._max_length = None if max_length is None else \
+            chk_integer('StringField.max_length', max_length, min_value=self._min_length)
+        self._pattern = None if pattern is None else re.compile(pattern)
+
+    def validate(self, value : str, try_convert_type=False) -> str:
+        value = super().validate(value, try_convert_type=try_convert_type)
+        if value is None: return None
+        return chk_string(
+            self.name, value, allow_empty=self._allow_empty, min_length=self._min_length, max_length=self._max_length,
+            pattern=self._pattern)
diff --git a/src/tests/ofc22/tests/common/orm/fields/__init__.py b/src/tests/ofc22/tests/common/orm/fields/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..2a4cccb63a28eda573b16e4cef7da3ae58ac9d54
--- /dev/null
+++ b/src/tests/ofc22/tests/common/orm/fields/__init__.py
@@ -0,0 +1,22 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .BooleanField import BooleanField
+from .Field import Field
+from .FloatField import FloatField
+from .ForeignKeyField import ForeignKeyField
+from .IntegerField import IntegerField
+from .PrimaryKeyField import PrimaryKeyField
+from .StringField import StringField
+__all__ = ['BooleanField', 'Field', 'FloatField', 'ForeignKeyField', 'IntegerField', 'PrimaryKeyField', 'StringField']
diff --git a/src/tests/ofc22/tests/common/orm/model/Model.py b/src/tests/ofc22/tests/common/orm/model/Model.py
new file mode 100644
index 0000000000000000000000000000000000000000..ffb9571142942de0a23982c990db3670a27bc670
--- /dev/null
+++ b/src/tests/ofc22/tests/common/orm/model/Model.py
@@ -0,0 +1,308 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+import logging, re
+from typing import Any, Dict, List, Mapping, Optional, Set, Tuple, Union
+from common.orm.Database import Database
+from common.orm.backend.Tools import key_to_str
+from common.orm.fields.ForeignKeyField import ForeignKeyField
+from ..Exceptions import ConstraintException, MutexException
+from ..fields.Field import Field
+from ..fields.PrimaryKeyField import PrimaryKeyField
+from .Tools import NoDupOrderedDict
+
+LOGGER = logging.getLogger(__name__)
+DEFAULT_PRIMARY_KEY_NAME = 'pk_auto'
+
+class MetaModel(type):
+    @classmethod
+    def __prepare__(cls, name : str, bases : Tuple[type, ...], **attrs : Any) -> Mapping[str, Any]:
+        return NoDupOrderedDict()
+
+    def __new__(cls, name : str, bases : Tuple[type, ...], attrs : NoDupOrderedDict[str, Any]):
+        field_names = list()
+        pk_field_name = None
+        for key, value in attrs.items():
+            if not isinstance(value, Field): continue
+            value.name = key
+            field_names.append(key)
+            if not isinstance(value, PrimaryKeyField): continue
+            if pk_field_name is None:
+                pk_field_name = key
+                continue
+            raise AttributeError('PrimaryKeyField for Model({:s}) already set to attribute({:s})'.format(
+                str(name), str(pk_field_name)))
+        if pk_field_name is None:
+            if DEFAULT_PRIMARY_KEY_NAME in attrs.keys():
+                msg = 'PrimaryKeyField for Model({:s}) not defined and attribute "{:s}" already used. '\
+                      'Leave attribute name "{:s}" for automatic PrimaryKeyField, or set a PrimaryKeyField.'
+                raise AttributeError(msg.format(str(name), DEFAULT_PRIMARY_KEY_NAME, DEFAULT_PRIMARY_KEY_NAME))
+            pk_field_name = DEFAULT_PRIMARY_KEY_NAME
+            attrs[pk_field_name] = PrimaryKeyField(name=pk_field_name)
+            field_names.append(pk_field_name)
+        cls_obj = super().__new__(cls, name, bases, dict(attrs))
+        setattr(cls_obj, '_pk_field_name', pk_field_name)
+        setattr(cls_obj, '_field_names_list', field_names)
+        setattr(cls_obj, '_field_names_set', set(field_names))
+        return cls_obj
+
+KEYWORD_INSTANCES  = 'instances'
+KEYWORD_LOCK       = 'lock'
+KEYWORD_REFERENCES = 'references'
+KEYWORD_STORED     = '_stored'
+
+class Model(metaclass=MetaModel):
+    @classmethod
+    def get_backend_key_instances(cls) -> str:
+        return key_to_str(['{:s}'.format(cls.__name__), KEYWORD_INSTANCES])
+
+    @classmethod
+    def get_backend_key_instance(cls, primary_key : str) -> str:
+        return '{:s}[{:s}]'.format(cls.__name__, primary_key)
+
+    @classmethod
+    def get_backend_key_references(cls, primary_key : str) -> str:
+        match = re.match(r'^[a-zA-Z0-9\_]+\[([^\]]*)\]', primary_key)
+        if not match: primary_key = cls.get_backend_key_instance(primary_key)
+        return key_to_str([primary_key, KEYWORD_REFERENCES])
+
+    @staticmethod
+    def get_backend_key_lock(backend_key : str) -> str:
+        if backend_key.endswith(KEYWORD_LOCK): return backend_key
+        return key_to_str([backend_key, KEYWORD_LOCK])
+
+    @staticmethod
+    def get_backend_key_locks(backend_keys : List[str]) -> List[str]:
+        return [Model.get_backend_key_lock(backend_key) for backend_key in backend_keys]
+
+    @classmethod
+    def backend_key__to__instance_key(cls, backend_key : str) -> str:
+        class_name = cls.__name__
+        if backend_key.startswith(class_name):
+            match = re.match(r'^{:s}\[([^\]]*)\]'.format(class_name), backend_key)
+            if match: return match.group(1)
+        return backend_key
+
+    def __init__(self, database : Database, primary_key : str, auto_load : bool = True) -> None:
+        if not isinstance(database, Database):
+            str_class_path = '{}.{}'.format(Database.__module__, Database.__name__)
+            raise AttributeError('database must inherit from {}'.format(str_class_path))
+        self._model_class = type(self)
+        self._class_name = self._model_class.__name__
+        pk_field_name = self._pk_field_name # pylint: disable=no-member
+        pk_field_instance : 'PrimaryKeyField' = getattr(self._model_class, pk_field_name)
+        primary_key = pk_field_instance.validate(primary_key)
+        primary_key = self.backend_key__to__instance_key(primary_key)
+        setattr(self, pk_field_name, primary_key)
+        self._database = database
+        self._backend = database.backend
+        self._instance_key : str = self.get_backend_key_instance(primary_key)
+        self._instances_key : str = self.get_backend_key_instances()
+        self._references_key : str = self.get_backend_key_references(primary_key)
+        self._owner_key : Optional[str] = None
+        if auto_load: self.load()
+
+    @property
+    def database(self) -> Database: return self._database
+
+    @property
+    def instance_key(self) -> str: return self._instance_key
+
+    def lock(self, extra_keys : List[List[str]] = [], blocking : bool = True):
+        while True:
+            lock_keys = Model.get_backend_key_locks(
+                [self._instance_key, self._instances_key, self._references_key] + extra_keys)
+            acquired,self._owner_key = self._backend.lock(lock_keys, owner_key=self._owner_key)
+            if acquired: return
+            if not blocking: break
+        raise MutexException('Unable to lock keys {:s} using owner_key {:s}'.format(
+            str(lock_keys), str(self._owner_key)))
+
+    def unlock(self, extra_keys : List[List[str]] = []):
+        lock_keys = Model.get_backend_key_locks(
+            [self._instance_key, self._instances_key, self._references_key] + extra_keys)
+        released = self._backend.unlock(lock_keys, self._owner_key)
+        if released: return
+        raise MutexException('Unable to unlock keys {:s} using owner_key {:s}'.format(
+            str(lock_keys), str(self._owner_key)))
+
+    def load(self) -> bool:
+        pk_field_name = self._pk_field_name # pylint: disable=no-member
+
+        try:
+            self.lock()
+
+            attributes = self._backend.dict_get(self._instance_key)
+            if attributes is None or len(attributes) == 0: return False
+            for field_name in self._field_names_list: # pylint: disable=no-member
+                if field_name == pk_field_name: continue
+                if field_name not in attributes: continue
+                raw_field_value = attributes[field_name]
+                field_instance : 'Field' = getattr(self._model_class, field_name)
+                field_value = field_instance.deserialize(raw_field_value)
+                if isinstance(field_instance, ForeignKeyField):
+                    setattr(self, field_name + KEYWORD_STORED, field_value)
+                    field_value = field_instance.foreign_model(self._database, field_value, auto_load=True)
+                setattr(self, field_name, field_value)
+            return True
+        finally:
+            self.unlock()
+
+    def save(self) -> None:
+        attributes : Dict[str, Any] = dict()
+        required_keys : Set[str] = set()
+        foreign_additions : Dict[str, str] = dict()
+        foreign_removals : Dict[str, str] = dict()
+        for field_name in self._field_names_list: # pylint: disable=no-member
+            field_value = getattr(self, field_name)
+            field_instance : 'Field' = getattr(self._model_class, field_name)
+            serialized_field_value = field_instance.serialize(field_value)
+            if (serialized_field_value is None) and (not field_instance.required): continue
+            if isinstance(field_instance, ForeignKeyField):
+                foreign_reference = '{:s}:{:s}'.format(self._instance_key, field_name)
+                field_value_stored = getattr(self, field_name + KEYWORD_STORED, None)
+                if field_value_stored is not None:
+                    foreign_removals[self.get_backend_key_references(field_value_stored)] = foreign_reference
+                foreign_additions[self.get_backend_key_references(serialized_field_value)] = foreign_reference
+                required_keys.add(serialized_field_value)
+            attributes[field_name] = serialized_field_value
+
+        extra_keys = []
+        extra_keys.extend(list(foreign_removals.keys()))
+        extra_keys.extend(list(foreign_additions.keys()))
+
+        try:
+            self.lock(extra_keys=extra_keys)
+
+            not_exists = [
+                str(required_key)
+                for required_key in required_keys
+                if not self._backend.exists(required_key)]
+            if len(not_exists) > 0:
+                raise ConstraintException('Required Keys ({:s}) does not exist'.format(', '.join(sorted(not_exists))))
+
+            self._backend.dict_update(self._instance_key, attributes)
+            self._backend.set_add(self._instances_key, self._instance_key)
+
+            for serialized_field_value,foreign_reference in foreign_removals.items():
+                self._backend.set_remove(serialized_field_value, foreign_reference)
+
+            for serialized_field_value,foreign_reference in foreign_additions.items():
+                self._backend.set_add(serialized_field_value, foreign_reference)
+        finally:
+            self.unlock(extra_keys=extra_keys)
+
+        for serialized_field_value,foreign_reference in foreign_additions.items():
+            setattr(self, (foreign_reference.rsplit(':', 1)[-1]) + KEYWORD_STORED, field_value_stored)
+
+    def delete(self) -> None:
+        foreign_removals : Dict[str, str] = {}
+        for field_name in self._field_names_list: # pylint: disable=no-member
+            field_instance : 'Field' = getattr(self._model_class, field_name)
+            if not isinstance(field_instance, ForeignKeyField): continue
+            foreign_reference = '{:s}:{:s}'.format(self._instance_key, field_name)
+            field_value_stored = getattr(self, field_name + KEYWORD_STORED, None)
+            if field_value_stored is None: continue
+            foreign_removals[self.get_backend_key_references(field_value_stored)] = foreign_reference
+
+        extra_keys = []
+        extra_keys.extend(list(foreign_removals.keys()))
+
+        try:
+            self.lock(extra_keys=extra_keys)
+
+            if self._backend.exists(self._references_key):
+                references = self._backend.set_get_all(self._references_key)
+                raise ConstraintException('Instance is used by Keys ({:s})'.format(', '.join(sorted(references))))
+
+            self._backend.delete(self._instance_key)
+            self._backend.set_remove(self._instances_key, self._instance_key)
+
+            for serialized_field_value,foreign_reference in foreign_removals.items():
+                self._backend.set_remove(serialized_field_value, foreign_reference)
+        finally:
+            self.unlock(extra_keys=extra_keys)
+
+    @staticmethod
+    def get_model_name(model_or_str) -> str:
+        if isinstance(model_or_str, str):
+            return model_or_str
+        if (type(model_or_str).__name__ == 'MetaModel') and issubclass(model_or_str, Model):
+            return model_or_str.__name__
+        raise Exception()
+
+    def references(
+        self, filter_by_models : Optional[Union[type, List[type], Set[type], Tuple[type]]] = None
+        ) -> Set[Tuple[str, str]]:
+
+        try:
+            self.lock()
+            if not self._backend.exists(self._references_key): return {}
+            references = self._backend.set_get_all(self._references_key)
+            try:
+                if filter_by_models is None:
+                    pass
+                elif isinstance(filter_by_models, str):
+                    filter_by_models = {filter_by_models}
+                elif isinstance(filter_by_models, (list, set, tuple)):
+                    filter_by_models = {Model.get_model_name(model_or_str) for model_or_str in filter_by_models}
+                elif (type(filter_by_models).__name__ == 'MetaModel') and issubclass(filter_by_models, Model):
+                    filter_by_models = {Model.get_model_name(filter_by_models)}
+                else:
+                    raise Exception()
+            except Exception as e:
+                msg = 'filter_by_models({:s}) unsupported. Expected a type or a list/set of types. Optionally, keep '\
+                      'it as None to retrieve all the references pointing to this instance.'
+                raise AttributeError(msg.format(str(filter_by_models))) from e
+            if filter_by_models:
+                references = filter(lambda instance_key: instance_key.split('[', 1)[0] in filter_by_models, references)
+            return {tuple(reference.rsplit(':', 1)) for reference in references}
+        finally:
+            self.unlock()
+
+    @classmethod
+    def get_primary_keys(cls, database : Database):
+        backend = database.backend
+        key_model_instances = cls.get_backend_key_instances()
+        key_model_instances_lock = cls.get_backend_key_lock(key_model_instances)
+
+        acquired,owner_key = backend.lock(key_model_instances_lock)
+        if not acquired:
+            raise MutexException('Unable to lock keys {:s}'.format(
+                str(key_model_instances_lock)))
+
+        instance_keys = backend.set_get_all(key_model_instances)
+
+        released = backend.unlock(key_model_instances_lock, owner_key)
+        if not released:
+            raise MutexException('Unable to unlock keys {:s} using owner_key {:s}'.format(
+                str(key_model_instances_lock), str(owner_key)))
+
+        return instance_keys
+
+    def dump_id(self) -> Dict:
+        raise NotImplementedError()
+
+    def dump(self) -> Dict:
+        raise NotImplementedError()
+
+    def __repr__(self) -> str:
+        pk_field_name = self._pk_field_name # pylint: disable=no-member
+        arguments = ', '.join(
+            '{:s}={:s}{:s}'.format(
+                name, repr(getattr(self, name)), '(PK)' if name == pk_field_name else '')
+            for name in self._field_names_list # pylint: disable=no-member
+        )
+        return '{:s}({:s})'.format(self._class_name, arguments)
diff --git a/src/tests/ofc22/tests/common/orm/model/Tools.py b/src/tests/ofc22/tests/common/orm/model/Tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..aed6a14bc2668017e312f0659ccb08c8c185ea90
--- /dev/null
+++ b/src/tests/ofc22/tests/common/orm/model/Tools.py
@@ -0,0 +1,31 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from collections import OrderedDict
+
+class NoDupOrderedDict(OrderedDict):
+    def __setitem__(self, key, value):
+        if key in self: raise NameError('{:s} already defined'.format(str(key)))
+        super().__setitem__(key, value)
+
+def format_key(key_pattern, instance, **kwargs):
+    attributes = {}
+    for attribute_name in instance.__dir__():
+        if attribute_name[0] == '_': continue
+        attribute_obj = getattr(instance, attribute_name, None)
+        if attribute_obj is None: continue
+        if type(attribute_obj).__name__ == 'method': continue
+        attributes[attribute_name] = attribute_obj
+    attributes.update(kwargs)
+    return key_pattern.format(**attributes)
diff --git a/src/tests/ofc22/tests/common/orm/model/__init__.py b/src/tests/ofc22/tests/common/orm/model/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..70a33251242c51f49140e596b8208a19dd5245f7
--- /dev/null
+++ b/src/tests/ofc22/tests/common/orm/model/__init__.py
@@ -0,0 +1,14 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
diff --git a/src/tests/ofc22/tests/common/orm/tests/__init__.py b/src/tests/ofc22/tests/common/orm/tests/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..70a33251242c51f49140e596b8208a19dd5245f7
--- /dev/null
+++ b/src/tests/ofc22/tests/common/orm/tests/__init__.py
@@ -0,0 +1,14 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
diff --git a/src/tests/ofc22/tests/common/orm/tests/test_unitary.py b/src/tests/ofc22/tests/common/orm/tests/test_unitary.py
new file mode 100644
index 0000000000000000000000000000000000000000..586a9623574a104c58b6ae4762fa13fc6ab02871
--- /dev/null
+++ b/src/tests/ofc22/tests/common/orm/tests/test_unitary.py
@@ -0,0 +1,656 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging, pytest
+from enum import Enum
+from common.orm.Exceptions import ConstraintException
+from common.orm.Database import Database
+from common.orm.Factory import get_database_backend
+from common.orm.backend.BackendEnum import BackendEnum
+from common.orm.backend._Backend import _Backend
+from common.orm.fields.BooleanField import BooleanField
+from common.orm.fields.EnumeratedField import EnumeratedField
+from common.orm.fields.FloatField import FloatField
+from common.orm.fields.ForeignKeyField import ForeignKeyField
+from common.orm.fields.IntegerField import IntegerField
+from common.orm.fields.PrimaryKeyField import PrimaryKeyField
+from common.orm.fields.StringField import StringField
+from common.orm.model.Model import DEFAULT_PRIMARY_KEY_NAME, Model
+
+logging.basicConfig(level=logging.INFO)
+LOGGER = logging.getLogger(__name__)
+
+def test_database_instantiation():
+    with pytest.raises(AttributeError) as e:
+        Database(None)
+    str_class_path = '{}.{}'.format(_Backend.__module__, _Backend.__name__)
+    assert str(e.value) == 'backend must inherit from {}'.format(str_class_path)
+
+    assert Database(get_database_backend(BackendEnum.INMEMORY)) is not None
+
+def test_model_without_attributes():
+    with pytest.raises(AttributeError) as e:
+        Model(None, 'valid-uuid')
+    str_class_path = '{}.{}'.format(Database.__module__, Database.__name__)
+    assert str(e.value) == 'database must inherit from {}'.format(str_class_path)
+
+    database = Database(get_database_backend(BackendEnum.INMEMORY))
+
+    with pytest.raises(ValueError) as e:
+        Model(database, '')
+    msg = '{:s}() is out of range: allow_empty(False).'
+    assert str(e.value) == msg.format(DEFAULT_PRIMARY_KEY_NAME)
+
+    with pytest.raises(TypeError) as e:
+        Model(database, 23)
+    msg = '{:s}(23) is of a wrong type(int). Accepted type_or_types(<class \'str\'>).'
+    assert str(e.value) == msg.format(DEFAULT_PRIMARY_KEY_NAME)
+
+    with pytest.raises(TypeError) as e:
+        Model(database, 23.5)
+    msg = '{:s}(23.5) is of a wrong type(float). Accepted type_or_types(<class \'str\'>).'
+    assert str(e.value) == msg.format(DEFAULT_PRIMARY_KEY_NAME)
+    
+    with pytest.raises(TypeError) as e:
+        Model(database, True)
+    msg = '{:s}(True) is of a wrong type(bool). Accepted type_or_types(<class \'str\'>).'
+    assert str(e.value) == msg.format(DEFAULT_PRIMARY_KEY_NAME)
+
+    with pytest.raises(TypeError) as e:
+        Model(database, ['a'])
+    msg = '{:s}([\'a\']) is of a wrong type(list). Accepted type_or_types(<class \'str\'>).'
+    assert str(e.value) == msg.format(DEFAULT_PRIMARY_KEY_NAME)
+
+    Model(database, 'valid-primary-key')
+
+def test_model_with_primarykey():
+    database = Database(get_database_backend(BackendEnum.INMEMORY))
+
+    with pytest.raises(AttributeError) as e:
+        class WrongTestModel(Model): # pylint: disable=unused-variable
+            pk = PrimaryKeyField()
+            name = StringField(min_length=1)
+            age = IntegerField(min_value=0)
+            salary = FloatField(min_value=0.0)
+            active = BooleanField()
+            pk2 = PrimaryKeyField()
+    assert str(e.value) == 'PrimaryKeyField for Model(WrongTestModel) already set to attribute(pk)'
+
+    class GenderEnum(Enum):
+        FEMALE = 'female'
+        MALE   = 'male'
+
+    class TestModel(Model):
+        pk = PrimaryKeyField()
+        name = StringField(min_length=1)
+        age = IntegerField(min_value=0)
+        salary = FloatField(min_value=0.0)
+        active = BooleanField()
+        gender = EnumeratedField(GenderEnum)
+
+    backend_key_instances  = TestModel.get_backend_key_instances()
+    backend_key_instance   = TestModel.get_backend_key_instance('pk')
+    backend_key_references = TestModel.get_backend_key_references('pk')
+
+    assert backend_key_instances  == 'TestModel/instances'
+    assert backend_key_instance   == 'TestModel[pk]'
+    assert backend_key_references == 'TestModel[pk]/references'
+
+    assert TestModel.get_backend_key_lock(backend_key_instances ) == 'TestModel/instances/lock'
+    assert TestModel.get_backend_key_lock(backend_key_instance  ) == 'TestModel[pk]/lock'
+    assert TestModel.get_backend_key_lock(backend_key_references) == 'TestModel[pk]/references/lock'
+
+    with pytest.raises(ValueError) as e:
+        TestModel(database, None)
+    assert str(e.value) == 'pk(None) is required. It cannot be None.'
+
+    with pytest.raises(ValueError) as e:
+        TestModel(database, '')
+    assert str(e.value) == 'pk() is out of range: allow_empty(False).'
+
+    obj = TestModel(database, 'valid-pk')
+    assert obj is not None
+
+    with pytest.raises(ValueError) as e:
+        obj.pk = 'another-valid-pk'
+    assert str(e.value) == 'PrimaryKeyField cannot be modified'
+
+def test_model_with_primarykey_and_attributes():
+    database = Database(get_database_backend(BackendEnum.INMEMORY))
+
+    class GenderEnum(Enum):
+        FEMALE = 'female'
+        MALE   = 'male'
+
+    with pytest.raises(AttributeError) as e:
+        class BadTestModel(Model):
+            pk_auto = StringField() # field using default name of primary key
+            name = StringField(min_length=5, max_length=10)
+            age = IntegerField(min_value=0)
+            salary = FloatField(min_value=0.0)
+            active = BooleanField()
+            gender = EnumeratedField(GenderEnum)
+
+    msg = 'PrimaryKeyField for Model(BadTestModel) not defined and attribute "pk_auto" already used. '\
+          'Leave attribute name "pk_auto" for automatic PrimaryKeyField, or set a PrimaryKeyField.'
+    assert str(e.value) == msg
+
+    class TestModel(Model):
+        pk = PrimaryKeyField()
+        name = StringField(min_length=5, max_length=10)
+        age = IntegerField(min_value=0)
+        salary = FloatField(min_value=0.0)
+        active = BooleanField()
+        gender = EnumeratedField(GenderEnum)
+
+    obj = TestModel(database, 'valid-pk')
+    assert obj is not None
+
+    with pytest.raises(AttributeError) as e:
+        del obj.name
+    assert str(e.value) == 'Attribute "name" cannot be deleted'
+
+    with pytest.raises(TypeError) as e:
+        obj.name = 55
+    assert str(e.value) == "name(55) is of a wrong type(int). Accepted type_or_types(<class 'str'>)."
+
+    with pytest.raises(TypeError) as e:
+        obj.name = 55.5
+    assert str(e.value) == "name(55.5) is of a wrong type(float). Accepted type_or_types(<class 'str'>)."
+
+    with pytest.raises(TypeError) as e:
+        obj.name = True
+    assert str(e.value) == "name(True) is of a wrong type(bool). Accepted type_or_types(<class 'str'>)."
+
+    with pytest.raises(TypeError) as e:
+        obj.age = 'too old'
+    assert str(e.value) == "age(too old) is of a wrong type(str). Accepted type_or_types(<class 'int'>)."
+
+    with pytest.raises(TypeError) as e:
+        obj.age = 37.5
+    assert str(e.value) == "age(37.5) is of a wrong type(float). Accepted type_or_types(<class 'int'>)."
+
+    with pytest.raises(TypeError) as e:
+        obj.salary = 'too high'
+    msg = "salary(too high) is of a wrong type(str). Accepted type_or_types((<class 'int'>, <class 'float'>))."
+    assert str(e.value) == msg
+
+    with pytest.raises(TypeError) as e:
+        obj.active = 'active'
+    assert str(e.value) == "active(active) is of a wrong type(str). Accepted type_or_types(<class 'bool'>)."
+
+    with pytest.raises(TypeError) as e:
+        obj.active = 27
+    assert str(e.value) == "active(27) is of a wrong type(int). Accepted type_or_types(<class 'bool'>)."
+
+    with pytest.raises(TypeError) as e:
+        obj.active = 92.5
+    assert str(e.value) == "active(92.5) is of a wrong type(float). Accepted type_or_types(<class 'bool'>)."
+
+    with pytest.raises(ValueError) as e:
+        obj.name = ''
+    assert str(e.value) == 'name() is out of range: allow_empty(False).'
+
+    with pytest.raises(ValueError) as e:
+        obj.name = 'John'
+    assert str(e.value) == 'name(John) is out of range: min_length(5).'
+
+    with pytest.raises(ValueError) as e:
+        obj.name = 'John Smith Willson'
+    assert str(e.value) == 'name(John Smith Willson) is out of range: max_value(10).'
+
+    with pytest.raises(TypeError) as e:
+        obj.gender = 51
+    assert str(e.value) == "gender(51) is of a wrong type(int). Accepted type_or_types(<enum 'GenderEnum'>)."
+
+    with pytest.raises(TypeError) as e:
+        obj.gender = 55.5
+    assert str(e.value) == "gender(55.5) is of a wrong type(float). Accepted type_or_types(<enum 'GenderEnum'>)."
+
+    with pytest.raises(TypeError) as e:
+        obj.gender = False
+    assert str(e.value) == "gender(False) is of a wrong type(bool). Accepted type_or_types(<enum 'GenderEnum'>)."
+
+    with pytest.raises(TypeError) as e:
+        obj.gender = 'male'
+    assert str(e.value) == "gender(male) is of a wrong type(str). Accepted type_or_types(<enum 'GenderEnum'>)."
+
+    obj.name = 'John Smith'
+    obj.age = 37
+    obj.salary = 5023.52
+    obj.active = True
+    obj.gender = GenderEnum.MALE
+    assert repr(obj) == "TestModel(pk='valid-pk'(PK), name='John Smith', age=37, salary=5023.52, active=True, "\
+                        "gender=<GenderEnum.MALE: 'male'>)"
+
+def test_model_database_operations():
+    database = Database(get_database_backend(BackendEnum.INMEMORY))
+
+    class GenderEnum(Enum):
+        FEMALE = 'female'
+        MALE   = 'male'
+
+    class TestModel(Model):
+        pk = PrimaryKeyField()
+        name = StringField(min_length=5, max_length=30)
+        age = IntegerField(min_value=0, required=True)
+        salary = FloatField(min_value=0.0)
+        active = BooleanField()
+        gender = EnumeratedField(GenderEnum)
+
+    obj_john = TestModel(database, 'john')
+    assert obj_john is not None
+
+    obj_john.name = 'John Smith'
+    obj_john.salary = 5023.52
+    obj_john.active = True
+    assert repr(obj_john) == "TestModel(pk='john'(PK), name='John Smith', age=None, salary=5023.52, active=True, "\
+                             "gender=None)"
+
+    with pytest.raises(ValueError) as e:
+        obj_john.save()
+    assert str(e.value) == 'age(None) is required. It cannot be None.'
+
+    obj_john.age = 37
+    assert repr(obj_john) == "TestModel(pk='john'(PK), name='John Smith', age=37, salary=5023.52, active=True, "\
+                             "gender=None)"
+
+    with pytest.raises(ValueError) as e:
+        obj_john.save()
+    assert str(e.value) == 'gender(None) is required. It cannot be None.'
+
+    obj_john.gender = GenderEnum.MALE
+    obj_john.save()
+
+    db_entries = database.dump()
+    assert len(db_entries) == 2
+    assert db_entries[0] == (
+        'set', 'TestModel/instances',
+        "{'TestModel[john]'}")
+    assert db_entries[1] == (
+        'dict', 'TestModel[john]',
+        "{'active': 'True', 'age': '37', 'gender': 'MALE', 'name': 'John Smith', 'pk': 'john', "\
+        "'salary': '5023.52'}")
+
+    obj_john2 = TestModel(database, 'john', auto_load=False)
+    assert obj_john2 is not None
+    assert repr(obj_john2) == "TestModel(pk='john'(PK), name=None, age=None, salary=None, active=None, gender=None)"
+    obj_john2.load()
+    assert repr(obj_john2) == "TestModel(pk='john'(PK), name='John Smith', age=37, salary=5023.52, active=True, "\
+                              "gender=<GenderEnum.MALE: 'male'>)"
+
+    obj_john2 = TestModel(database, 'john', auto_load=True)
+    assert obj_john2 is not None
+    assert repr(obj_john2) == "TestModel(pk='john'(PK), name='John Smith', age=37, salary=5023.52, active=True, "\
+                              "gender=<GenderEnum.MALE: 'male'>)"
+
+    obj_john2.delete()
+    assert len(database.dump()) == 0
+
+    obj_john2.save()
+
+    db_entries = database.dump()
+    LOGGER.info('----- Database Dump [{:3d} entries] -------------------------'.format(len(db_entries)))
+    for db_entry in db_entries:
+        LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry))
+    LOGGER.info('-----------------------------------------------------------')
+    assert len(db_entries) == 2
+    assert db_entries[0] == (
+        'set', 'TestModel/instances',
+        "{'TestModel[john]'}")
+    assert db_entries[1] == (
+        'dict', 'TestModel[john]',
+        "{'active': 'True', 'age': '37', 'gender': 'MALE', 'name': 'John Smith', 'pk': 'john', "\
+        "'salary': '5023.52'}")
+
+
+    obj_jane = TestModel(database, 'jane', auto_load=True)
+    obj_jane.name = 'Jane Willson'
+    obj_jane.age = 26
+    obj_jane.salary = 6071.72
+    obj_jane.active = True
+    obj_jane.gender = GenderEnum.FEMALE
+    assert repr(obj_jane) == "TestModel(pk='jane'(PK), name='Jane Willson', age=26, salary=6071.72, active=True, "\
+                             "gender=<GenderEnum.FEMALE: 'female'>)"
+    obj_jane.save()
+
+    obj_julia = TestModel(database, 'julia', auto_load=True)
+    obj_julia.name = 'Julia Simons'
+    obj_julia.age = 42
+    obj_julia.salary = 5451.13
+    obj_julia.active = True
+    obj_julia.gender = GenderEnum.FEMALE
+    assert repr(obj_julia) == "TestModel(pk='julia'(PK), name='Julia Simons', age=42, salary=5451.13, active=True, "\
+                              "gender=<GenderEnum.FEMALE: 'female'>)"
+    obj_julia.save()
+
+    db_entries = database.dump()
+    LOGGER.info('----- Database Dump [{:3d} entries] -------------------------'.format(len(db_entries)))
+    for db_entry in db_entries:
+        LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry))
+    LOGGER.info('-----------------------------------------------------------')
+
+    test_model_pks = sorted(TestModel.get_primary_keys(database))
+    assert len(test_model_pks) == 3
+    assert test_model_pks[0] == 'TestModel[jane]'
+    assert test_model_pks[1] == 'TestModel[john]'
+    assert test_model_pks[2] == 'TestModel[julia]'
+
+    database.clear_all()
+    assert len(database.dump()) == 0
+
+def test_model_foreignkeys():
+    database = Database(get_database_backend(BackendEnum.INMEMORY))
+
+    class GenderEnum(Enum):
+        FEMALE = 'female'
+        MALE   = 'male'
+
+    class Team(Model):
+        pk = PrimaryKeyField()
+        name = StringField(max_length=10, required=True)
+
+    class Workplace(Model):
+        pk = PrimaryKeyField()
+        name = StringField(max_length=10, required=True)
+
+    class Member(Model):
+        pk = PrimaryKeyField()
+        team = ForeignKeyField(Team)
+        place = ForeignKeyField(Workplace, required=False)
+        name = StringField(max_length=10, required=True)
+        gender = EnumeratedField(GenderEnum)
+
+    team_dev_ops = Team(database, 'dev-ops')
+    team_dev_ops.name = 'Dev Ops'
+    assert team_dev_ops is not None
+    assert repr(team_dev_ops) == "Team(pk='dev-ops'(PK), name='Dev Ops')"
+
+    workplace_bcn = Workplace(database, 'bcn')
+    workplace_bcn.name = 'Barcelona'
+    assert workplace_bcn is not None
+    assert repr(workplace_bcn) == "Workplace(pk='bcn'(PK), name='Barcelona')"
+
+    member_john = Member(database, 'john')
+    member_john.name = 'John'
+    member_john.team = team_dev_ops
+    member_john.place = workplace_bcn
+    assert member_john is not None
+    assert repr(member_john) == "Member(pk='john'(PK), team='Team[dev-ops]', place='Workplace[bcn]', name='John', "\
+                                "gender=None)"
+
+    with pytest.raises(ValueError) as e:
+        member_john.save()
+    assert str(e.value) == 'gender(None) is required. It cannot be None.'
+
+    member_john.gender = GenderEnum.MALE
+
+    with pytest.raises(ConstraintException) as e:
+        member_john.save()
+    assert str(e.value) == 'Required Keys (Team[dev-ops], Workplace[bcn]) does not exist'
+
+    workplace_bcn.save()
+    assert repr(Workplace(database, workplace_bcn.pk)) == "Workplace(pk='bcn'(PK), name='Barcelona')"
+
+    with pytest.raises(ConstraintException) as e:
+        member_john.save()
+    assert str(e.value) == 'Required Keys (Team[dev-ops]) does not exist'
+
+    team_dev_ops.save()
+    assert repr(Team(database, team_dev_ops.pk)) == "Team(pk='dev-ops'(PK), name='Dev Ops')"
+
+    member_john.save()
+    assert repr(Member(database, member_john.pk)) == \
+        "Member(pk='john'(PK), team='Team[dev-ops]', place='Workplace[bcn]', name='John', "\
+        "gender=<GenderEnum.MALE: 'male'>)"
+
+    with pytest.raises(ConstraintException) as e:
+        workplace_bcn.delete()
+    assert str(e.value) == 'Instance is used by Keys (Member[john]:place)'
+
+    with pytest.raises(ConstraintException) as e:
+        team_dev_ops.delete()
+    assert str(e.value) == 'Instance is used by Keys (Member[john]:team)'
+
+    workplace_mad = Workplace(database, 'mad')
+    workplace_mad.name = 'Madrid'
+    assert workplace_mad is not None
+    assert repr(workplace_mad) == "Workplace(pk='mad'(PK), name='Madrid')"
+
+    member_john = Member(database, 'john')
+    member_john.name = 'John'
+    member_john.place = workplace_mad
+    assert member_john is not None
+    assert repr(member_john) == \
+        "Member(pk='john'(PK), team='Team[dev-ops]', place='Workplace[mad]', name='John', "\
+        "gender=<GenderEnum.MALE: 'male'>)"
+
+    with pytest.raises(ConstraintException) as e:
+        member_john.save()
+    assert str(e.value) == 'Required Keys (Workplace[mad]) does not exist'
+
+    workplace_mad.save()
+    assert repr(Workplace(database, workplace_mad.pk)) == "Workplace(pk='mad'(PK), name='Madrid')"
+
+    member_john.save()
+
+    member_john = Member(database, 'john')
+
+    with pytest.raises(ValueError) as e:
+        del member_john.place
+        del member_john.team
+    assert str(e.value) == 'team(None) is required. It cannot be None.'
+
+
+    member_jane = Member(database, 'jane')
+    member_jane.name = 'Jane'
+    member_jane.place = workplace_mad
+    assert member_jane is not None
+    assert repr(member_jane) == "Member(pk='jane'(PK), team=None, place='Workplace[mad]', name='Jane', gender=None)"
+
+    with pytest.raises(ValueError) as e:
+        member_jane.save()
+    assert str(e.value) == 'team(None) is required. It cannot be None.'
+
+    member_jane.team = team_dev_ops
+
+    with pytest.raises(ValueError) as e:
+        member_jane.save()
+    assert str(e.value) == 'gender(None) is required. It cannot be None.'
+
+    member_jane.gender = GenderEnum.FEMALE
+
+    member_jane.save()
+    assert repr(Member(database, member_jane.pk)) == \
+        "Member(pk='jane'(PK), team='Team[dev-ops]', place='Workplace[mad]', name='Jane', "\
+        "gender=<GenderEnum.FEMALE: 'female'>)"
+
+    member_brad = Member(database, 'brad')
+    assert member_brad is not None
+    assert repr(member_brad) == "Member(pk='brad'(PK), team=None, place=None, name=None, gender=None)"
+
+    with pytest.raises(ValueError) as e:
+        member_brad.save()
+    assert str(e.value) == 'team(None) is required. It cannot be None.'
+
+    member_brad.team = team_dev_ops
+
+    with pytest.raises(ValueError) as e:
+        member_brad.save()
+    assert str(e.value) == 'name(None) is required. It cannot be None.'
+
+    member_brad.name = 'Brad'
+    assert repr(member_brad) == "Member(pk='brad'(PK), team=\'Team[dev-ops]\', place=None, name='Brad', gender=None)"
+
+    with pytest.raises(ValueError) as e:
+        member_brad.save()
+    assert str(e.value) == 'gender(None) is required. It cannot be None.'
+
+    member_brad.gender = GenderEnum.MALE
+
+    member_brad.save()
+    assert repr(Member(database, member_brad.pk)) == \
+        "Member(pk='brad'(PK), team='Team[dev-ops]', place=None, name='Brad', gender=<GenderEnum.MALE: 'male'>)"
+
+    team_admin = Team(database, 'admin')
+    team_admin.name = 'Admin'
+    team_admin.save()
+    assert repr(Team(database, team_admin.pk)) == "Team(pk='admin'(PK), name='Admin')"
+
+    member_brad = Member(database, member_brad.pk)
+    assert repr(member_brad) == \
+        "Member(pk='brad'(PK), team='Team[dev-ops]', place=None, name='Brad', gender=<GenderEnum.MALE: 'male'>)"
+    member_brad.team = team_admin
+    assert repr(member_brad) == \
+        "Member(pk='brad'(PK), team='Team[admin]', place=None, name='Brad', gender=<GenderEnum.MALE: 'male'>)"
+    member_brad.save()
+    assert repr(Member(database, member_brad.pk)) == \
+        "Member(pk='brad'(PK), team='Team[admin]', place=None, name='Brad', gender=<GenderEnum.MALE: 'male'>)"
+
+    references = sorted(team_dev_ops.references())
+    assert len(references) == 2
+    assert references[0] == ('Member[jane]', 'team')
+    assert references[1] == ('Member[john]', 'team')
+
+    references = sorted(workplace_bcn.references())
+    assert len(references) == 0
+
+    references = sorted(workplace_mad.references())
+    assert len(references) == 2
+    assert references[0] == ('Member[jane]', 'place')
+    assert references[1] == ('Member[john]', 'place')
+
+    references = sorted(workplace_mad.references('Member'))
+    assert len(references) == 2
+    assert references[0] == ('Member[jane]', 'place')
+    assert references[1] == ('Member[john]', 'place')
+
+    references = sorted(workplace_mad.references({'Member'}))
+    assert len(references) == 2
+    assert references[0] == ('Member[jane]', 'place')
+    assert references[1] == ('Member[john]', 'place')
+
+    references = sorted(workplace_mad.references(['Member']))
+    assert len(references) == 2
+    assert references[0] == ('Member[jane]', 'place')
+    assert references[1] == ('Member[john]', 'place')
+
+    references = sorted(workplace_mad.references(('Member',)))
+    assert len(references) == 2
+    assert references[0] == ('Member[jane]', 'place')
+    assert references[1] == ('Member[john]', 'place')
+
+    references = sorted(workplace_mad.references(Member))
+    assert len(references) == 2
+    assert references[0] == ('Member[jane]', 'place')
+    assert references[1] == ('Member[john]', 'place')
+
+    references = sorted(workplace_mad.references({Member}))
+    assert len(references) == 2
+    assert references[0] == ('Member[jane]', 'place')
+    assert references[1] == ('Member[john]', 'place')
+
+    references = sorted(workplace_mad.references([Member]))
+    assert len(references) == 2
+    assert references[0] == ('Member[jane]', 'place')
+    assert references[1] == ('Member[john]', 'place')
+
+    references = sorted(workplace_mad.references((Member,)))
+    assert len(references) == 2
+    assert references[0] == ('Member[jane]', 'place')
+    assert references[1] == ('Member[john]', 'place')
+
+    references = sorted(workplace_mad.references({'non-existing-model'}))
+    assert len(references) == 0
+
+    with pytest.raises(AttributeError) as e:
+        references = sorted(workplace_mad.references(7))
+    assert str(e.value) == 'filter_by_models(7) unsupported. Expected a type or a list/set of types. '\
+                           'Optionally, keep it as None to retrieve all the references pointing to this instance.'
+
+    with pytest.raises(AttributeError) as e:
+        references = sorted(workplace_mad.references({7}))
+    assert str(e.value) == 'filter_by_models({7}) unsupported. Expected a type or a list/set of types. '\
+                           'Optionally, keep it as None to retrieve all the references pointing to this instance.'
+
+    db_entries = database.dump()
+    LOGGER.info('----- Database Dump [{:3d} entries] -------------------------'.format(len(db_entries)))
+    for db_entry in db_entries:
+        LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry))
+    LOGGER.info('-----------------------------------------------------------')
+
+    assert len(db_entries) == 13
+    assert db_entries[ 0] == ('set', "Member/instances",
+                              "{'Member[brad]', 'Member[jane]', 'Member[john]'}")
+    assert db_entries[ 1] == ('dict', "Member[brad]",
+                              "{'gender': 'MALE', 'name': 'Brad', 'pk': 'brad', 'team': 'Team[admin]'}")
+    assert db_entries[ 2] == ('dict', "Member[jane]",
+                              "{'gender': 'FEMALE', 'name': 'Jane', 'pk': 'jane', 'place': 'Workplace[mad]', "\
+                              "'team': 'Team[dev-ops]'}")
+    assert db_entries[ 3] == ('dict', "Member[john]",
+                              "{'gender': 'MALE', 'name': 'John', 'pk': 'john', 'place': 'Workplace[mad]', "\
+                              "'team': 'Team[dev-ops]'}")
+    assert db_entries[ 4] == ('set', "Team/instances",
+                              "{'Team[admin]', 'Team[dev-ops]'}")
+    assert db_entries[ 5] == ('dict', "Team[admin]",
+                              "{'name': 'Admin', 'pk': 'admin'}")
+    assert db_entries[ 6] == ('set' , "Team[admin]/references",
+                              "{'Member[brad]:team'}")
+    assert db_entries[ 7] == ('dict', "Team[dev-ops]",
+                              "{'name': 'Dev Ops', 'pk': 'dev-ops'}")
+    assert db_entries[ 8] == ('set' , "Team[dev-ops]/references",
+                              "{'Member[jane]:team', 'Member[john]:team'}")
+    assert db_entries[ 9] == ('set', "Workplace/instances",
+                              "{'Workplace[bcn]', 'Workplace[mad]'}")
+    assert db_entries[10] == ('dict', "Workplace[bcn]",
+                              "{'name': 'Barcelona', 'pk': 'bcn'}")
+    assert db_entries[11] == ('dict', "Workplace[mad]",
+                              "{'name': 'Madrid', 'pk': 'mad'}")
+    assert db_entries[12] == ('set' , "Workplace[mad]/references",
+                              "{'Member[jane]:place', 'Member[john]:place'}")
+
+    Member(database, member_john.pk).delete()
+
+    db_entries = database.dump()
+    LOGGER.info('----- Database Dump [{:3d} entries] -------------------------'.format(len(db_entries)))
+    for db_entry in db_entries:
+        LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry))
+    LOGGER.info('-----------------------------------------------------------')
+
+    assert len(db_entries) == 12
+    assert db_entries[ 0] == ('set', "Member/instances",
+                              "{'Member[brad]', 'Member[jane]'}")
+    assert db_entries[ 1] == ('dict', 'Member[brad]',
+                              "{'gender': 'MALE', 'name': 'Brad', 'pk': 'brad', 'team': 'Team[admin]'}")
+    assert db_entries[ 2] == ('dict', 'Member[jane]',
+                              "{'gender': 'FEMALE', 'name': 'Jane', 'pk': 'jane', 'place': 'Workplace[mad]', "\
+                              "'team': 'Team[dev-ops]'}")
+    assert db_entries[ 3] == ('set', "Team/instances",
+                              "{'Team[admin]', 'Team[dev-ops]'}")
+    assert db_entries[ 4] == ('dict', 'Team[admin]',
+                              "{'name': 'Admin', 'pk': 'admin'}")
+    assert db_entries[ 5] == ('set',  'Team[admin]/references',
+                              "{'Member[brad]:team'}")
+    assert db_entries[ 6] == ('dict', 'Team[dev-ops]',
+                              "{'name': 'Dev Ops', 'pk': 'dev-ops'}")
+    assert db_entries[ 7] == ('set',  'Team[dev-ops]/references',
+                              "{'Member[jane]:team'}")
+    assert db_entries[ 8] == ('set', "Workplace/instances",
+                              "{'Workplace[bcn]', 'Workplace[mad]'}")
+    assert db_entries[ 9] == ('dict', 'Workplace[bcn]',
+                              "{'name': 'Barcelona', 'pk': 'bcn'}")
+    assert db_entries[10] == ('dict', 'Workplace[mad]',
+                              "{'name': 'Madrid', 'pk': 'mad'}")
+    assert db_entries[11] == ('set',  'Workplace[mad]/references',
+                              "{'Member[jane]:place'}")
diff --git a/src/tests/ofc22/tests/common/proto b/src/tests/ofc22/tests/common/proto
new file mode 120000
index 0000000000000000000000000000000000000000..0ae252a7824cad03d85fa60224b87d8c779f1588
--- /dev/null
+++ b/src/tests/ofc22/tests/common/proto
@@ -0,0 +1 @@
+../../proto/src/python
\ No newline at end of file
diff --git a/src/tests/ofc22/tests/common/rpc_method_wrapper/Decorator.py b/src/tests/ofc22/tests/common/rpc_method_wrapper/Decorator.py
new file mode 100644
index 0000000000000000000000000000000000000000..31dc4b82bdaa8762b1dee5af247b3f8b7b9af2af
--- /dev/null
+++ b/src/tests/ofc22/tests/common/rpc_method_wrapper/Decorator.py
@@ -0,0 +1,82 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import grpc, logging
+from enum import Enum
+from typing import Dict, List
+from prometheus_client import Counter, Histogram
+from prometheus_client.metrics import MetricWrapperBase
+from common.tools.grpc.Tools import grpc_message_to_json_string
+from .ServiceExceptions import ServiceException
+
+class RequestConditionEnum(Enum):
+    STARTED   = 'started'
+    COMPLETED = 'completed'
+    FAILED    = 'failed'
+
+def get_counter_requests(method_name : str, request_condition : RequestConditionEnum) -> Counter:
+    str_request_condition = request_condition.value
+    name = '{:s}_counter_requests_{:s}'.format(method_name.replace(':', '_'), str_request_condition)
+    description = '{:s} counter of requests {:s}'.format(method_name, str_request_condition)
+    return Counter(name, description)
+
+def get_histogram_duration(method_name : str) -> Histogram:
+    name = '{:s}_histogram_duration'.format(method_name.replace(':', '_'))
+    description = '{:s} histogram of request duration'.format(method_name)
+    return Histogram(name, description)
+
+METRIC_TEMPLATES = {
+    '{:s}_COUNTER_STARTED'   : lambda method_name: get_counter_requests  (method_name, RequestConditionEnum.STARTED),
+    '{:s}_COUNTER_COMPLETED' : lambda method_name: get_counter_requests  (method_name, RequestConditionEnum.COMPLETED),
+    '{:s}_COUNTER_FAILED'    : lambda method_name: get_counter_requests  (method_name, RequestConditionEnum.FAILED),
+    '{:s}_HISTOGRAM_DURATION': lambda method_name: get_histogram_duration(method_name),
+}
+
+def create_metrics(service_name : str, method_names : List[str]) -> Dict[str, MetricWrapperBase]:
+    metrics = {}
+    for method_name in method_names:
+        for template_name, template_generator_function in METRIC_TEMPLATES.items():
+            metric_name = template_name.format(method_name).upper()
+            metrics[metric_name] = template_generator_function('{:s}:{:s}'.format(service_name, method_name))
+    return metrics
+
+def safe_and_metered_rpc_method(metrics : Dict[str, MetricWrapperBase], logger : logging.Logger):
+    def outer_wrapper(func):
+        function_name = func.__name__
+        HISTOGRAM_DURATION : Histogram = metrics.get('{:s}_HISTOGRAM_DURATION'.format(function_name).upper())
+        COUNTER_STARTED    : Counter   = metrics.get('{:s}_COUNTER_STARTED'   .format(function_name).upper())
+        COUNTER_COMPLETED  : Counter   = metrics.get('{:s}_COUNTER_COMPLETED' .format(function_name).upper())
+        COUNTER_FAILED     : Counter   = metrics.get('{:s}_COUNTER_FAILED'    .format(function_name).upper())
+
+        @HISTOGRAM_DURATION.time()
+        def inner_wrapper(self, request, grpc_context : grpc.ServicerContext):
+            COUNTER_STARTED.inc()
+            try:
+                logger.debug('{:s} request: {:s}'.format(function_name, grpc_message_to_json_string(request)))
+                reply = func(self, request, grpc_context)
+                logger.debug('{:s} reply: {:s}'.format(function_name, grpc_message_to_json_string(reply)))
+                COUNTER_COMPLETED.inc()
+                return reply
+            except ServiceException as e:   # pragma: no cover (ServiceException not thrown)
+                if e.code not in [grpc.StatusCode.NOT_FOUND, grpc.StatusCode.ALREADY_EXISTS]:
+                    # Assume not found or already exists is just a condition, not an error
+                    logger.exception('{:s} exception'.format(function_name))
+                    COUNTER_FAILED.inc()
+                grpc_context.abort(e.code, e.details)
+            except Exception as e:          # pragma: no cover, pylint: disable=broad-except
+                logger.exception('{:s} exception'.format(function_name))
+                COUNTER_FAILED.inc()
+                grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))
+        return inner_wrapper
+    return outer_wrapper
diff --git a/src/tests/ofc22/tests/common/rpc_method_wrapper/ServiceExceptions.py b/src/tests/ofc22/tests/common/rpc_method_wrapper/ServiceExceptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..f4f0a64cad79c96dc069bd37e8d2c2be5f011c53
--- /dev/null
+++ b/src/tests/ofc22/tests/common/rpc_method_wrapper/ServiceExceptions.py
@@ -0,0 +1,58 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import grpc
+from typing import Iterable, Union
+
+class ServiceException(Exception):
+    def __init__(
+        self, code : grpc.StatusCode, details : str, extra_details : Union[str, Iterable[str]] = []
+        ) -> None:
+
+        self.code = code
+        if isinstance(extra_details, str): extra_details = [extra_details]
+        self.details = '; '.join(map(str, [details] + extra_details))
+        super().__init__(self.details)
+
+class NotFoundException(ServiceException):
+    def __init__(
+        self, object_name : str, object_uuid: str, extra_details : Union[str, Iterable[str]] = []
+        ) -> None:
+
+        details = '{:s}({:s}) not found'.format(str(object_name), str(object_uuid))
+        super().__init__(grpc.StatusCode.NOT_FOUND, details, extra_details=extra_details)
+
+class AlreadyExistsException(ServiceException):
+    def __init__(
+        self, object_name : str, object_uuid: str, extra_details : Union[str, Iterable[str]] = None
+        ) -> None:
+
+        details = '{:s}({:s}) already exists'.format(str(object_name), str(object_uuid))
+        super().__init__(grpc.StatusCode.ALREADY_EXISTS, details, extra_details=extra_details)
+
+class InvalidArgumentException(ServiceException):
+    def __init__(
+        self, argument_name : str, argument_value: str, extra_details : Union[str, Iterable[str]] = None
+        ) -> None:
+
+        details = '{:s}({:s}) is invalid'.format(str(argument_name), str(argument_value))
+        super().__init__(grpc.StatusCode.INVALID_ARGUMENT, details, extra_details=extra_details)
+
+class OperationFailedException(ServiceException):
+    def __init__(
+        self, operation : str, extra_details : Union[str, Iterable[str]] = None
+        ) -> None:
+
+        details = 'Operation({:s}) failed'.format(str(operation))
+        super().__init__(grpc.StatusCode.INTERNAL, details, extra_details=extra_details)
diff --git a/src/tests/ofc22/tests/common/rpc_method_wrapper/__init__.py b/src/tests/ofc22/tests/common/rpc_method_wrapper/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..70a33251242c51f49140e596b8208a19dd5245f7
--- /dev/null
+++ b/src/tests/ofc22/tests/common/rpc_method_wrapper/__init__.py
@@ -0,0 +1,14 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
diff --git a/src/tests/ofc22/tests/common/rpc_method_wrapper/tests/__init__.py b/src/tests/ofc22/tests/common/rpc_method_wrapper/tests/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..70a33251242c51f49140e596b8208a19dd5245f7
--- /dev/null
+++ b/src/tests/ofc22/tests/common/rpc_method_wrapper/tests/__init__.py
@@ -0,0 +1,14 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
diff --git a/src/tests/ofc22/tests/common/rpc_method_wrapper/tests/test_unitary.py b/src/tests/ofc22/tests/common/rpc_method_wrapper/tests/test_unitary.py
new file mode 100644
index 0000000000000000000000000000000000000000..c8fc7a2aa187dcb905a8c230b047ffb1171d2ccd
--- /dev/null
+++ b/src/tests/ofc22/tests/common/rpc_method_wrapper/tests/test_unitary.py
@@ -0,0 +1,44 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import grpc, logging, time
+from common.rpc_method_wrapper.Decorator import create_metrics, safe_and_metered_rpc_method
+
+logging.basicConfig(level=logging.DEBUG)
+LOGGER = logging.getLogger(__name__)
+
+def test_database_instantiation():
+    SERVICE_NAME = 'Context'
+    METHOD_NAMES = [
+        'ListContextIds',  'ListContexts',   'GetContext',  'SetContext',  'RemoveContext',  'GetContextEvents',
+        'ListTopologyIds', 'ListTopologies', 'GetTopology', 'SetTopology', 'RemoveTopology', 'GetTopologyEvents',
+        'ListDeviceIds',   'ListDevices',    'GetDevice',   'SetDevice',   'RemoveDevice',   'GetDeviceEvents',
+        'ListLinkIds',     'ListLinks',      'GetLink',     'SetLink',     'RemoveLink',     'GetLinkEvents',
+        'ListServiceIds',  'ListServices',   'GetService',  'SetService',  'RemoveService',  'GetServiceEvents',
+    ]
+    METRICS = create_metrics(SERVICE_NAME, METHOD_NAMES)
+
+    class TestServiceServicerImpl:
+        @safe_and_metered_rpc_method(METRICS, LOGGER)
+        def GetTopology(self, request, grpc_context : grpc.ServicerContext):
+            print('doing funny things')
+            time.sleep(0.1)
+            return 'done'
+
+    tssi = TestServiceServicerImpl()
+    tssi.GetTopology(1, 2)
+
+    for metric_name,metric in METRICS.items():
+        if 'GETTOPOLOGY_' not in metric_name: continue
+        print(metric_name, metric._child_samples()) # pylint: disable=protected-access
diff --git a/src/tests/ofc22/tests/common/tests/EventTools.py b/src/tests/ofc22/tests/common/tests/EventTools.py
new file mode 100644
index 0000000000000000000000000000000000000000..ceff4d60e597690b29d5f1bcac894c081eb88a56
--- /dev/null
+++ b/src/tests/ofc22/tests/common/tests/EventTools.py
@@ -0,0 +1,101 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json, logging
+from typing import Dict, List, Tuple
+from common.proto.context_pb2 import (
+    ConnectionEvent, ContextEvent, DeviceEvent, EventTypeEnum, LinkEvent, ServiceEvent, TopologyEvent)
+from common.tools.grpc.Tools import grpc_message_to_json_string
+from context.client.EventsCollector import EventsCollector
+
+LOGGER = logging.getLogger(__name__)
+
+EVENT_CREATE = EventTypeEnum.EVENTTYPE_CREATE
+EVENT_UPDATE = EventTypeEnum.EVENTTYPE_UPDATE
+EVENT_REMOVE = EventTypeEnum.EVENTTYPE_REMOVE
+
+def class_to_classname(klass): return klass.__name__
+def instance_to_classname(instance): return type(instance).__name__
+
+CLASSNAME_CONTEXT_EVENT    = class_to_classname(ContextEvent)
+CLASSNAME_TOPOLOGY_EVENT   = class_to_classname(TopologyEvent)
+CLASSNAME_DEVICE_EVENT     = class_to_classname(DeviceEvent)
+CLASSNAME_LINK_EVENT       = class_to_classname(LinkEvent)
+CLASSNAME_SERVICE_EVENT    = class_to_classname(ServiceEvent)
+CLASSNAME_CONNECTION_EVENT = class_to_classname(ConnectionEvent)
+
+EVENT_CLASS_NAME__TO__ENTITY_ID_SELECTOR = {
+    CLASSNAME_CONTEXT_EVENT   : lambda event: event.context_id,
+    CLASSNAME_TOPOLOGY_EVENT  : lambda event: event.topology_id,
+    CLASSNAME_DEVICE_EVENT    : lambda event: event.device_id,
+    CLASSNAME_LINK_EVENT      : lambda event: event.link_id,
+    CLASSNAME_SERVICE_EVENT   : lambda event: event.service_id,
+    CLASSNAME_CONNECTION_EVENT: lambda event: event.connection_id,
+}
+
+def event_to_key(event):
+    event_class_name = instance_to_classname(event)
+    entity_id_selector_function = EVENT_CLASS_NAME__TO__ENTITY_ID_SELECTOR.get(event_class_name)
+    entity_id = entity_id_selector_function(event)
+    return (event_class_name, event.event.event_type, grpc_message_to_json_string(entity_id))
+
+def check_events(
+    events_collector : EventsCollector, expected_events : List[Tuple[str, int, Dict]],
+    fail_if_missing_events : bool = True, fail_if_unexpected_events : bool = False,
+    timeout_per_event = 1.0, max_wait_time = 30.0
+) -> None:
+    expected_events_map = {}
+    num_expected_events = 0
+    for event_classname, event_type_id, event_ids in expected_events:
+        event_key = (event_classname, event_type_id, json.dumps(event_ids, sort_keys=True))
+        event_count = expected_events_map.get(event_key, 0)
+        expected_events_map[event_key] = event_count + 1
+        num_expected_events += 1
+
+    i, wait_time = 0, 0
+    while num_expected_events > 0:
+        event_received = events_collector.get_event(block=True, timeout=timeout_per_event)
+        if event_received is None:
+            wait_time += timeout_per_event
+            if wait_time > max_wait_time: break
+            continue
+        LOGGER.info('event_received[{:d}] = {:s}'.format(i, str(event_received)))
+        event_key = event_to_key(event_received)
+        event_count = expected_events_map.pop(event_key, 0)
+        if event_count > 0: num_expected_events -= 1
+        event_count -= 1
+        if event_count != 0: expected_events_map[event_key] = event_count
+
+    if len(expected_events_map) == 0:
+        LOGGER.info('EventsCheck passed')
+    else:
+        missing_events = {}
+        unexpected_events = {}
+        for event_key,event_count in expected_events_map.items():
+            if event_count > 0:
+                missing_events[event_key] = event_count
+            if event_count < 0:
+                unexpected_events[event_key] = -event_count
+        msg_except = ['EventCheck failed:']
+        msg_logger = ['EventCheck:']
+        if len(missing_events) > 0:
+            msg = 'missing_events={:s}'.format(str(missing_events))
+            if fail_if_missing_events: msg_except.append(msg)
+            msg_logger.append(msg)
+        if len(unexpected_events) > 0:
+            msg = 'unexpected_events={:s}'.format(str(unexpected_events))
+            if fail_if_unexpected_events: msg_except.append(msg)
+            msg_logger.append(msg)
+        if len(msg_logger) > 1: LOGGER.warning(' '.join(msg_logger))
+        if len(msg_except) > 1: raise Exception(' '.join(msg_except))
diff --git a/src/tests/ofc22/tests/common/tests/MockServicerImpl_Context.py b/src/tests/ofc22/tests/common/tests/MockServicerImpl_Context.py
new file mode 100644
index 0000000000000000000000000000000000000000..9f80fdbcab0419072a4299f908a7b637038c2a1b
--- /dev/null
+++ b/src/tests/ofc22/tests/common/tests/MockServicerImpl_Context.py
@@ -0,0 +1,279 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import grpc, logging
+from typing import Any, Dict, Iterator, List
+from common.proto.context_pb2 import (
+    Connection, ConnectionEvent, ConnectionId, ConnectionIdList, ConnectionList,
+    Context, ContextEvent, ContextId, ContextIdList, ContextList,
+    Device, DeviceEvent, DeviceId, DeviceIdList, DeviceList,
+    Empty,
+    Link, LinkEvent, LinkId, LinkIdList, LinkList,
+    Service, ServiceEvent, ServiceId, ServiceIdList, ServiceList,
+    Slice, SliceEvent, SliceId, SliceIdList, SliceList,
+    Topology, TopologyEvent, TopologyId, TopologyIdList, TopologyList)
+from common.proto.context_pb2_grpc import ContextServiceServicer
+from common.tools.grpc.Tools import grpc_message_to_json_string
+
+LOGGER = logging.getLogger(__name__)
+
+def get_container(database : Dict[str, Dict[str, Any]], container_name : str) -> Dict[str, Any]:
+    return database.setdefault(container_name, {})
+
+def get_entries(database : Dict[str, Dict[str, Any]], container_name : str) -> List[Any]:
+    container = get_container(database, container_name)
+    return [container[entry_uuid] for entry_uuid in sorted(container.keys())]
+
+def get_entry(
+    context : grpc.ServicerContext, database : Dict[str, Dict[str, Any]], container_name : str, entry_uuid : str
+) -> Any:
+    LOGGER.debug('[get_entry] AFTER database={:s}'.format(str(database)))
+    container = get_container(database, container_name)
+    if entry_uuid not in container:
+        context.abort(grpc.StatusCode.NOT_FOUND, str('{:s}({:s}) not found'.format(container_name, entry_uuid)))
+    return container[entry_uuid]
+
+def set_entry(database : Dict[str, Dict[str, Any]], container_name : str, entry_uuid : str, entry : Any) -> Any:
+    container = get_container(database, container_name)
+    LOGGER.debug('[set_entry] BEFORE database={:s}'.format(str(database)))
+    container[entry_uuid] = entry
+    LOGGER.debug('[set_entry] AFTER database={:s}'.format(str(database)))
+    return entry
+
+def del_entry(
+    context : grpc.ServicerContext, database : Dict[str, Dict[str, Any]], container_name : str, entry_uuid : str
+) -> Any:
+    container = get_container(database, container_name)
+    if entry_uuid not in container:
+        context.abort(grpc.StatusCode.NOT_FOUND, str('{:s}({:s}) not found'.format(container_name, entry_uuid)))
+    del container[entry_uuid]
+    return Empty()
+
+class MockServicerImpl_Context(ContextServiceServicer):
+    def __init__(self):
+        LOGGER.info('[__init__] Creating Servicer...')
+        self.database : Dict[str, Any] = {}
+        LOGGER.info('[__init__] Servicer Created')
+
+    # ----- Context ----------------------------------------------------------------------------------------------------
+
+    def ListContextIds(self, request: Empty, context : grpc.ServicerContext) -> ContextIdList:
+        LOGGER.info('[ListContextIds] request={:s}'.format(grpc_message_to_json_string(request)))
+        return ContextIdList(context_ids=[context.context_id for context in get_entries(self.database, 'context')])
+
+    def ListContexts(self, request: Empty, context : grpc.ServicerContext) -> ContextList:
+        LOGGER.info('[ListContexts] request={:s}'.format(grpc_message_to_json_string(request)))
+        return ContextList(contexts=get_entries(self.database, 'context'))
+
+    def GetContext(self, request: ContextId, context : grpc.ServicerContext) -> Context:
+        LOGGER.info('[GetContext] request={:s}'.format(grpc_message_to_json_string(request)))
+        return get_entry(context, self.database, 'context', request.context_uuid.uuid)
+
+    def SetContext(self, request: Context, context : grpc.ServicerContext) -> ContextId:
+        LOGGER.info('[SetContext] request={:s}'.format(grpc_message_to_json_string(request)))
+        return set_entry(self.database, 'context', request.context_id.context_uuid.uuid, request).context_id
+
+    def RemoveContext(self, request: ContextId, context : grpc.ServicerContext) -> Empty:
+        LOGGER.info('[RemoveContext] request={:s}'.format(grpc_message_to_json_string(request)))
+        return del_entry(context, self.database, 'context', request.context_uuid.uuid)
+
+    def GetContextEvents(self, request: Empty, context : grpc.ServicerContext) -> Iterator[ContextEvent]:
+        LOGGER.info('[GetContextEvents] request={:s}'.format(grpc_message_to_json_string(request)))
+
+
+    # ----- Topology ---------------------------------------------------------------------------------------------------
+
+    def ListTopologyIds(self, request: ContextId, context : grpc.ServicerContext) -> TopologyIdList:
+        LOGGER.info('[ListTopologyIds] request={:s}'.format(grpc_message_to_json_string(request)))
+        topologies = get_entries(self.database, 'topology[{:s}]'.format(str(request.context_uuid.uuid)))
+        return TopologyIdList(topology_ids=[topology.topology_id for topology in topologies])
+
+    def ListTopologies(self, request: ContextId, context : grpc.ServicerContext) -> TopologyList:
+        LOGGER.info('[ListTopologies] request={:s}'.format(grpc_message_to_json_string(request)))
+        topologies = get_entries(self.database, 'topology[{:s}]'.format(str(request.context_uuid.uuid)))
+        return TopologyList(topologies=[topology for topology in topologies])
+
+    def GetTopology(self, request: TopologyId, context : grpc.ServicerContext) -> Topology:
+        LOGGER.info('[GetTopology] request={:s}'.format(grpc_message_to_json_string(request)))
+        container_name = 'topology[{:s}]'.format(str(request.context_id.context_uuid.uuid))
+        return get_entry(context, self.database, container_name, request.topology_uuid.uuid)
+
+    def SetTopology(self, request: Topology, context : grpc.ServicerContext) -> TopologyId:
+        LOGGER.info('[SetTopology] request={:s}'.format(grpc_message_to_json_string(request)))
+        container_name = 'topology[{:s}]'.format(str(request.topology_id.context_id.context_uuid.uuid))
+        return set_entry(self.database, container_name, request.topology_id.topology_uuid.uuid, request).topology_id
+
+    def RemoveTopology(self, request: TopologyId, context : grpc.ServicerContext) -> Empty:
+        LOGGER.info('[RemoveTopology] request={:s}'.format(grpc_message_to_json_string(request)))
+        container_name = 'topology[{:s}]'.format(str(request.context_id.context_uuid.uuid))
+        return del_entry(context, self.database, container_name, request.topology_uuid.uuid)
+
+    def GetTopologyEvents(self, request: Empty, context : grpc.ServicerContext) -> Iterator[TopologyEvent]:
+        LOGGER.info('[GetTopologyEvents] request={:s}'.format(grpc_message_to_json_string(request)))
+
+
+    # ----- Device -----------------------------------------------------------------------------------------------------
+
+    def ListDeviceIds(self, request: Empty, context : grpc.ServicerContext) -> DeviceIdList:
+        LOGGER.info('[ListDeviceIds] request={:s}'.format(grpc_message_to_json_string(request)))
+        return DeviceIdList(device_ids=[device.device_id for device in get_entries(self.database, 'device')])
+
+    def ListDevices(self, request: Empty, context : grpc.ServicerContext) -> DeviceList:
+        LOGGER.info('[ListDevices] request={:s}'.format(grpc_message_to_json_string(request)))
+        return DeviceList(devices=get_entries(self.database, 'device'))
+
+    def GetDevice(self, request: DeviceId, context : grpc.ServicerContext) -> Device:
+        LOGGER.info('[GetDevice] request={:s}'.format(grpc_message_to_json_string(request)))
+        return get_entry(context, self.database, 'device', request.device_uuid.uuid)
+
+    def SetDevice(self, request: Context, context : grpc.ServicerContext) -> DeviceId:
+        LOGGER.info('[SetDevice] request={:s}'.format(grpc_message_to_json_string(request)))
+        return set_entry(self.database, 'device', request.device_id.device_uuid.uuid, request).device_id
+
+    def RemoveDevice(self, request: DeviceId, context : grpc.ServicerContext) -> Empty:
+        LOGGER.info('[RemoveDevice] request={:s}'.format(grpc_message_to_json_string(request)))
+        return del_entry(context, self.database, 'device', request.device_uuid.uuid)
+
+    def GetDeviceEvents(self, request: Empty, context : grpc.ServicerContext) -> Iterator[DeviceEvent]:
+        LOGGER.info('[GetDeviceEvents] request={:s}'.format(grpc_message_to_json_string(request)))
+
+
+    # ----- Link -------------------------------------------------------------------------------------------------------
+
+    def ListLinkIds(self, request: Empty, context : grpc.ServicerContext) -> LinkIdList:
+        LOGGER.info('[ListLinkIds] request={:s}'.format(grpc_message_to_json_string(request)))
+        return LinkIdList(link_ids=[link.link_id for link in get_entries(self.database, 'link')])
+
+    def ListLinks(self, request: Empty, context : grpc.ServicerContext) -> LinkList:
+        LOGGER.info('[ListLinks] request={:s}'.format(grpc_message_to_json_string(request)))
+        return LinkList(links=get_entries(self.database, 'link'))
+
+    def GetLink(self, request: LinkId, context : grpc.ServicerContext) -> Link:
+        LOGGER.info('[GetLink] request={:s}'.format(grpc_message_to_json_string(request)))
+        return get_entry(context, self.database, 'link', request.link_uuid.uuid)
+
+    def SetLink(self, request: Context, context : grpc.ServicerContext) -> LinkId:
+        LOGGER.info('[SetLink] request={:s}'.format(grpc_message_to_json_string(request)))
+        return set_entry(self.database, 'link', request.link_id.link_uuid.uuid, request).link_id
+
+    def RemoveLink(self, request: LinkId, context : grpc.ServicerContext) -> Empty:
+        LOGGER.info('[RemoveLink] request={:s}'.format(grpc_message_to_json_string(request)))
+        return del_entry(context, self.database, 'link', request.link_uuid.uuid)
+
+    def GetLinkEvents(self, request: Empty, context : grpc.ServicerContext) -> Iterator[LinkEvent]:
+        LOGGER.info('[GetLinkEvents] request={:s}'.format(grpc_message_to_json_string(request)))
+
+
+    # ----- Slice ------------------------------------------------------------------------------------------------------
+
+    def ListSliceIds(self, request: ContextId, context : grpc.ServicerContext) -> SliceIdList:
+        LOGGER.info('[ListSliceIds] request={:s}'.format(grpc_message_to_json_string(request)))
+        slices = get_entries(self.database, 'slice[{:s}]'.format(str(request.context_uuid.uuid)))
+        return SliceIdList(slice_ids=[slice.slice_id for slice in slices])
+
+    def ListSlices(self, request: ContextId, context : grpc.ServicerContext) -> SliceList:
+        LOGGER.info('[ListSlices] request={:s}'.format(grpc_message_to_json_string(request)))
+        slices = get_entries(self.database, 'slice[{:s}]'.format(str(request.context_uuid.uuid)))
+        return SliceList(slices=[slice for slice in slices])
+
+    def GetSlice(self, request: SliceId, context : grpc.ServicerContext) -> Slice:
+        LOGGER.info('[GetSlice] request={:s}'.format(grpc_message_to_json_string(request)))
+        container_name = 'slice[{:s}]'.format(str(request.context_id.context_uuid.uuid))
+        return get_entry(context, self.database, container_name, request.slice_uuid.uuid)
+
+    def SetSlice(self, request: Slice, context : grpc.ServicerContext) -> SliceId:
+        LOGGER.info('[SetSlice] request={:s}'.format(grpc_message_to_json_string(request)))
+        return set_entry(
+            self.database, 'slice[{:s}]'.format(str(request.slice_id.context_id.context_uuid.uuid)),
+            request.slice_id.slice_uuid.uuid, request).slice_id
+
+    def RemoveSlice(self, request: SliceId, context : grpc.ServicerContext) -> Empty:
+        LOGGER.info('[RemoveSlice] request={:s}'.format(grpc_message_to_json_string(request)))
+        container_name = 'slice[{:s}]'.format(str(request.context_id.context_uuid.uuid))
+        return del_entry(context, self.database, container_name, request.slice_uuid.uuid)
+
+    def GetSliceEvents(self, request: Empty, context : grpc.ServicerContext) -> Iterator[SliceEvent]:
+        LOGGER.info('[GetSliceEvents] request={:s}'.format(grpc_message_to_json_string(request)))
+
+
+    # ----- Service ----------------------------------------------------------------------------------------------------
+
+    def ListServiceIds(self, request: ContextId, context : grpc.ServicerContext) -> ServiceIdList:
+        LOGGER.info('[ListServiceIds] request={:s}'.format(grpc_message_to_json_string(request)))
+        services = get_entries(self.database, 'service[{:s}]'.format(str(request.context_uuid.uuid)))
+        return ServiceIdList(service_ids=[service.service_id for service in services])
+
+    def ListServices(self, request: ContextId, context : grpc.ServicerContext) -> ServiceList:
+        LOGGER.info('[ListServices] request={:s}'.format(grpc_message_to_json_string(request)))
+        services = get_entries(self.database, 'service[{:s}]'.format(str(request.context_uuid.uuid)))
+        return ServiceList(services=[service for service in services])
+
+    def GetService(self, request: ServiceId, context : grpc.ServicerContext) -> Service:
+        LOGGER.info('[GetService] request={:s}'.format(grpc_message_to_json_string(request)))
+        container_name = 'service[{:s}]'.format(str(request.context_id.context_uuid.uuid))
+        return get_entry(context, self.database, container_name, request.service_uuid.uuid)
+
+    def SetService(self, request: Service, context : grpc.ServicerContext) -> ServiceId:
+        LOGGER.info('[SetService] request={:s}'.format(grpc_message_to_json_string(request)))
+        return set_entry(
+            self.database, 'service[{:s}]'.format(str(request.service_id.context_id.context_uuid.uuid)),
+            request.service_id.service_uuid.uuid, request).service_id
+
+    def RemoveService(self, request: ServiceId, context : grpc.ServicerContext) -> Empty:
+        LOGGER.info('[RemoveService] request={:s}'.format(grpc_message_to_json_string(request)))
+        container_name = 'service[{:s}]'.format(str(request.context_id.context_uuid.uuid))
+        return del_entry(context, self.database, container_name, request.service_uuid.uuid)
+
+    def GetServiceEvents(self, request: Empty, context : grpc.ServicerContext) -> Iterator[ServiceEvent]:
+        LOGGER.info('[GetServiceEvents] request={:s}'.format(grpc_message_to_json_string(request)))
+
+
+    # ----- Connection -------------------------------------------------------------------------------------------------
+
+    def ListConnectionIds(self, request: ServiceId, context : grpc.ServicerContext) -> ConnectionIdList:
+        LOGGER.info('[ListConnectionIds] request={:s}'.format(grpc_message_to_json_string(request)))
+        container_name = 'service_connections[{:s}/{:s}]'.format(
+            str(request.context_id.context_uuid.uuid), str(request.service_uuid.uuid))
+        return ConnectionIdList(connection_ids=[c.connection_id for c in get_entries(self.database, container_name)])
+
+    def ListConnections(self, request: ServiceId, context : grpc.ServicerContext) -> ConnectionList:
+        LOGGER.info('[ListConnections] request={:s}'.format(grpc_message_to_json_string(request)))
+        container_name = 'service_connections[{:s}/{:s}]'.format(
+            str(request.context_id.context_uuid.uuid), str(request.service_uuid.uuid))
+        return ConnectionList(connections=get_entries(self.database, container_name))
+
+    def GetConnection(self, request: ConnectionId, context : grpc.ServicerContext) -> Connection:
+        LOGGER.info('[GetConnection] request={:s}'.format(grpc_message_to_json_string(request)))
+        return get_entry(context, self.database, 'connection', request.connection_uuid.uuid)
+
+    def SetConnection(self, request: Connection, context : grpc.ServicerContext) -> ConnectionId:
+        LOGGER.info('[SetConnection] request={:s}'.format(grpc_message_to_json_string(request)))
+        service_connection__container_name = 'service_connection[{:s}/{:s}]'.format(
+            str(request.service_id.context_id.context_uuid.uuid), str(request.service_id.service_uuid.uuid))
+        set_entry(
+            self.database, service_connection__container_name, request.connection_id.connection_uuid.uuid, request)
+        return set_entry(
+            self.database, 'connection', request.connection_id.connection_uuid.uuid, request).connection_id
+
+    def RemoveConnection(self, request: ConnectionId, context : grpc.ServicerContext) -> Empty:
+        LOGGER.info('[RemoveConnection] request={:s}'.format(grpc_message_to_json_string(request)))
+        connection = get_entry(context, self.database, 'connection', request.connection_uuid.uuid)
+        service_id = connection.service_id
+        service_connection__container_name = 'service_connection[{:s}/{:s}]'.format(
+            str(service_id.context_id.context_uuid.uuid), str(service_id.service_uuid.uuid))
+        del_entry(context, self.database, service_connection__container_name, request.connection_uuid.uuid)
+        return del_entry(context, self.database, 'connection', request.connection_uuid.uuid)
+
+    def GetConnectionEvents(self, request: Empty, context : grpc.ServicerContext) -> Iterator[ConnectionEvent]:
+        LOGGER.info('[GetConnectionEvents] request={:s}'.format(grpc_message_to_json_string(request)))
diff --git a/src/tests/ofc22/tests/common/tests/MockServicerImpl_Device.py b/src/tests/ofc22/tests/common/tests/MockServicerImpl_Device.py
new file mode 100644
index 0000000000000000000000000000000000000000..efb809b3c027800e9ebe38c7b18f79b5caca9ade
--- /dev/null
+++ b/src/tests/ofc22/tests/common/tests/MockServicerImpl_Device.py
@@ -0,0 +1,51 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import grpc, logging
+from common.Settings import get_setting
+from common.proto.context_pb2 import Device, DeviceConfig, DeviceId, Empty
+from common.proto.device_pb2 import MonitoringSettings
+from common.proto.device_pb2_grpc import DeviceServiceServicer
+from common.tools.grpc.Tools import grpc_message_to_json_string
+from context.client.ContextClient import ContextClient
+
+LOGGER = logging.getLogger(__name__)
+
+class MockServicerImpl_Device(DeviceServiceServicer):
+    def __init__(self):
+        LOGGER.info('[__init__] Creating Servicer...')
+        self.context_client = ContextClient(
+            get_setting('CONTEXTSERVICE_SERVICE_HOST'),
+            get_setting('CONTEXTSERVICE_SERVICE_PORT_GRPC'))
+        LOGGER.info('[__init__] Servicer Created')
+
+    def AddDevice(self, request : Device, context : grpc.ServicerContext) -> DeviceId:
+        LOGGER.info('[AddDevice] request={:s}'.format(grpc_message_to_json_string(request)))
+        return self.context_client.SetDevice(request)
+
+    def ConfigureDevice(self, request : Device, context : grpc.ServicerContext) -> DeviceId:
+        LOGGER.info('[ConfigureDevice] request={:s}'.format(grpc_message_to_json_string(request)))
+        return self.context_client.SetDevice(request)
+
+    def DeleteDevice(self, request : DeviceId, context : grpc.ServicerContext) -> Empty:
+        LOGGER.info('[DeleteDevice] request={:s}'.format(grpc_message_to_json_string(request)))
+        return self.context_client.RemoveDevice(request)
+
+    def GetInitialConfig(self, request : DeviceId, context : grpc.ServicerContext) -> DeviceConfig:
+        LOGGER.info('[GetInitialConfig] request={:s}'.format(grpc_message_to_json_string(request)))
+        return DeviceConfig()
+
+    def MonitorDeviceKpi(self, request : MonitoringSettings, context : grpc.ServicerContext) -> Empty:
+        LOGGER.info('[MonitorDeviceKpi] request={:s}'.format(grpc_message_to_json_string(request)))
+        return Empty()
diff --git a/src/tests/ofc22/tests/common/tests/MockServicerImpl_Monitoring.py b/src/tests/ofc22/tests/common/tests/MockServicerImpl_Monitoring.py
new file mode 100644
index 0000000000000000000000000000000000000000..3f82f22e300e6ddae15816da69de63c96d0fdd5b
--- /dev/null
+++ b/src/tests/ofc22/tests/common/tests/MockServicerImpl_Monitoring.py
@@ -0,0 +1,34 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import grpc, logging
+from queue import Queue
+from common.proto.context_pb2 import Empty
+from common.proto.monitoring_pb2 import Kpi
+from common.proto.monitoring_pb2_grpc import MonitoringServiceServicer
+from common.tools.grpc.Tools import grpc_message_to_json_string
+
+LOGGER = logging.getLogger(__name__)
+
+class MockServicerImpl_Monitoring(MonitoringServiceServicer):
+    def __init__(self, queue_samples : Queue):
+        LOGGER.info('[__init__] Creating Servicer...')
+        self.queue_samples = queue_samples
+        LOGGER.info('[__init__] Servicer Created')
+
+    def IncludeKpi(self, request : Kpi, context : grpc.ServicerContext) -> Empty:
+        LOGGER.info('[IncludeKpi] request={:s}'.format(grpc_message_to_json_string(request)))
+        self.queue_samples.put(request)
+        return Empty()
diff --git a/src/tests/ofc22/tests/common/tests/MockServicerImpl_Service.py b/src/tests/ofc22/tests/common/tests/MockServicerImpl_Service.py
new file mode 100644
index 0000000000000000000000000000000000000000..39ddc1119e2294b2b62c40da58b151648b25051c
--- /dev/null
+++ b/src/tests/ofc22/tests/common/tests/MockServicerImpl_Service.py
@@ -0,0 +1,45 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import grpc, logging
+from common.Settings import get_setting
+from common.proto.context_pb2 import Empty, Service, ServiceId, ServiceStatusEnum
+from common.proto.service_pb2_grpc import ServiceServiceServicer
+from common.tools.grpc.Tools import grpc_message_to_json_string
+from context.client.ContextClient import ContextClient
+
+LOGGER = logging.getLogger(__name__)
+
+class MockServicerImpl_Service(ServiceServiceServicer):
+    def __init__(self):
+        LOGGER.info('[__init__] Creating Servicer...')
+        self.context_client = ContextClient(
+            get_setting('CONTEXTSERVICE_SERVICE_HOST'),
+            get_setting('CONTEXTSERVICE_SERVICE_PORT_GRPC'))
+        LOGGER.info('[__init__] Servicer Created')
+
+    def CreateService(self, request : Service, context : grpc.ServicerContext) -> ServiceId:
+        LOGGER.info('[CreateService] request={:s}'.format(grpc_message_to_json_string(request)))
+        return self.context_client.SetService(request)
+
+    def UpdateService(self, request : Service, context : grpc.ServicerContext) -> ServiceId:
+        LOGGER.info('[UpdateService] request={:s}'.format(grpc_message_to_json_string(request)))
+        service = Service()
+        service.CopyFrom(request)
+        service.service_status.service_status = ServiceStatusEnum.SERVICESTATUS_ACTIVE #pylint: disable=no-member
+        return self.context_client.SetService(service)
+
+    def DeleteService(self, request : ServiceId, context : grpc.ServicerContext) -> Empty:
+        LOGGER.info('[DeleteService] request={:s}'.format(grpc_message_to_json_string(request)))
+        return self.context_client.RemoveService(request)
diff --git a/src/tests/ofc22/tests/common/tests/MockServicerImpl_Slice.py b/src/tests/ofc22/tests/common/tests/MockServicerImpl_Slice.py
new file mode 100644
index 0000000000000000000000000000000000000000..f6be3dd62dbbe0b3c307d88f27c0400a977f8adb
--- /dev/null
+++ b/src/tests/ofc22/tests/common/tests/MockServicerImpl_Slice.py
@@ -0,0 +1,45 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import grpc, logging
+from common.Settings import get_setting
+from common.proto.context_pb2 import Empty, Slice, SliceId, SliceStatusEnum
+from common.proto.slice_pb2_grpc import SliceServiceServicer
+from common.tools.grpc.Tools import grpc_message_to_json_string
+from context.client.ContextClient import ContextClient
+
+LOGGER = logging.getLogger(__name__)
+
+class MockServicerImpl_Slice(SliceServiceServicer):
+    def __init__(self):
+        LOGGER.info('[__init__] Creating Servicer...')
+        self.context_client = ContextClient(
+            get_setting('CONTEXTSERVICE_SERVICE_HOST'),
+            get_setting('CONTEXTSERVICE_SERVICE_PORT_GRPC'))
+        LOGGER.info('[__init__] Servicer Created')
+
+    def CreateSlice(self, request : Slice, context : grpc.ServicerContext) -> SliceId:
+        LOGGER.info('[CreateSlice] request={:s}'.format(grpc_message_to_json_string(request)))
+        return self.context_client.SetSlice(request)
+
+    def UpdateSlice(self, request : Slice, context : grpc.ServicerContext) -> SliceId:
+        LOGGER.info('[UpdateSlice] request={:s}'.format(grpc_message_to_json_string(request)))
+        slice_ = Slice()
+        slice_.CopyFrom(request)
+        slice_.slice_status.slice_status = SliceStatusEnum.SLICESTATUS_ACTIVE # pylint: disable=no-member
+        return self.context_client.SetSlice(slice_)
+
+    def DeleteSlice(self, request : SliceId, context : grpc.ServicerContext) -> Empty:
+        LOGGER.info('[DeleteSlice] request={:s}'.format(grpc_message_to_json_string(request)))
+        return self.context_client.RemoveSlice(request)
diff --git a/src/tests/ofc22/tests/common/tests/PytestGenerateTests.py b/src/tests/ofc22/tests/common/tests/PytestGenerateTests.py
new file mode 100644
index 0000000000000000000000000000000000000000..240570565f9f3df2905ee33efb15c3b6a147050c
--- /dev/null
+++ b/src/tests/ofc22/tests/common/tests/PytestGenerateTests.py
@@ -0,0 +1,56 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Create a set of tests enabling to run tests as follows ...
+#   from common.tests.PytestGenerateTests import pytest_generate_tests # pylint: disable=unused-import
+#
+#   scenario1 = ('basic', {'attribute': 'value'})
+#   scenario2 = ('advanced', {'attribute': 'value2'})
+#
+#   class TestSampleWithScenarios:
+#       scenarios = [scenario1, scenario2]
+#
+#       def test_demo1(self, attribute):
+#           assert isinstance(attribute, str)
+#
+#       def test_demo2(self, attribute):
+#           assert isinstance(attribute, str)
+#
+# ... and run them as:
+#   $ pytest --log-level=INFO --verbose my_test.py
+#   =================== test session starts ===================
+#   platform linux -- Python 3.9.6, pytest-6.2.4, py-1.10.0, pluggy-0.13.1 -- /home/.../.pyenv/.../bin/python3.9
+#   cachedir: .pytest_cache
+#   benchmark: 3.4.1 (defaults: timer=time.perf_counter disable_gc=False min_rounds=5 min_time=0.000005 max_time=1.0
+#                               calibration_precision=10 warmup=False warmup_iterations=100000)
+#   rootdir: /home/.../tests
+#   plugins: benchmark-3.4.1
+#   collected 4 items
+#
+#   my_test.py::TestSampleWithScenarios::test_demo1[basic] PASSED          [ 25%]
+#   my_test.py::TestSampleWithScenarios::test_demo2[basic] PASSED          [ 50%]
+#   my_test.py::TestSampleWithScenarios::test_demo1[advanced] PASSED       [ 75%]
+#   my_test.py::TestSampleWithScenarios::test_demo2[advanced] PASSED       [100%]
+#
+#   ==================== 4 passed in 0.02s ====================
+
+def pytest_generate_tests(metafunc):
+    idlist = []
+    argvalues = []
+    for scenario in metafunc.cls.scenarios:
+        idlist.append(scenario[0])
+        items = scenario[1].items()
+        argnames = [x[0] for x in items]
+        argvalues.append([x[1] for x in items])
+    metafunc.parametrize(argnames, argvalues, ids=idlist, scope='class')
diff --git a/src/tests/ofc22/tests/common/tests/__init__.py b/src/tests/ofc22/tests/common/tests/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..70a33251242c51f49140e596b8208a19dd5245f7
--- /dev/null
+++ b/src/tests/ofc22/tests/common/tests/__init__.py
@@ -0,0 +1,14 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
diff --git a/src/tests/ofc22/tests/common/tools/__init__.py b/src/tests/ofc22/tests/common/tools/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..70a33251242c51f49140e596b8208a19dd5245f7
--- /dev/null
+++ b/src/tests/ofc22/tests/common/tools/__init__.py
@@ -0,0 +1,14 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
diff --git a/src/tests/ofc22/tests/common/tools/client/RetryDecorator.py b/src/tests/ofc22/tests/common/tools/client/RetryDecorator.py
new file mode 100644
index 0000000000000000000000000000000000000000..9a1c0d69fc5d86b8a64a5886884c31e73af27777
--- /dev/null
+++ b/src/tests/ofc22/tests/common/tools/client/RetryDecorator.py
@@ -0,0 +1,91 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This decorator re-executes the decorated function when it raises an exception. It enables to control the maximum
+# number of retries, the delay between retries, and to set the execution of a preparation method before every retry.
+# The delay is specfied by means of user-customizable functions.
+#
+# Delay functions should return a compute function with a single parameter, the number of retry. For instance:
+#   delay_linear(initial=0, increment=0):
+#       adds a constant delay of 0 seconds between retries
+#   delay_linear(initial=1, increment=0):
+#       adds a constant delay of 1 second between retries
+#   delay_linear(initial=1, increment=0.5, maximum=10):
+#       adds an increasing delay between retries, starting with 1 second, and incresing it linearly by steps of 0.5
+#       seconds, up to 10 seconds, every time an exception is caught within the current execution.
+#       E.g. 1.0, 1.5, 2.0, 2.5, ..., 10.0, 10.0, 10.0, ...
+#   delay_exponential(initial=1, increment=1): adds a constant delay of 1 second between retries
+#   delay_exponential(initial=1, increment=2, maximum=10):
+#       adds an increasing delay between retries, starting with 1 second, and incresing it exponentially by steps of 2
+#       seconds, up to 10 seconds,  every time an exception is caught within the current execution.
+#       E.g. 1.0, 2.0, 4.0, 8.0, 10.0, 10.0, 10.0, ...
+#
+# Arguments:
+# - max_retries: defines the maximum number of retries acceptable before giving up. By default, 0 retries are executed.
+# - delay_function: defines the delay computation method to be used. By default, delay_linear with a fixed delay of 0.1
+#   seconds is used.
+# - prepare_method_name: if not None, defines the name of the preparation method within the same class to be executed
+#   when an exception in exceptions is caught, and before running the next retry. By default, is None, meaning that no
+#   method is executed.
+# - prepare_method_args: defines the list of positional arguments to be provided to the preparation method. If no
+#   preparation method is specified, the argument is silently ignored. By default, an empty list is defined.
+# - prepare_method_kwargs: defines the dictionary of keyword arguments to be provided to the preparation method. If no
+#   preparation method is specified, the argument is silently ignored. By default, an empty dictionary is defined.
+
+import grpc, logging, time
+from grpc._channel import _InactiveRpcError
+
+LOGGER = logging.getLogger(__name__)
+
+def delay_linear(initial=0, increment=0, maximum=None):
+    def compute(num_try):
+        delay = initial + (num_try - 1) * increment
+        if maximum is not None: delay = max(delay, maximum)
+        return delay
+    return compute
+
+def delay_exponential(initial=1, increment=1, maximum=None):
+    def compute(num_try):
+        delay = initial * pow(increment, (num_try - 1))
+        if maximum is not None: delay = max(delay, maximum)
+        return delay
+    return compute
+
+def retry(max_retries=0, delay_function=delay_linear(initial=0, increment=0),
+          prepare_method_name=None, prepare_method_args=[], prepare_method_kwargs={}):
+    def _reconnect(func):
+        def wrapper(self, *args, **kwargs):
+            if prepare_method_name is not None:
+                prepare_method = getattr(self, prepare_method_name, None)
+                if prepare_method is None: raise Exception('Prepare Method ({}) not found'.format(prepare_method_name))
+            num_try, given_up = 0, False
+            while not given_up:
+                try:
+                    return func(self, *args, **kwargs)
+                except (grpc.RpcError, _InactiveRpcError) as e:
+                    if e.code() not in [grpc.StatusCode.UNAVAILABLE]: raise
+
+                    num_try += 1
+                    given_up = num_try > max_retries
+                    if given_up: raise Exception('Giving up... {:d} tries failed'.format(max_retries)) from e
+                    if delay_function is not None:
+                        delay = delay_function(num_try)
+                        time.sleep(delay)
+                        LOGGER.info('Retry {:d}/{:d} after {:f} seconds...'.format(num_try, max_retries, delay))
+                    else:
+                        LOGGER.info('Retry {:d}/{:d} immediate...'.format(num_try, max_retries))
+
+                    if prepare_method_name is not None: prepare_method(*prepare_method_args, **prepare_method_kwargs)
+        return wrapper
+    return _reconnect
diff --git a/src/tests/ofc22/tests/common/tools/client/__init__.py b/src/tests/ofc22/tests/common/tools/client/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..70a33251242c51f49140e596b8208a19dd5245f7
--- /dev/null
+++ b/src/tests/ofc22/tests/common/tools/client/__init__.py
@@ -0,0 +1,14 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
diff --git a/src/tests/ofc22/tests/common/tools/grpc/Tools.py b/src/tests/ofc22/tests/common/tools/grpc/Tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..f0c72a36f18acf2c278f7204352055861b79216f
--- /dev/null
+++ b/src/tests/ofc22/tests/common/tools/grpc/Tools.py
@@ -0,0 +1,36 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+from google.protobuf.json_format import MessageToDict
+
+def grpc_message_to_json(
+        message, including_default_value_fields=True, preserving_proto_field_name=True, use_integers_for_enums=False
+    ):
+    if not hasattr(message, 'DESCRIPTOR'): return json.dumps(str(message), sort_keys=True) # not a gRPC message
+    return MessageToDict(
+        message, including_default_value_fields=including_default_value_fields,
+        preserving_proto_field_name=preserving_proto_field_name, use_integers_for_enums=use_integers_for_enums)
+
+def grpc_message_list_to_json(message_list):
+    if message_list is None: return None
+    return [grpc_message_to_json(message) for message in message_list]
+
+def grpc_message_to_json_string(message):
+    if message is None: return str(None)
+    return json.dumps(grpc_message_to_json(message), sort_keys=True)
+
+def grpc_message_list_to_json_string(message_list):
+    if message_list is None: return str(None)
+    return json.dumps(grpc_message_list_to_json(message_list), sort_keys=True)
diff --git a/src/tests/ofc22/tests/common/tools/grpc/__init__.py b/src/tests/ofc22/tests/common/tools/grpc/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..70a33251242c51f49140e596b8208a19dd5245f7
--- /dev/null
+++ b/src/tests/ofc22/tests/common/tools/grpc/__init__.py
@@ -0,0 +1,14 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
diff --git a/src/tests/ofc22/tests/common/tools/object_factory/ConfigRule.py b/src/tests/ofc22/tests/common/tools/object_factory/ConfigRule.py
new file mode 100644
index 0000000000000000000000000000000000000000..5d889ee7ef3a38f1e43c74addc75d95291370491
--- /dev/null
+++ b/src/tests/ofc22/tests/common/tools/object_factory/ConfigRule.py
@@ -0,0 +1,27 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+from typing import Any, Dict, Union
+from common.proto.context_pb2 import ConfigActionEnum
+
+def json_config_rule(action : ConfigActionEnum, resource_key : str, resource_value : Union[str, Dict[str, Any]]):
+    if not isinstance(resource_value, str): resource_value = json.dumps(resource_value, sort_keys=True)
+    return {'action': action, 'custom': {'resource_key': resource_key, 'resource_value': resource_value}}
+
+def json_config_rule_set(resource_key : str, resource_value : Union[str, Dict[str, Any]]):
+    return json_config_rule(ConfigActionEnum.CONFIGACTION_SET, resource_key, resource_value)
+
+def json_config_rule_delete(resource_key : str, resource_value : Union[str, Dict[str, Any]]):
+    return json_config_rule(ConfigActionEnum.CONFIGACTION_DELETE, resource_key, resource_value)
diff --git a/src/tests/ofc22/tests/common/tools/object_factory/Connection.py b/src/tests/ofc22/tests/common/tools/object_factory/Connection.py
new file mode 100644
index 0000000000000000000000000000000000000000..3f0207ae57901dc4775552b0a967d280077069bb
--- /dev/null
+++ b/src/tests/ofc22/tests/common/tools/object_factory/Connection.py
@@ -0,0 +1,32 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import copy
+from typing import Dict, List, Optional
+
+def json_connection_id(connection_uuid : str):
+    return {'connection_uuid': {'uuid': connection_uuid}}
+
+def json_connection(
+        connection_uuid : str, service_id : Optional[Dict] = None, path_hops_endpoint_ids : List[Dict] = [],
+        sub_service_ids : List[Dict] = []
+    ):
+
+    result = {
+        'connection_id'         : json_connection_id(connection_uuid),
+        'path_hops_endpoint_ids': copy.deepcopy(path_hops_endpoint_ids),
+        'sub_service_ids'       : copy.deepcopy(sub_service_ids),
+    }
+    if service_id is not None: result['service_id'] = copy.deepcopy(service_id)
+    return result
diff --git a/src/tests/ofc22/tests/common/tools/object_factory/Constraint.py b/src/tests/ofc22/tests/common/tools/object_factory/Constraint.py
new file mode 100644
index 0000000000000000000000000000000000000000..df290d4285330f1965608d710d9d48ca49131521
--- /dev/null
+++ b/src/tests/ofc22/tests/common/tools/object_factory/Constraint.py
@@ -0,0 +1,20 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+from typing import Any, Dict, Union
+
+def json_constraint(constraint_type : str, constraint_value : Union[str, Dict[str, Any]]):
+    if not isinstance(constraint_value, str): constraint_value = json.dumps(constraint_value, sort_keys=True)
+    return {'custom': {'constraint_type': constraint_type, 'constraint_value': constraint_value}}
diff --git a/src/tests/ofc22/tests/common/tools/object_factory/Context.py b/src/tests/ofc22/tests/common/tools/object_factory/Context.py
new file mode 100644
index 0000000000000000000000000000000000000000..d5d1bf9439dd12c67d77bcbe38f37fb29c89d948
--- /dev/null
+++ b/src/tests/ofc22/tests/common/tools/object_factory/Context.py
@@ -0,0 +1,23 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+def json_context_id(context_uuid : str):
+    return {'context_uuid': {'uuid': context_uuid}}
+
+def json_context(context_uuid : str):
+    return {
+        'context_id'  : json_context_id(context_uuid),
+        'topology_ids': [],
+        'service_ids' : [],
+    }
diff --git a/src/tests/ofc22/tests/common/tools/object_factory/Device.py b/src/tests/ofc22/tests/common/tools/object_factory/Device.py
new file mode 100644
index 0000000000000000000000000000000000000000..32baff9ae5cfb9a9a41d1d06bfec7df5fd5c0e4a
--- /dev/null
+++ b/src/tests/ofc22/tests/common/tools/object_factory/Device.py
@@ -0,0 +1,119 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import copy
+from typing import Dict, List, Tuple
+from common.DeviceTypes import DeviceTypeEnum
+from common.proto.context_pb2 import DeviceDriverEnum, DeviceOperationalStatusEnum
+from common.tools.object_factory.ConfigRule import json_config_rule_set
+
+DEVICE_DISABLED = DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_DISABLED
+
+DEVICE_EMUOLS_TYPE  = DeviceTypeEnum.EMULATED_OPTICAL_LINE_SYSTEM.value
+DEVICE_EMUPR_TYPE   = DeviceTypeEnum.EMULATED_PACKET_ROUTER.value
+DEVICE_EMU_DRIVERS  = [DeviceDriverEnum.DEVICEDRIVER_UNDEFINED]
+DEVICE_EMU_ADDRESS  = '127.0.0.1'
+DEVICE_EMU_PORT     = '0'
+
+DEVICE_PR_TYPE      = DeviceTypeEnum.PACKET_ROUTER.value
+DEVICE_PR_DRIVERS   = [DeviceDriverEnum.DEVICEDRIVER_OPENCONFIG]
+
+DEVICE_TAPI_TYPE    = DeviceTypeEnum.OPTICAL_LINE_SYSTEM.value
+DEVICE_TAPI_DRIVERS = [DeviceDriverEnum.DEVICEDRIVER_TRANSPORT_API]
+
+# check which enum type and value assign to microwave device
+DEVICE_MICROWAVE_TYPE    = DeviceTypeEnum.MICROVAWE_RADIO_SYSTEM.value
+DEVICE_MICROWAVE_DRIVERS = [DeviceDriverEnum.DEVICEDRIVER_IETF_NETWORK_TOPOLOGY]
+
+DEVICE_P4_TYPE      = DeviceTypeEnum.P4_SWITCH.value
+DEVICE_P4_DRIVERS   = [DeviceDriverEnum.DEVICEDRIVER_P4]
+
+def json_device_id(device_uuid : str):
+    return {'device_uuid': {'uuid': device_uuid}}
+
+def json_device(
+        device_uuid : str, device_type : str, status : DeviceOperationalStatusEnum, endpoints : List[Dict] = [],
+        config_rules : List[Dict] = [], drivers : List[Dict] = []
+    ):
+    return {
+        'device_id'                : json_device_id(device_uuid),
+        'device_type'              : device_type,
+        'device_config'            : {'config_rules': copy.deepcopy(config_rules)},
+        'device_operational_status': status,
+        'device_drivers'           : copy.deepcopy(drivers),
+        'device_endpoints'         : copy.deepcopy(endpoints),
+    }
+
+def json_device_emulated_packet_router_disabled(
+        device_uuid : str, endpoints : List[Dict] = [], config_rules : List[Dict] = [],
+        drivers : List[Dict] = DEVICE_EMU_DRIVERS
+    ):
+    return json_device(
+        device_uuid, DEVICE_EMUPR_TYPE, DEVICE_DISABLED, endpoints=endpoints, config_rules=config_rules,
+        drivers=drivers)
+
+def json_device_emulated_tapi_disabled(
+        device_uuid : str, endpoints : List[Dict] = [], config_rules : List[Dict] = [],
+        drivers : List[Dict] = DEVICE_EMU_DRIVERS
+    ):
+    return json_device(
+        device_uuid, DEVICE_EMUOLS_TYPE, DEVICE_DISABLED, endpoints=endpoints, config_rules=config_rules,
+        drivers=drivers)
+
+def json_device_packetrouter_disabled(
+        device_uuid : str, endpoints : List[Dict] = [], config_rules : List[Dict] = [],
+        drivers : List[Dict] = DEVICE_PR_DRIVERS
+    ):
+    return json_device(
+        device_uuid, DEVICE_PR_TYPE, DEVICE_DISABLED, endpoints=endpoints, config_rules=config_rules, drivers=drivers)
+
+def json_device_tapi_disabled(
+        device_uuid : str, endpoints : List[Dict] = [], config_rules : List[Dict] = [],
+        drivers : List[Dict] = DEVICE_TAPI_DRIVERS
+    ):
+    return json_device(
+        device_uuid, DEVICE_TAPI_TYPE, DEVICE_DISABLED, endpoints=endpoints, config_rules=config_rules, drivers=drivers)
+
+def json_device_microwave_disabled(
+        device_uuid : str, endpoints : List[Dict] = [], config_rules : List[Dict] = [],
+        drivers : List[Dict] = DEVICE_MICROWAVE_DRIVERS
+    ):
+    return json_device(
+        device_uuid, DEVICE_MICROWAVE_TYPE, DEVICE_DISABLED, endpoints=endpoints, config_rules=config_rules,
+        drivers=drivers)
+
+def json_device_p4_disabled(
+        device_uuid : str, endpoints : List[Dict] = [], config_rules : List[Dict] = [],
+        drivers : List[Dict] = DEVICE_P4_DRIVERS
+    ):
+    return json_device(
+        device_uuid, DEVICE_P4_TYPE, DEVICE_DISABLED, endpoints=endpoints, config_rules=config_rules, drivers=drivers)
+
+def json_device_connect_rules(address : str, port : int, settings : Dict = {}):
+    return [
+        json_config_rule_set('_connect/address',  address),
+        json_config_rule_set('_connect/port',     port),
+        json_config_rule_set('_connect/settings', settings),
+    ]
+
+def json_device_emulated_connect_rules(
+        endpoint_descriptors : List[Tuple[str, str, List[int]]], address : str = DEVICE_EMU_ADDRESS,
+        port : int = DEVICE_EMU_PORT
+    ):
+
+    settings = {'endpoints': [
+        {'uuid': endpoint_uuid, 'type': endpoint_type, 'sample_types': sample_types}
+        for endpoint_uuid,endpoint_type,sample_types in endpoint_descriptors
+    ]}
+    return json_device_connect_rules(address, port, settings=settings)
diff --git a/src/tests/ofc22/tests/common/tools/object_factory/EndPoint.py b/src/tests/ofc22/tests/common/tools/object_factory/EndPoint.py
new file mode 100644
index 0000000000000000000000000000000000000000..9eca5e96371713d1e095eba4666ee806ad6cf71e
--- /dev/null
+++ b/src/tests/ofc22/tests/common/tools/object_factory/EndPoint.py
@@ -0,0 +1,51 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import copy
+from typing import Dict, List, Optional, Tuple
+
+def json_endpoint_id(device_id : Dict, endpoint_uuid : str, topology_id : Optional[Dict] = None):
+    result = {'device_id': copy.deepcopy(device_id), 'endpoint_uuid': {'uuid': endpoint_uuid}}
+    if topology_id is not None: result['topology_id'] = copy.deepcopy(topology_id)
+    return result
+
+def json_endpoint_ids(
+        device_id : Dict, endpoint_descriptors : List[Tuple[str, str, List[int]]], topology_id : Optional[Dict] = None
+    ):
+    return [
+        json_endpoint_id(device_id, endpoint_uuid, topology_id=topology_id)
+        for endpoint_uuid, _, _ in endpoint_descriptors
+    ]
+
+def json_endpoint(
+        device_id : Dict, endpoint_uuid : str, endpoint_type : str, topology_id : Optional[Dict] = None,
+        kpi_sample_types : List[int] = []
+    ):
+
+    result = {
+        'endpoint_id': json_endpoint_id(device_id, endpoint_uuid, topology_id=topology_id),
+        'endpoint_type': endpoint_type,
+    }
+    if len(kpi_sample_types) > 0: result['kpi_sample_types'] = copy.deepcopy(kpi_sample_types)
+    return result
+
+def json_endpoints(
+        device_id : Dict, endpoint_descriptors : List[Tuple[str, str, List[int]]], topology_id : Optional[Dict] = None
+    ):
+    return [
+        json_endpoint(
+            device_id, endpoint_uuid, endpoint_type, topology_id=topology_id,
+            kpi_sample_types=endpoint_sample_types)
+        for endpoint_uuid, endpoint_type, endpoint_sample_types in endpoint_descriptors
+    ]
diff --git a/src/tests/ofc22/tests/common/tools/object_factory/Link.py b/src/tests/ofc22/tests/common/tools/object_factory/Link.py
new file mode 100644
index 0000000000000000000000000000000000000000..13973566ece5e6f83312c9bd50ca0c4add2d262b
--- /dev/null
+++ b/src/tests/ofc22/tests/common/tools/object_factory/Link.py
@@ -0,0 +1,27 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import copy
+from typing import Dict, List
+
+def get_link_uuid(a_endpoint_id : Dict, z_endpoint_id : Dict) -> str:
+    return '{:s}/{:s}=={:s}/{:s}'.format(
+        a_endpoint_id['device_id']['device_uuid']['uuid'], a_endpoint_id['endpoint_uuid']['uuid'],
+        a_endpoint_id['device_id']['device_uuid']['uuid'], z_endpoint_id['endpoint_uuid']['uuid'])
+
+def json_link_id(link_uuid : str):
+    return {'link_uuid': {'uuid': link_uuid}}
+
+def json_link(link_uuid : str, endpoint_ids : List[Dict]):
+    return {'link_id': json_link_id(link_uuid), 'link_endpoint_ids': copy.deepcopy(endpoint_ids)}
diff --git a/src/tests/ofc22/tests/common/tools/object_factory/Service.py b/src/tests/ofc22/tests/common/tools/object_factory/Service.py
new file mode 100644
index 0000000000000000000000000000000000000000..f0f49210ec067267984dede6f28d7adad8009261
--- /dev/null
+++ b/src/tests/ofc22/tests/common/tools/object_factory/Service.py
@@ -0,0 +1,53 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import copy
+from typing import Dict, List, Optional
+from common.Constants import DEFAULT_CONTEXT_UUID
+from common.proto.context_pb2 import ServiceStatusEnum, ServiceTypeEnum
+from common.tools.object_factory.Context import json_context_id
+
+def get_service_uuid(a_endpoint_id : Dict, z_endpoint_id : Dict) -> str:
+    return 'svc:{:s}/{:s}=={:s}/{:s}'.format(
+        a_endpoint_id['device_id']['device_uuid']['uuid'], a_endpoint_id['endpoint_uuid']['uuid'],
+        a_endpoint_id['device_id']['device_uuid']['uuid'], z_endpoint_id['endpoint_uuid']['uuid'])
+
+def json_service_id(service_uuid : str, context_id : Optional[Dict] = None):
+    result = {'service_uuid': {'uuid': service_uuid}}
+    if context_id is not None: result['context_id'] = copy.deepcopy(context_id)
+    return result
+
+def json_service(
+    service_uuid : str, service_type : ServiceTypeEnum, context_id : Optional[Dict] = None,
+    status : ServiceStatusEnum = ServiceStatusEnum.SERVICESTATUS_PLANNED,
+    endpoint_ids : List[Dict] = [], constraints : List[Dict] = [], config_rules : List[Dict] = []):
+
+    return {
+        'service_id'          : json_service_id(service_uuid, context_id=context_id),
+        'service_type'        : service_type,
+        'service_status'      : {'service_status': status},
+        'service_endpoint_ids': copy.deepcopy(endpoint_ids),
+        'service_constraints' : copy.deepcopy(constraints),
+        'service_config'      : {'config_rules': copy.deepcopy(config_rules)},
+    }
+
+def json_service_l3nm_planned(
+        service_uuid : str, endpoint_ids : List[Dict] = [], constraints : List[Dict] = [],
+        config_rules : List[Dict] = []
+    ):
+
+    return json_service(
+        service_uuid, ServiceTypeEnum.SERVICETYPE_L3NM, context_id=json_context_id(DEFAULT_CONTEXT_UUID),
+        status=ServiceStatusEnum.SERVICESTATUS_PLANNED, endpoint_ids=endpoint_ids, constraints=constraints,
+        config_rules=config_rules)
diff --git a/src/tests/ofc22/tests/common/tools/object_factory/Topology.py b/src/tests/ofc22/tests/common/tools/object_factory/Topology.py
new file mode 100644
index 0000000000000000000000000000000000000000..7de4a1d577f1e46cfdf6545dde79b60808cd8afb
--- /dev/null
+++ b/src/tests/ofc22/tests/common/tools/object_factory/Topology.py
@@ -0,0 +1,28 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import copy
+from typing import Dict, Optional
+
+def json_topology_id(topology_uuid : str, context_id : Optional[Dict] = None):
+    result = {'topology_uuid': {'uuid': topology_uuid}}
+    if context_id is not None: result['context_id'] = copy.deepcopy(context_id)
+    return result
+
+def json_topology(topology_uuid : str, context_id : Optional[Dict] = None):
+    return {
+        'topology_id': json_topology_id(topology_uuid, context_id=context_id),
+        'device_ids' : [],
+        'link_ids'   : [],
+    }
diff --git a/src/tests/ofc22/tests/common/tools/object_factory/__init__.py b/src/tests/ofc22/tests/common/tools/object_factory/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..70a33251242c51f49140e596b8208a19dd5245f7
--- /dev/null
+++ b/src/tests/ofc22/tests/common/tools/object_factory/__init__.py
@@ -0,0 +1,14 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
diff --git a/src/tests/ofc22/tests/common/tools/service/GenericGrpcService.py b/src/tests/ofc22/tests/common/tools/service/GenericGrpcService.py
new file mode 100644
index 0000000000000000000000000000000000000000..61fccdb02b5a6cbd23600093bcba4c69bf142d83
--- /dev/null
+++ b/src/tests/ofc22/tests/common/tools/service/GenericGrpcService.py
@@ -0,0 +1,71 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import Optional, Union
+import grpc, logging
+from concurrent import futures
+from grpc_health.v1.health import HealthServicer, OVERALL_HEALTH
+from grpc_health.v1.health_pb2 import HealthCheckResponse
+from grpc_health.v1.health_pb2_grpc import add_HealthServicer_to_server
+from common.Settings import get_grpc_bind_address, get_grpc_grace_period, get_grpc_max_workers
+
+class GenericGrpcService:
+    def __init__(
+        self, bind_port : Union[str, int], bind_address : Optional[str] = None, max_workers : Optional[int] = None,
+        grace_period : Optional[int] = None, enable_health_servicer : bool = True, cls_name : str = __name__
+    ) -> None:
+        self.logger = logging.getLogger(cls_name)
+        self.bind_port = bind_port
+        self.bind_address = get_grpc_bind_address() if bind_address is None else bind_address
+        self.max_workers = get_grpc_max_workers() if max_workers is None else max_workers
+        self.grace_period = get_grpc_grace_period() if grace_period is None else grace_period
+        self.enable_health_servicer = enable_health_servicer
+        self.endpoint = None
+        self.health_servicer = None
+        self.pool = None
+        self.server = None
+
+    def install_servicers(self):
+        pass
+
+    def start(self):
+        self.endpoint = '{:s}:{:s}'.format(str(self.bind_address), str(self.bind_port))
+        self.logger.info('Starting Service (tentative endpoint: {:s}, max_workers: {:s})...'.format(
+            str(self.endpoint), str(self.max_workers)))
+
+        self.pool = futures.ThreadPoolExecutor(max_workers=self.max_workers)
+        self.server = grpc.server(self.pool) # , interceptors=(tracer_interceptor,))
+
+        self.install_servicers()
+
+        if self.enable_health_servicer:
+            self.health_servicer = HealthServicer(
+                experimental_non_blocking=True, experimental_thread_pool=futures.ThreadPoolExecutor(max_workers=1))
+            add_HealthServicer_to_server(self.health_servicer, self.server)
+
+        self.bind_port = self.server.add_insecure_port(self.endpoint)
+        self.endpoint = '{:s}:{:s}'.format(str(self.bind_address), str(self.bind_port))
+        self.logger.info('Listening on {:s}...'.format(str(self.endpoint)))
+        self.server.start()
+        if self.enable_health_servicer:
+            self.health_servicer.set(OVERALL_HEALTH, HealthCheckResponse.SERVING) # pylint: disable=maybe-no-member
+
+        self.logger.debug('Service started')
+
+    def stop(self):
+        self.logger.debug('Stopping service (grace period {:s} seconds)...'.format(str(self.grace_period)))
+        if self.enable_health_servicer:
+            self.health_servicer.enter_graceful_shutdown()
+        self.server.stop(self.grace_period)
+        self.logger.debug('Service stopped')
diff --git a/src/tests/ofc22/tests/common/tools/service/GenericRestServer.py b/src/tests/ofc22/tests/common/tools/service/GenericRestServer.py
new file mode 100644
index 0000000000000000000000000000000000000000..4325fe1dbc0169665a1281b27e6993670add337c
--- /dev/null
+++ b/src/tests/ofc22/tests/common/tools/service/GenericRestServer.py
@@ -0,0 +1,59 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import functools, logging, threading, time
+from typing import Optional, Union
+from flask import Flask, request
+from flask_restful import Api, Resource
+from werkzeug.serving import make_server
+from common.Settings import get_http_bind_address
+
+logging.getLogger('werkzeug').setLevel(logging.WARNING)
+
+
+def log_request(logger, response):
+    timestamp = time.strftime('[%Y-%b-%d %H:%M]')
+    logger.info('%s %s %s %s %s', timestamp, request.remote_addr, request.method, request.full_path, response.status)
+    return response
+
+class GenericRestServer(threading.Thread):
+    def __init__(
+        self, bind_port : Union[str, int], base_url : str, bind_address : Optional[str] = None,
+        cls_name : str = __name__
+    ) -> None:
+        threading.Thread.__init__(self, daemon=True)
+        self.logger = logging.getLogger(cls_name)
+        self.bind_port = bind_port
+        self.base_url = base_url
+        self.bind_address = get_http_bind_address() if bind_address is None else bind_address
+        self.endpoint = 'http://{:s}:{:s}{:s}'.format(str(self.bind_address), str(self.bind_port), str(self.base_url))
+        self.srv = None
+        self.ctx = None
+        self.app = Flask(__name__)
+        self.app.after_request(functools.partial(log_request, self.logger))
+        self.api = Api(self.app, prefix=self.base_url)
+
+    def add_resource(self, resource : Resource, *urls, **kwargs):
+        self.api.add_resource(resource, *urls, **kwargs)
+
+    def run(self):
+        self.srv = make_server(self.bind_address, self.bind_port, self.app, threaded=True)
+        self.ctx = self.app.app_context()
+        self.ctx.push()
+
+        self.logger.info('Listening on {:s}...'.format(str(self.endpoint)))
+        self.srv.serve_forever()
+
+    def shutdown(self):
+        self.srv.shutdown()
diff --git a/src/tests/ofc22/tests/common/tools/service/__init__.py b/src/tests/ofc22/tests/common/tools/service/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..70a33251242c51f49140e596b8208a19dd5245f7
--- /dev/null
+++ b/src/tests/ofc22/tests/common/tools/service/__init__.py
@@ -0,0 +1,14 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
diff --git a/src/tests/ofc22/tests/common/type_checkers/Assertions.py b/src/tests/ofc22/tests/common/type_checkers/Assertions.py
new file mode 100644
index 0000000000000000000000000000000000000000..20ffa9ad619a40d6da4f3830c202d1a545545b51
--- /dev/null
+++ b/src/tests/ofc22/tests/common/type_checkers/Assertions.py
@@ -0,0 +1,382 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import Dict
+
+# ----- Enumerations ---------------------------------------------------------------------------------------------------
+def validate_config_action_enum(message):
+    assert isinstance(message, str)
+    assert message in [
+        'CONFIGACTION_UNDEFINED',
+        'CONFIGACTION_SET',
+        'CONFIGACTION_DELETE',
+    ]
+
+def validate_device_driver_enum(message):
+    assert isinstance(message, str)
+    assert message in [
+        'DEVICEDRIVER_UNDEFINED',
+        'DEVICEDRIVER_OPENCONFIG',
+        'DEVICEDRIVER_TRANSPORT_API',
+        'DEVICEDRIVER_P4',
+        'DEVICEDRIVER_IETF_NETWORK_TOPOLOGY',
+        'DEVICEDRIVER_ONF_TR_352',
+    ]
+
+def validate_device_operational_status_enum(message):
+    assert isinstance(message, str)
+    assert message in [
+        'DEVICEOPERATIONALSTATUS_UNDEFINED',
+        'DEVICEOPERATIONALSTATUS_DISABLED',
+        'DEVICEOPERATIONALSTATUS_ENABLED'
+    ]
+
+def validate_kpi_sample_types_enum(message):
+    assert isinstance(message, str)
+    assert message in [
+        'KPISAMPLETYPE_UNKNOWN',
+        'KPISAMPLETYPE_PACKETS_TRANSMITTED',
+        'KPISAMPLETYPE_PACKETS_RECEIVED',
+        'KPISAMPLETYPE_BYTES_TRANSMITTED',
+        'KPISAMPLETYPE_BYTES_RECEIVED',
+    ]
+
+def validate_service_type_enum(message):
+    assert isinstance(message, str)
+    assert message in [
+        'SERVICETYPE_UNKNOWN',
+        'SERVICETYPE_L3NM',
+        'SERVICETYPE_L2NM',
+        'SERVICETYPE_TAPI_CONNECTIVITY_SERVICE',
+    ]
+
+def validate_service_state_enum(message):
+    assert isinstance(message, str)
+    assert message in [
+        'SERVICESTATUS_UNDEFINED',
+        'SERVICESTATUS_PLANNED',
+        'SERVICESTATUS_ACTIVE',
+        'SERVICESTATUS_PENDING_REMOVAL',
+    ]
+
+
+# ----- Common ---------------------------------------------------------------------------------------------------------
+def validate_uuid(message, allow_empty=False):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'uuid' in message
+    assert isinstance(message['uuid'], str)
+    if allow_empty: return
+    assert len(message['uuid']) > 1
+
+CONFIG_RULE_TYPES = {
+    'custom',
+    'acl',
+}
+def validate_config_rule(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 2
+    assert 'action' in message
+    validate_config_action_enum(message['action'])
+    other_keys = set(list(message.keys()))
+    other_keys.discard('action')
+    config_rule_type = other_keys.pop()
+    assert config_rule_type in CONFIG_RULE_TYPES
+    assert config_rule_type == 'custom', 'ConfigRule Type Validator for {:s} not implemented'.format(config_rule_type)
+    custom : Dict = message['custom']
+    assert len(custom.keys()) == 2
+    assert 'resource_key' in custom
+    assert isinstance(custom['resource_key'], str)
+    assert 'resource_value' in custom
+    assert isinstance(custom['resource_value'], str)
+
+def validate_config_rules(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'config_rules' in message
+    for config_rule in message['config_rules']: validate_config_rule(config_rule)
+
+CONSTRAINT_TYPES = {
+    'custom',
+    'schedule',
+    'endpoint_location',
+    'sla_capacity',
+    'sla_latency',
+    'sla_availability',
+    'sla_isolation',
+}
+def validate_constraint(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    other_keys = list(message.keys())
+    constraint_type = other_keys[0]
+    assert constraint_type in CONSTRAINT_TYPES
+    assert constraint_type == 'custom', 'Constraint Type Validator for {:s} not implemented'.format(constraint_type)
+    custom : Dict = message['custom']
+    assert len(custom.keys()) == 2
+    assert 'constraint_type' in custom
+    assert isinstance(custom['constraint_type'], str)
+    assert 'constraint_value' in custom
+    assert isinstance(custom['constraint_value'], str)
+
+
+# ----- Identifiers ----------------------------------------------------------------------------------------------------
+
+def validate_context_id(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'context_uuid' in message
+    validate_uuid(message['context_uuid'])
+
+def validate_service_id(message, context_uuid=None):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 2
+    assert 'context_id' in message
+    validate_context_id(message['context_id'])
+    if context_uuid is not None: assert message['context_id']['context_uuid']['uuid'] == context_uuid
+    assert 'service_uuid' in message
+    validate_uuid(message['service_uuid'])
+
+def validate_topology_id(message, context_uuid=None):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 2
+    assert 'context_id' in message
+    validate_context_id(message['context_id'])
+    if context_uuid is not None: assert message['context_id']['context_uuid']['uuid'] == context_uuid
+    assert 'topology_uuid' in message
+    validate_uuid(message['topology_uuid'])
+
+def validate_device_id(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'device_uuid' in message
+    validate_uuid(message['device_uuid'])
+
+def validate_link_id(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'link_uuid' in message
+    validate_uuid(message['link_uuid'])
+
+def validate_endpoint_id(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 3
+    assert 'topology_id' in message
+    validate_topology_id(message['topology_id'])
+    assert 'device_id' in message
+    validate_device_id(message['device_id'])
+    assert 'endpoint_uuid' in message
+    validate_uuid(message['endpoint_uuid'])
+
+def validate_connection_id(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'connection_uuid' in message
+    validate_uuid(message['connection_uuid'])
+
+
+# ----- Lists of Identifiers -------------------------------------------------------------------------------------------
+
+def validate_context_ids(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'context_ids' in message
+    assert isinstance(message['context_ids'], list)
+    for context_id in message['context_ids']: validate_context_id(context_id)
+
+def validate_service_ids(message, context_uuid=None):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'service_ids' in message
+    assert isinstance(message['service_ids'], list)
+    for service_id in message['service_ids']: validate_service_id(service_id, context_uuid=context_uuid)
+
+def validate_topology_ids(message, context_uuid=None):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'topology_ids' in message
+    assert isinstance(message['topology_ids'], list)
+    for topology_id in message['topology_ids']: validate_topology_id(topology_id, context_uuid=context_uuid)
+
+def validate_device_ids(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'device_ids' in message
+    assert isinstance(message['device_ids'], list)
+    for device_id in message['device_ids']: validate_device_id(device_id)
+
+def validate_link_ids(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'link_ids' in message
+    assert isinstance(message['link_ids'], list)
+    for link_id in message['link_ids']: validate_link_id(link_id)
+
+def validate_connection_ids(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'connection_ids' in message
+    assert isinstance(message['connection_ids'], list)
+    for connection_id in message['connection_ids']: validate_connection_id(connection_id)
+
+
+# ----- Objects --------------------------------------------------------------------------------------------------------
+
+def validate_context(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 3
+    assert 'context_id' in message
+    validate_context_id(message['context_id'])
+    context_uuid = message['context_id']['context_uuid']['uuid']
+    assert 'service_ids' in message
+    assert isinstance(message['service_ids'], list)
+    for service_id in message['service_ids']: validate_service_id(service_id, context_uuid=context_uuid)
+    assert 'topology_ids' in message
+    assert isinstance(message['topology_ids'], list)
+    for topology_id in message['topology_ids']: validate_topology_id(topology_id, context_uuid=context_uuid)
+
+def validate_service_state(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'service_status' in message
+    validate_service_state_enum(message['service_status'])
+
+def validate_service(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 6
+    assert 'service_id' in message
+    validate_service_id(message['service_id'])
+    assert 'service_type' in message
+    validate_service_type_enum(message['service_type'])
+    assert 'service_endpoint_ids' in message
+    assert isinstance(message['service_endpoint_ids'], list)
+    for endpoint_id in message['service_endpoint_ids']: validate_endpoint_id(endpoint_id)
+    assert 'service_constraints' in message
+    assert isinstance(message['service_constraints'], list)
+    for constraint in message['service_constraints']: validate_constraint(constraint)
+    assert 'service_status' in message
+    validate_service_state(message['service_status'])
+    assert 'service_config' in message
+    validate_config_rules(message['service_config'])
+
+def validate_topology(message, num_devices=None, num_links=None):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 3
+    assert 'topology_id' in message
+    validate_topology_id(message['topology_id'])
+    assert 'device_ids' in message
+    assert isinstance(message['device_ids'], list)
+    if num_devices is not None: assert len(message['device_ids']) == num_devices
+    for device_id in message['device_ids']: validate_device_id(device_id)
+    assert 'link_ids' in message
+    assert isinstance(message['link_ids'], list)
+    if num_links is not None: assert len(message['link_ids']) == num_links
+    for link_id in message['link_ids']: validate_link_id(link_id)
+
+def validate_endpoint(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 3
+    assert 'endpoint_id' in message
+    validate_endpoint_id(message['endpoint_id'])
+    assert 'endpoint_type' in message
+    assert isinstance(message['endpoint_type'], str)
+    assert 'kpi_sample_types' in message
+    assert isinstance(message['kpi_sample_types'], list)
+    for kpi_sample_type in message['kpi_sample_types']: validate_kpi_sample_types_enum(kpi_sample_type)
+
+def validate_device(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 6
+    assert 'device_id' in message
+    validate_device_id(message['device_id'])
+    assert 'device_type' in message
+    assert isinstance(message['device_type'], str)
+    assert 'device_config' in message
+    validate_config_rules(message['device_config'])
+    assert 'device_operational_status' in message
+    validate_device_operational_status_enum(message['device_operational_status'])
+    assert 'device_drivers' in message
+    assert isinstance(message['device_drivers'], list)
+    for driver in message['device_drivers']: validate_device_driver_enum(driver)
+    assert 'device_endpoints' in message
+    assert isinstance(message['device_endpoints'], list)
+    for endpoint in message['device_endpoints']: validate_endpoint(endpoint)
+
+def validate_link(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 2
+    assert 'link_id' in message
+    validate_link_id(message['link_id'])
+    assert 'link_endpoint_ids' in message
+    assert isinstance(message['link_endpoint_ids'], list)
+    for endpoint_id in message['link_endpoint_ids']: validate_endpoint_id(endpoint_id)
+
+def validate_connection(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 4
+    assert 'connection_id' in message
+    validate_connection_id(message['connection_id'])
+    assert 'service_id' in message
+    validate_service_id(message['service_id'])
+    assert 'path_hops_endpoint_ids' in message
+    assert isinstance(message['path_hops_endpoint_ids'], list)
+    for endpoint_id in message['path_hops_endpoint_ids']: validate_endpoint_id(endpoint_id)
+    assert 'sub_service_ids' in message
+    assert isinstance(message['sub_service_ids'], list)
+    for sub_service_id in message['sub_service_ids']: validate_service_id(sub_service_id)
+
+
+# ----- Lists of Objects -----------------------------------------------------------------------------------------------
+
+def validate_contexts(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'contexts' in message
+    assert isinstance(message['contexts'], list)
+    for context in message['contexts']: validate_context(context)
+
+def validate_services(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'services' in message
+    assert isinstance(message['services'], list)
+    for service in message['services']: validate_service(service)
+
+def validate_topologies(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'topologies' in message
+    assert isinstance(message['topologies'], list)
+    for topology in message['topologies']: validate_topology(topology)
+
+def validate_devices(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'devices' in message
+    assert isinstance(message['devices'], list)
+    for device in message['devices']: validate_device(device)
+
+def validate_links(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'links' in message
+    assert isinstance(message['links'], list)
+    for link in message['links']: validate_link(link)
+
+def validate_connections(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'connections' in message
+    assert isinstance(message['connections'], list)
+    for connection in message['connections']: validate_connection(connection)
diff --git a/src/tests/ofc22/tests/common/type_checkers/Checkers.py b/src/tests/ofc22/tests/common/type_checkers/Checkers.py
new file mode 100644
index 0000000000000000000000000000000000000000..0bf36cb79fddb827743c2f7529b4ae3a9040d506
--- /dev/null
+++ b/src/tests/ofc22/tests/common/type_checkers/Checkers.py
@@ -0,0 +1,109 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import re
+from typing import Any, Container, Dict, List, Optional, Pattern, Set, Sized, Tuple, Union
+
+def chk_none(name : str, value : Any, reason=None) -> Any:
+    if value is None: return value
+    if reason is None: reason = 'must be None.'
+    raise ValueError('{}({}) {}'.format(str(name), str(value), str(reason)))
+
+def chk_not_none(name : str, value : Any, reason=None) -> Any:
+    if value is not None: return value
+    if reason is None: reason = 'must not be None.'
+    raise ValueError('{}({}) {}'.format(str(name), str(value), str(reason)))
+
+def chk_attribute(name : str, container : Dict, container_name : str, **kwargs):
+    if name in container: return container[name]
+    if 'default' in kwargs: return kwargs['default']
+    raise AttributeError('Missing object({:s}) in container({:s})'.format(str(name), str(container_name)))
+
+def chk_type(name : str, value : Any, type_or_types : Union[type, Set[type]] = set()) -> Any:
+    if isinstance(value, type_or_types): return value
+    msg = '{}({}) is of a wrong type({}). Accepted type_or_types({}).'
+    raise TypeError(msg.format(str(name), str(value), type(value).__name__, str(type_or_types)))
+
+def chk_issubclass(name : str, value : type, class_or_classes : Union[type, Set[type]] = set()) -> Any:
+    if issubclass(value, class_or_classes): return value
+    msg = '{}({}) is of a wrong class({}). Accepted class_or_classes({}).'
+    raise TypeError(msg.format(str(name), str(value), type(value).__name__, str(class_or_classes)))
+
+def chk_length(
+    name : str, value : Sized, allow_empty : bool = False,
+    min_length : Optional[int] = None, max_length : Optional[int] = None) -> Any:
+
+    length = len(chk_type(name, value, Sized))
+
+    allow_empty = chk_type('allow_empty for {}'.format(name), allow_empty, bool)
+    if not allow_empty and length == 0:
+        raise ValueError('{}({}) is out of range: allow_empty({}).'.format(str(name), str(value), str(allow_empty)))
+
+    if min_length is not None:
+        min_length = chk_type('min_length for {}'.format(name), min_length, int)
+        if length < min_length:
+            raise ValueError('{}({}) is out of range: min_length({}).'.format(str(name), str(value), str(min_length)))
+
+    if max_length is not None:
+        max_length = chk_type('max_length for {}'.format(name), max_length, int)
+        if length > max_length:
+            raise ValueError('{}({}) is out of range: max_value({}).'.format(str(name), str(value), str(max_length)))
+
+    return value
+
+def chk_boolean(name : str, value : Any) -> bool:
+    return chk_type(name, value, bool)
+
+def chk_string(
+    name : str, value : Any, allow_empty : bool = False,
+    min_length : Optional[int] = None, max_length : Optional[int] = None,
+    pattern : Optional[Union[Pattern, str]] = None) -> str:
+
+    chk_type(name, value, str)
+    chk_length(name, value, allow_empty=allow_empty, min_length=min_length, max_length=max_length)
+    if pattern is None: return value
+    pattern = re.compile(pattern)
+    if pattern.match(value): return value
+    raise ValueError('{}({}) does not match pattern({}).'.format(str(name), str(value), str(pattern)))
+
+def chk_float(
+    name : str, value : Any, type_or_types : Union[type, Set[type], List[type], Tuple[type]] = (int, float),
+    min_value : Optional[Union[int, float]] = None, max_value : Optional[Union[int, float]] = None) -> float:
+
+    chk_not_none(name, value)
+    chk_type(name, value, type_or_types)
+    if min_value is not None:
+        chk_type(name, value, type_or_types)
+        if value < min_value:
+            msg = '{}({}) lower than min_value({}).'
+            raise ValueError(msg.format(str(name), str(value), str(min_value)))
+    if max_value is not None:
+        chk_type(name, value, type_or_types)
+        if value > max_value:
+            msg = '{}({}) greater than max_value({}).'
+            raise ValueError(msg.format(str(name), str(value), str(max_value)))
+    return float(value)
+
+def chk_integer(
+    name : str, value : Any,
+    min_value : Optional[Union[int, float]] = None, max_value : Optional[Union[int, float]] = None) -> int:
+
+    return int(chk_float(name, value, type_or_types=int, min_value=min_value, max_value=max_value))
+
+def chk_options(name : str, value : Any, options : Container) -> Any:
+    chk_not_none(name, value)
+    if value not in options:
+        msg = '{}({}) is not one of options({}).'
+        raise ValueError(msg.format(str(name), str(value), str(options)))
+    return value
diff --git a/src/tests/ofc22/tests/common/type_checkers/__init__.py b/src/tests/ofc22/tests/common/type_checkers/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..70a33251242c51f49140e596b8208a19dd5245f7
--- /dev/null
+++ b/src/tests/ofc22/tests/common/type_checkers/__init__.py
@@ -0,0 +1,14 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
diff --git a/src/tests/ofc22/tests/test_functional_bootstrap.py b/src/tests/ofc22/tests/test_functional_bootstrap.py
index 334d7894babedfed2ffb30e4682a1d56e4261cb9..56231512d49542f4cca4a0850767409d340d6852 100644
--- a/src/tests/ofc22/tests/test_functional_bootstrap.py
+++ b/src/tests/ofc22/tests/test_functional_bootstrap.py
@@ -21,7 +21,7 @@ from common.tools.object_factory.Link import json_link_id
 from common.tools.object_factory.Topology import json_topology_id
 from context.client.ContextClient import ContextClient
 from context.client.EventsCollector import EventsCollector
-from context.proto.context_pb2 import Context, ContextId, Device, Empty, Link, Topology
+from common.proto.context_pb2 import Context, ContextId, Device, Empty, Link, Topology
 from device.client.DeviceClient import DeviceClient
 from .Objects import CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES
 
diff --git a/src/tests/ofc22/tests/test_functional_cleanup.py b/src/tests/ofc22/tests/test_functional_cleanup.py
index eb78a585079e3ee757a836433bf23423a3ad899d..6c3a79dfd2e1e46d5c3e4b2d5e33f0ae42decd00 100644
--- a/src/tests/ofc22/tests/test_functional_cleanup.py
+++ b/src/tests/ofc22/tests/test_functional_cleanup.py
@@ -21,7 +21,7 @@ from common.tools.object_factory.Link import json_link_id
 from common.tools.object_factory.Topology import json_topology_id
 from context.client.ContextClient import ContextClient
 from context.client.EventsCollector import EventsCollector
-from context.proto.context_pb2 import ContextId, DeviceId, Empty, LinkId, TopologyId
+from common.proto.context_pb2 import ContextId, DeviceId, Empty, LinkId, TopologyId
 from device.client.DeviceClient import DeviceClient
 from .Objects import CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES
 
diff --git a/src/tests/ofc22/tests/test_functional_create_service.py b/src/tests/ofc22/tests/test_functional_create_service.py
index f3389fdbfce4e9262ffddbad876bb86f9b300551..a630501815194deb5f49a07bd9f7e6c1b5d03dcd 100644
--- a/src/tests/ofc22/tests/test_functional_create_service.py
+++ b/src/tests/ofc22/tests/test_functional_create_service.py
@@ -23,7 +23,7 @@ from common.tools.grpc.Tools import grpc_message_to_json_string
 from compute.tests.mock_osm.MockOSM import MockOSM
 from context.client.ContextClient import ContextClient
 from context.client.EventsCollector import EventsCollector
-from context.proto.context_pb2 import ContextId, Empty
+from common.proto.context_pb2 import ContextId, Empty
 from .Objects import (
     CONTEXT_ID, CONTEXTS, DEVICE_O1_UUID, DEVICE_R1_UUID, DEVICE_R3_UUID, DEVICES, LINKS, TOPOLOGIES,
     WIM_MAPPING, WIM_PASSWORD, WIM_SERVICE_CONNECTION_POINTS, WIM_SERVICE_TYPE, WIM_USERNAME)
diff --git a/src/tests/ofc22/tests/test_functional_delete_service.py b/src/tests/ofc22/tests/test_functional_delete_service.py
index 51e91a5967e1696fa2fdfe7dd06d2efb46642248..222dee5adc0839df9b9a6cac1dcdd08ecb2ec195 100644
--- a/src/tests/ofc22/tests/test_functional_delete_service.py
+++ b/src/tests/ofc22/tests/test_functional_delete_service.py
@@ -23,7 +23,7 @@ from common.tools.grpc.Tools import grpc_message_to_json_string
 from compute.tests.mock_osm.MockOSM import MockOSM
 from context.client.ContextClient import ContextClient
 from context.client.EventsCollector import EventsCollector
-from context.proto.context_pb2 import ContextId, Empty
+from common.proto.context_pb2 import ContextId, Empty
 from .Objects import (
     CONTEXT_ID, CONTEXTS, DEVICE_O1_UUID, DEVICE_R1_UUID, DEVICE_R3_UUID, DEVICES, LINKS, TOPOLOGIES, WIM_MAPPING,
     WIM_PASSWORD, WIM_USERNAME)