diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index ce9c33962f403ab0d3ab4a2811c527f6701a5e99..40838a3e06d4a08b151fde136c9289e409db3196 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -9,16 +9,19 @@ stages:
   - funct_test
 
 # include the individual .gitlab-ci.yml of each micro-service
-include: 
+include:
   - local: '/manifests/.gitlab-ci.yml'
   - local: '/src/monitoring/.gitlab-ci.yml'
-  - local: '/src/centralizedattackdetector/.gitlab-ci.yml'
+  - local: '/src/compute/.gitlab-ci.yml'
   - local: '/src/context/.gitlab-ci.yml'
   - local: '/src/device/.gitlab-ci.yml'
   - local: '/src/service/.gitlab-ci.yml'
-  - local: '/src/webui/.gitlab-ci.yml'
-  - local: '/src/tester_integration/.gitlab-ci.yml'
-  - local: '/src/tester_functional/.gitlab-ci.yml'
+  # - local: '/src/dbscanserving/.gitlab-ci.yml'
+  # - local: '/src/opticalattackmitigator/.gitlab-ci.yml'
+  # - local: '/src/opticalcentralizedattackdetector/.gitlab-ci.yml'
+  #- local: '/src/tester_integration/.gitlab-ci.yml'
+  #- local: '/src/tester_functional/.gitlab-ci.yml'
   - local: '/src/automation/.gitlab-ci.yml'
-  - local: '/src/policy/.gitlab-ci.yml'
-  
+  # - local: '/src/l3_distributedattackdetector/.gitlab-ci.yml'
+  # - local: '/src/l3_centralizedattackdetector/.gitlab-ci.yml'
+  # - local: '/src/l3_attackmitigator/.gitlab-ci.yml'
diff --git a/deploy_in_kubernetes.sh b/deploy_in_kubernetes.sh
index 101473c9e14351ebc8dcd34daf7cbc29d5aff80a..58e35d249110688123bea1553bdff3c55bcee5f2 100755
--- a/deploy_in_kubernetes.sh
+++ b/deploy_in_kubernetes.sh
@@ -10,7 +10,7 @@ REGISTRY_IMAGE=""
 #REGISTRY_IMAGE="http://my-container-registry.local/"
 
 # Set the list of components you want to build images for, and deploy.
-COMPONENTS="context device automation policy service compute monitoring centralizedattackdetector webui"
+COMPONENTS="context device automation policy service compute monitoring dbscanserving opticalattackmitigator opticalcentralizedattackdetector webui"
 
 # Set the tag you want to use for your images.
 IMAGE_TAG="tf-dev"
@@ -38,6 +38,11 @@ kubectl delete namespace $K8S_NAMESPACE
 kubectl create namespace $K8S_NAMESPACE
 printf "\n"
 
+# creating the secrets for the influxdb deployment
+#TODO: make sure to change this when having a production deployment
+kubectl create secret generic influxdb-secrets --namespace=$K8S_NAMESPACE --from-literal=INFLUXDB_DB="monitoring" --from-literal=INFLUXDB_ADMIN_USER="teraflow" --from-literal=INFLUXDB_ADMIN_PASSWORD="teraflow" --from-literal=INFLUXDB_HTTP_AUTH_ENABLED="True"
+kubectl create secret generic monitoring-secrets --namespace=$K8S_NAMESPACE --from-literal=INFLUXDB_DATABASE="monitoring" --from-literal=INFLUXDB_USER="teraflow" --from-literal=INFLUXDB_PASSWORD="teraflow" --from-literal=INFLUXDB_HOSTNAME="localhost"
+
 for COMPONENT in $COMPONENTS; do
     echo "Processing '$COMPONENT' component..."
     IMAGE_NAME="$COMPONENT:$IMAGE_TAG"
diff --git a/install_development_dependencies.sh b/install_development_dependencies.sh
index 890cc162d65e1a94ed136a9e4010af9df4cd8cca..701e844d7563ffcff51a53fc189bb7df34e1d06a 100755
--- a/install_development_dependencies.sh
+++ b/install_development_dependencies.sh
@@ -7,7 +7,7 @@ pip install --upgrade pip setuptools wheel pip-tools pylint pytest pytest-benchm
 echo "" > requirements.in
 
 #TODO: include here your component
-COMPONENTS="compute context device monitoring centralizedattackdetector webui"
+COMPONENTS="compute context device service monitoring opticalcentralizedattackdetector opticalattackmitigator dbscanserving webui"
 
 # compiling dependencies from all components
 for component in $COMPONENTS
diff --git a/manifests/.gitignore b/manifests/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..74cb209abec0711bfa467772708b137606e98b9a
--- /dev/null
+++ b/manifests/.gitignore
@@ -0,0 +1,4 @@
+# Internal manifest used for local testings.
+
+# CTTC section:
+cttc-ols/
diff --git a/manifests/centralizedattackdetectorservice.yaml b/manifests/centralizedattackdetectorservice.yaml
deleted file mode 100644
index faf42a5550098c9e73d19e2587cc508dbd3cf7be..0000000000000000000000000000000000000000
--- a/manifests/centralizedattackdetectorservice.yaml
+++ /dev/null
@@ -1,70 +0,0 @@
-apiVersion: apps/v1
-kind: Deployment
-metadata:
-  name: centralizedattackdetectorervice
-spec:
-  selector:
-    matchLabels:
-      app: centralizedattackdetectorervice
-  template:
-    metadata:
-      labels:
-        app: centralizedattackdetectorervice
-    spec:
-      terminationGracePeriodSeconds: 5
-      containers:
-      - name: server
-        image: registry.gitlab.com/teraflow-h2020/controller/centralizedcybersecurity:latest
-        imagePullPolicy: Always
-        ports:
-        - containerPort: 10000
-        env:
-        - name: DB_ENGINE
-          value: "redis"
-        - name: REDIS_DATABASE_ID
-          value: "0"
-        - name: LOG_LEVEL
-          value: "DEBUG"
-        readinessProbe:
-          exec:
-            command: ["/bin/grpc_health_probe", "-addr=:10000"]
-        livenessProbe:
-          exec:
-            command: ["/bin/grpc_health_probe", "-addr=:10000"]
-        resources:
-          requests:
-            cpu: 250m
-            memory: 512Mi
-          limits:
-            cpu: 700m
-            memory: 1024Mi
----
-apiVersion: v1
-kind: Service
-metadata:
-  name: centralizedattackdetectorervice
-spec:
-  type: ClusterIP
-  selector:
-    app: centralizedattackdetectorervice
-  ports:
-  - name: grpc
-    port: 10000
-    targetPort: 10000
----
-apiVersion: v1
-kind: Service
-metadata:
-  name: centralizedattackdetectorervice-public
-  labels:
-    app: centralizedattackdetectorervice
-spec:
-  type: NodePort
-  selector:
-    app: centralizedattackdetectorervice
-  ports:
-  - name: grpc
-    protocol: TCP
-    port: 10000
-    targetPort: 10000
----
diff --git a/manifests/computeservice.yaml b/manifests/computeservice.yaml
index cdca52eb890d832cfb56457b89d44a045ee4af57..73380f75daeb7d60891c3d093ca7651ba4280e58 100644
--- a/manifests/computeservice.yaml
+++ b/manifests/computeservice.yaml
@@ -17,6 +17,7 @@ spec:
         image: registry.gitlab.com/teraflow-h2020/controller/compute:latest
         imagePullPolicy: Always
         ports:
+        - containerPort: 8080
         - containerPort: 9090
         env:
         - name: LOG_LEVEL
@@ -44,7 +45,12 @@ spec:
   selector:
     app: computeservice
   ports:
+  - name: http
+    protocol: TCP
+    port: 8080
+    targetPort: 8080
   - name: grpc
+    protocol: TCP
     port: 9090
     targetPort: 9090
 ---
@@ -59,6 +65,10 @@ spec:
   selector:
     app: computeservice
   ports:
+  - name: http
+    protocol: TCP
+    port: 8080
+    targetPort: 8080
   - name: grpc
     protocol: TCP
     port: 9090
diff --git a/manifests/contextservice.yaml b/manifests/contextservice.yaml
index c0ea046e0ae1d9e4bb6b6a70de5a8b26844981fc..7ccf3e4f0dc9abb41f228d4fb7c4ec18fad93954 100644
--- a/manifests/contextservice.yaml
+++ b/manifests/contextservice.yaml
@@ -34,6 +34,8 @@ spec:
         env:
         - name: DB_BACKEND
           value: "redis"
+        - name: MB_BACKEND
+          value: "redis"
         - name: REDIS_DATABASE_ID
           value: "0"
         - name: LOG_LEVEL
@@ -64,9 +66,11 @@ spec:
     app: contextservice
   ports:
   - name: grpc
+    protocol: TCP
     port: 1010
     targetPort: 1010
   - name: http
+    protocol: TCP
     port: 8080
     targetPort: 8080
 ---
diff --git a/manifests/dbscanservingservice.yaml b/manifests/dbscanservingservice.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..ca8982db25f0fe94588418dc67787f63babc9826
--- /dev/null
+++ b/manifests/dbscanservingservice.yaml
@@ -0,0 +1,66 @@
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  name: dbscanservingservice
+spec:
+  selector:
+    matchLabels:
+      app: dbscanservingservice
+  template:
+    metadata:
+      labels:
+        app: dbscanservingservice
+    spec:
+      terminationGracePeriodSeconds: 5
+      containers:
+      - name: server
+        image: registry.gitlab.com/teraflow-h2020/controller/dbscanserving:latest
+        imagePullPolicy: Always
+        ports:
+        - containerPort: 10006
+        env:
+        - name: LOG_LEVEL
+          value: "DEBUG"
+        readinessProbe:
+          exec:
+            command: ["/bin/grpc_health_probe", "-addr=:10006"]
+        livenessProbe:
+          exec:
+            command: ["/bin/grpc_health_probe", "-addr=:10006"]
+        resources:
+          requests:
+            cpu: 250m
+            memory: 512Mi
+          limits:
+            cpu: 700m
+            memory: 1024Mi
+---
+apiVersion: v1
+kind: Service
+metadata:
+  name: dbscanservingservice
+spec:
+  type: ClusterIP
+  selector:
+    app: dbscanservingservice
+  ports:
+  - name: grpc
+    port: 10006
+    targetPort: 10006
+---
+apiVersion: v1
+kind: Service
+metadata:
+  name: dbscanservingservice-public
+  labels:
+    app: dbscanservingservice
+spec:
+  type: NodePort
+  selector:
+    app: dbscanservingservice
+  ports:
+  - name: http
+    protocol: TCP
+    port: 10006
+    targetPort: 10006
+---
diff --git a/manifests/l3_attackmitigatorservice.yaml b/manifests/l3_attackmitigatorservice.yaml
index 7ac91b02fd2b3be51a9e810ae194fb0d7a9bf0d7..34f660324066c7a06b8debff276ed5444027fe4e 100644
--- a/manifests/l3_attackmitigatorservice.yaml
+++ b/manifests/l3_attackmitigatorservice.yaml
@@ -1,20 +1,20 @@
 apiVersion: apps/v1
 kind: Deployment
 metadata:
-  name: attackmitigatorservice
+  name: l3_attackmitigatorservice
 spec:
   selector:
     matchLabels:
-      app: attackmitigatorservice
+      app: l3_attackmitigatorservice
   template:
     metadata:
       labels:
-        app: attackmitigatorservice
+        app: l3_attackmitigatorservice
     spec:
       terminationGracePeriodSeconds: 5
       containers:
       - name: server
-        image: registry.gitlab.com/teraflow-h2020/controller/attackmitigator:latest
+        image: registry.gitlab.com/teraflow-h2020/controller/l3_attackmitigator:latest
         imagePullPolicy: Always
         ports:
         - containerPort: 10002
@@ -38,11 +38,11 @@ spec:
 apiVersion: v1
 kind: Service
 metadata:
-  name: attackmitigatorservice
+  name: l3_attackmitigatorservice
 spec:
   type: ClusterIP
   selector:
-    app: attackmitigatorservice
+    app: l3_attackmitigatorservice
   ports:
   - name: grpc
     port: 10002
diff --git a/manifests/l3_centralizedattackdetectorservice.yaml b/manifests/l3_centralizedattackdetectorservice.yaml
index 4b4b629294dc72f0d158b923364bb65d559df624..0393d83c29591d2fa4cc1a2b52abbcdc760de23a 100644
--- a/manifests/l3_centralizedattackdetectorservice.yaml
+++ b/manifests/l3_centralizedattackdetectorservice.yaml
@@ -1,20 +1,20 @@
 apiVersion: apps/v1
 kind: Deployment
 metadata:
-  name: centralizedattackdetectorservice
+  name: l3_centralizedattackdetectorservice
 spec:
   selector:
     matchLabels:
-      app: centralizedattackdetectorservice
+      app: l3_centralizedattackdetectorservice
   template:
     metadata:
       labels:
-        app: centralizedattackdetectorservice
+        app: l3_centralizedattackdetectorservice
     spec:
       terminationGracePeriodSeconds: 5
       containers:
       - name: server
-        image: registry.gitlab.com/teraflow-h2020/controller/centralizedattackdetector:latest
+        image: registry.gitlab.com/teraflow-h2020/controller/l3_centralizedattackdetector:latest
         imagePullPolicy: Always
         ports:
         - containerPort: 10001
@@ -38,11 +38,11 @@ spec:
 apiVersion: v1
 kind: Service
 metadata:
-  name: centralizedattackdetectorservice
+  name: l3_centralizedattackdetectorservice
 spec:
   type: ClusterIP
   selector:
-    app: centralizedattackdetectorservice
+    app: l3_centralizedattackdetectorservice
   ports:
   - name: grpc
     port: 10001
diff --git a/manifests/l3_distributedattackdetectorservice.yaml b/manifests/l3_distributedattackdetectorservice.yaml
index 22157d9a4e9faa3af6ea76c6212c7914a0868726..eff047b1b97733f808df01a23a11a930515eb3e5 100644
--- a/manifests/l3_distributedattackdetectorservice.yaml
+++ b/manifests/l3_distributedattackdetectorservice.yaml
@@ -1,20 +1,20 @@
 apiVersion: apps/v1
 kind: Deployment
 metadata:
-  name: distributedattackdetectorservice
+  name: l3_distributedattackdetectorservice
 spec:
   selector:
     matchLabels:
-      app: distributedattackdetectorservice
+      app: l3_distributedattackdetectorservice
   template:
     metadata:
       labels:
-        app: distributedattackdetectorservice
+        app: l3_distributedattackdetectorservice
     spec:
       terminationGracePeriodSeconds: 5
       containers:
       - name: server
-        image: registry.gitlab.com/teraflow-h2020/controller/distributedattackdetector:latest
+        image: registry.gitlab.com/teraflow-h2020/controller/l3_distributedattackdetector:latest
         imagePullPolicy: Always
         ports:
         - containerPort: 10000
@@ -38,11 +38,11 @@ spec:
 apiVersion: v1
 kind: Service
 metadata:
-  name: distributedattackdetectorservice
+  name: l3_distributedattackdetectorservice
 spec:
   type: ClusterIP
   selector:
-    app: distributedattackdetectorservice
+    app: l3_distributedattackdetectorservice
   ports:
   - name: grpc
     port: 10000
diff --git a/manifests/monitoringservice.yaml b/manifests/monitoringservice.yaml
index ca5f7108860570044fed9b149ec500c547010f9c..d1b4023fa1db0fcae1e8b6b83ceb665151c35822 100644
--- a/manifests/monitoringservice.yaml
+++ b/manifests/monitoringservice.yaml
@@ -74,4 +74,4 @@ spec:
     protocol: TCP
     port: 8086
     targetPort: 8086
----
\ No newline at end of file
+---
diff --git a/manifests/opticalattackmitigatorservice.yaml b/manifests/opticalattackmitigatorservice.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..1cc03ba0575edcc66e1cf3f9b57fd161a763a696
--- /dev/null
+++ b/manifests/opticalattackmitigatorservice.yaml
@@ -0,0 +1,66 @@
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  name: opticalattackmitigatorservice
+spec:
+  selector:
+    matchLabels:
+      app: opticalattackmitigatorservice
+  template:
+    metadata:
+      labels:
+        app: opticalattackmitigatorservice
+    spec:
+      terminationGracePeriodSeconds: 5
+      containers:
+      - name: server
+        image: registry.gitlab.com/teraflow-h2020/controller/opticalattackmitigator:latest
+        imagePullPolicy: Always
+        ports:
+        - containerPort: 10007
+        env:
+        - name: LOG_LEVEL
+          value: "DEBUG"
+        readinessProbe:
+          exec:
+            command: ["/bin/grpc_health_probe", "-addr=:10007"]
+        livenessProbe:
+          exec:
+            command: ["/bin/grpc_health_probe", "-addr=:10007"]
+        resources:
+          requests:
+            cpu: 250m
+            memory: 512Mi
+          limits:
+            cpu: 700m
+            memory: 1024Mi
+---
+apiVersion: v1
+kind: Service
+metadata:
+  name: opticalattackmitigatorservice
+spec:
+  type: ClusterIP
+  selector:
+    app: opticalattackmitigatorservice
+  ports:
+  - name: grpc
+    port: 10007
+    targetPort: 10007
+---
+apiVersion: v1
+kind: Service
+metadata:
+  name: opticalattackmitigatorservice-public
+  labels:
+    app: opticalattackmitigatorservice
+spec:
+  type: NodePort
+  selector:
+    app: opticalattackmitigatorservice
+  ports:
+  - name: http
+    protocol: TCP
+    port: 10007
+    targetPort: 10007
+---
diff --git a/manifests/opticalcentralizedattackdetectorservice.yaml b/manifests/opticalcentralizedattackdetectorservice.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..d3ceb3ea49ac1421e14faf00dbb81b1fdaff5537
--- /dev/null
+++ b/manifests/opticalcentralizedattackdetectorservice.yaml
@@ -0,0 +1,66 @@
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  name: opticalcentralizedattackdetectorservice
+spec:
+  selector:
+    matchLabels:
+      app: opticalcentralizedattackdetectorservice
+  template:
+    metadata:
+      labels:
+        app: opticalcentralizedattackdetectorservice
+    spec:
+      terminationGracePeriodSeconds: 5
+      containers:
+      - name: server
+        image: registry.gitlab.com/teraflow-h2020/controller/opticalcentralizedattackdetector:latest
+        imagePullPolicy: Always
+        ports:
+        - containerPort: 10005
+        envFrom:
+          - secretRef:
+              name: monitoring-secrets
+        readinessProbe:
+          exec:
+            command: ["/bin/grpc_health_probe", "-addr=:10005"]
+        livenessProbe:
+          exec:
+            command: ["/bin/grpc_health_probe", "-addr=:10005"]
+        resources:
+          requests:
+            cpu: 250m
+            memory: 512Mi
+          limits:
+            cpu: 700m
+            memory: 1024Mi
+---
+apiVersion: v1
+kind: Service
+metadata:
+  name: opticalcentralizedattackdetectorservice
+spec:
+  type: ClusterIP
+  selector:
+    app: opticalcentralizedattackdetectorservice
+  ports:
+  - name: grpc
+    port: 10005
+    targetPort: 10005
+---
+apiVersion: v1
+kind: Service
+metadata:
+  name: opticalcentralizedattackdetectorservice-public
+  labels:
+    app: opticalcentralizedattackdetectorservice
+spec:
+  type: NodePort
+  selector:
+    app: opticalcentralizedattackdetectorservice
+  ports:
+  - name: grpc
+    protocol: TCP
+    port: 10005
+    targetPort: 10005
+---
diff --git a/manifests/serviceservice.yaml b/manifests/serviceservice.yaml
index 72fd1c61564831f61bdf78aa494092829f0dd676..e9337807d9bbcec32fee2fe6f4ab04e4b4dba1aa 100644
--- a/manifests/serviceservice.yaml
+++ b/manifests/serviceservice.yaml
@@ -19,10 +19,6 @@ spec:
         ports:
         - containerPort: 3030
         env:
-        - name: DB_ENGINE
-          value: "redis"
-        - name: REDIS_DATABASE_ID
-          value: "0"
         - name: LOG_LEVEL
           value: "DEBUG"
         readinessProbe:
diff --git a/proto/automation.proto b/proto/automation.proto
index 6de1d51e2aa440651beaf3875d213a2deee79abb..95526d36d75047f0f351d77b2dfa760934883dfc 100644
--- a/proto/automation.proto
+++ b/proto/automation.proto
@@ -1,4 +1,3 @@
-//Example of topology
 syntax = "proto3";
 package automation;
 
@@ -40,7 +39,7 @@ message DeviceRoleState {
 }
 
 message DeviceDeletionResult {
-  repeated bool deleted = 1;
+  repeated string deleted = 1;
 }
 
 message Empty {}
@@ -51,5 +50,3 @@ enum ZtpDeviceState {
   ZTP_DEV_STATE_UPDATED  = 2;
   ZTP_DEV_STATE_DELETED  = 3;
 }
-
-
diff --git a/proto/compile.sh b/proto/compile.sh
index 9f0acdb050d3e4d05dd8a8e374c157e2040142c1..9a28ac860259c57a25cd304a9089451c2a08525d 100755
--- a/proto/compile.sh
+++ b/proto/compile.sh
@@ -1,5 +1,5 @@
 #!/bin/bash
-python3 -m grpc_tools.protoc -I=./ --python_out=src/ --grpc_python_out=src/ *.proto
+python3 -m grpc_tools.protoc -I=./ --python_out=../src/ --grpc_python_out=../src/ *.proto
 #requires installation of protoc-gen-uml
 export PATH=${HOME}/protoc-gen-uml/target/universal/stage/bin:$PATH
 protoc --uml_out=uml/ -I ./ *.proto
diff --git a/proto/context.proto b/proto/context.proto
index 07d6f4b71688c3c23ac3ce85d38164bc5bf6ee1f..2e2154b7d3336b1ce4ed36ed3323bf0325f467ee 100644
--- a/proto/context.proto
+++ b/proto/context.proto
@@ -1,43 +1,50 @@
 syntax = "proto3";
 package context;
 
-//import "kpi_sample_types.proto";
+import "kpi_sample_types.proto";
 
 service ContextService {
-  rpc ListContextIds   (Empty     ) returns (       ContextIdList ) {}
-  rpc ListContexts     (Empty     ) returns (       ContextList   ) {}
-  rpc GetContext       (ContextId ) returns (       Context       ) {}
-  rpc SetContext       (Context   ) returns (       ContextId     ) {}
-  rpc RemoveContext    (ContextId ) returns (       Empty         ) {}
-  rpc GetContextEvents (Empty     ) returns (stream ContextEvent  ) {}
-
-  rpc ListTopologyIds  (ContextId ) returns (       TopologyIdList) {}
-  rpc ListTopologies   (ContextId ) returns (       TopologyList  ) {}
-  rpc GetTopology      (TopologyId) returns (       Topology      ) {}
-  rpc SetTopology      (Topology  ) returns (       TopologyId    ) {}
-  rpc RemoveTopology   (TopologyId) returns (       Empty         ) {}
-  rpc GetTopologyEvents(Empty     ) returns (stream TopologyEvent ) {}
-
-  rpc ListDeviceIds    (Empty     ) returns (       DeviceIdList  ) {}
-  rpc ListDevices      (Empty     ) returns (       DeviceList    ) {}
-  rpc GetDevice        (DeviceId  ) returns (       Device        ) {}
-  rpc SetDevice        (Device    ) returns (       DeviceId      ) {}
-  rpc RemoveDevice     (DeviceId  ) returns (       Empty         ) {}
-  rpc GetDeviceEvents  (Empty     ) returns (stream DeviceEvent   ) {}
-
-  rpc ListLinkIds      (Empty     ) returns (       LinkIdList    ) {}
-  rpc ListLinks        (Empty     ) returns (       LinkList      ) {}
-  rpc GetLink          (LinkId    ) returns (       Link          ) {}
-  rpc SetLink          (Link      ) returns (       LinkId        ) {}
-  rpc RemoveLink       (LinkId    ) returns (       Empty         ) {}
-  rpc GetLinkEvents    (Empty     ) returns (stream LinkEvent     ) {}
-
-  rpc ListServiceIds   (ContextId ) returns (       ServiceIdList ) {}
-  rpc ListServices     (ContextId ) returns (       ServiceList   ) {}
-  rpc GetService       (ServiceId ) returns (       Service       ) {}
-  rpc SetService       (Service   ) returns (       ServiceId     ) {}
-  rpc RemoveService    (ServiceId ) returns (       Empty         ) {}
-  rpc GetServiceEvents (Empty     ) returns (stream ServiceEvent  ) {}
+  rpc ListContextIds     (Empty       ) returns (       ContextIdList   ) {}
+  rpc ListContexts       (Empty       ) returns (       ContextList     ) {}
+  rpc GetContext         (ContextId   ) returns (       Context         ) {}
+  rpc SetContext         (Context     ) returns (       ContextId       ) {}
+  rpc RemoveContext      (ContextId   ) returns (       Empty           ) {}
+  rpc GetContextEvents   (Empty       ) returns (stream ContextEvent    ) {}
+
+  rpc ListTopologyIds    (ContextId   ) returns (       TopologyIdList  ) {}
+  rpc ListTopologies     (ContextId   ) returns (       TopologyList    ) {}
+  rpc GetTopology        (TopologyId  ) returns (       Topology        ) {}
+  rpc SetTopology        (Topology    ) returns (       TopologyId      ) {}
+  rpc RemoveTopology     (TopologyId  ) returns (       Empty           ) {}
+  rpc GetTopologyEvents  (Empty       ) returns (stream TopologyEvent   ) {}
+
+  rpc ListDeviceIds      (Empty       ) returns (       DeviceIdList    ) {}
+  rpc ListDevices        (Empty       ) returns (       DeviceList      ) {}
+  rpc GetDevice          (DeviceId    ) returns (       Device          ) {}
+  rpc SetDevice          (Device      ) returns (       DeviceId        ) {}
+  rpc RemoveDevice       (DeviceId    ) returns (       Empty           ) {}
+  rpc GetDeviceEvents    (Empty       ) returns (stream DeviceEvent     ) {}
+
+  rpc ListLinkIds        (Empty       ) returns (       LinkIdList      ) {}
+  rpc ListLinks          (Empty       ) returns (       LinkList        ) {}
+  rpc GetLink            (LinkId      ) returns (       Link            ) {}
+  rpc SetLink            (Link        ) returns (       LinkId          ) {}
+  rpc RemoveLink         (LinkId      ) returns (       Empty           ) {}
+  rpc GetLinkEvents      (Empty       ) returns (stream LinkEvent       ) {}
+
+  rpc ListServiceIds     (ContextId   ) returns (       ServiceIdList   ) {}
+  rpc ListServices       (ContextId   ) returns (       ServiceList     ) {}
+  rpc GetService         (ServiceId   ) returns (       Service         ) {}
+  rpc SetService         (Service     ) returns (       ServiceId       ) {}
+  rpc RemoveService      (ServiceId   ) returns (       Empty           ) {}
+  rpc GetServiceEvents   (Empty       ) returns (stream ServiceEvent    ) {}
+
+  rpc ListConnectionIds  (ServiceId   ) returns (       ConnectionIdList) {}
+  rpc ListConnections    (ServiceId   ) returns (       ConnectionList  ) {}
+  rpc GetConnection      (ConnectionId) returns (       Connection      ) {}
+  rpc SetConnection      (Connection  ) returns (       ConnectionId    ) {}
+  rpc RemoveConnection   (ConnectionId) returns (       Empty           ) {}
+  rpc GetConnectionEvents(Empty       ) returns (stream ConnectionEvent ) {}
 }
 
 // ----- Generic -------------------------------------------------------------------------------------------------------
@@ -233,6 +240,32 @@ message ServiceEvent {
 }
 
 
+// ----- Connection ----------------------------------------------------------------------------------------------------
+message ConnectionId {
+  Uuid connection_uuid = 1;
+}
+
+message Connection {
+  ConnectionId connection_id = 1;
+  ServiceId service_id = 2;
+  repeated EndPointId path_hops_endpoint_ids = 3;
+  repeated ServiceId sub_service_ids = 4;
+}
+
+message ConnectionIdList {
+  repeated ConnectionId connection_ids = 1;
+}
+
+message ConnectionList {
+  repeated Connection connections = 1;
+}
+
+message ConnectionEvent {
+  Event event = 1;
+  ConnectionId connection_id = 2;
+}
+
+
 // ----- Endpoint ------------------------------------------------------------------------------------------------------
 message EndPointId {
   TopologyId topology_id = 1;
@@ -243,7 +276,7 @@ message EndPointId {
 message EndPoint {
   EndPointId endpoint_id = 1;
   string endpoint_type = 2;
-  //repeated kpi_sample_types.KpiSampleType kpi_sample_types = 3;
+  repeated kpi_sample_types.KpiSampleType kpi_sample_types = 3;
 }
 
 
@@ -268,26 +301,6 @@ message Constraint {
 }
 
 
-// ----- Connection ----------------------------------------------------------------------------------------------------
-message ConnectionId {
-  Uuid connection_uuid = 1;
-}
-
-message Connection {
-  ConnectionId connection_id = 1;
-  ServiceId related_service_id = 2;
-  repeated EndPointId path = 3;
-}
-
-message ConnectionIdList {
-  repeated ConnectionId connection_ids = 1;
-}
-
-message ConnectionList {
-  repeated Connection connections = 1;
-}
-
-
 // ----- Miscellaneous -------------------------------------------------------------------------------------------------
 message TeraFlowController {
   ContextId context_id = 1;
diff --git a/proto/dbscanserving.proto b/proto/dbscanserving.proto
new file mode 100644
index 0000000000000000000000000000000000000000..f2c63b15a00481290cfd46ab73940d2f399e01d5
--- /dev/null
+++ b/proto/dbscanserving.proto
@@ -0,0 +1,29 @@
+syntax = "proto3";
+
+package dbscanserving;
+
+enum Metric {
+    EUCLIDEAN = 0;
+}
+
+message Sample {
+    repeated float features = 1;
+}
+
+message DetectionRequest {
+    float eps = 1;
+    int32 min_samples = 2;
+    Metric metric = 3;
+    int32 num_samples = 4;
+    int32 num_features = 5;
+    repeated Sample samples = 6;
+    int32 identifier = 7;
+}
+
+message DetectionResponse {
+    repeated int32 cluster_indices = 1;
+}
+
+service Detector {
+    rpc Detect (DetectionRequest) returns (DetectionResponse);
+}
diff --git a/proto/device.proto b/proto/device.proto
index b1804b486ea181140fac2bfc86354e2a9530d536..a7f3e44b131908a8931a1ef5257d0261b37537a4 100644
--- a/proto/device.proto
+++ b/proto/device.proto
@@ -2,19 +2,19 @@ syntax = "proto3";
 package device;
 
 import "context.proto";
-//import "monitoring.proto";
+import "monitoring.proto";
 
 service DeviceService {
   rpc AddDevice       (context.Device    ) returns (context.DeviceId    ) {}
   rpc ConfigureDevice (context.Device    ) returns (context.DeviceId    ) {}
   rpc DeleteDevice    (context.DeviceId  ) returns (context.Empty       ) {}
   rpc GetInitialConfig(context.DeviceId  ) returns (context.DeviceConfig) {}
-  //rpc MonitorDeviceKpi(MonitoringSettings) returns (context.Empty       ) {}
+  rpc MonitorDeviceKpi(MonitoringSettings) returns (context.Empty       ) {}
 }
 
-//message MonitoringSettings {
-//  monitoring.KpiId kpi_id = 1;
-//  monitoring.KpiDescriptor kpi_descriptor = 2;
-//  float sampling_duration_s = 3;
-//  float sampling_interval_s = 4;
-//}
+message MonitoringSettings {
+  monitoring.KpiId kpi_id = 1;
+  monitoring.KpiDescriptor kpi_descriptor = 2;
+  float sampling_duration_s = 3;
+  float sampling_interval_s = 4;
+}
diff --git a/proto/kpi_sample_types.proto b/proto/kpi_sample_types.proto
index c989407cdcc946c27efcf97e0b648530b20dfccd..e06527131007ab278aa92bb1e392aa9e49af9a96 100644
--- a/proto/kpi_sample_types.proto
+++ b/proto/kpi_sample_types.proto
@@ -1,10 +1,10 @@
 syntax = "proto3";
-//package kpi_sample_types;
+package kpi_sample_types;
 
-//enum KpiSampleType {
-//    UNKNOWN = 0;
-//    PACKETS_TRANSMITTED = 101;
-//    PACKETS_RECEIVED    = 102;
-//    BYTES_TRANSMITTED   = 201;
-//    BYTES_RECEIVED      = 202;
-//}
+enum KpiSampleType {
+    KPISAMPLETYPE_UNKNOWN = 0;
+    KPISAMPLETYPE_PACKETS_TRANSMITTED = 101;
+    KPISAMPLETYPE_PACKETS_RECEIVED    = 102;
+    KPISAMPLETYPE_BYTES_TRANSMITTED   = 201;
+    KPISAMPLETYPE_BYTES_RECEIVED      = 202;
+}
diff --git a/proto/l3_attackmitigator.proto b/proto/l3_attackmitigator.proto
index 345ac2ad116e84d1ecc46d8fe89c45e9acfa4e65..7d76408f892eb960cc61b54596c39833dbeed173 100644
--- a/proto/l3_attackmitigator.proto
+++ b/proto/l3_attackmitigator.proto
@@ -1,19 +1,16 @@
 syntax = "proto3";
 
+import "context.proto";
+
 service L3Attackmitigator{
   // Sends a greeting
-  rpc SendOutput (Output) returns (EmptyMitigator) {}
+  rpc SendOutput (L3AttackmitigatorOutput) returns (context.Empty) {}
   // Sends another greeting
-  rpc GetMitigation (EmptyMitigator) returns (EmptyMitigator) {}
-}
-
-
-message EmptyMitigator {
-	optional string message = 1;
+  rpc GetMitigation (context.Empty) returns (context.Empty) {}
 }
 
 
-message Output {
+message L3AttackmitigatorOutput {
 	float confidence = 1;
 	string timestamp = 2;
 	string ip_o = 3;	
@@ -22,7 +19,7 @@ message Output {
 	string flow_id = 6;
 	string protocol = 7;
 	string port_d = 8;
-	optional string ml_id = 9;
-	optional float time_start = 10;
-	optional float time_end = 11;
+	string ml_id = 9;
+	float time_start = 10;
+	float time_end = 11;
 }
diff --git a/proto/l3_centralizedattackdetector.proto b/proto/l3_centralizedattackdetector.proto
index 6d675f4b8bc72149d49e07b093560cf5d5195528..6a7dff8939a7c47cfde59038a21706a433c40614 100644
--- a/proto/l3_centralizedattackdetector.proto
+++ b/proto/l3_centralizedattackdetector.proto
@@ -2,12 +2,12 @@ syntax = "proto3";
 
 service L3Centralizedattackdetector {
   // Sends a greeting
-  rpc SendInput (ModelInput) returns (Empty) {}
+  rpc SendInput (L3CentralizedattackdetectorMetrics) returns (Empty) {}
   // Sends another greeting
-  rpc GetOutput (Empty) returns (ModelOutput) {}
+  rpc GetOutput (Empty) returns (L3CentralizedattackdetectorModelOutput) {}
 }
 
-message ModelInput {
+message L3CentralizedattackdetectorMetrics {
 	/*
 	Model input sent to the Inferencer by the client
 	There are currently 9 values and 
@@ -30,15 +30,15 @@ message ModelInput {
 	string port_d = 12;
 	string flow_id = 13;
 	string protocol = 14;
-	optional float time_start = 15;
-	optional float time_end = 16;
+	float time_start = 15;
+	float time_end = 16;
 }
 
 message Empty {
-	optional string message = 1;
+	string message = 1;
 }
 
-message ModelOutput {
+message L3CentralizedattackdetectorModelOutput {
 	float confidence = 1;
 	string timestamp = 2;
 	string ip_o = 3;	
@@ -47,9 +47,9 @@ message ModelOutput {
 	string flow_id = 6;
 	string protocol = 7;
 	string port_d = 8;
-	optional string ml_id = 9;
-	optional float time_start = 10;
-	optional float time_end = 11;
+	string ml_id = 9;
+	float time_start = 10;
+	float time_end = 11;
 }
 
 // Collections or streams?
diff --git a/proto/monitoring.proto b/proto/monitoring.proto
index c012b330ff65381fecaf04ae9d2f5964480aebf3..4b1b930438045d9199f31de2e408e2c56e91d848 100644
--- a/proto/monitoring.proto
+++ b/proto/monitoring.proto
@@ -2,68 +2,30 @@ syntax = "proto3";
 package monitoring;
 
 import "context.proto";
-//import "kpi_sample_types.proto";
+import "kpi_sample_types.proto";
 
 service MonitoringService {
-  // Old RPCs:
-  rpc CreateKpi (CreateKpiRequest) returns (KpiId) {}
-  rpc IncludeKpi (IncludeKpiRequest) returns (context.Empty) {}
-  rpc MonitorKpi (MonitorKpiRequest) returns (context.Empty) {}
-  rpc MonitorDeviceKpi (MonitorDeviceKpiRequest) returns (context.Empty) {}
-  rpc GetStreamKpi ( KpiId ) returns (stream Kpi) {}
-  rpc GetInstantKpi ( KpiId ) returns (Kpi) {}
-
-  // New RPCs:
-  //rpc CreateKpi       (KpiDescriptor    ) returns (KpiId        ) {}
-  //rpc GetKpiDescriptor(KpiId            ) returns (KpiDescriptor) {}
-  //rpc IncludeKpi      (Kpi              ) returns (context.Empty) {}
-  //rpc MonitorKpi      (MonitorKpiRequest) returns (context.Empty) {}
-  //rpc GetStreamKpi    (KpiId            ) returns (stream Kpi   ) {}
-  //rpc GetInstantKpi   (KpiId            ) returns (Kpi          ) {}
+  rpc CreateKpi       (KpiDescriptor    ) returns (KpiId        ) {}
+  rpc GetKpiDescriptor(KpiId            ) returns (KpiDescriptor) {}
+  rpc IncludeKpi      (Kpi              ) returns (context.Empty) {}
+  rpc MonitorKpi      (MonitorKpiRequest) returns (context.Empty) {}
+  rpc GetStreamKpi    (KpiId            ) returns (stream Kpi   ) {}
+  rpc GetInstantKpi   (KpiId            ) returns (Kpi          ) {}
 }
 
-message CreateKpiRequest /*New name: KpiDescriptor*/ {
-  // Old fields:
-  string kpiDescription = 1;
-  context.DeviceId device_id = 2;
-  KpiSampleType kpi_sample_type = 3;
-  //  context.EndpointId endpoint_id = 4;  // others might be added
-  //  context.ServiceId  service_id  = 5;  // for monitoring other
-  //  context.SliceId    slice_id    = 6;  // entities
-
-  // New fields:
-  //string kpi_description = 1;
-  //kpi_sample_types.KpiSampleType kpi_sample_type = 2;
-  //context.DeviceId device_id = 3;
-  //context.EndPointId endpoint_id = 4;
-  //context.ServiceId  service_id  = 5;
-  ////context.SliceId    slice_id    = 6; // to be used in future features
+message KpiDescriptor {
+  string kpi_description = 1;
+  kpi_sample_types.KpiSampleType kpi_sample_type = 2;
+  context.DeviceId device_id = 3;
+  context.EndPointId endpoint_id = 4;
+  context.ServiceId  service_id  = 5;
+//  context.SliceId    slice_id    = 6;
 }
 
 message MonitorKpiRequest{
   KpiId kpi_id = 1;
-
-  // Old fields:
-  uint32 connexion_time_s = 2;
-  uint32 sample_rate_ms = 3;
-
-  // New fields:
-  //float sampling_duration_s = 2;
-  //float sampling_interval_s = 3;
-}
-
-// Message to be removed:
-message MonitorDeviceKpiRequest{
-  Kpi kpi = 1;
-  uint32 connexion_time_s = 2;
-  uint32 sample_rate_ms = 3;
-}
-
-// Message to be removed:
-message IncludeKpiRequest{
-  KpiId kpi_id = 1;
-  string time_stamp = 2;
-  KpiValue kpi_value= 3;
+  float sampling_duration_s = 2;
+  float sampling_interval_s = 3;
 }
 
 message KpiId {
@@ -73,32 +35,18 @@ message KpiId {
 message Kpi {
   KpiId kpi_id = 1;
   string timestamp = 2;
-  string kpiDescription = 3;          // field to be removed
-  KpiValue kpi_value = 4;             // field to be renumbered to 3
-  KpiSampleType kpi_sample_type = 5;  // field to be removed
-  context.DeviceId device_id = 6;     // field to be removed
-  //  context.EndpointId endpoint_id = 7;  // others might be added // field to be removed
-  //  context.ServiceId  service_id  = 8;  // for monitoring other  // field to be removed
-  //  context.SliceId    slice_id    = 9;  // entities              // field to be removed
+  KpiValue kpi_value = 4;
 }
 
 message KpiValue {
   oneof value {
-    uint32 intVal = 1;      // field to be renamed to int_val
-    float floatVal = 2;     // field to be renamed to float_val
-    string stringVal = 3;   // field to be renamed to str_val
-    bool boolVal = 4;       // field to be renamed to bool_val
+    uint32 intVal = 1;
+    float floatVal = 2;
+    string stringVal = 3;
+    bool boolVal = 4;
   }
 }
 
 message KpiList {
-  repeated Kpi kpiList = 1; // to be renamed to kpi_list
-}
-
-enum KpiSampleType {  // to be moved to file "kpi_sample_types.proto"
-  UNKNOWN = 0;
-  PACKETS_TRANSMITTED = 101;  // others might be added for
-  PACKETS_RECEIVED    = 102;  // packet, optical, radio,...
-  BYTES_TRANSMITTED   = 201;
-  BYTES_RECEIVED      = 202;
+  repeated Kpi kpi_list = 1;
 }
diff --git a/proto/optical_attack_mitigator.proto b/proto/optical_attack_mitigator.proto
new file mode 100644
index 0000000000000000000000000000000000000000..d75a845dd10f605d6894a610f3f82bbef27b8297
--- /dev/null
+++ b/proto/optical_attack_mitigator.proto
@@ -0,0 +1,24 @@
+// protocol buffers documentation: https://developers.google.com/protocol-buffers/docs/proto3
+syntax = "proto3";
+package optical_attack_mitigator;
+
+import "context.proto";
+
+service AttackMitigator {
+  rpc NotifyAttack (AttackDescription) returns (AttackResponse) {}
+}
+
+message AttackDescription {
+  context.Uuid cs_id = 1;
+  int32 attack_id = 2;
+  float confidence = 3;
+  string attack_description = 4;
+}
+
+message AttackResponse {
+  context.Uuid cs_id = 1;
+  int32 attack_id = 2;
+  string attack_description = 3;
+  int32 response_strategy_id = 4;
+  string response_strategy_description = 5;
+}
diff --git a/proto/optical_centralized_attack_detector.proto b/proto/optical_centralized_attack_detector.proto
new file mode 100644
index 0000000000000000000000000000000000000000..99cc1ce6fabc662de078f58985eae2d4eeb3d24c
--- /dev/null
+++ b/proto/optical_centralized_attack_detector.proto
@@ -0,0 +1,18 @@
+// protocol buffers documentation: https://developers.google.com/protocol-buffers/docs/proto3
+syntax = "proto3";
+package centralized_attack_detector;
+
+import "context.proto";
+import "monitoring.proto";
+
+service OpticalCentralizedAttackDetectorService {
+  rpc NotifyServiceUpdate (context.Service   ) returns (context.Empty) {}
+  
+  // rpc that triggers the attack detection loop
+  rpc DetectAttack        (context.Empty     ) returns (context.Empty) {}
+
+  // rpc called by the distributed component to report KPIs
+  rpc ReportSummarizedKpi (monitoring.KpiList) returns (context.Empty) {}
+
+  rpc ReportKpi           (monitoring.KpiList) returns (context.Empty) {}
+}
diff --git a/proto/te.proto b/proto/te.proto
new file mode 100644
index 0000000000000000000000000000000000000000..a0233fab09a024e217a19b46681562db0c0541e1
--- /dev/null
+++ b/proto/te.proto
@@ -0,0 +1,11 @@
+syntax = "proto3";
+package te;
+
+import "context.proto";
+import "service.proto";
+
+service TEService {
+  rpc RequestLSP(service.Service  ) returns (service.ServiceState) {}
+  rpc UpdateLSP (service.ServiceId) returns (service.ServiceState) {}
+  rpc DeleteLSP (service.ServiceId) returns (context.Empty       ) {}
+}
diff --git a/proto/uml/Empty.png b/proto/uml/Empty.png
new file mode 100644
index 0000000000000000000000000000000000000000..eecbaabac484da77bf84ba5f1fec96c5f00ed544
Binary files /dev/null and b/proto/uml/Empty.png differ
diff --git a/proto/uml/L3AttackmitigatorOutput.png b/proto/uml/L3AttackmitigatorOutput.png
new file mode 100644
index 0000000000000000000000000000000000000000..fe262fb4d87828ab5b5906f666589641d5f34bdd
Binary files /dev/null and b/proto/uml/L3AttackmitigatorOutput.png differ
diff --git a/proto/uml/L3CentralizedattackdetectorMetrics.png b/proto/uml/L3CentralizedattackdetectorMetrics.png
new file mode 100644
index 0000000000000000000000000000000000000000..5b35d521382b7050d1f09310689567d2f2a7910e
Binary files /dev/null and b/proto/uml/L3CentralizedattackdetectorMetrics.png differ
diff --git a/proto/uml/L3CentralizedattackdetectorModelOutput.png b/proto/uml/L3CentralizedattackdetectorModelOutput.png
new file mode 100644
index 0000000000000000000000000000000000000000..1d29df1fc90aaad9363f8231ccc6a8194e920cc4
Binary files /dev/null and b/proto/uml/L3CentralizedattackdetectorModelOutput.png differ
diff --git a/proto/uml/automation.png b/proto/uml/automation.png
index 22230e16556c92bd5829d4988a611fdeb8ec2688..97e733d388760d6bc32f294558b1ee7b9df8b764 100644
Binary files a/proto/uml/automation.png and b/proto/uml/automation.png differ
diff --git a/proto/uml/context.png b/proto/uml/context.png
index c71d2e63e77f68084b1ac112c7c8bbc7842dc92c..57a2493cf8596d1f4e70f2e3056d1a9d71432aee 100644
Binary files a/proto/uml/context.png and b/proto/uml/context.png differ
diff --git a/proto/uml/dbscanserving.png b/proto/uml/dbscanserving.png
new file mode 100644
index 0000000000000000000000000000000000000000..d00792952b0b9eced9291b7b5595aec5e8268220
Binary files /dev/null and b/proto/uml/dbscanserving.png differ
diff --git a/proto/uml/device.png b/proto/uml/device.png
new file mode 100644
index 0000000000000000000000000000000000000000..a30ec19aecdb00fbbe6db16c3c45272e4da045e1
Binary files /dev/null and b/proto/uml/device.png differ
diff --git a/proto/uml/kpi_sample_types.png b/proto/uml/kpi_sample_types.png
new file mode 100644
index 0000000000000000000000000000000000000000..dfd0720034fa71fc4cbc44a903e32f4d40712e19
Binary files /dev/null and b/proto/uml/kpi_sample_types.png differ
diff --git a/proto/uml/monitoring.png b/proto/uml/monitoring.png
index d0ffd5120ea91b1b1ba19cdd8299ed1501ebe8ae..676389fc6971457b3d0c4194484bb78b4be52404 100644
Binary files a/proto/uml/monitoring.png and b/proto/uml/monitoring.png differ
diff --git a/proto/uml/optical_attack_mitigator.png b/proto/uml/optical_attack_mitigator.png
new file mode 100644
index 0000000000000000000000000000000000000000..e790794872c76de73ebecf006c612841c33ea3b5
Binary files /dev/null and b/proto/uml/optical_attack_mitigator.png differ
diff --git a/proto/uml/policy.png b/proto/uml/policy.png
index 76ae4611f5e41252eb09817b919326d2d16d3f57..e4f5c52848b75643c3a8272fb0223147bced5b90 100644
Binary files a/proto/uml/policy.png and b/proto/uml/policy.png differ
diff --git a/proto/uml/service.png b/proto/uml/service.png
deleted file mode 100644
index b2b12f0bbc12415f53912ce4658311eb805672c8..0000000000000000000000000000000000000000
Binary files a/proto/uml/service.png and /dev/null differ
diff --git a/proto/uml/slice.png b/proto/uml/slice.png
index 7a9335c46a2807528928b1d3df22a2b5ea5ac951..01b62425b28f6bb5319a96ddd7fc5b62ff620620 100644
Binary files a/proto/uml/slice.png and b/proto/uml/slice.png differ
diff --git a/report_coverage_centralized_attack_detector.sh b/report_coverage_centralized_attack_detector.sh
deleted file mode 100755
index ea34a11315cfdfbc8c05427c8d4f6e78dad4b42c..0000000000000000000000000000000000000000
--- a/report_coverage_centralized_attack_detector.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/bash
-
-./report_coverage_all.sh | grep --color -E -i "^centralizedattackdetector/.*$|$"
diff --git a/report_coverage_l3_attackmitigator.sh b/report_coverage_l3_attackmitigator.sh
old mode 100644
new mode 100755
diff --git a/report_coverage_l3_centralizedattackdetector.sh b/report_coverage_l3_centralizedattackdetector.sh
old mode 100644
new mode 100755
diff --git a/report_coverage_l3_distributedattackdetector.sh b/report_coverage_l3_distributedattackdetector.sh
old mode 100644
new mode 100755
diff --git a/run_tests_locally.sh b/run_tests_locally.sh
index 163bf3330da8a88540df90bf334e571957ac4798..11add2a82b7a4cfeddfd974d986b000e631bac23 100755
--- a/run_tests_locally.sh
+++ b/run_tests_locally.sh
@@ -30,14 +30,10 @@ coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \
     common/message_broker/tests/test_unitary.py \
     common/rpc_method_wrapper/tests/test_unitary.py
 
-coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \
-    centralizedattackdetector/tests/test_unitary.py
-
 coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \
     context/tests/test_unitary.py
 
 coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \
-    device/tests/test_unitary_driverapi.py \
     device/tests/test_unitary.py
 
 coverage run --rcfile=$RCFILE --append -m pytest -s --log-level=INFO --verbose \
@@ -49,6 +45,15 @@ coverage run --rcfile=$RCFILE --append -m pytest -s --log-level=INFO --verbose \
 coverage run --rcfile=$RCFILE --append -m pytest -s --log-level=INFO --verbose \
     l3_attackmitigator/tests/test_unitary.py
 
+coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \
+    opticalcentralizedattackdetector/tests/test_unitary.py
+
+coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \
+    dbscanserving/tests/test_unitary.py
+
+coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \
+    opticalattackmitigator/tests/test_unitary.py
+
 coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \
     service/tests/test_unitary.py
 
diff --git a/src/automation/.env.example b/src/automation/.env.example
index 35c4e0b97f715c3c0a6ff961f209070eae547590..98c75623cb55ecb40cfe1c77c01ddb86d433a932 100644
--- a/src/automation/.env.example
+++ b/src/automation/.env.example
@@ -1,5 +1,5 @@
 # Define the host for the Context Service
-quarkus.kubernetes.env.vars.context-service-host=context
+quarkus.kubernetes.env.vars.context-service-host=ContextService
 
 # Define the host for the Device Service
-quarkus.kubernetes.env.vars.device-service-host=device
\ No newline at end of file
+quarkus.kubernetes.env.vars.device-service-host=DeviceService
\ No newline at end of file
diff --git a/src/automation/.gitlab-ci.yml b/src/automation/.gitlab-ci.yml
index 612a6a107075652c8ee624da661a646d0b3f3d1e..0e87fa4b3a726ddf9ae79c717487ae7a7fedd0e6 100644
--- a/src/automation/.gitlab-ci.yml
+++ b/src/automation/.gitlab-ci.yml
@@ -2,21 +2,24 @@
 build automation:
   variables:
     IMAGE_NAME: 'automation' # name of the microservice
-    IMAGE_NAME_TEST: 'automation-test' # name of the microservice
     IMAGE_TAG: '0.0.1' # tag of the container image (production, development, etc)
   stage: build
   script:
     - docker build -t "$IMAGE_NAME:$IMAGE_TAG" -f ./src/$IMAGE_NAME/src/main/docker/Dockerfile.multistage.jvm ./src/$IMAGE_NAME/ --target builder
+  after_script:
+    - docker images --filter="dangling=true" --quiet | xargs -r docker rmi
   rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
     - changes:
         - src/$IMAGE_NAME/**
+        - manifests/${IMAGE_NAME}service.yaml
         - .gitlab-ci.yml
 
 # Run tests, build & push the image
 unit_test automation:
   variables:
     IMAGE_NAME: 'automation' # name of the microservice
-    IMAGE_NAME_TEST: 'automation-test' # name of the microservice
     IMAGE_TAG: '0.0.1' # tag of the container image (production, development, etc)
   stage: unit_test
   needs:
@@ -28,9 +31,14 @@ unit_test automation:
     - docker build -t "$IMAGE_NAME:$IMAGE_TAG" -f ./src/$IMAGE_NAME/src/main/docker/Dockerfile.multistage.jvm ./src/$IMAGE_NAME/ --target release
     - docker tag "$IMAGE_NAME:$IMAGE_TAG" "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
     - docker push "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
+  after_script:
+    - docker rm -f $IMAGE_NAME
   rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
     - changes:
         - src/$IMAGE_NAME/**
+        - manifests/${IMAGE_NAME}service.yaml
         - .gitlab-ci.yml
 
 # Deployment of automation service in Kubernetes Cluster
@@ -45,3 +53,9 @@ deploy automation:
     - kubectl apply -f "manifests/automationservice.yaml"
     - kubectl delete pods --selector app=automationservice
     - kubectl get all
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+      when: manual    
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
+      when: manual
+
diff --git a/src/automation/README.md b/src/automation/README.md
index dc8aef6b11581ed43d0b6f8d2889428d23aefbc1..099980bcc4172bf9e5c2d59459f40ae4331696cf 100644
--- a/src/automation/README.md
+++ b/src/automation/README.md
@@ -1,19 +1,39 @@
-# How to run locally the automation service (tested in Ubuntu 20.04)
+# Automation TeraFlow OS service 
 
+The Automation service, also known as Zero-Touch Provisioning (ZTP), is tested on Ubuntu 20.04. Follow the instructions below to build, test, and run this service on your local environment.
 
-## Compile code
+## Automation Teraflow OS service architecture
 
-`
-./mvnw compile
-`
-## Execute unit tests
+| The Automation Teraflow OS service architecture consists of six (6) interfaces listed below:                                                                                                                                                 | 
+|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| 1. The `AutomationGateway` interface that implements all the rpc functions that are described in `automation.proto` file.                                                                                                                    | 
+| 2. The `ContextGateway` interface that communicates with a `Context` Service gRPC client and implements all the rpc functions that are described in `context.proto` file.                                                                    |
+| 3. The `DeviceGateway` interface that communicates with a `Device` Service gRPC client and implements all the rpc functions that are described in `device.proto` file.                                                                       |
+| 4. The `AutomationService` interface that implements the `addDevice()` method by communicating with a `Context` gRPC client & a `Device` gRPC client through the use of `ContextService` interface & `DeviceService` interface respectively. |
+| 5. The `ContextService` interface that implements the `getDevice()` & `getDeviceEvents()` methods by communicating with a `Context` gRPC client through the use of `ContextGateway` interface.                                               |
+| 6. The `DeviceService` interface that implements the `getInitialConfiguration()` & `configureDevice()` methods by communicating with a `Device` gRPC client through the use of `DeviceGateway` interface.                                    |
 
-`
-./mvnw test
-`
-## Run service
 
-`
-./mvnw quarkus:dev
-`
+## Run with dev profile
 
+```bash
+./mvnw clean quarkus:dev
+```
+
+## Running tests
+
+Run unit and functional tests `./mvnw clean test`
+
+## Deploying on a Kubernetes cluster
+
+To create the K8s manifest file under `target/kubernetes/kubernetes.yml` to be used run
+
+```bash
+./mvnw clean package -DskipUTs -DskipITs
+``` 
+
+To deploy the application in a K8s cluster run
+
+```bash
+kubectl apply -f "manifests/automationservice.yaml"
+```
diff --git a/src/automation/pom.xml b/src/automation/pom.xml
index 0bf37ea67c0953cc63da34046da93ab281adf76c..042185673a6fd069252fcbe655d33a83ca7b9ff5 100644
--- a/src/automation/pom.xml
+++ b/src/automation/pom.xml
@@ -83,6 +83,13 @@
   </dependencyManagement>
 
   <dependencies>
+
+    <dependency>
+      <groupId>io.github.project-openubl</groupId>
+      <artifactId>quarkus-bouncycastle</artifactId>
+      <version>1.2.2.Final</version>
+    </dependency>
+
     <dependency>
       <groupId>io.quarkus</groupId>
       <artifactId>quarkus-grpc</artifactId>
diff --git a/src/automation/src/main/java/eu/teraflow/automation/ContextSubscriber.java b/src/automation/src/main/java/eu/teraflow/automation/ContextSubscriber.java
index cedbce546d400a10f6cd3334d3fc443b430db3b4..195634d00bb17debb623e164d975288488a9a240 100644
--- a/src/automation/src/main/java/eu/teraflow/automation/ContextSubscriber.java
+++ b/src/automation/src/main/java/eu/teraflow/automation/ContextSubscriber.java
@@ -4,6 +4,7 @@ import eu.teraflow.automation.context.ContextService;
 import eu.teraflow.automation.device.model.DeviceEvent;
 import io.quarkus.runtime.StartupEvent;
 import io.smallrye.mutiny.Multi;
+import java.time.Duration;
 import javax.enterprise.context.ApplicationScoped;
 import javax.enterprise.event.Observes;
 import javax.inject.Inject;
@@ -29,7 +30,15 @@ public class ContextSubscriber {
     }
 
     public void listenForDeviceEvents() {
-        Multi<DeviceEvent> deviceEventsMulti = contextService.getDeviceEvents();
+
+        Multi<DeviceEvent> deviceEventsMulti =
+                contextService
+                        .getDeviceEvents()
+                        .onFailure()
+                        .retry()
+                        .withBackOff(Duration.ofSeconds(1))
+                        .withJitter(0.2)
+                        .atMost(10);
 
         deviceEventsMulti
                 .onItem()
diff --git a/src/automation/src/main/java/eu/teraflow/automation/SimpleLivenessCheck.java b/src/automation/src/main/java/eu/teraflow/automation/SimpleLivenessCheck.java
index ee4686b63ff9e90f0be8cda9cee4e363bf6d7507..28c896fbd97a63df67d5986a6d1a2fc53b1c62c6 100644
--- a/src/automation/src/main/java/eu/teraflow/automation/SimpleLivenessCheck.java
+++ b/src/automation/src/main/java/eu/teraflow/automation/SimpleLivenessCheck.java
@@ -11,6 +11,6 @@ public class SimpleLivenessCheck implements HealthCheck {
 
     @Override
     public HealthCheckResponse call() {
-        return HealthCheckResponse.up("Automation Service");
+        return HealthCheckResponse.up("Automation Service is live");
     }
 }
diff --git a/src/automation/src/main/java/eu/teraflow/automation/SimpleReadinessCheck.java b/src/automation/src/main/java/eu/teraflow/automation/SimpleReadinessCheck.java
new file mode 100644
index 0000000000000000000000000000000000000000..e542a3582855ae4a81273520f4075f9038f61d43
--- /dev/null
+++ b/src/automation/src/main/java/eu/teraflow/automation/SimpleReadinessCheck.java
@@ -0,0 +1,17 @@
+package eu.teraflow.automation;
+
+import javax.enterprise.context.ApplicationScoped;
+import org.eclipse.microprofile.health.HealthCheck;
+import org.eclipse.microprofile.health.HealthCheckResponse;
+import org.eclipse.microprofile.health.Readiness;
+
+@Readiness
+@ApplicationScoped
+public class SimpleReadinessCheck implements HealthCheck {
+
+    @Override
+    public HealthCheckResponse call() {
+
+        return HealthCheckResponse.up("Automation Service is ready");
+    }
+}
diff --git a/src/automation/src/main/proto/kpi_sample_types.proto b/src/automation/src/main/proto/kpi_sample_types.proto
new file mode 120000
index 0000000000000000000000000000000000000000..98e748bbf4fbadbc04c3657f458d733f1bc7bdb8
--- /dev/null
+++ b/src/automation/src/main/proto/kpi_sample_types.proto
@@ -0,0 +1 @@
+../../../../../proto/kpi_sample_types.proto
\ No newline at end of file
diff --git a/src/automation/src/main/proto/monitoring.proto b/src/automation/src/main/proto/monitoring.proto
new file mode 120000
index 0000000000000000000000000000000000000000..aceaa7328099fe736163be048ee1ad21a61d79a2
--- /dev/null
+++ b/src/automation/src/main/proto/monitoring.proto
@@ -0,0 +1 @@
+../../../../../proto/monitoring.proto
\ No newline at end of file
diff --git a/src/automation/src/main/resources/application.yaml b/src/automation/src/main/resources/application.yaml
index 2ea1ef12a279b6e94ab7aa2eff1dacb7020c7ac2..8d5ae6ed513f7d20227b217fd803c809c4f5b2c4 100644
--- a/src/automation/src/main/resources/application.yaml
+++ b/src/automation/src/main/resources/application.yaml
@@ -38,5 +38,5 @@ quarkus:
         container-port: 9999
     env:
       vars:
-        context-service-host: context
-        device-service-host: device
\ No newline at end of file
+        context-service-host: ContextService
+        device-service-host: DeviceService
\ No newline at end of file
diff --git a/src/automation/target/generated-sources/grpc/automation/Automation.java b/src/automation/target/generated-sources/grpc/automation/Automation.java
index c4eb90a45b7ac0d615d72df7e1f0b314a764036a..a44bc42294078fdba325d9dc9f149eaf1bd2bcbc 100644
--- a/src/automation/target/generated-sources/grpc/automation/Automation.java
+++ b/src/automation/target/generated-sources/grpc/automation/Automation.java
@@ -3344,21 +3344,29 @@ public final class Automation {
       com.google.protobuf.MessageOrBuilder {
 
     /**
-     * <code>repeated bool deleted = 1;</code>
+     * <code>repeated string deleted = 1;</code>
      * @return A list containing the deleted.
      */
-    java.util.List<java.lang.Boolean> getDeletedList();
+    java.util.List<java.lang.String>
+        getDeletedList();
     /**
-     * <code>repeated bool deleted = 1;</code>
+     * <code>repeated string deleted = 1;</code>
      * @return The count of deleted.
      */
     int getDeletedCount();
     /**
-     * <code>repeated bool deleted = 1;</code>
+     * <code>repeated string deleted = 1;</code>
      * @param index The index of the element to return.
      * @return The deleted at the given index.
      */
-    boolean getDeleted(int index);
+    java.lang.String getDeleted(int index);
+    /**
+     * <code>repeated string deleted = 1;</code>
+     * @param index The index of the value to return.
+     * @return The bytes of the deleted at the given index.
+     */
+    com.google.protobuf.ByteString
+        getDeletedBytes(int index);
   }
   /**
    * Protobuf type {@code automation.DeviceDeletionResult}
@@ -3373,7 +3381,7 @@ public final class Automation {
       super(builder);
     }
     private DeviceDeletionResult() {
-      deleted_ = emptyBooleanList();
+      deleted_ = com.google.protobuf.LazyStringArrayList.EMPTY;
     }
 
     @java.lang.Override
@@ -3407,25 +3415,13 @@ public final class Automation {
             case 0:
               done = true;
               break;
-            case 8: {
-              if (!((mutable_bitField0_ & 0x00000001) != 0)) {
-                deleted_ = newBooleanList();
-                mutable_bitField0_ |= 0x00000001;
-              }
-              deleted_.addBoolean(input.readBool());
-              break;
-            }
             case 10: {
-              int length = input.readRawVarint32();
-              int limit = input.pushLimit(length);
-              if (!((mutable_bitField0_ & 0x00000001) != 0) && input.getBytesUntilLimit() > 0) {
-                deleted_ = newBooleanList();
+              java.lang.String s = input.readStringRequireUtf8();
+              if (!((mutable_bitField0_ & 0x00000001) != 0)) {
+                deleted_ = new com.google.protobuf.LazyStringArrayList();
                 mutable_bitField0_ |= 0x00000001;
               }
-              while (input.getBytesUntilLimit() > 0) {
-                deleted_.addBoolean(input.readBool());
-              }
-              input.popLimit(limit);
+              deleted_.add(s);
               break;
             }
             default: {
@@ -3444,7 +3440,7 @@ public final class Automation {
             e).setUnfinishedMessage(this);
       } finally {
         if (((mutable_bitField0_ & 0x00000001) != 0)) {
-          deleted_.makeImmutable(); // C
+          deleted_ = deleted_.getUnmodifiableView();
         }
         this.unknownFields = unknownFields.build();
         makeExtensionsImmutable();
@@ -3464,32 +3460,39 @@ public final class Automation {
     }
 
     public static final int DELETED_FIELD_NUMBER = 1;
-    private com.google.protobuf.Internal.BooleanList deleted_;
+    private com.google.protobuf.LazyStringList deleted_;
     /**
-     * <code>repeated bool deleted = 1;</code>
+     * <code>repeated string deleted = 1;</code>
      * @return A list containing the deleted.
      */
-    @java.lang.Override
-    public java.util.List<java.lang.Boolean>
+    public com.google.protobuf.ProtocolStringList
         getDeletedList() {
       return deleted_;
     }
     /**
-     * <code>repeated bool deleted = 1;</code>
+     * <code>repeated string deleted = 1;</code>
      * @return The count of deleted.
      */
     public int getDeletedCount() {
       return deleted_.size();
     }
     /**
-     * <code>repeated bool deleted = 1;</code>
+     * <code>repeated string deleted = 1;</code>
      * @param index The index of the element to return.
      * @return The deleted at the given index.
      */
-    public boolean getDeleted(int index) {
-      return deleted_.getBoolean(index);
+    public java.lang.String getDeleted(int index) {
+      return deleted_.get(index);
+    }
+    /**
+     * <code>repeated string deleted = 1;</code>
+     * @param index The index of the value to return.
+     * @return The bytes of the deleted at the given index.
+     */
+    public com.google.protobuf.ByteString
+        getDeletedBytes(int index) {
+      return deleted_.getByteString(index);
     }
-    private int deletedMemoizedSerializedSize = -1;
 
     private byte memoizedIsInitialized = -1;
     @java.lang.Override
@@ -3505,13 +3508,8 @@ public final class Automation {
     @java.lang.Override
     public void writeTo(com.google.protobuf.CodedOutputStream output)
                         throws java.io.IOException {
-      getSerializedSize();
-      if (getDeletedList().size() > 0) {
-        output.writeUInt32NoTag(10);
-        output.writeUInt32NoTag(deletedMemoizedSerializedSize);
-      }
       for (int i = 0; i < deleted_.size(); i++) {
-        output.writeBoolNoTag(deleted_.getBoolean(i));
+        com.google.protobuf.GeneratedMessageV3.writeString(output, 1, deleted_.getRaw(i));
       }
       unknownFields.writeTo(output);
     }
@@ -3524,14 +3522,11 @@ public final class Automation {
       size = 0;
       {
         int dataSize = 0;
-        dataSize = 1 * getDeletedList().size();
-        size += dataSize;
-        if (!getDeletedList().isEmpty()) {
-          size += 1;
-          size += com.google.protobuf.CodedOutputStream
-              .computeInt32SizeNoTag(dataSize);
+        for (int i = 0; i < deleted_.size(); i++) {
+          dataSize += computeStringSizeNoTag(deleted_.getRaw(i));
         }
-        deletedMemoizedSerializedSize = dataSize;
+        size += dataSize;
+        size += 1 * getDeletedList().size();
       }
       size += unknownFields.getSerializedSize();
       memoizedSize = size;
@@ -3698,7 +3693,7 @@ public final class Automation {
       @java.lang.Override
       public Builder clear() {
         super.clear();
-        deleted_ = emptyBooleanList();
+        deleted_ = com.google.protobuf.LazyStringArrayList.EMPTY;
         bitField0_ = (bitField0_ & ~0x00000001);
         return this;
       }
@@ -3728,7 +3723,7 @@ public final class Automation {
         automation.Automation.DeviceDeletionResult result = new automation.Automation.DeviceDeletionResult(this);
         int from_bitField0_ = bitField0_;
         if (((bitField0_ & 0x00000001) != 0)) {
-          deleted_.makeImmutable();
+          deleted_ = deleted_.getUnmodifiableView();
           bitField0_ = (bitField0_ & ~0x00000001);
         }
         result.deleted_ = deleted_;
@@ -3820,68 +3815,83 @@ public final class Automation {
       }
       private int bitField0_;
 
-      private com.google.protobuf.Internal.BooleanList deleted_ = emptyBooleanList();
+      private com.google.protobuf.LazyStringList deleted_ = com.google.protobuf.LazyStringArrayList.EMPTY;
       private void ensureDeletedIsMutable() {
         if (!((bitField0_ & 0x00000001) != 0)) {
-          deleted_ = mutableCopy(deleted_);
+          deleted_ = new com.google.protobuf.LazyStringArrayList(deleted_);
           bitField0_ |= 0x00000001;
          }
       }
       /**
-       * <code>repeated bool deleted = 1;</code>
+       * <code>repeated string deleted = 1;</code>
        * @return A list containing the deleted.
        */
-      public java.util.List<java.lang.Boolean>
+      public com.google.protobuf.ProtocolStringList
           getDeletedList() {
-        return ((bitField0_ & 0x00000001) != 0) ?
-                 java.util.Collections.unmodifiableList(deleted_) : deleted_;
+        return deleted_.getUnmodifiableView();
       }
       /**
-       * <code>repeated bool deleted = 1;</code>
+       * <code>repeated string deleted = 1;</code>
        * @return The count of deleted.
        */
       public int getDeletedCount() {
         return deleted_.size();
       }
       /**
-       * <code>repeated bool deleted = 1;</code>
+       * <code>repeated string deleted = 1;</code>
        * @param index The index of the element to return.
        * @return The deleted at the given index.
        */
-      public boolean getDeleted(int index) {
-        return deleted_.getBoolean(index);
+      public java.lang.String getDeleted(int index) {
+        return deleted_.get(index);
+      }
+      /**
+       * <code>repeated string deleted = 1;</code>
+       * @param index The index of the value to return.
+       * @return The bytes of the deleted at the given index.
+       */
+      public com.google.protobuf.ByteString
+          getDeletedBytes(int index) {
+        return deleted_.getByteString(index);
       }
       /**
-       * <code>repeated bool deleted = 1;</code>
+       * <code>repeated string deleted = 1;</code>
        * @param index The index to set the value at.
        * @param value The deleted to set.
        * @return This builder for chaining.
        */
       public Builder setDeleted(
-          int index, boolean value) {
-        ensureDeletedIsMutable();
-        deleted_.setBoolean(index, value);
+          int index, java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  ensureDeletedIsMutable();
+        deleted_.set(index, value);
         onChanged();
         return this;
       }
       /**
-       * <code>repeated bool deleted = 1;</code>
+       * <code>repeated string deleted = 1;</code>
        * @param value The deleted to add.
        * @return This builder for chaining.
        */
-      public Builder addDeleted(boolean value) {
-        ensureDeletedIsMutable();
-        deleted_.addBoolean(value);
+      public Builder addDeleted(
+          java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  ensureDeletedIsMutable();
+        deleted_.add(value);
         onChanged();
         return this;
       }
       /**
-       * <code>repeated bool deleted = 1;</code>
+       * <code>repeated string deleted = 1;</code>
        * @param values The deleted to add.
        * @return This builder for chaining.
        */
       public Builder addAllDeleted(
-          java.lang.Iterable<? extends java.lang.Boolean> values) {
+          java.lang.Iterable<java.lang.String> values) {
         ensureDeletedIsMutable();
         com.google.protobuf.AbstractMessageLite.Builder.addAll(
             values, deleted_);
@@ -3889,15 +3899,31 @@ public final class Automation {
         return this;
       }
       /**
-       * <code>repeated bool deleted = 1;</code>
+       * <code>repeated string deleted = 1;</code>
        * @return This builder for chaining.
        */
       public Builder clearDeleted() {
-        deleted_ = emptyBooleanList();
+        deleted_ = com.google.protobuf.LazyStringArrayList.EMPTY;
         bitField0_ = (bitField0_ & ~0x00000001);
         onChanged();
         return this;
       }
+      /**
+       * <code>repeated string deleted = 1;</code>
+       * @param value The bytes of the deleted to add.
+       * @return This builder for chaining.
+       */
+      public Builder addDeletedBytes(
+          com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  checkByteStringIsUtf8(value);
+        ensureDeletedIsMutable();
+        deleted_.add(value);
+        onChanged();
+        return this;
+      }
       @java.lang.Override
       public final Builder setUnknownFields(
           final com.google.protobuf.UnknownFieldSet unknownFields) {
@@ -4419,7 +4445,7 @@ public final class Automation {
       "evRoleId\030\001 \001(\0132\030.automation.DeviceRoleId" +
       "\0220\n\014devRoleState\030\002 \001(\0162\032.automation.ZtpD" +
       "eviceState\"\'\n\024DeviceDeletionResult\022\017\n\007de" +
-      "leted\030\001 \003(\010\"\007\n\005Empty*H\n\016DeviceRoleType\022\010" +
+      "leted\030\001 \003(\t\"\007\n\005Empty*H\n\016DeviceRoleType\022\010" +
       "\n\004NONE\020\000\022\013\n\007DEV_OPS\020\001\022\014\n\010DEV_CONF\020\002\022\021\n\rP" +
       "IPELINE_CONF\020\003*~\n\016ZtpDeviceState\022\033\n\027ZTP_" +
       "DEV_STATE_UNDEFINED\020\000\022\031\n\025ZTP_DEV_STATE_C" +
diff --git a/src/automation/target/generated-sources/grpc/context/ContextOuterClass.java b/src/automation/target/generated-sources/grpc/context/ContextOuterClass.java
index 21d845ecd69f3ec21bdc185dfac5e2cbd171904d..3decce929e0eb9ac2e50c23f8aa3dba36b4321bf 100644
--- a/src/automation/target/generated-sources/grpc/context/ContextOuterClass.java
+++ b/src/automation/target/generated-sources/grpc/context/ContextOuterClass.java
@@ -28708,24 +28708,45 @@ public final class ContextOuterClass {
     context.ContextOuterClass.EndPointIdOrBuilder getEndpointIdOrBuilder();
 
     /**
-     * <pre>
-     *repeated kpi_sample_types.KpiSampleType kpi_sample_types = 3;
-     * </pre>
-     *
      * <code>string endpoint_type = 2;</code>
      * @return The endpointType.
      */
     java.lang.String getEndpointType();
     /**
-     * <pre>
-     *repeated kpi_sample_types.KpiSampleType kpi_sample_types = 3;
-     * </pre>
-     *
      * <code>string endpoint_type = 2;</code>
      * @return The bytes for endpointType.
      */
     com.google.protobuf.ByteString
         getEndpointTypeBytes();
+
+    /**
+     * <code>repeated .kpi_sample_types.KpiSampleType kpi_sample_types = 3;</code>
+     * @return A list containing the kpiSampleTypes.
+     */
+    java.util.List<kpi_sample_types.KpiSampleTypes.KpiSampleType> getKpiSampleTypesList();
+    /**
+     * <code>repeated .kpi_sample_types.KpiSampleType kpi_sample_types = 3;</code>
+     * @return The count of kpiSampleTypes.
+     */
+    int getKpiSampleTypesCount();
+    /**
+     * <code>repeated .kpi_sample_types.KpiSampleType kpi_sample_types = 3;</code>
+     * @param index The index of the element to return.
+     * @return The kpiSampleTypes at the given index.
+     */
+    kpi_sample_types.KpiSampleTypes.KpiSampleType getKpiSampleTypes(int index);
+    /**
+     * <code>repeated .kpi_sample_types.KpiSampleType kpi_sample_types = 3;</code>
+     * @return A list containing the enum numeric values on the wire for kpiSampleTypes.
+     */
+    java.util.List<java.lang.Integer>
+    getKpiSampleTypesValueList();
+    /**
+     * <code>repeated .kpi_sample_types.KpiSampleType kpi_sample_types = 3;</code>
+     * @param index The index of the value to return.
+     * @return The enum numeric value on the wire of kpiSampleTypes at the given index.
+     */
+    int getKpiSampleTypesValue(int index);
   }
   /**
    * Protobuf type {@code context.EndPoint}
@@ -28741,6 +28762,7 @@ public final class ContextOuterClass {
     }
     private EndPoint() {
       endpointType_ = "";
+      kpiSampleTypes_ = java.util.Collections.emptyList();
     }
 
     @java.lang.Override
@@ -28763,6 +28785,7 @@ public final class ContextOuterClass {
       if (extensionRegistry == null) {
         throw new java.lang.NullPointerException();
       }
+      int mutable_bitField0_ = 0;
       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
           com.google.protobuf.UnknownFieldSet.newBuilder();
       try {
@@ -28792,6 +28815,29 @@ public final class ContextOuterClass {
               endpointType_ = s;
               break;
             }
+            case 24: {
+              int rawValue = input.readEnum();
+              if (!((mutable_bitField0_ & 0x00000001) != 0)) {
+                kpiSampleTypes_ = new java.util.ArrayList<java.lang.Integer>();
+                mutable_bitField0_ |= 0x00000001;
+              }
+              kpiSampleTypes_.add(rawValue);
+              break;
+            }
+            case 26: {
+              int length = input.readRawVarint32();
+              int oldLimit = input.pushLimit(length);
+              while(input.getBytesUntilLimit() > 0) {
+                int rawValue = input.readEnum();
+                if (!((mutable_bitField0_ & 0x00000001) != 0)) {
+                  kpiSampleTypes_ = new java.util.ArrayList<java.lang.Integer>();
+                  mutable_bitField0_ |= 0x00000001;
+                }
+                kpiSampleTypes_.add(rawValue);
+              }
+              input.popLimit(oldLimit);
+              break;
+            }
             default: {
               if (!parseUnknownField(
                   input, unknownFields, extensionRegistry, tag)) {
@@ -28807,6 +28853,9 @@ public final class ContextOuterClass {
         throw new com.google.protobuf.InvalidProtocolBufferException(
             e).setUnfinishedMessage(this);
       } finally {
+        if (((mutable_bitField0_ & 0x00000001) != 0)) {
+          kpiSampleTypes_ = java.util.Collections.unmodifiableList(kpiSampleTypes_);
+        }
         this.unknownFields = unknownFields.build();
         makeExtensionsImmutable();
       }
@@ -28853,10 +28902,6 @@ public final class ContextOuterClass {
     public static final int ENDPOINT_TYPE_FIELD_NUMBER = 2;
     private volatile java.lang.Object endpointType_;
     /**
-     * <pre>
-     *repeated kpi_sample_types.KpiSampleType kpi_sample_types = 3;
-     * </pre>
-     *
      * <code>string endpoint_type = 2;</code>
      * @return The endpointType.
      */
@@ -28874,10 +28919,6 @@ public final class ContextOuterClass {
       }
     }
     /**
-     * <pre>
-     *repeated kpi_sample_types.KpiSampleType kpi_sample_types = 3;
-     * </pre>
-     *
      * <code>string endpoint_type = 2;</code>
      * @return The bytes for endpointType.
      */
@@ -28896,6 +28937,64 @@ public final class ContextOuterClass {
       }
     }
 
+    public static final int KPI_SAMPLE_TYPES_FIELD_NUMBER = 3;
+    private java.util.List<java.lang.Integer> kpiSampleTypes_;
+    private static final com.google.protobuf.Internal.ListAdapter.Converter<
+        java.lang.Integer, kpi_sample_types.KpiSampleTypes.KpiSampleType> kpiSampleTypes_converter_ =
+            new com.google.protobuf.Internal.ListAdapter.Converter<
+                java.lang.Integer, kpi_sample_types.KpiSampleTypes.KpiSampleType>() {
+              public kpi_sample_types.KpiSampleTypes.KpiSampleType convert(java.lang.Integer from) {
+                @SuppressWarnings("deprecation")
+                kpi_sample_types.KpiSampleTypes.KpiSampleType result = kpi_sample_types.KpiSampleTypes.KpiSampleType.valueOf(from);
+                return result == null ? kpi_sample_types.KpiSampleTypes.KpiSampleType.UNRECOGNIZED : result;
+              }
+            };
+    /**
+     * <code>repeated .kpi_sample_types.KpiSampleType kpi_sample_types = 3;</code>
+     * @return A list containing the kpiSampleTypes.
+     */
+    @java.lang.Override
+    public java.util.List<kpi_sample_types.KpiSampleTypes.KpiSampleType> getKpiSampleTypesList() {
+      return new com.google.protobuf.Internal.ListAdapter<
+          java.lang.Integer, kpi_sample_types.KpiSampleTypes.KpiSampleType>(kpiSampleTypes_, kpiSampleTypes_converter_);
+    }
+    /**
+     * <code>repeated .kpi_sample_types.KpiSampleType kpi_sample_types = 3;</code>
+     * @return The count of kpiSampleTypes.
+     */
+    @java.lang.Override
+    public int getKpiSampleTypesCount() {
+      return kpiSampleTypes_.size();
+    }
+    /**
+     * <code>repeated .kpi_sample_types.KpiSampleType kpi_sample_types = 3;</code>
+     * @param index The index of the element to return.
+     * @return The kpiSampleTypes at the given index.
+     */
+    @java.lang.Override
+    public kpi_sample_types.KpiSampleTypes.KpiSampleType getKpiSampleTypes(int index) {
+      return kpiSampleTypes_converter_.convert(kpiSampleTypes_.get(index));
+    }
+    /**
+     * <code>repeated .kpi_sample_types.KpiSampleType kpi_sample_types = 3;</code>
+     * @return A list containing the enum numeric values on the wire for kpiSampleTypes.
+     */
+    @java.lang.Override
+    public java.util.List<java.lang.Integer>
+    getKpiSampleTypesValueList() {
+      return kpiSampleTypes_;
+    }
+    /**
+     * <code>repeated .kpi_sample_types.KpiSampleType kpi_sample_types = 3;</code>
+     * @param index The index of the value to return.
+     * @return The enum numeric value on the wire of kpiSampleTypes at the given index.
+     */
+    @java.lang.Override
+    public int getKpiSampleTypesValue(int index) {
+      return kpiSampleTypes_.get(index);
+    }
+    private int kpiSampleTypesMemoizedSerializedSize;
+
     private byte memoizedIsInitialized = -1;
     @java.lang.Override
     public final boolean isInitialized() {
@@ -28910,12 +29009,20 @@ public final class ContextOuterClass {
     @java.lang.Override
     public void writeTo(com.google.protobuf.CodedOutputStream output)
                         throws java.io.IOException {
+      getSerializedSize();
       if (endpointId_ != null) {
         output.writeMessage(1, getEndpointId());
       }
       if (!getEndpointTypeBytes().isEmpty()) {
         com.google.protobuf.GeneratedMessageV3.writeString(output, 2, endpointType_);
       }
+      if (getKpiSampleTypesList().size() > 0) {
+        output.writeUInt32NoTag(26);
+        output.writeUInt32NoTag(kpiSampleTypesMemoizedSerializedSize);
+      }
+      for (int i = 0; i < kpiSampleTypes_.size(); i++) {
+        output.writeEnumNoTag(kpiSampleTypes_.get(i));
+      }
       unknownFields.writeTo(output);
     }
 
@@ -28932,6 +29039,18 @@ public final class ContextOuterClass {
       if (!getEndpointTypeBytes().isEmpty()) {
         size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, endpointType_);
       }
+      {
+        int dataSize = 0;
+        for (int i = 0; i < kpiSampleTypes_.size(); i++) {
+          dataSize += com.google.protobuf.CodedOutputStream
+            .computeEnumSizeNoTag(kpiSampleTypes_.get(i));
+        }
+        size += dataSize;
+        if (!getKpiSampleTypesList().isEmpty()) {  size += 1;
+          size += com.google.protobuf.CodedOutputStream
+            .computeUInt32SizeNoTag(dataSize);
+        }kpiSampleTypesMemoizedSerializedSize = dataSize;
+      }
       size += unknownFields.getSerializedSize();
       memoizedSize = size;
       return size;
@@ -28954,6 +29073,7 @@ public final class ContextOuterClass {
       }
       if (!getEndpointType()
           .equals(other.getEndpointType())) return false;
+      if (!kpiSampleTypes_.equals(other.kpiSampleTypes_)) return false;
       if (!unknownFields.equals(other.unknownFields)) return false;
       return true;
     }
@@ -28971,6 +29091,10 @@ public final class ContextOuterClass {
       }
       hash = (37 * hash) + ENDPOINT_TYPE_FIELD_NUMBER;
       hash = (53 * hash) + getEndpointType().hashCode();
+      if (getKpiSampleTypesCount() > 0) {
+        hash = (37 * hash) + KPI_SAMPLE_TYPES_FIELD_NUMBER;
+        hash = (53 * hash) + kpiSampleTypes_.hashCode();
+      }
       hash = (29 * hash) + unknownFields.hashCode();
       memoizedHashCode = hash;
       return hash;
@@ -29112,6 +29236,8 @@ public final class ContextOuterClass {
         }
         endpointType_ = "";
 
+        kpiSampleTypes_ = java.util.Collections.emptyList();
+        bitField0_ = (bitField0_ & ~0x00000001);
         return this;
       }
 
@@ -29138,12 +29264,18 @@ public final class ContextOuterClass {
       @java.lang.Override
       public context.ContextOuterClass.EndPoint buildPartial() {
         context.ContextOuterClass.EndPoint result = new context.ContextOuterClass.EndPoint(this);
+        int from_bitField0_ = bitField0_;
         if (endpointIdBuilder_ == null) {
           result.endpointId_ = endpointId_;
         } else {
           result.endpointId_ = endpointIdBuilder_.build();
         }
         result.endpointType_ = endpointType_;
+        if (((bitField0_ & 0x00000001) != 0)) {
+          kpiSampleTypes_ = java.util.Collections.unmodifiableList(kpiSampleTypes_);
+          bitField0_ = (bitField0_ & ~0x00000001);
+        }
+        result.kpiSampleTypes_ = kpiSampleTypes_;
         onBuilt();
         return result;
       }
@@ -29199,6 +29331,16 @@ public final class ContextOuterClass {
           endpointType_ = other.endpointType_;
           onChanged();
         }
+        if (!other.kpiSampleTypes_.isEmpty()) {
+          if (kpiSampleTypes_.isEmpty()) {
+            kpiSampleTypes_ = other.kpiSampleTypes_;
+            bitField0_ = (bitField0_ & ~0x00000001);
+          } else {
+            ensureKpiSampleTypesIsMutable();
+            kpiSampleTypes_.addAll(other.kpiSampleTypes_);
+          }
+          onChanged();
+        }
         this.mergeUnknownFields(other.unknownFields);
         onChanged();
         return this;
@@ -29227,6 +29369,7 @@ public final class ContextOuterClass {
         }
         return this;
       }
+      private int bitField0_;
 
       private context.ContextOuterClass.EndPointId endpointId_;
       private com.google.protobuf.SingleFieldBuilderV3<
@@ -29349,10 +29492,6 @@ public final class ContextOuterClass {
 
       private java.lang.Object endpointType_ = "";
       /**
-       * <pre>
-       *repeated kpi_sample_types.KpiSampleType kpi_sample_types = 3;
-       * </pre>
-       *
        * <code>string endpoint_type = 2;</code>
        * @return The endpointType.
        */
@@ -29369,10 +29508,6 @@ public final class ContextOuterClass {
         }
       }
       /**
-       * <pre>
-       *repeated kpi_sample_types.KpiSampleType kpi_sample_types = 3;
-       * </pre>
-       *
        * <code>string endpoint_type = 2;</code>
        * @return The bytes for endpointType.
        */
@@ -29390,10 +29525,6 @@ public final class ContextOuterClass {
         }
       }
       /**
-       * <pre>
-       *repeated kpi_sample_types.KpiSampleType kpi_sample_types = 3;
-       * </pre>
-       *
        * <code>string endpoint_type = 2;</code>
        * @param value The endpointType to set.
        * @return This builder for chaining.
@@ -29409,10 +29540,6 @@ public final class ContextOuterClass {
         return this;
       }
       /**
-       * <pre>
-       *repeated kpi_sample_types.KpiSampleType kpi_sample_types = 3;
-       * </pre>
-       *
        * <code>string endpoint_type = 2;</code>
        * @return This builder for chaining.
        */
@@ -29423,10 +29550,6 @@ public final class ContextOuterClass {
         return this;
       }
       /**
-       * <pre>
-       *repeated kpi_sample_types.KpiSampleType kpi_sample_types = 3;
-       * </pre>
-       *
        * <code>string endpoint_type = 2;</code>
        * @param value The bytes for endpointType to set.
        * @return This builder for chaining.
@@ -29442,6 +29565,146 @@ public final class ContextOuterClass {
         onChanged();
         return this;
       }
+
+      private java.util.List<java.lang.Integer> kpiSampleTypes_ =
+        java.util.Collections.emptyList();
+      private void ensureKpiSampleTypesIsMutable() {
+        if (!((bitField0_ & 0x00000001) != 0)) {
+          kpiSampleTypes_ = new java.util.ArrayList<java.lang.Integer>(kpiSampleTypes_);
+          bitField0_ |= 0x00000001;
+        }
+      }
+      /**
+       * <code>repeated .kpi_sample_types.KpiSampleType kpi_sample_types = 3;</code>
+       * @return A list containing the kpiSampleTypes.
+       */
+      public java.util.List<kpi_sample_types.KpiSampleTypes.KpiSampleType> getKpiSampleTypesList() {
+        return new com.google.protobuf.Internal.ListAdapter<
+            java.lang.Integer, kpi_sample_types.KpiSampleTypes.KpiSampleType>(kpiSampleTypes_, kpiSampleTypes_converter_);
+      }
+      /**
+       * <code>repeated .kpi_sample_types.KpiSampleType kpi_sample_types = 3;</code>
+       * @return The count of kpiSampleTypes.
+       */
+      public int getKpiSampleTypesCount() {
+        return kpiSampleTypes_.size();
+      }
+      /**
+       * <code>repeated .kpi_sample_types.KpiSampleType kpi_sample_types = 3;</code>
+       * @param index The index of the element to return.
+       * @return The kpiSampleTypes at the given index.
+       */
+      public kpi_sample_types.KpiSampleTypes.KpiSampleType getKpiSampleTypes(int index) {
+        return kpiSampleTypes_converter_.convert(kpiSampleTypes_.get(index));
+      }
+      /**
+       * <code>repeated .kpi_sample_types.KpiSampleType kpi_sample_types = 3;</code>
+       * @param index The index to set the value at.
+       * @param value The kpiSampleTypes to set.
+       * @return This builder for chaining.
+       */
+      public Builder setKpiSampleTypes(
+          int index, kpi_sample_types.KpiSampleTypes.KpiSampleType value) {
+        if (value == null) {
+          throw new NullPointerException();
+        }
+        ensureKpiSampleTypesIsMutable();
+        kpiSampleTypes_.set(index, value.getNumber());
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>repeated .kpi_sample_types.KpiSampleType kpi_sample_types = 3;</code>
+       * @param value The kpiSampleTypes to add.
+       * @return This builder for chaining.
+       */
+      public Builder addKpiSampleTypes(kpi_sample_types.KpiSampleTypes.KpiSampleType value) {
+        if (value == null) {
+          throw new NullPointerException();
+        }
+        ensureKpiSampleTypesIsMutable();
+        kpiSampleTypes_.add(value.getNumber());
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>repeated .kpi_sample_types.KpiSampleType kpi_sample_types = 3;</code>
+       * @param values The kpiSampleTypes to add.
+       * @return This builder for chaining.
+       */
+      public Builder addAllKpiSampleTypes(
+          java.lang.Iterable<? extends kpi_sample_types.KpiSampleTypes.KpiSampleType> values) {
+        ensureKpiSampleTypesIsMutable();
+        for (kpi_sample_types.KpiSampleTypes.KpiSampleType value : values) {
+          kpiSampleTypes_.add(value.getNumber());
+        }
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>repeated .kpi_sample_types.KpiSampleType kpi_sample_types = 3;</code>
+       * @return This builder for chaining.
+       */
+      public Builder clearKpiSampleTypes() {
+        kpiSampleTypes_ = java.util.Collections.emptyList();
+        bitField0_ = (bitField0_ & ~0x00000001);
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>repeated .kpi_sample_types.KpiSampleType kpi_sample_types = 3;</code>
+       * @return A list containing the enum numeric values on the wire for kpiSampleTypes.
+       */
+      public java.util.List<java.lang.Integer>
+      getKpiSampleTypesValueList() {
+        return java.util.Collections.unmodifiableList(kpiSampleTypes_);
+      }
+      /**
+       * <code>repeated .kpi_sample_types.KpiSampleType kpi_sample_types = 3;</code>
+       * @param index The index of the value to return.
+       * @return The enum numeric value on the wire of kpiSampleTypes at the given index.
+       */
+      public int getKpiSampleTypesValue(int index) {
+        return kpiSampleTypes_.get(index);
+      }
+      /**
+       * <code>repeated .kpi_sample_types.KpiSampleType kpi_sample_types = 3;</code>
+       * @param index The index of the value to return.
+       * @return The enum numeric value on the wire of kpiSampleTypes at the given index.
+       * @return This builder for chaining.
+       */
+      public Builder setKpiSampleTypesValue(
+          int index, int value) {
+        ensureKpiSampleTypesIsMutable();
+        kpiSampleTypes_.set(index, value);
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>repeated .kpi_sample_types.KpiSampleType kpi_sample_types = 3;</code>
+       * @param value The enum numeric value on the wire for kpiSampleTypes to add.
+       * @return This builder for chaining.
+       */
+      public Builder addKpiSampleTypesValue(int value) {
+        ensureKpiSampleTypesIsMutable();
+        kpiSampleTypes_.add(value);
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>repeated .kpi_sample_types.KpiSampleType kpi_sample_types = 3;</code>
+       * @param values The enum numeric values on the wire for kpiSampleTypes to add.
+       * @return This builder for chaining.
+       */
+      public Builder addAllKpiSampleTypesValue(
+          java.lang.Iterable<java.lang.Integer> values) {
+        ensureKpiSampleTypesIsMutable();
+        for (int value : values) {
+          kpiSampleTypes_.add(value);
+        }
+        onChanged();
+        return this;
+      }
       @java.lang.Override
       public final Builder setUnknownFields(
           final com.google.protobuf.UnknownFieldSet unknownFields) {
@@ -36215,160 +36478,164 @@ public final class ContextOuterClass {
       descriptor;
   static {
     java.lang.String[] descriptorData = {
-      "\n\rcontext.proto\022\007context\"\007\n\005Empty\"\024\n\004Uui" +
-      "d\022\014\n\004uuid\030\001 \001(\t\"F\n\005Event\022\021\n\ttimestamp\030\001 " +
-      "\001(\001\022*\n\nevent_type\030\002 \001(\0162\026.context.EventT" +
-      "ypeEnum\"0\n\tContextId\022#\n\014context_uuid\030\001 \001" +
-      "(\0132\r.context.Uuid\"\266\001\n\007Context\022&\n\ncontext" +
-      "_id\030\001 \001(\0132\022.context.ContextId\022)\n\014topolog" +
-      "y_ids\030\002 \003(\0132\023.context.TopologyId\022\'\n\013serv" +
-      "ice_ids\030\003 \003(\0132\022.context.ServiceId\022/\n\ncon" +
-      "troller\030\004 \001(\0132\033.context.TeraFlowControll" +
-      "er\"8\n\rContextIdList\022\'\n\013context_ids\030\001 \003(\013" +
-      "2\022.context.ContextId\"1\n\013ContextList\022\"\n\010c" +
-      "ontexts\030\001 \003(\0132\020.context.Context\"U\n\014Conte" +
-      "xtEvent\022\035\n\005event\030\001 \001(\0132\016.context.Event\022&" +
-      "\n\ncontext_id\030\002 \001(\0132\022.context.ContextId\"Z" +
-      "\n\nTopologyId\022&\n\ncontext_id\030\001 \001(\0132\022.conte" +
-      "xt.ContextId\022$\n\rtopology_uuid\030\002 \001(\0132\r.co" +
-      "ntext.Uuid\"~\n\010Topology\022(\n\013topology_id\030\001 " +
-      "\001(\0132\023.context.TopologyId\022%\n\ndevice_ids\030\002" +
-      " \003(\0132\021.context.DeviceId\022!\n\010link_ids\030\003 \003(" +
-      "\0132\017.context.LinkId\";\n\016TopologyIdList\022)\n\014" +
-      "topology_ids\030\001 \003(\0132\023.context.TopologyId\"" +
-      "5\n\014TopologyList\022%\n\ntopologies\030\001 \003(\0132\021.co" +
-      "ntext.Topology\"X\n\rTopologyEvent\022\035\n\005event" +
-      "\030\001 \001(\0132\016.context.Event\022(\n\013topology_id\030\002 " +
-      "\001(\0132\023.context.TopologyId\".\n\010DeviceId\022\"\n\013" +
-      "device_uuid\030\001 \001(\0132\r.context.Uuid\"\232\002\n\006Dev" +
-      "ice\022$\n\tdevice_id\030\001 \001(\0132\021.context.DeviceI" +
-      "d\022\023\n\013device_type\030\002 \001(\t\022,\n\rdevice_config\030" +
-      "\003 \001(\0132\025.context.DeviceConfig\022G\n\031device_o" +
-      "perational_status\030\004 \001(\0162$.context.Device" +
-      "OperationalStatusEnum\0221\n\016device_drivers\030" +
-      "\005 \003(\0162\031.context.DeviceDriverEnum\022+\n\020devi" +
-      "ce_endpoints\030\006 \003(\0132\021.context.EndPoint\"9\n" +
-      "\014DeviceConfig\022)\n\014config_rules\030\001 \003(\0132\023.co" +
-      "ntext.ConfigRule\"5\n\014DeviceIdList\022%\n\ndevi" +
-      "ce_ids\030\001 \003(\0132\021.context.DeviceId\".\n\nDevic" +
-      "eList\022 \n\007devices\030\001 \003(\0132\017.context.Device\"" +
-      "R\n\013DeviceEvent\022\035\n\005event\030\001 \001(\0132\016.context." +
-      "Event\022$\n\tdevice_id\030\002 \001(\0132\021.context.Devic" +
-      "eId\"*\n\006LinkId\022 \n\tlink_uuid\030\001 \001(\0132\r.conte" +
-      "xt.Uuid\"X\n\004Link\022 \n\007link_id\030\001 \001(\0132\017.conte" +
-      "xt.LinkId\022.\n\021link_endpoint_ids\030\002 \003(\0132\023.c" +
-      "ontext.EndPointId\"/\n\nLinkIdList\022!\n\010link_" +
-      "ids\030\001 \003(\0132\017.context.LinkId\"(\n\010LinkList\022\034" +
-      "\n\005links\030\001 \003(\0132\r.context.Link\"L\n\tLinkEven" +
-      "t\022\035\n\005event\030\001 \001(\0132\016.context.Event\022 \n\007link" +
-      "_id\030\002 \001(\0132\017.context.LinkId\"X\n\tServiceId\022" +
-      "&\n\ncontext_id\030\001 \001(\0132\022.context.ContextId\022" +
-      "#\n\014service_uuid\030\002 \001(\0132\r.context.Uuid\"\246\002\n" +
-      "\007Service\022&\n\nservice_id\030\001 \001(\0132\022.context.S" +
-      "erviceId\022.\n\014service_type\030\002 \001(\0162\030.context" +
-      ".ServiceTypeEnum\0221\n\024service_endpoint_ids" +
-      "\030\003 \003(\0132\023.context.EndPointId\0220\n\023service_c" +
-      "onstraints\030\004 \003(\0132\023.context.Constraint\022.\n" +
-      "\016service_status\030\005 \001(\0132\026.context.ServiceS" +
-      "tatus\022.\n\016service_config\030\006 \001(\0132\026.context." +
-      "ServiceConfig\"C\n\rServiceStatus\0222\n\016servic" +
-      "e_status\030\001 \001(\0162\032.context.ServiceStatusEn" +
-      "um\":\n\rServiceConfig\022)\n\014config_rules\030\001 \003(" +
-      "\0132\023.context.ConfigRule\"8\n\rServiceIdList\022" +
-      "\'\n\013service_ids\030\001 \003(\0132\022.context.ServiceId" +
-      "\"1\n\013ServiceList\022\"\n\010services\030\001 \003(\0132\020.cont" +
-      "ext.Service\"U\n\014ServiceEvent\022\035\n\005event\030\001 \001" +
-      "(\0132\016.context.Event\022&\n\nservice_id\030\002 \001(\0132\022" +
-      ".context.ServiceId\"\202\001\n\nEndPointId\022(\n\013top" +
-      "ology_id\030\001 \001(\0132\023.context.TopologyId\022$\n\td" +
-      "evice_id\030\002 \001(\0132\021.context.DeviceId\022$\n\rend" +
-      "point_uuid\030\003 \001(\0132\r.context.Uuid\"K\n\010EndPo" +
-      "int\022(\n\013endpoint_id\030\001 \001(\0132\023.context.EndPo" +
-      "intId\022\025\n\rendpoint_type\030\002 \001(\t\"e\n\nConfigRu" +
-      "le\022)\n\006action\030\001 \001(\0162\031.context.ConfigActio" +
-      "nEnum\022\024\n\014resource_key\030\002 \001(\t\022\026\n\016resource_" +
-      "value\030\003 \001(\t\"?\n\nConstraint\022\027\n\017constraint_" +
-      "type\030\001 \001(\t\022\030\n\020constraint_value\030\002 \001(\t\"6\n\014" +
-      "ConnectionId\022&\n\017connection_uuid\030\001 \001(\0132\r." +
-      "context.Uuid\"\215\001\n\nConnection\022,\n\rconnectio" +
-      "n_id\030\001 \001(\0132\025.context.ConnectionId\022.\n\022rel" +
-      "ated_service_id\030\002 \001(\0132\022.context.ServiceI" +
-      "d\022!\n\004path\030\003 \003(\0132\023.context.EndPointId\"A\n\020" +
-      "ConnectionIdList\022-\n\016connection_ids\030\001 \003(\013" +
-      "2\025.context.ConnectionId\":\n\016ConnectionLis" +
-      "t\022(\n\013connections\030\001 \003(\0132\023.context.Connect" +
-      "ion\"^\n\022TeraFlowController\022&\n\ncontext_id\030" +
-      "\001 \001(\0132\022.context.ContextId\022\022\n\nip_address\030" +
-      "\002 \001(\t\022\014\n\004port\030\003 \001(\r\"U\n\024AuthenticationRes" +
-      "ult\022&\n\ncontext_id\030\001 \001(\0132\022.context.Contex" +
-      "tId\022\025\n\rauthenticated\030\002 \001(\010*j\n\rEventTypeE" +
-      "num\022\027\n\023EVENTTYPE_UNDEFINED\020\000\022\024\n\020EVENTTYP" +
-      "E_CREATE\020\001\022\024\n\020EVENTTYPE_UPDATE\020\002\022\024\n\020EVEN" +
-      "TTYPE_REMOVE\020\003*\305\001\n\020DeviceDriverEnum\022\032\n\026D" +
-      "EVICEDRIVER_UNDEFINED\020\000\022\033\n\027DEVICEDRIVER_" +
-      "OPENCONFIG\020\001\022\036\n\032DEVICEDRIVER_TRANSPORT_A" +
-      "PI\020\002\022\023\n\017DEVICEDRIVER_P4\020\003\022&\n\"DEVICEDRIVE" +
-      "R_IETF_NETWORK_TOPOLOGY\020\004\022\033\n\027DEVICEDRIVE" +
-      "R_ONF_TR_352\020\005*\217\001\n\033DeviceOperationalStat" +
-      "usEnum\022%\n!DEVICEOPERATIONALSTATUS_UNDEFI" +
-      "NED\020\000\022$\n DEVICEOPERATIONALSTATUS_DISABLE" +
-      "D\020\001\022#\n\037DEVICEOPERATIONALSTATUS_ENABLED\020\002" +
-      "*\201\001\n\017ServiceTypeEnum\022\027\n\023SERVICETYPE_UNKN" +
-      "OWN\020\000\022\024\n\020SERVICETYPE_L3NM\020\001\022\024\n\020SERVICETY" +
-      "PE_L2NM\020\002\022)\n%SERVICETYPE_TAPI_CONNECTIVI" +
-      "TY_SERVICE\020\003*\210\001\n\021ServiceStatusEnum\022\033\n\027SE" +
-      "RVICESTATUS_UNDEFINED\020\000\022\031\n\025SERVICESTATUS" +
-      "_PLANNED\020\001\022\030\n\024SERVICESTATUS_ACTIVE\020\002\022!\n\035" +
-      "SERVICESTATUS_PENDING_REMOVAL\020\003*]\n\020Confi" +
-      "gActionEnum\022\032\n\026CONFIGACTION_UNDEFINED\020\000\022" +
-      "\024\n\020CONFIGACTION_SET\020\001\022\027\n\023CONFIGACTION_DE" +
-      "LETE\020\0022\245\r\n\016ContextService\022:\n\016ListContext" +
-      "Ids\022\016.context.Empty\032\026.context.ContextIdL" +
-      "ist\"\000\0226\n\014ListContexts\022\016.context.Empty\032\024." +
-      "context.ContextList\"\000\0224\n\nGetContext\022\022.co" +
-      "ntext.ContextId\032\020.context.Context\"\000\0224\n\nS" +
-      "etContext\022\020.context.Context\032\022.context.Co" +
-      "ntextId\"\000\0225\n\rRemoveContext\022\022.context.Con" +
-      "textId\032\016.context.Empty\"\000\022=\n\020GetContextEv" +
-      "ents\022\016.context.Empty\032\025.context.ContextEv" +
-      "ent\"\0000\001\022@\n\017ListTopologyIds\022\022.context.Con" +
-      "textId\032\027.context.TopologyIdList\"\000\022=\n\016Lis" +
-      "tTopologies\022\022.context.ContextId\032\025.contex" +
-      "t.TopologyList\"\000\0227\n\013GetTopology\022\023.contex" +
-      "t.TopologyId\032\021.context.Topology\"\000\0227\n\013Set" +
-      "Topology\022\021.context.Topology\032\023.context.To" +
-      "pologyId\"\000\0227\n\016RemoveTopology\022\023.context.T" +
-      "opologyId\032\016.context.Empty\"\000\022?\n\021GetTopolo" +
-      "gyEvents\022\016.context.Empty\032\026.context.Topol" +
-      "ogyEvent\"\0000\001\0228\n\rListDeviceIds\022\016.context." +
-      "Empty\032\025.context.DeviceIdList\"\000\0224\n\013ListDe" +
-      "vices\022\016.context.Empty\032\023.context.DeviceLi" +
-      "st\"\000\0221\n\tGetDevice\022\021.context.DeviceId\032\017.c" +
-      "ontext.Device\"\000\0221\n\tSetDevice\022\017.context.D" +
-      "evice\032\021.context.DeviceId\"\000\0223\n\014RemoveDevi" +
-      "ce\022\021.context.DeviceId\032\016.context.Empty\"\000\022" +
-      ";\n\017GetDeviceEvents\022\016.context.Empty\032\024.con" +
-      "text.DeviceEvent\"\0000\001\0224\n\013ListLinkIds\022\016.co" +
-      "ntext.Empty\032\023.context.LinkIdList\"\000\0220\n\tLi" +
-      "stLinks\022\016.context.Empty\032\021.context.LinkLi" +
-      "st\"\000\022+\n\007GetLink\022\017.context.LinkId\032\r.conte" +
-      "xt.Link\"\000\022+\n\007SetLink\022\r.context.Link\032\017.co" +
-      "ntext.LinkId\"\000\022/\n\nRemoveLink\022\017.context.L" +
-      "inkId\032\016.context.Empty\"\000\0227\n\rGetLinkEvents" +
-      "\022\016.context.Empty\032\022.context.LinkEvent\"\0000\001" +
-      "\022>\n\016ListServiceIds\022\022.context.ContextId\032\026" +
-      ".context.ServiceIdList\"\000\022:\n\014ListServices" +
-      "\022\022.context.ContextId\032\024.context.ServiceLi" +
-      "st\"\000\0224\n\nGetService\022\022.context.ServiceId\032\020" +
-      ".context.Service\"\000\0224\n\nSetService\022\020.conte" +
-      "xt.Service\032\022.context.ServiceId\"\000\0225\n\rRemo" +
-      "veService\022\022.context.ServiceId\032\016.context." +
-      "Empty\"\000\022=\n\020GetServiceEvents\022\016.context.Em" +
-      "pty\032\025.context.ServiceEvent\"\0000\001b\006proto3"
+      "\n\rcontext.proto\022\007context\032\026kpi_sample_typ" +
+      "es.proto\"\007\n\005Empty\"\024\n\004Uuid\022\014\n\004uuid\030\001 \001(\t\"" +
+      "F\n\005Event\022\021\n\ttimestamp\030\001 \001(\001\022*\n\nevent_typ" +
+      "e\030\002 \001(\0162\026.context.EventTypeEnum\"0\n\tConte" +
+      "xtId\022#\n\014context_uuid\030\001 \001(\0132\r.context.Uui" +
+      "d\"\266\001\n\007Context\022&\n\ncontext_id\030\001 \001(\0132\022.cont" +
+      "ext.ContextId\022)\n\014topology_ids\030\002 \003(\0132\023.co" +
+      "ntext.TopologyId\022\'\n\013service_ids\030\003 \003(\0132\022." +
+      "context.ServiceId\022/\n\ncontroller\030\004 \001(\0132\033." +
+      "context.TeraFlowController\"8\n\rContextIdL" +
+      "ist\022\'\n\013context_ids\030\001 \003(\0132\022.context.Conte" +
+      "xtId\"1\n\013ContextList\022\"\n\010contexts\030\001 \003(\0132\020." +
+      "context.Context\"U\n\014ContextEvent\022\035\n\005event" +
+      "\030\001 \001(\0132\016.context.Event\022&\n\ncontext_id\030\002 \001" +
+      "(\0132\022.context.ContextId\"Z\n\nTopologyId\022&\n\n" +
+      "context_id\030\001 \001(\0132\022.context.ContextId\022$\n\r" +
+      "topology_uuid\030\002 \001(\0132\r.context.Uuid\"~\n\010To" +
+      "pology\022(\n\013topology_id\030\001 \001(\0132\023.context.To" +
+      "pologyId\022%\n\ndevice_ids\030\002 \003(\0132\021.context.D" +
+      "eviceId\022!\n\010link_ids\030\003 \003(\0132\017.context.Link" +
+      "Id\";\n\016TopologyIdList\022)\n\014topology_ids\030\001 \003" +
+      "(\0132\023.context.TopologyId\"5\n\014TopologyList\022" +
+      "%\n\ntopologies\030\001 \003(\0132\021.context.Topology\"X" +
+      "\n\rTopologyEvent\022\035\n\005event\030\001 \001(\0132\016.context" +
+      ".Event\022(\n\013topology_id\030\002 \001(\0132\023.context.To" +
+      "pologyId\".\n\010DeviceId\022\"\n\013device_uuid\030\001 \001(" +
+      "\0132\r.context.Uuid\"\232\002\n\006Device\022$\n\tdevice_id" +
+      "\030\001 \001(\0132\021.context.DeviceId\022\023\n\013device_type" +
+      "\030\002 \001(\t\022,\n\rdevice_config\030\003 \001(\0132\025.context." +
+      "DeviceConfig\022G\n\031device_operational_statu" +
+      "s\030\004 \001(\0162$.context.DeviceOperationalStatu" +
+      "sEnum\0221\n\016device_drivers\030\005 \003(\0162\031.context." +
+      "DeviceDriverEnum\022+\n\020device_endpoints\030\006 \003" +
+      "(\0132\021.context.EndPoint\"9\n\014DeviceConfig\022)\n" +
+      "\014config_rules\030\001 \003(\0132\023.context.ConfigRule" +
+      "\"5\n\014DeviceIdList\022%\n\ndevice_ids\030\001 \003(\0132\021.c" +
+      "ontext.DeviceId\".\n\nDeviceList\022 \n\007devices" +
+      "\030\001 \003(\0132\017.context.Device\"R\n\013DeviceEvent\022\035" +
+      "\n\005event\030\001 \001(\0132\016.context.Event\022$\n\tdevice_" +
+      "id\030\002 \001(\0132\021.context.DeviceId\"*\n\006LinkId\022 \n" +
+      "\tlink_uuid\030\001 \001(\0132\r.context.Uuid\"X\n\004Link\022" +
+      " \n\007link_id\030\001 \001(\0132\017.context.LinkId\022.\n\021lin" +
+      "k_endpoint_ids\030\002 \003(\0132\023.context.EndPointI" +
+      "d\"/\n\nLinkIdList\022!\n\010link_ids\030\001 \003(\0132\017.cont" +
+      "ext.LinkId\"(\n\010LinkList\022\034\n\005links\030\001 \003(\0132\r." +
+      "context.Link\"L\n\tLinkEvent\022\035\n\005event\030\001 \001(\013" +
+      "2\016.context.Event\022 \n\007link_id\030\002 \001(\0132\017.cont" +
+      "ext.LinkId\"X\n\tServiceId\022&\n\ncontext_id\030\001 " +
+      "\001(\0132\022.context.ContextId\022#\n\014service_uuid\030" +
+      "\002 \001(\0132\r.context.Uuid\"\246\002\n\007Service\022&\n\nserv" +
+      "ice_id\030\001 \001(\0132\022.context.ServiceId\022.\n\014serv" +
+      "ice_type\030\002 \001(\0162\030.context.ServiceTypeEnum" +
+      "\0221\n\024service_endpoint_ids\030\003 \003(\0132\023.context" +
+      ".EndPointId\0220\n\023service_constraints\030\004 \003(\013" +
+      "2\023.context.Constraint\022.\n\016service_status\030" +
+      "\005 \001(\0132\026.context.ServiceStatus\022.\n\016service" +
+      "_config\030\006 \001(\0132\026.context.ServiceConfig\"C\n" +
+      "\rServiceStatus\0222\n\016service_status\030\001 \001(\0162\032" +
+      ".context.ServiceStatusEnum\":\n\rServiceCon" +
+      "fig\022)\n\014config_rules\030\001 \003(\0132\023.context.Conf" +
+      "igRule\"8\n\rServiceIdList\022\'\n\013service_ids\030\001" +
+      " \003(\0132\022.context.ServiceId\"1\n\013ServiceList\022" +
+      "\"\n\010services\030\001 \003(\0132\020.context.Service\"U\n\014S" +
+      "erviceEvent\022\035\n\005event\030\001 \001(\0132\016.context.Eve" +
+      "nt\022&\n\nservice_id\030\002 \001(\0132\022.context.Service" +
+      "Id\"\202\001\n\nEndPointId\022(\n\013topology_id\030\001 \001(\0132\023" +
+      ".context.TopologyId\022$\n\tdevice_id\030\002 \001(\0132\021" +
+      ".context.DeviceId\022$\n\rendpoint_uuid\030\003 \001(\013" +
+      "2\r.context.Uuid\"\206\001\n\010EndPoint\022(\n\013endpoint" +
+      "_id\030\001 \001(\0132\023.context.EndPointId\022\025\n\rendpoi" +
+      "nt_type\030\002 \001(\t\0229\n\020kpi_sample_types\030\003 \003(\0162" +
+      "\037.kpi_sample_types.KpiSampleType\"e\n\nConf" +
+      "igRule\022)\n\006action\030\001 \001(\0162\031.context.ConfigA" +
+      "ctionEnum\022\024\n\014resource_key\030\002 \001(\t\022\026\n\016resou" +
+      "rce_value\030\003 \001(\t\"?\n\nConstraint\022\027\n\017constra" +
+      "int_type\030\001 \001(\t\022\030\n\020constraint_value\030\002 \001(\t" +
+      "\"6\n\014ConnectionId\022&\n\017connection_uuid\030\001 \001(" +
+      "\0132\r.context.Uuid\"\215\001\n\nConnection\022,\n\rconne" +
+      "ction_id\030\001 \001(\0132\025.context.ConnectionId\022.\n" +
+      "\022related_service_id\030\002 \001(\0132\022.context.Serv" +
+      "iceId\022!\n\004path\030\003 \003(\0132\023.context.EndPointId" +
+      "\"A\n\020ConnectionIdList\022-\n\016connection_ids\030\001" +
+      " \003(\0132\025.context.ConnectionId\":\n\016Connectio" +
+      "nList\022(\n\013connections\030\001 \003(\0132\023.context.Con" +
+      "nection\"^\n\022TeraFlowController\022&\n\ncontext" +
+      "_id\030\001 \001(\0132\022.context.ContextId\022\022\n\nip_addr" +
+      "ess\030\002 \001(\t\022\014\n\004port\030\003 \001(\r\"U\n\024Authenticatio" +
+      "nResult\022&\n\ncontext_id\030\001 \001(\0132\022.context.Co" +
+      "ntextId\022\025\n\rauthenticated\030\002 \001(\010*j\n\rEventT" +
+      "ypeEnum\022\027\n\023EVENTTYPE_UNDEFINED\020\000\022\024\n\020EVEN" +
+      "TTYPE_CREATE\020\001\022\024\n\020EVENTTYPE_UPDATE\020\002\022\024\n\020" +
+      "EVENTTYPE_REMOVE\020\003*\305\001\n\020DeviceDriverEnum\022" +
+      "\032\n\026DEVICEDRIVER_UNDEFINED\020\000\022\033\n\027DEVICEDRI" +
+      "VER_OPENCONFIG\020\001\022\036\n\032DEVICEDRIVER_TRANSPO" +
+      "RT_API\020\002\022\023\n\017DEVICEDRIVER_P4\020\003\022&\n\"DEVICED" +
+      "RIVER_IETF_NETWORK_TOPOLOGY\020\004\022\033\n\027DEVICED" +
+      "RIVER_ONF_TR_352\020\005*\217\001\n\033DeviceOperational" +
+      "StatusEnum\022%\n!DEVICEOPERATIONALSTATUS_UN" +
+      "DEFINED\020\000\022$\n DEVICEOPERATIONALSTATUS_DIS" +
+      "ABLED\020\001\022#\n\037DEVICEOPERATIONALSTATUS_ENABL" +
+      "ED\020\002*\201\001\n\017ServiceTypeEnum\022\027\n\023SERVICETYPE_" +
+      "UNKNOWN\020\000\022\024\n\020SERVICETYPE_L3NM\020\001\022\024\n\020SERVI" +
+      "CETYPE_L2NM\020\002\022)\n%SERVICETYPE_TAPI_CONNEC" +
+      "TIVITY_SERVICE\020\003*\210\001\n\021ServiceStatusEnum\022\033" +
+      "\n\027SERVICESTATUS_UNDEFINED\020\000\022\031\n\025SERVICEST" +
+      "ATUS_PLANNED\020\001\022\030\n\024SERVICESTATUS_ACTIVE\020\002" +
+      "\022!\n\035SERVICESTATUS_PENDING_REMOVAL\020\003*]\n\020C" +
+      "onfigActionEnum\022\032\n\026CONFIGACTION_UNDEFINE" +
+      "D\020\000\022\024\n\020CONFIGACTION_SET\020\001\022\027\n\023CONFIGACTIO" +
+      "N_DELETE\020\0022\245\r\n\016ContextService\022:\n\016ListCon" +
+      "textIds\022\016.context.Empty\032\026.context.Contex" +
+      "tIdList\"\000\0226\n\014ListContexts\022\016.context.Empt" +
+      "y\032\024.context.ContextList\"\000\0224\n\nGetContext\022" +
+      "\022.context.ContextId\032\020.context.Context\"\000\022" +
+      "4\n\nSetContext\022\020.context.Context\032\022.contex" +
+      "t.ContextId\"\000\0225\n\rRemoveContext\022\022.context" +
+      ".ContextId\032\016.context.Empty\"\000\022=\n\020GetConte" +
+      "xtEvents\022\016.context.Empty\032\025.context.Conte" +
+      "xtEvent\"\0000\001\022@\n\017ListTopologyIds\022\022.context" +
+      ".ContextId\032\027.context.TopologyIdList\"\000\022=\n" +
+      "\016ListTopologies\022\022.context.ContextId\032\025.co" +
+      "ntext.TopologyList\"\000\0227\n\013GetTopology\022\023.co" +
+      "ntext.TopologyId\032\021.context.Topology\"\000\0227\n" +
+      "\013SetTopology\022\021.context.Topology\032\023.contex" +
+      "t.TopologyId\"\000\0227\n\016RemoveTopology\022\023.conte" +
+      "xt.TopologyId\032\016.context.Empty\"\000\022?\n\021GetTo" +
+      "pologyEvents\022\016.context.Empty\032\026.context.T" +
+      "opologyEvent\"\0000\001\0228\n\rListDeviceIds\022\016.cont" +
+      "ext.Empty\032\025.context.DeviceIdList\"\000\0224\n\013Li" +
+      "stDevices\022\016.context.Empty\032\023.context.Devi" +
+      "ceList\"\000\0221\n\tGetDevice\022\021.context.DeviceId" +
+      "\032\017.context.Device\"\000\0221\n\tSetDevice\022\017.conte" +
+      "xt.Device\032\021.context.DeviceId\"\000\0223\n\014Remove" +
+      "Device\022\021.context.DeviceId\032\016.context.Empt" +
+      "y\"\000\022;\n\017GetDeviceEvents\022\016.context.Empty\032\024" +
+      ".context.DeviceEvent\"\0000\001\0224\n\013ListLinkIds\022" +
+      "\016.context.Empty\032\023.context.LinkIdList\"\000\0220" +
+      "\n\tListLinks\022\016.context.Empty\032\021.context.Li" +
+      "nkList\"\000\022+\n\007GetLink\022\017.context.LinkId\032\r.c" +
+      "ontext.Link\"\000\022+\n\007SetLink\022\r.context.Link\032" +
+      "\017.context.LinkId\"\000\022/\n\nRemoveLink\022\017.conte" +
+      "xt.LinkId\032\016.context.Empty\"\000\0227\n\rGetLinkEv" +
+      "ents\022\016.context.Empty\032\022.context.LinkEvent" +
+      "\"\0000\001\022>\n\016ListServiceIds\022\022.context.Context" +
+      "Id\032\026.context.ServiceIdList\"\000\022:\n\014ListServ" +
+      "ices\022\022.context.ContextId\032\024.context.Servi" +
+      "ceList\"\000\0224\n\nGetService\022\022.context.Service" +
+      "Id\032\020.context.Service\"\000\0224\n\nSetService\022\020.c" +
+      "ontext.Service\032\022.context.ServiceId\"\000\0225\n\r" +
+      "RemoveService\022\022.context.ServiceId\032\016.cont" +
+      "ext.Empty\"\000\022=\n\020GetServiceEvents\022\016.contex" +
+      "t.Empty\032\025.context.ServiceEvent\"\0000\001b\006prot" +
+      "o3"
     };
     descriptor = com.google.protobuf.Descriptors.FileDescriptor
       .internalBuildGeneratedFileFrom(descriptorData,
         new com.google.protobuf.Descriptors.FileDescriptor[] {
+          kpi_sample_types.KpiSampleTypes.getDescriptor(),
         });
     internal_static_context_Empty_descriptor =
       getDescriptor().getMessageTypes().get(0);
@@ -36567,7 +36834,7 @@ public final class ContextOuterClass {
     internal_static_context_EndPoint_fieldAccessorTable = new
       com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
         internal_static_context_EndPoint_descriptor,
-        new java.lang.String[] { "EndpointId", "EndpointType", });
+        new java.lang.String[] { "EndpointId", "EndpointType", "KpiSampleTypes", });
     internal_static_context_ConfigRule_descriptor =
       getDescriptor().getMessageTypes().get(33);
     internal_static_context_ConfigRule_fieldAccessorTable = new
@@ -36616,6 +36883,7 @@ public final class ContextOuterClass {
       com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
         internal_static_context_AuthenticationResult_descriptor,
         new java.lang.String[] { "ContextId", "Authenticated", });
+    kpi_sample_types.KpiSampleTypes.getDescriptor();
   }
 
   // @@protoc_insertion_point(outer_class_scope)
diff --git a/src/automation/target/generated-sources/grpc/device/Device.java b/src/automation/target/generated-sources/grpc/device/Device.java
index 460f29a6188215daff9a55c1039ba84c5abf3ecc..bc57d19cae53bf0540a402e9771bc87c1ecf49c5 100644
--- a/src/automation/target/generated-sources/grpc/device/Device.java
+++ b/src/automation/target/generated-sources/grpc/device/Device.java
@@ -14,6 +14,979 @@ public final class Device {
     registerAllExtensions(
         (com.google.protobuf.ExtensionRegistryLite) registry);
   }
+  public interface MonitoringSettingsOrBuilder extends
+      // @@protoc_insertion_point(interface_extends:device.MonitoringSettings)
+      com.google.protobuf.MessageOrBuilder {
+
+    /**
+     * <code>.monitoring.KpiId kpi_id = 1;</code>
+     * @return Whether the kpiId field is set.
+     */
+    boolean hasKpiId();
+    /**
+     * <code>.monitoring.KpiId kpi_id = 1;</code>
+     * @return The kpiId.
+     */
+    monitoring.Monitoring.KpiId getKpiId();
+    /**
+     * <code>.monitoring.KpiId kpi_id = 1;</code>
+     */
+    monitoring.Monitoring.KpiIdOrBuilder getKpiIdOrBuilder();
+
+    /**
+     * <code>.monitoring.KpiDescriptor kpi_descriptor = 2;</code>
+     * @return Whether the kpiDescriptor field is set.
+     */
+    boolean hasKpiDescriptor();
+    /**
+     * <code>.monitoring.KpiDescriptor kpi_descriptor = 2;</code>
+     * @return The kpiDescriptor.
+     */
+    monitoring.Monitoring.KpiDescriptor getKpiDescriptor();
+    /**
+     * <code>.monitoring.KpiDescriptor kpi_descriptor = 2;</code>
+     */
+    monitoring.Monitoring.KpiDescriptorOrBuilder getKpiDescriptorOrBuilder();
+
+    /**
+     * <code>float sampling_duration_s = 3;</code>
+     * @return The samplingDurationS.
+     */
+    float getSamplingDurationS();
+
+    /**
+     * <code>float sampling_interval_s = 4;</code>
+     * @return The samplingIntervalS.
+     */
+    float getSamplingIntervalS();
+  }
+  /**
+   * Protobuf type {@code device.MonitoringSettings}
+   */
+  public static final class MonitoringSettings extends
+      com.google.protobuf.GeneratedMessageV3 implements
+      // @@protoc_insertion_point(message_implements:device.MonitoringSettings)
+      MonitoringSettingsOrBuilder {
+  private static final long serialVersionUID = 0L;
+    // Use MonitoringSettings.newBuilder() to construct.
+    private MonitoringSettings(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
+      super(builder);
+    }
+    private MonitoringSettings() {
+    }
+
+    @java.lang.Override
+    @SuppressWarnings({"unused"})
+    protected java.lang.Object newInstance(
+        UnusedPrivateParameter unused) {
+      return new MonitoringSettings();
+    }
+
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+    getUnknownFields() {
+      return this.unknownFields;
+    }
+    private MonitoringSettings(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      this();
+      if (extensionRegistry == null) {
+        throw new java.lang.NullPointerException();
+      }
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            case 10: {
+              monitoring.Monitoring.KpiId.Builder subBuilder = null;
+              if (kpiId_ != null) {
+                subBuilder = kpiId_.toBuilder();
+              }
+              kpiId_ = input.readMessage(monitoring.Monitoring.KpiId.parser(), extensionRegistry);
+              if (subBuilder != null) {
+                subBuilder.mergeFrom(kpiId_);
+                kpiId_ = subBuilder.buildPartial();
+              }
+
+              break;
+            }
+            case 18: {
+              monitoring.Monitoring.KpiDescriptor.Builder subBuilder = null;
+              if (kpiDescriptor_ != null) {
+                subBuilder = kpiDescriptor_.toBuilder();
+              }
+              kpiDescriptor_ = input.readMessage(monitoring.Monitoring.KpiDescriptor.parser(), extensionRegistry);
+              if (subBuilder != null) {
+                subBuilder.mergeFrom(kpiDescriptor_);
+                kpiDescriptor_ = subBuilder.buildPartial();
+              }
+
+              break;
+            }
+            case 29: {
+
+              samplingDurationS_ = input.readFloat();
+              break;
+            }
+            case 37: {
+
+              samplingIntervalS_ = input.readFloat();
+              break;
+            }
+            default: {
+              if (!parseUnknownField(
+                  input, unknownFields, extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e).setUnfinishedMessage(this);
+      } finally {
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return device.Device.internal_static_device_MonitoringSettings_descriptor;
+    }
+
+    @java.lang.Override
+    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return device.Device.internal_static_device_MonitoringSettings_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              device.Device.MonitoringSettings.class, device.Device.MonitoringSettings.Builder.class);
+    }
+
+    public static final int KPI_ID_FIELD_NUMBER = 1;
+    private monitoring.Monitoring.KpiId kpiId_;
+    /**
+     * <code>.monitoring.KpiId kpi_id = 1;</code>
+     * @return Whether the kpiId field is set.
+     */
+    @java.lang.Override
+    public boolean hasKpiId() {
+      return kpiId_ != null;
+    }
+    /**
+     * <code>.monitoring.KpiId kpi_id = 1;</code>
+     * @return The kpiId.
+     */
+    @java.lang.Override
+    public monitoring.Monitoring.KpiId getKpiId() {
+      return kpiId_ == null ? monitoring.Monitoring.KpiId.getDefaultInstance() : kpiId_;
+    }
+    /**
+     * <code>.monitoring.KpiId kpi_id = 1;</code>
+     */
+    @java.lang.Override
+    public monitoring.Monitoring.KpiIdOrBuilder getKpiIdOrBuilder() {
+      return getKpiId();
+    }
+
+    public static final int KPI_DESCRIPTOR_FIELD_NUMBER = 2;
+    private monitoring.Monitoring.KpiDescriptor kpiDescriptor_;
+    /**
+     * <code>.monitoring.KpiDescriptor kpi_descriptor = 2;</code>
+     * @return Whether the kpiDescriptor field is set.
+     */
+    @java.lang.Override
+    public boolean hasKpiDescriptor() {
+      return kpiDescriptor_ != null;
+    }
+    /**
+     * <code>.monitoring.KpiDescriptor kpi_descriptor = 2;</code>
+     * @return The kpiDescriptor.
+     */
+    @java.lang.Override
+    public monitoring.Monitoring.KpiDescriptor getKpiDescriptor() {
+      return kpiDescriptor_ == null ? monitoring.Monitoring.KpiDescriptor.getDefaultInstance() : kpiDescriptor_;
+    }
+    /**
+     * <code>.monitoring.KpiDescriptor kpi_descriptor = 2;</code>
+     */
+    @java.lang.Override
+    public monitoring.Monitoring.KpiDescriptorOrBuilder getKpiDescriptorOrBuilder() {
+      return getKpiDescriptor();
+    }
+
+    public static final int SAMPLING_DURATION_S_FIELD_NUMBER = 3;
+    private float samplingDurationS_;
+    /**
+     * <code>float sampling_duration_s = 3;</code>
+     * @return The samplingDurationS.
+     */
+    @java.lang.Override
+    public float getSamplingDurationS() {
+      return samplingDurationS_;
+    }
+
+    public static final int SAMPLING_INTERVAL_S_FIELD_NUMBER = 4;
+    private float samplingIntervalS_;
+    /**
+     * <code>float sampling_interval_s = 4;</code>
+     * @return The samplingIntervalS.
+     */
+    @java.lang.Override
+    public float getSamplingIntervalS() {
+      return samplingIntervalS_;
+    }
+
+    private byte memoizedIsInitialized = -1;
+    @java.lang.Override
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized == 1) return true;
+      if (isInitialized == 0) return false;
+
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    @java.lang.Override
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      if (kpiId_ != null) {
+        output.writeMessage(1, getKpiId());
+      }
+      if (kpiDescriptor_ != null) {
+        output.writeMessage(2, getKpiDescriptor());
+      }
+      if (samplingDurationS_ != 0F) {
+        output.writeFloat(3, samplingDurationS_);
+      }
+      if (samplingIntervalS_ != 0F) {
+        output.writeFloat(4, samplingIntervalS_);
+      }
+      unknownFields.writeTo(output);
+    }
+
+    @java.lang.Override
+    public int getSerializedSize() {
+      int size = memoizedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      if (kpiId_ != null) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeMessageSize(1, getKpiId());
+      }
+      if (kpiDescriptor_ != null) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeMessageSize(2, getKpiDescriptor());
+      }
+      if (samplingDurationS_ != 0F) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeFloatSize(3, samplingDurationS_);
+      }
+      if (samplingIntervalS_ != 0F) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeFloatSize(4, samplingIntervalS_);
+      }
+      size += unknownFields.getSerializedSize();
+      memoizedSize = size;
+      return size;
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof device.Device.MonitoringSettings)) {
+        return super.equals(obj);
+      }
+      device.Device.MonitoringSettings other = (device.Device.MonitoringSettings) obj;
+
+      if (hasKpiId() != other.hasKpiId()) return false;
+      if (hasKpiId()) {
+        if (!getKpiId()
+            .equals(other.getKpiId())) return false;
+      }
+      if (hasKpiDescriptor() != other.hasKpiDescriptor()) return false;
+      if (hasKpiDescriptor()) {
+        if (!getKpiDescriptor()
+            .equals(other.getKpiDescriptor())) return false;
+      }
+      if (java.lang.Float.floatToIntBits(getSamplingDurationS())
+          != java.lang.Float.floatToIntBits(
+              other.getSamplingDurationS())) return false;
+      if (java.lang.Float.floatToIntBits(getSamplingIntervalS())
+          != java.lang.Float.floatToIntBits(
+              other.getSamplingIntervalS())) return false;
+      if (!unknownFields.equals(other.unknownFields)) return false;
+      return true;
+    }
+
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptor().hashCode();
+      if (hasKpiId()) {
+        hash = (37 * hash) + KPI_ID_FIELD_NUMBER;
+        hash = (53 * hash) + getKpiId().hashCode();
+      }
+      if (hasKpiDescriptor()) {
+        hash = (37 * hash) + KPI_DESCRIPTOR_FIELD_NUMBER;
+        hash = (53 * hash) + getKpiDescriptor().hashCode();
+      }
+      hash = (37 * hash) + SAMPLING_DURATION_S_FIELD_NUMBER;
+      hash = (53 * hash) + java.lang.Float.floatToIntBits(
+          getSamplingDurationS());
+      hash = (37 * hash) + SAMPLING_INTERVAL_S_FIELD_NUMBER;
+      hash = (53 * hash) + java.lang.Float.floatToIntBits(
+          getSamplingIntervalS());
+      hash = (29 * hash) + unknownFields.hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static device.Device.MonitoringSettings parseFrom(
+        java.nio.ByteBuffer data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static device.Device.MonitoringSettings parseFrom(
+        java.nio.ByteBuffer data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static device.Device.MonitoringSettings parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static device.Device.MonitoringSettings parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static device.Device.MonitoringSettings parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static device.Device.MonitoringSettings parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static device.Device.MonitoringSettings parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
+    }
+    public static device.Device.MonitoringSettings parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
+    }
+    public static device.Device.MonitoringSettings parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input);
+    }
+    public static device.Device.MonitoringSettings parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+    }
+    public static device.Device.MonitoringSettings parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
+    }
+    public static device.Device.MonitoringSettings parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
+    }
+
+    @java.lang.Override
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder() {
+      return DEFAULT_INSTANCE.toBuilder();
+    }
+    public static Builder newBuilder(device.Device.MonitoringSettings prototype) {
+      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+    }
+    @java.lang.Override
+    public Builder toBuilder() {
+      return this == DEFAULT_INSTANCE
+          ? new Builder() : new Builder().mergeFrom(this);
+    }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code device.MonitoringSettings}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
+        // @@protoc_insertion_point(builder_implements:device.MonitoringSettings)
+        device.Device.MonitoringSettingsOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return device.Device.internal_static_device_MonitoringSettings_descriptor;
+      }
+
+      @java.lang.Override
+      protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return device.Device.internal_static_device_MonitoringSettings_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                device.Device.MonitoringSettings.class, device.Device.MonitoringSettings.Builder.class);
+      }
+
+      // Construct using device.Device.MonitoringSettings.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessageV3
+                .alwaysUseFieldBuilders) {
+        }
+      }
+      @java.lang.Override
+      public Builder clear() {
+        super.clear();
+        if (kpiIdBuilder_ == null) {
+          kpiId_ = null;
+        } else {
+          kpiId_ = null;
+          kpiIdBuilder_ = null;
+        }
+        if (kpiDescriptorBuilder_ == null) {
+          kpiDescriptor_ = null;
+        } else {
+          kpiDescriptor_ = null;
+          kpiDescriptorBuilder_ = null;
+        }
+        samplingDurationS_ = 0F;
+
+        samplingIntervalS_ = 0F;
+
+        return this;
+      }
+
+      @java.lang.Override
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return device.Device.internal_static_device_MonitoringSettings_descriptor;
+      }
+
+      @java.lang.Override
+      public device.Device.MonitoringSettings getDefaultInstanceForType() {
+        return device.Device.MonitoringSettings.getDefaultInstance();
+      }
+
+      @java.lang.Override
+      public device.Device.MonitoringSettings build() {
+        device.Device.MonitoringSettings result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      @java.lang.Override
+      public device.Device.MonitoringSettings buildPartial() {
+        device.Device.MonitoringSettings result = new device.Device.MonitoringSettings(this);
+        if (kpiIdBuilder_ == null) {
+          result.kpiId_ = kpiId_;
+        } else {
+          result.kpiId_ = kpiIdBuilder_.build();
+        }
+        if (kpiDescriptorBuilder_ == null) {
+          result.kpiDescriptor_ = kpiDescriptor_;
+        } else {
+          result.kpiDescriptor_ = kpiDescriptorBuilder_.build();
+        }
+        result.samplingDurationS_ = samplingDurationS_;
+        result.samplingIntervalS_ = samplingIntervalS_;
+        onBuilt();
+        return result;
+      }
+
+      @java.lang.Override
+      public Builder clone() {
+        return super.clone();
+      }
+      @java.lang.Override
+      public Builder setField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          java.lang.Object value) {
+        return super.setField(field, value);
+      }
+      @java.lang.Override
+      public Builder clearField(
+          com.google.protobuf.Descriptors.FieldDescriptor field) {
+        return super.clearField(field);
+      }
+      @java.lang.Override
+      public Builder clearOneof(
+          com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+        return super.clearOneof(oneof);
+      }
+      @java.lang.Override
+      public Builder setRepeatedField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          int index, java.lang.Object value) {
+        return super.setRepeatedField(field, index, value);
+      }
+      @java.lang.Override
+      public Builder addRepeatedField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          java.lang.Object value) {
+        return super.addRepeatedField(field, value);
+      }
+      @java.lang.Override
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof device.Device.MonitoringSettings) {
+          return mergeFrom((device.Device.MonitoringSettings)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(device.Device.MonitoringSettings other) {
+        if (other == device.Device.MonitoringSettings.getDefaultInstance()) return this;
+        if (other.hasKpiId()) {
+          mergeKpiId(other.getKpiId());
+        }
+        if (other.hasKpiDescriptor()) {
+          mergeKpiDescriptor(other.getKpiDescriptor());
+        }
+        if (other.getSamplingDurationS() != 0F) {
+          setSamplingDurationS(other.getSamplingDurationS());
+        }
+        if (other.getSamplingIntervalS() != 0F) {
+          setSamplingIntervalS(other.getSamplingIntervalS());
+        }
+        this.mergeUnknownFields(other.unknownFields);
+        onChanged();
+        return this;
+      }
+
+      @java.lang.Override
+      public final boolean isInitialized() {
+        return true;
+      }
+
+      @java.lang.Override
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        device.Device.MonitoringSettings parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (device.Device.MonitoringSettings) e.getUnfinishedMessage();
+          throw e.unwrapIOException();
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+
+      private monitoring.Monitoring.KpiId kpiId_;
+      private com.google.protobuf.SingleFieldBuilderV3<
+          monitoring.Monitoring.KpiId, monitoring.Monitoring.KpiId.Builder, monitoring.Monitoring.KpiIdOrBuilder> kpiIdBuilder_;
+      /**
+       * <code>.monitoring.KpiId kpi_id = 1;</code>
+       * @return Whether the kpiId field is set.
+       */
+      public boolean hasKpiId() {
+        return kpiIdBuilder_ != null || kpiId_ != null;
+      }
+      /**
+       * <code>.monitoring.KpiId kpi_id = 1;</code>
+       * @return The kpiId.
+       */
+      public monitoring.Monitoring.KpiId getKpiId() {
+        if (kpiIdBuilder_ == null) {
+          return kpiId_ == null ? monitoring.Monitoring.KpiId.getDefaultInstance() : kpiId_;
+        } else {
+          return kpiIdBuilder_.getMessage();
+        }
+      }
+      /**
+       * <code>.monitoring.KpiId kpi_id = 1;</code>
+       */
+      public Builder setKpiId(monitoring.Monitoring.KpiId value) {
+        if (kpiIdBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          kpiId_ = value;
+          onChanged();
+        } else {
+          kpiIdBuilder_.setMessage(value);
+        }
+
+        return this;
+      }
+      /**
+       * <code>.monitoring.KpiId kpi_id = 1;</code>
+       */
+      public Builder setKpiId(
+          monitoring.Monitoring.KpiId.Builder builderForValue) {
+        if (kpiIdBuilder_ == null) {
+          kpiId_ = builderForValue.build();
+          onChanged();
+        } else {
+          kpiIdBuilder_.setMessage(builderForValue.build());
+        }
+
+        return this;
+      }
+      /**
+       * <code>.monitoring.KpiId kpi_id = 1;</code>
+       */
+      public Builder mergeKpiId(monitoring.Monitoring.KpiId value) {
+        if (kpiIdBuilder_ == null) {
+          if (kpiId_ != null) {
+            kpiId_ =
+              monitoring.Monitoring.KpiId.newBuilder(kpiId_).mergeFrom(value).buildPartial();
+          } else {
+            kpiId_ = value;
+          }
+          onChanged();
+        } else {
+          kpiIdBuilder_.mergeFrom(value);
+        }
+
+        return this;
+      }
+      /**
+       * <code>.monitoring.KpiId kpi_id = 1;</code>
+       */
+      public Builder clearKpiId() {
+        if (kpiIdBuilder_ == null) {
+          kpiId_ = null;
+          onChanged();
+        } else {
+          kpiId_ = null;
+          kpiIdBuilder_ = null;
+        }
+
+        return this;
+      }
+      /**
+       * <code>.monitoring.KpiId kpi_id = 1;</code>
+       */
+      public monitoring.Monitoring.KpiId.Builder getKpiIdBuilder() {
+        
+        onChanged();
+        return getKpiIdFieldBuilder().getBuilder();
+      }
+      /**
+       * <code>.monitoring.KpiId kpi_id = 1;</code>
+       */
+      public monitoring.Monitoring.KpiIdOrBuilder getKpiIdOrBuilder() {
+        if (kpiIdBuilder_ != null) {
+          return kpiIdBuilder_.getMessageOrBuilder();
+        } else {
+          return kpiId_ == null ?
+              monitoring.Monitoring.KpiId.getDefaultInstance() : kpiId_;
+        }
+      }
+      /**
+       * <code>.monitoring.KpiId kpi_id = 1;</code>
+       */
+      private com.google.protobuf.SingleFieldBuilderV3<
+          monitoring.Monitoring.KpiId, monitoring.Monitoring.KpiId.Builder, monitoring.Monitoring.KpiIdOrBuilder> 
+          getKpiIdFieldBuilder() {
+        if (kpiIdBuilder_ == null) {
+          kpiIdBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
+              monitoring.Monitoring.KpiId, monitoring.Monitoring.KpiId.Builder, monitoring.Monitoring.KpiIdOrBuilder>(
+                  getKpiId(),
+                  getParentForChildren(),
+                  isClean());
+          kpiId_ = null;
+        }
+        return kpiIdBuilder_;
+      }
+
+      private monitoring.Monitoring.KpiDescriptor kpiDescriptor_;
+      private com.google.protobuf.SingleFieldBuilderV3<
+          monitoring.Monitoring.KpiDescriptor, monitoring.Monitoring.KpiDescriptor.Builder, monitoring.Monitoring.KpiDescriptorOrBuilder> kpiDescriptorBuilder_;
+      /**
+       * <code>.monitoring.KpiDescriptor kpi_descriptor = 2;</code>
+       * @return Whether the kpiDescriptor field is set.
+       */
+      public boolean hasKpiDescriptor() {
+        return kpiDescriptorBuilder_ != null || kpiDescriptor_ != null;
+      }
+      /**
+       * <code>.monitoring.KpiDescriptor kpi_descriptor = 2;</code>
+       * @return The kpiDescriptor.
+       */
+      public monitoring.Monitoring.KpiDescriptor getKpiDescriptor() {
+        if (kpiDescriptorBuilder_ == null) {
+          return kpiDescriptor_ == null ? monitoring.Monitoring.KpiDescriptor.getDefaultInstance() : kpiDescriptor_;
+        } else {
+          return kpiDescriptorBuilder_.getMessage();
+        }
+      }
+      /**
+       * <code>.monitoring.KpiDescriptor kpi_descriptor = 2;</code>
+       */
+      public Builder setKpiDescriptor(monitoring.Monitoring.KpiDescriptor value) {
+        if (kpiDescriptorBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          kpiDescriptor_ = value;
+          onChanged();
+        } else {
+          kpiDescriptorBuilder_.setMessage(value);
+        }
+
+        return this;
+      }
+      /**
+       * <code>.monitoring.KpiDescriptor kpi_descriptor = 2;</code>
+       */
+      public Builder setKpiDescriptor(
+          monitoring.Monitoring.KpiDescriptor.Builder builderForValue) {
+        if (kpiDescriptorBuilder_ == null) {
+          kpiDescriptor_ = builderForValue.build();
+          onChanged();
+        } else {
+          kpiDescriptorBuilder_.setMessage(builderForValue.build());
+        }
+
+        return this;
+      }
+      /**
+       * <code>.monitoring.KpiDescriptor kpi_descriptor = 2;</code>
+       */
+      public Builder mergeKpiDescriptor(monitoring.Monitoring.KpiDescriptor value) {
+        if (kpiDescriptorBuilder_ == null) {
+          if (kpiDescriptor_ != null) {
+            kpiDescriptor_ =
+              monitoring.Monitoring.KpiDescriptor.newBuilder(kpiDescriptor_).mergeFrom(value).buildPartial();
+          } else {
+            kpiDescriptor_ = value;
+          }
+          onChanged();
+        } else {
+          kpiDescriptorBuilder_.mergeFrom(value);
+        }
+
+        return this;
+      }
+      /**
+       * <code>.monitoring.KpiDescriptor kpi_descriptor = 2;</code>
+       */
+      public Builder clearKpiDescriptor() {
+        if (kpiDescriptorBuilder_ == null) {
+          kpiDescriptor_ = null;
+          onChanged();
+        } else {
+          kpiDescriptor_ = null;
+          kpiDescriptorBuilder_ = null;
+        }
+
+        return this;
+      }
+      /**
+       * <code>.monitoring.KpiDescriptor kpi_descriptor = 2;</code>
+       */
+      public monitoring.Monitoring.KpiDescriptor.Builder getKpiDescriptorBuilder() {
+        
+        onChanged();
+        return getKpiDescriptorFieldBuilder().getBuilder();
+      }
+      /**
+       * <code>.monitoring.KpiDescriptor kpi_descriptor = 2;</code>
+       */
+      public monitoring.Monitoring.KpiDescriptorOrBuilder getKpiDescriptorOrBuilder() {
+        if (kpiDescriptorBuilder_ != null) {
+          return kpiDescriptorBuilder_.getMessageOrBuilder();
+        } else {
+          return kpiDescriptor_ == null ?
+              monitoring.Monitoring.KpiDescriptor.getDefaultInstance() : kpiDescriptor_;
+        }
+      }
+      /**
+       * <code>.monitoring.KpiDescriptor kpi_descriptor = 2;</code>
+       */
+      private com.google.protobuf.SingleFieldBuilderV3<
+          monitoring.Monitoring.KpiDescriptor, monitoring.Monitoring.KpiDescriptor.Builder, monitoring.Monitoring.KpiDescriptorOrBuilder> 
+          getKpiDescriptorFieldBuilder() {
+        if (kpiDescriptorBuilder_ == null) {
+          kpiDescriptorBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
+              monitoring.Monitoring.KpiDescriptor, monitoring.Monitoring.KpiDescriptor.Builder, monitoring.Monitoring.KpiDescriptorOrBuilder>(
+                  getKpiDescriptor(),
+                  getParentForChildren(),
+                  isClean());
+          kpiDescriptor_ = null;
+        }
+        return kpiDescriptorBuilder_;
+      }
+
+      private float samplingDurationS_ ;
+      /**
+       * <code>float sampling_duration_s = 3;</code>
+       * @return The samplingDurationS.
+       */
+      @java.lang.Override
+      public float getSamplingDurationS() {
+        return samplingDurationS_;
+      }
+      /**
+       * <code>float sampling_duration_s = 3;</code>
+       * @param value The samplingDurationS to set.
+       * @return This builder for chaining.
+       */
+      public Builder setSamplingDurationS(float value) {
+        
+        samplingDurationS_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>float sampling_duration_s = 3;</code>
+       * @return This builder for chaining.
+       */
+      public Builder clearSamplingDurationS() {
+        
+        samplingDurationS_ = 0F;
+        onChanged();
+        return this;
+      }
+
+      private float samplingIntervalS_ ;
+      /**
+       * <code>float sampling_interval_s = 4;</code>
+       * @return The samplingIntervalS.
+       */
+      @java.lang.Override
+      public float getSamplingIntervalS() {
+        return samplingIntervalS_;
+      }
+      /**
+       * <code>float sampling_interval_s = 4;</code>
+       * @param value The samplingIntervalS to set.
+       * @return This builder for chaining.
+       */
+      public Builder setSamplingIntervalS(float value) {
+        
+        samplingIntervalS_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>float sampling_interval_s = 4;</code>
+       * @return This builder for chaining.
+       */
+      public Builder clearSamplingIntervalS() {
+        
+        samplingIntervalS_ = 0F;
+        onChanged();
+        return this;
+      }
+      @java.lang.Override
+      public final Builder setUnknownFields(
+          final com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.setUnknownFields(unknownFields);
+      }
+
+      @java.lang.Override
+      public final Builder mergeUnknownFields(
+          final com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.mergeUnknownFields(unknownFields);
+      }
+
+
+      // @@protoc_insertion_point(builder_scope:device.MonitoringSettings)
+    }
+
+    // @@protoc_insertion_point(class_scope:device.MonitoringSettings)
+    private static final device.Device.MonitoringSettings DEFAULT_INSTANCE;
+    static {
+      DEFAULT_INSTANCE = new device.Device.MonitoringSettings();
+    }
+
+    public static device.Device.MonitoringSettings getDefaultInstance() {
+      return DEFAULT_INSTANCE;
+    }
+
+    private static final com.google.protobuf.Parser<MonitoringSettings>
+        PARSER = new com.google.protobuf.AbstractParser<MonitoringSettings>() {
+      @java.lang.Override
+      public MonitoringSettings parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new MonitoringSettings(input, extensionRegistry);
+      }
+    };
+
+    public static com.google.protobuf.Parser<MonitoringSettings> parser() {
+      return PARSER;
+    }
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<MonitoringSettings> getParserForType() {
+      return PARSER;
+    }
+
+    @java.lang.Override
+    public device.Device.MonitoringSettings getDefaultInstanceForType() {
+      return DEFAULT_INSTANCE;
+    }
+
+  }
+
+  private static final com.google.protobuf.Descriptors.Descriptor
+    internal_static_device_MonitoringSettings_descriptor;
+  private static final 
+    com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+      internal_static_device_MonitoringSettings_fieldAccessorTable;
 
   public static com.google.protobuf.Descriptors.FileDescriptor
       getDescriptor() {
@@ -23,21 +996,35 @@ public final class Device {
       descriptor;
   static {
     java.lang.String[] descriptorData = {
-      "\n\014device.proto\022\006device\032\rcontext.proto2\360\001" +
-      "\n\rDeviceService\0221\n\tAddDevice\022\017.context.D" +
-      "evice\032\021.context.DeviceId\"\000\0227\n\017ConfigureD" +
-      "evice\022\017.context.Device\032\021.context.DeviceI" +
-      "d\"\000\0223\n\014DeleteDevice\022\021.context.DeviceId\032\016" +
-      ".context.Empty\"\000\022>\n\020GetInitialConfig\022\021.c" +
-      "ontext.DeviceId\032\025.context.DeviceConfig\"\000" +
-      "b\006proto3"
+      "\n\014device.proto\022\006device\032\rcontext.proto\032\020m" +
+      "onitoring.proto\"\244\001\n\022MonitoringSettings\022!" +
+      "\n\006kpi_id\030\001 \001(\0132\021.monitoring.KpiId\0221\n\016kpi" +
+      "_descriptor\030\002 \001(\0132\031.monitoring.KpiDescri" +
+      "ptor\022\033\n\023sampling_duration_s\030\003 \001(\002\022\033\n\023sam" +
+      "pling_interval_s\030\004 \001(\0022\262\002\n\rDeviceService" +
+      "\0221\n\tAddDevice\022\017.context.Device\032\021.context" +
+      ".DeviceId\"\000\0227\n\017ConfigureDevice\022\017.context" +
+      ".Device\032\021.context.DeviceId\"\000\0223\n\014DeleteDe" +
+      "vice\022\021.context.DeviceId\032\016.context.Empty\"" +
+      "\000\022>\n\020GetInitialConfig\022\021.context.DeviceId" +
+      "\032\025.context.DeviceConfig\"\000\022@\n\020MonitorDevi" +
+      "ceKpi\022\032.device.MonitoringSettings\032\016.cont" +
+      "ext.Empty\"\000b\006proto3"
     };
     descriptor = com.google.protobuf.Descriptors.FileDescriptor
       .internalBuildGeneratedFileFrom(descriptorData,
         new com.google.protobuf.Descriptors.FileDescriptor[] {
           context.ContextOuterClass.getDescriptor(),
+          monitoring.Monitoring.getDescriptor(),
         });
+    internal_static_device_MonitoringSettings_descriptor =
+      getDescriptor().getMessageTypes().get(0);
+    internal_static_device_MonitoringSettings_fieldAccessorTable = new
+      com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
+        internal_static_device_MonitoringSettings_descriptor,
+        new java.lang.String[] { "KpiId", "KpiDescriptor", "SamplingDurationS", "SamplingIntervalS", });
     context.ContextOuterClass.getDescriptor();
+    monitoring.Monitoring.getDescriptor();
   }
 
   // @@protoc_insertion_point(outer_class_scope)
diff --git a/src/automation/target/generated-sources/grpc/device/DeviceService.java b/src/automation/target/generated-sources/grpc/device/DeviceService.java
index c2d9be46eb44a8e05efab74866f7ff8337934ed3..1768f9911b9d05a7c61e70d8f75e397f9a3341a4 100644
--- a/src/automation/target/generated-sources/grpc/device/DeviceService.java
+++ b/src/automation/target/generated-sources/grpc/device/DeviceService.java
@@ -16,6 +16,8 @@ public interface DeviceService extends MutinyService {
     
     io.smallrye.mutiny.Uni<context.ContextOuterClass.DeviceConfig> getInitialConfig(context.ContextOuterClass.DeviceId request);
     
+    io.smallrye.mutiny.Uni<context.ContextOuterClass.Empty> monitorDeviceKpi(device.Device.MonitoringSettings request);
+    
     
     
 
diff --git a/src/automation/target/generated-sources/grpc/device/DeviceServiceBean.java b/src/automation/target/generated-sources/grpc/device/DeviceServiceBean.java
index 203e2f2b3e1d42e7a848cdcc19f251e484def93f..c7e767237abc22ff273cc454f1433e5f811382fc 100644
--- a/src/automation/target/generated-sources/grpc/device/DeviceServiceBean.java
+++ b/src/automation/target/generated-sources/grpc/device/DeviceServiceBean.java
@@ -47,5 +47,13 @@ public class DeviceServiceBean extends MutinyDeviceServiceGrpc.DeviceServiceImpl
           throw new io.grpc.StatusRuntimeException(io.grpc.Status.UNIMPLEMENTED);
        }
     }
+    @Override
+    public io.smallrye.mutiny.Uni<context.ContextOuterClass.Empty> monitorDeviceKpi(device.Device.MonitoringSettings request) {
+       try {
+         return delegate.monitorDeviceKpi(request);
+       } catch (UnsupportedOperationException e) {
+          throw new io.grpc.StatusRuntimeException(io.grpc.Status.UNIMPLEMENTED);
+       }
+    }
 
 }
\ No newline at end of file
diff --git a/src/automation/target/generated-sources/grpc/device/DeviceServiceClient.java b/src/automation/target/generated-sources/grpc/device/DeviceServiceClient.java
index e987e6333549200193cf9b6a77f8589b15086718..2445752a6392c3f6f9df0b0ef439d789e6a8d925 100644
--- a/src/automation/target/generated-sources/grpc/device/DeviceServiceClient.java
+++ b/src/automation/target/generated-sources/grpc/device/DeviceServiceClient.java
@@ -36,5 +36,9 @@ public class DeviceServiceClient implements DeviceService, MutinyClient<MutinyDe
     public io.smallrye.mutiny.Uni<context.ContextOuterClass.DeviceConfig> getInitialConfig(context.ContextOuterClass.DeviceId request) {
        return stub.getInitialConfig(request);
     }
+    @Override
+    public io.smallrye.mutiny.Uni<context.ContextOuterClass.Empty> monitorDeviceKpi(device.Device.MonitoringSettings request) {
+       return stub.monitorDeviceKpi(request);
+    }
 
 }
\ No newline at end of file
diff --git a/src/automation/target/generated-sources/grpc/device/DeviceServiceGrpc.java b/src/automation/target/generated-sources/grpc/device/DeviceServiceGrpc.java
index c19e45888168c69c1454dfde99af59cf64195851..9c2e379d311a58ca51c9208feba359120c086c0e 100644
--- a/src/automation/target/generated-sources/grpc/device/DeviceServiceGrpc.java
+++ b/src/automation/target/generated-sources/grpc/device/DeviceServiceGrpc.java
@@ -138,6 +138,37 @@ public final class DeviceServiceGrpc {
     return getGetInitialConfigMethod;
   }
 
+  private static volatile io.grpc.MethodDescriptor<device.Device.MonitoringSettings,
+      context.ContextOuterClass.Empty> getMonitorDeviceKpiMethod;
+
+  @io.grpc.stub.annotations.RpcMethod(
+      fullMethodName = SERVICE_NAME + '/' + "MonitorDeviceKpi",
+      requestType = device.Device.MonitoringSettings.class,
+      responseType = context.ContextOuterClass.Empty.class,
+      methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
+  public static io.grpc.MethodDescriptor<device.Device.MonitoringSettings,
+      context.ContextOuterClass.Empty> getMonitorDeviceKpiMethod() {
+    io.grpc.MethodDescriptor<device.Device.MonitoringSettings, context.ContextOuterClass.Empty> getMonitorDeviceKpiMethod;
+    if ((getMonitorDeviceKpiMethod = DeviceServiceGrpc.getMonitorDeviceKpiMethod) == null) {
+      synchronized (DeviceServiceGrpc.class) {
+        if ((getMonitorDeviceKpiMethod = DeviceServiceGrpc.getMonitorDeviceKpiMethod) == null) {
+          DeviceServiceGrpc.getMonitorDeviceKpiMethod = getMonitorDeviceKpiMethod =
+              io.grpc.MethodDescriptor.<device.Device.MonitoringSettings, context.ContextOuterClass.Empty>newBuilder()
+              .setType(io.grpc.MethodDescriptor.MethodType.UNARY)
+              .setFullMethodName(generateFullMethodName(SERVICE_NAME, "MonitorDeviceKpi"))
+              .setSampledToLocalTracing(true)
+              .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
+                  device.Device.MonitoringSettings.getDefaultInstance()))
+              .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
+                  context.ContextOuterClass.Empty.getDefaultInstance()))
+              .setSchemaDescriptor(new DeviceServiceMethodDescriptorSupplier("MonitorDeviceKpi"))
+              .build();
+        }
+      }
+    }
+    return getMonitorDeviceKpiMethod;
+  }
+
   /**
    * Creates a new async stub that supports all call types for the service
    */
@@ -214,6 +245,13 @@ public final class DeviceServiceGrpc {
       io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGetInitialConfigMethod(), responseObserver);
     }
 
+    /**
+     */
+    public void monitorDeviceKpi(device.Device.MonitoringSettings request,
+        io.grpc.stub.StreamObserver<context.ContextOuterClass.Empty> responseObserver) {
+      io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getMonitorDeviceKpiMethod(), responseObserver);
+    }
+
     @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() {
       return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())
           .addMethod(
@@ -244,6 +282,13 @@ public final class DeviceServiceGrpc {
                 context.ContextOuterClass.DeviceId,
                 context.ContextOuterClass.DeviceConfig>(
                   this, METHODID_GET_INITIAL_CONFIG)))
+          .addMethod(
+            getMonitorDeviceKpiMethod(),
+            io.grpc.stub.ServerCalls.asyncUnaryCall(
+              new MethodHandlers<
+                device.Device.MonitoringSettings,
+                context.ContextOuterClass.Empty>(
+                  this, METHODID_MONITOR_DEVICE_KPI)))
           .build();
     }
   }
@@ -293,6 +338,14 @@ public final class DeviceServiceGrpc {
       io.grpc.stub.ClientCalls.asyncUnaryCall(
           getChannel().newCall(getGetInitialConfigMethod(), getCallOptions()), request, responseObserver);
     }
+
+    /**
+     */
+    public void monitorDeviceKpi(device.Device.MonitoringSettings request,
+        io.grpc.stub.StreamObserver<context.ContextOuterClass.Empty> responseObserver) {
+      io.grpc.stub.ClientCalls.asyncUnaryCall(
+          getChannel().newCall(getMonitorDeviceKpiMethod(), getCallOptions()), request, responseObserver);
+    }
   }
 
   /**
@@ -336,6 +389,13 @@ public final class DeviceServiceGrpc {
       return io.grpc.stub.ClientCalls.blockingUnaryCall(
           getChannel(), getGetInitialConfigMethod(), getCallOptions(), request);
     }
+
+    /**
+     */
+    public context.ContextOuterClass.Empty monitorDeviceKpi(device.Device.MonitoringSettings request) {
+      return io.grpc.stub.ClientCalls.blockingUnaryCall(
+          getChannel(), getMonitorDeviceKpiMethod(), getCallOptions(), request);
+    }
   }
 
   /**
@@ -383,12 +443,21 @@ public final class DeviceServiceGrpc {
       return io.grpc.stub.ClientCalls.futureUnaryCall(
           getChannel().newCall(getGetInitialConfigMethod(), getCallOptions()), request);
     }
+
+    /**
+     */
+    public com.google.common.util.concurrent.ListenableFuture<context.ContextOuterClass.Empty> monitorDeviceKpi(
+        device.Device.MonitoringSettings request) {
+      return io.grpc.stub.ClientCalls.futureUnaryCall(
+          getChannel().newCall(getMonitorDeviceKpiMethod(), getCallOptions()), request);
+    }
   }
 
   private static final int METHODID_ADD_DEVICE = 0;
   private static final int METHODID_CONFIGURE_DEVICE = 1;
   private static final int METHODID_DELETE_DEVICE = 2;
   private static final int METHODID_GET_INITIAL_CONFIG = 3;
+  private static final int METHODID_MONITOR_DEVICE_KPI = 4;
 
   private static final class MethodHandlers<Req, Resp> implements
       io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
@@ -423,6 +492,10 @@ public final class DeviceServiceGrpc {
           serviceImpl.getInitialConfig((context.ContextOuterClass.DeviceId) request,
               (io.grpc.stub.StreamObserver<context.ContextOuterClass.DeviceConfig>) responseObserver);
           break;
+        case METHODID_MONITOR_DEVICE_KPI:
+          serviceImpl.monitorDeviceKpi((device.Device.MonitoringSettings) request,
+              (io.grpc.stub.StreamObserver<context.ContextOuterClass.Empty>) responseObserver);
+          break;
         default:
           throw new AssertionError();
       }
@@ -488,6 +561,7 @@ public final class DeviceServiceGrpc {
               .addMethod(getConfigureDeviceMethod())
               .addMethod(getDeleteDeviceMethod())
               .addMethod(getGetInitialConfigMethod())
+              .addMethod(getMonitorDeviceKpiMethod())
               .build();
         }
       }
diff --git a/src/automation/target/generated-sources/grpc/device/MutinyDeviceServiceGrpc.java b/src/automation/target/generated-sources/grpc/device/MutinyDeviceServiceGrpc.java
index ee52d131578ede0769997d3f76a02a302568c21f..096784b5a39fa66afbe2ff5402b8beec6c294730 100644
--- a/src/automation/target/generated-sources/grpc/device/MutinyDeviceServiceGrpc.java
+++ b/src/automation/target/generated-sources/grpc/device/MutinyDeviceServiceGrpc.java
@@ -55,6 +55,11 @@ public final class MutinyDeviceServiceGrpc implements io.quarkus.grpc.runtime.Mu
             return io.quarkus.grpc.runtime.ClientCalls.oneToOne(request, delegateStub::getInitialConfig);
         }
 
+        
+        public io.smallrye.mutiny.Uni<context.ContextOuterClass.Empty> monitorDeviceKpi(device.Device.MonitoringSettings request) {
+            return io.quarkus.grpc.runtime.ClientCalls.oneToOne(request, delegateStub::monitorDeviceKpi);
+        }
+
     }
 
     
@@ -92,6 +97,11 @@ public final class MutinyDeviceServiceGrpc implements io.quarkus.grpc.runtime.Mu
             throw new io.grpc.StatusRuntimeException(io.grpc.Status.UNIMPLEMENTED);
         }
 
+        
+        public io.smallrye.mutiny.Uni<context.ContextOuterClass.Empty> monitorDeviceKpi(device.Device.MonitoringSettings request) {
+            throw new io.grpc.StatusRuntimeException(io.grpc.Status.UNIMPLEMENTED);
+        }
+
         @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() {
             return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())
                     .addMethod(
@@ -122,6 +132,13 @@ public final class MutinyDeviceServiceGrpc implements io.quarkus.grpc.runtime.Mu
                                             context.ContextOuterClass.DeviceId,
                                             context.ContextOuterClass.DeviceConfig>(
                                             this, METHODID_GET_INITIAL_CONFIG, compression)))
+                    .addMethod(
+                            device.DeviceServiceGrpc.getMonitorDeviceKpiMethod(),
+                            asyncUnaryCall(
+                                    new MethodHandlers<
+                                            device.Device.MonitoringSettings,
+                                            context.ContextOuterClass.Empty>(
+                                            this, METHODID_MONITOR_DEVICE_KPI, compression)))
                     .build();
         }
     }
@@ -130,6 +147,7 @@ public final class MutinyDeviceServiceGrpc implements io.quarkus.grpc.runtime.Mu
     private static final int METHODID_CONFIGURE_DEVICE = 1;
     private static final int METHODID_DELETE_DEVICE = 2;
     private static final int METHODID_GET_INITIAL_CONFIG = 3;
+    private static final int METHODID_MONITOR_DEVICE_KPI = 4;
 
     private static final class MethodHandlers<Req, Resp> implements
             io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
@@ -174,6 +192,12 @@ public final class MutinyDeviceServiceGrpc implements io.quarkus.grpc.runtime.Mu
                             compression,
                             serviceImpl::getInitialConfig);
                     break;
+                case METHODID_MONITOR_DEVICE_KPI:
+                    io.quarkus.grpc.runtime.ServerCalls.oneToOne((device.Device.MonitoringSettings) request,
+                            (io.grpc.stub.StreamObserver<context.ContextOuterClass.Empty>) responseObserver,
+                            compression,
+                            serviceImpl::monitorDeviceKpi);
+                    break;
                 default:
                     throw new java.lang.AssertionError();
             }
diff --git a/src/automation/target/generated-sources/grpc/kpi_sample_types/KpiSampleTypes.java b/src/automation/target/generated-sources/grpc/kpi_sample_types/KpiSampleTypes.java
new file mode 100644
index 0000000000000000000000000000000000000000..67e1ec736f9d83cbf95b419e9e61e92e82e73b88
--- /dev/null
+++ b/src/automation/target/generated-sources/grpc/kpi_sample_types/KpiSampleTypes.java
@@ -0,0 +1,176 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: kpi_sample_types.proto
+
+package kpi_sample_types;
+
+public final class KpiSampleTypes {
+  private KpiSampleTypes() {}
+  public static void registerAllExtensions(
+      com.google.protobuf.ExtensionRegistryLite registry) {
+  }
+
+  public static void registerAllExtensions(
+      com.google.protobuf.ExtensionRegistry registry) {
+    registerAllExtensions(
+        (com.google.protobuf.ExtensionRegistryLite) registry);
+  }
+  /**
+   * Protobuf enum {@code kpi_sample_types.KpiSampleType}
+   */
+  public enum KpiSampleType
+      implements com.google.protobuf.ProtocolMessageEnum {
+    /**
+     * <code>KPISAMPLETYPE_UNKNOWN = 0;</code>
+     */
+    KPISAMPLETYPE_UNKNOWN(0),
+    /**
+     * <code>KPISAMPLETYPE_PACKETS_TRANSMITTED = 101;</code>
+     */
+    KPISAMPLETYPE_PACKETS_TRANSMITTED(101),
+    /**
+     * <code>KPISAMPLETYPE_PACKETS_RECEIVED = 102;</code>
+     */
+    KPISAMPLETYPE_PACKETS_RECEIVED(102),
+    /**
+     * <code>KPISAMPLETYPE_BYTES_TRANSMITTED = 201;</code>
+     */
+    KPISAMPLETYPE_BYTES_TRANSMITTED(201),
+    /**
+     * <code>KPISAMPLETYPE_BYTES_RECEIVED = 202;</code>
+     */
+    KPISAMPLETYPE_BYTES_RECEIVED(202),
+    UNRECOGNIZED(-1),
+    ;
+
+    /**
+     * <code>KPISAMPLETYPE_UNKNOWN = 0;</code>
+     */
+    public static final int KPISAMPLETYPE_UNKNOWN_VALUE = 0;
+    /**
+     * <code>KPISAMPLETYPE_PACKETS_TRANSMITTED = 101;</code>
+     */
+    public static final int KPISAMPLETYPE_PACKETS_TRANSMITTED_VALUE = 101;
+    /**
+     * <code>KPISAMPLETYPE_PACKETS_RECEIVED = 102;</code>
+     */
+    public static final int KPISAMPLETYPE_PACKETS_RECEIVED_VALUE = 102;
+    /**
+     * <code>KPISAMPLETYPE_BYTES_TRANSMITTED = 201;</code>
+     */
+    public static final int KPISAMPLETYPE_BYTES_TRANSMITTED_VALUE = 201;
+    /**
+     * <code>KPISAMPLETYPE_BYTES_RECEIVED = 202;</code>
+     */
+    public static final int KPISAMPLETYPE_BYTES_RECEIVED_VALUE = 202;
+
+
+    public final int getNumber() {
+      if (this == UNRECOGNIZED) {
+        throw new java.lang.IllegalArgumentException(
+            "Can't get the number of an unknown enum value.");
+      }
+      return value;
+    }
+
+    /**
+     * @param value The numeric wire value of the corresponding enum entry.
+     * @return The enum associated with the given numeric wire value.
+     * @deprecated Use {@link #forNumber(int)} instead.
+     */
+    @java.lang.Deprecated
+    public static KpiSampleType valueOf(int value) {
+      return forNumber(value);
+    }
+
+    /**
+     * @param value The numeric wire value of the corresponding enum entry.
+     * @return The enum associated with the given numeric wire value.
+     */
+    public static KpiSampleType forNumber(int value) {
+      switch (value) {
+        case 0: return KPISAMPLETYPE_UNKNOWN;
+        case 101: return KPISAMPLETYPE_PACKETS_TRANSMITTED;
+        case 102: return KPISAMPLETYPE_PACKETS_RECEIVED;
+        case 201: return KPISAMPLETYPE_BYTES_TRANSMITTED;
+        case 202: return KPISAMPLETYPE_BYTES_RECEIVED;
+        default: return null;
+      }
+    }
+
+    public static com.google.protobuf.Internal.EnumLiteMap<KpiSampleType>
+        internalGetValueMap() {
+      return internalValueMap;
+    }
+    private static final com.google.protobuf.Internal.EnumLiteMap<
+        KpiSampleType> internalValueMap =
+          new com.google.protobuf.Internal.EnumLiteMap<KpiSampleType>() {
+            public KpiSampleType findValueByNumber(int number) {
+              return KpiSampleType.forNumber(number);
+            }
+          };
+
+    public final com.google.protobuf.Descriptors.EnumValueDescriptor
+        getValueDescriptor() {
+      if (this == UNRECOGNIZED) {
+        throw new java.lang.IllegalStateException(
+            "Can't get the descriptor of an unrecognized enum value.");
+      }
+      return getDescriptor().getValues().get(ordinal());
+    }
+    public final com.google.protobuf.Descriptors.EnumDescriptor
+        getDescriptorForType() {
+      return getDescriptor();
+    }
+    public static final com.google.protobuf.Descriptors.EnumDescriptor
+        getDescriptor() {
+      return kpi_sample_types.KpiSampleTypes.getDescriptor().getEnumTypes().get(0);
+    }
+
+    private static final KpiSampleType[] VALUES = values();
+
+    public static KpiSampleType valueOf(
+        com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+      if (desc.getType() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "EnumValueDescriptor is not for this type.");
+      }
+      if (desc.getIndex() == -1) {
+        return UNRECOGNIZED;
+      }
+      return VALUES[desc.getIndex()];
+    }
+
+    private final int value;
+
+    private KpiSampleType(int value) {
+      this.value = value;
+    }
+
+    // @@protoc_insertion_point(enum_scope:kpi_sample_types.KpiSampleType)
+  }
+
+
+  public static com.google.protobuf.Descriptors.FileDescriptor
+      getDescriptor() {
+    return descriptor;
+  }
+  private static  com.google.protobuf.Descriptors.FileDescriptor
+      descriptor;
+  static {
+    java.lang.String[] descriptorData = {
+      "\n\026kpi_sample_types.proto\022\020kpi_sample_typ" +
+      "es*\276\001\n\rKpiSampleType\022\031\n\025KPISAMPLETYPE_UN" +
+      "KNOWN\020\000\022%\n!KPISAMPLETYPE_PACKETS_TRANSMI" +
+      "TTED\020e\022\"\n\036KPISAMPLETYPE_PACKETS_RECEIVED" +
+      "\020f\022$\n\037KPISAMPLETYPE_BYTES_TRANSMITTED\020\311\001" +
+      "\022!\n\034KPISAMPLETYPE_BYTES_RECEIVED\020\312\001b\006pro" +
+      "to3"
+    };
+    descriptor = com.google.protobuf.Descriptors.FileDescriptor
+      .internalBuildGeneratedFileFrom(descriptorData,
+        new com.google.protobuf.Descriptors.FileDescriptor[] {
+        });
+  }
+
+  // @@protoc_insertion_point(outer_class_scope)
+}
diff --git a/src/automation/target/generated-sources/grpc/monitoring/Monitoring.java b/src/automation/target/generated-sources/grpc/monitoring/Monitoring.java
new file mode 100644
index 0000000000000000000000000000000000000000..1ef1f0c029d1d5916eaf5ffc4f185c048a40b5a3
--- /dev/null
+++ b/src/automation/target/generated-sources/grpc/monitoring/Monitoring.java
@@ -0,0 +1,5629 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: monitoring.proto
+
+package monitoring;
+
+public final class Monitoring {
+  private Monitoring() {}
+  public static void registerAllExtensions(
+      com.google.protobuf.ExtensionRegistryLite registry) {
+  }
+
+  public static void registerAllExtensions(
+      com.google.protobuf.ExtensionRegistry registry) {
+    registerAllExtensions(
+        (com.google.protobuf.ExtensionRegistryLite) registry);
+  }
+  public interface KpiDescriptorOrBuilder extends
+      // @@protoc_insertion_point(interface_extends:monitoring.KpiDescriptor)
+      com.google.protobuf.MessageOrBuilder {
+
+    /**
+     * <code>string kpi_description = 1;</code>
+     * @return The kpiDescription.
+     */
+    java.lang.String getKpiDescription();
+    /**
+     * <code>string kpi_description = 1;</code>
+     * @return The bytes for kpiDescription.
+     */
+    com.google.protobuf.ByteString
+        getKpiDescriptionBytes();
+
+    /**
+     * <code>.kpi_sample_types.KpiSampleType kpi_sample_type = 2;</code>
+     * @return The enum numeric value on the wire for kpiSampleType.
+     */
+    int getKpiSampleTypeValue();
+    /**
+     * <code>.kpi_sample_types.KpiSampleType kpi_sample_type = 2;</code>
+     * @return The kpiSampleType.
+     */
+    kpi_sample_types.KpiSampleTypes.KpiSampleType getKpiSampleType();
+
+    /**
+     * <code>.context.DeviceId device_id = 3;</code>
+     * @return Whether the deviceId field is set.
+     */
+    boolean hasDeviceId();
+    /**
+     * <code>.context.DeviceId device_id = 3;</code>
+     * @return The deviceId.
+     */
+    context.ContextOuterClass.DeviceId getDeviceId();
+    /**
+     * <code>.context.DeviceId device_id = 3;</code>
+     */
+    context.ContextOuterClass.DeviceIdOrBuilder getDeviceIdOrBuilder();
+
+    /**
+     * <code>.context.EndPointId endpoint_id = 4;</code>
+     * @return Whether the endpointId field is set.
+     */
+    boolean hasEndpointId();
+    /**
+     * <code>.context.EndPointId endpoint_id = 4;</code>
+     * @return The endpointId.
+     */
+    context.ContextOuterClass.EndPointId getEndpointId();
+    /**
+     * <code>.context.EndPointId endpoint_id = 4;</code>
+     */
+    context.ContextOuterClass.EndPointIdOrBuilder getEndpointIdOrBuilder();
+
+    /**
+     * <pre>
+     *  context.SliceId    slice_id    = 6;
+     * </pre>
+     *
+     * <code>.context.ServiceId service_id = 5;</code>
+     * @return Whether the serviceId field is set.
+     */
+    boolean hasServiceId();
+    /**
+     * <pre>
+     *  context.SliceId    slice_id    = 6;
+     * </pre>
+     *
+     * <code>.context.ServiceId service_id = 5;</code>
+     * @return The serviceId.
+     */
+    context.ContextOuterClass.ServiceId getServiceId();
+    /**
+     * <pre>
+     *  context.SliceId    slice_id    = 6;
+     * </pre>
+     *
+     * <code>.context.ServiceId service_id = 5;</code>
+     */
+    context.ContextOuterClass.ServiceIdOrBuilder getServiceIdOrBuilder();
+  }
+  /**
+   * Protobuf type {@code monitoring.KpiDescriptor}
+   */
+  public static final class KpiDescriptor extends
+      com.google.protobuf.GeneratedMessageV3 implements
+      // @@protoc_insertion_point(message_implements:monitoring.KpiDescriptor)
+      KpiDescriptorOrBuilder {
+  private static final long serialVersionUID = 0L;
+    // Use KpiDescriptor.newBuilder() to construct.
+    private KpiDescriptor(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
+      super(builder);
+    }
+    private KpiDescriptor() {
+      kpiDescription_ = "";
+      kpiSampleType_ = 0;
+    }
+
+    @java.lang.Override
+    @SuppressWarnings({"unused"})
+    protected java.lang.Object newInstance(
+        UnusedPrivateParameter unused) {
+      return new KpiDescriptor();
+    }
+
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+    getUnknownFields() {
+      return this.unknownFields;
+    }
+    private KpiDescriptor(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      this();
+      if (extensionRegistry == null) {
+        throw new java.lang.NullPointerException();
+      }
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            case 10: {
+              java.lang.String s = input.readStringRequireUtf8();
+
+              kpiDescription_ = s;
+              break;
+            }
+            case 16: {
+              int rawValue = input.readEnum();
+
+              kpiSampleType_ = rawValue;
+              break;
+            }
+            case 26: {
+              context.ContextOuterClass.DeviceId.Builder subBuilder = null;
+              if (deviceId_ != null) {
+                subBuilder = deviceId_.toBuilder();
+              }
+              deviceId_ = input.readMessage(context.ContextOuterClass.DeviceId.parser(), extensionRegistry);
+              if (subBuilder != null) {
+                subBuilder.mergeFrom(deviceId_);
+                deviceId_ = subBuilder.buildPartial();
+              }
+
+              break;
+            }
+            case 34: {
+              context.ContextOuterClass.EndPointId.Builder subBuilder = null;
+              if (endpointId_ != null) {
+                subBuilder = endpointId_.toBuilder();
+              }
+              endpointId_ = input.readMessage(context.ContextOuterClass.EndPointId.parser(), extensionRegistry);
+              if (subBuilder != null) {
+                subBuilder.mergeFrom(endpointId_);
+                endpointId_ = subBuilder.buildPartial();
+              }
+
+              break;
+            }
+            case 42: {
+              context.ContextOuterClass.ServiceId.Builder subBuilder = null;
+              if (serviceId_ != null) {
+                subBuilder = serviceId_.toBuilder();
+              }
+              serviceId_ = input.readMessage(context.ContextOuterClass.ServiceId.parser(), extensionRegistry);
+              if (subBuilder != null) {
+                subBuilder.mergeFrom(serviceId_);
+                serviceId_ = subBuilder.buildPartial();
+              }
+
+              break;
+            }
+            default: {
+              if (!parseUnknownField(
+                  input, unknownFields, extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e).setUnfinishedMessage(this);
+      } finally {
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return monitoring.Monitoring.internal_static_monitoring_KpiDescriptor_descriptor;
+    }
+
+    @java.lang.Override
+    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return monitoring.Monitoring.internal_static_monitoring_KpiDescriptor_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              monitoring.Monitoring.KpiDescriptor.class, monitoring.Monitoring.KpiDescriptor.Builder.class);
+    }
+
+    public static final int KPI_DESCRIPTION_FIELD_NUMBER = 1;
+    private volatile java.lang.Object kpiDescription_;
+    /**
+     * <code>string kpi_description = 1;</code>
+     * @return The kpiDescription.
+     */
+    @java.lang.Override
+    public java.lang.String getKpiDescription() {
+      java.lang.Object ref = kpiDescription_;
+      if (ref instanceof java.lang.String) {
+        return (java.lang.String) ref;
+      } else {
+        com.google.protobuf.ByteString bs = 
+            (com.google.protobuf.ByteString) ref;
+        java.lang.String s = bs.toStringUtf8();
+        kpiDescription_ = s;
+        return s;
+      }
+    }
+    /**
+     * <code>string kpi_description = 1;</code>
+     * @return The bytes for kpiDescription.
+     */
+    @java.lang.Override
+    public com.google.protobuf.ByteString
+        getKpiDescriptionBytes() {
+      java.lang.Object ref = kpiDescription_;
+      if (ref instanceof java.lang.String) {
+        com.google.protobuf.ByteString b = 
+            com.google.protobuf.ByteString.copyFromUtf8(
+                (java.lang.String) ref);
+        kpiDescription_ = b;
+        return b;
+      } else {
+        return (com.google.protobuf.ByteString) ref;
+      }
+    }
+
+    public static final int KPI_SAMPLE_TYPE_FIELD_NUMBER = 2;
+    private int kpiSampleType_;
+    /**
+     * <code>.kpi_sample_types.KpiSampleType kpi_sample_type = 2;</code>
+     * @return The enum numeric value on the wire for kpiSampleType.
+     */
+    @java.lang.Override public int getKpiSampleTypeValue() {
+      return kpiSampleType_;
+    }
+    /**
+     * <code>.kpi_sample_types.KpiSampleType kpi_sample_type = 2;</code>
+     * @return The kpiSampleType.
+     */
+    @java.lang.Override public kpi_sample_types.KpiSampleTypes.KpiSampleType getKpiSampleType() {
+      @SuppressWarnings("deprecation")
+      kpi_sample_types.KpiSampleTypes.KpiSampleType result = kpi_sample_types.KpiSampleTypes.KpiSampleType.valueOf(kpiSampleType_);
+      return result == null ? kpi_sample_types.KpiSampleTypes.KpiSampleType.UNRECOGNIZED : result;
+    }
+
+    public static final int DEVICE_ID_FIELD_NUMBER = 3;
+    private context.ContextOuterClass.DeviceId deviceId_;
+    /**
+     * <code>.context.DeviceId device_id = 3;</code>
+     * @return Whether the deviceId field is set.
+     */
+    @java.lang.Override
+    public boolean hasDeviceId() {
+      return deviceId_ != null;
+    }
+    /**
+     * <code>.context.DeviceId device_id = 3;</code>
+     * @return The deviceId.
+     */
+    @java.lang.Override
+    public context.ContextOuterClass.DeviceId getDeviceId() {
+      return deviceId_ == null ? context.ContextOuterClass.DeviceId.getDefaultInstance() : deviceId_;
+    }
+    /**
+     * <code>.context.DeviceId device_id = 3;</code>
+     */
+    @java.lang.Override
+    public context.ContextOuterClass.DeviceIdOrBuilder getDeviceIdOrBuilder() {
+      return getDeviceId();
+    }
+
+    public static final int ENDPOINT_ID_FIELD_NUMBER = 4;
+    private context.ContextOuterClass.EndPointId endpointId_;
+    /**
+     * <code>.context.EndPointId endpoint_id = 4;</code>
+     * @return Whether the endpointId field is set.
+     */
+    @java.lang.Override
+    public boolean hasEndpointId() {
+      return endpointId_ != null;
+    }
+    /**
+     * <code>.context.EndPointId endpoint_id = 4;</code>
+     * @return The endpointId.
+     */
+    @java.lang.Override
+    public context.ContextOuterClass.EndPointId getEndpointId() {
+      return endpointId_ == null ? context.ContextOuterClass.EndPointId.getDefaultInstance() : endpointId_;
+    }
+    /**
+     * <code>.context.EndPointId endpoint_id = 4;</code>
+     */
+    @java.lang.Override
+    public context.ContextOuterClass.EndPointIdOrBuilder getEndpointIdOrBuilder() {
+      return getEndpointId();
+    }
+
+    public static final int SERVICE_ID_FIELD_NUMBER = 5;
+    private context.ContextOuterClass.ServiceId serviceId_;
+    /**
+     * <pre>
+     *  context.SliceId    slice_id    = 6;
+     * </pre>
+     *
+     * <code>.context.ServiceId service_id = 5;</code>
+     * @return Whether the serviceId field is set.
+     */
+    @java.lang.Override
+    public boolean hasServiceId() {
+      return serviceId_ != null;
+    }
+    /**
+     * <pre>
+     *  context.SliceId    slice_id    = 6;
+     * </pre>
+     *
+     * <code>.context.ServiceId service_id = 5;</code>
+     * @return The serviceId.
+     */
+    @java.lang.Override
+    public context.ContextOuterClass.ServiceId getServiceId() {
+      return serviceId_ == null ? context.ContextOuterClass.ServiceId.getDefaultInstance() : serviceId_;
+    }
+    /**
+     * <pre>
+     *  context.SliceId    slice_id    = 6;
+     * </pre>
+     *
+     * <code>.context.ServiceId service_id = 5;</code>
+     */
+    @java.lang.Override
+    public context.ContextOuterClass.ServiceIdOrBuilder getServiceIdOrBuilder() {
+      return getServiceId();
+    }
+
+    private byte memoizedIsInitialized = -1;
+    @java.lang.Override
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized == 1) return true;
+      if (isInitialized == 0) return false;
+
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    @java.lang.Override
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      if (!getKpiDescriptionBytes().isEmpty()) {
+        com.google.protobuf.GeneratedMessageV3.writeString(output, 1, kpiDescription_);
+      }
+      if (kpiSampleType_ != kpi_sample_types.KpiSampleTypes.KpiSampleType.KPISAMPLETYPE_UNKNOWN.getNumber()) {
+        output.writeEnum(2, kpiSampleType_);
+      }
+      if (deviceId_ != null) {
+        output.writeMessage(3, getDeviceId());
+      }
+      if (endpointId_ != null) {
+        output.writeMessage(4, getEndpointId());
+      }
+      if (serviceId_ != null) {
+        output.writeMessage(5, getServiceId());
+      }
+      unknownFields.writeTo(output);
+    }
+
+    @java.lang.Override
+    public int getSerializedSize() {
+      int size = memoizedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      if (!getKpiDescriptionBytes().isEmpty()) {
+        size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, kpiDescription_);
+      }
+      if (kpiSampleType_ != kpi_sample_types.KpiSampleTypes.KpiSampleType.KPISAMPLETYPE_UNKNOWN.getNumber()) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeEnumSize(2, kpiSampleType_);
+      }
+      if (deviceId_ != null) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeMessageSize(3, getDeviceId());
+      }
+      if (endpointId_ != null) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeMessageSize(4, getEndpointId());
+      }
+      if (serviceId_ != null) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeMessageSize(5, getServiceId());
+      }
+      size += unknownFields.getSerializedSize();
+      memoizedSize = size;
+      return size;
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof monitoring.Monitoring.KpiDescriptor)) {
+        return super.equals(obj);
+      }
+      monitoring.Monitoring.KpiDescriptor other = (monitoring.Monitoring.KpiDescriptor) obj;
+
+      if (!getKpiDescription()
+          .equals(other.getKpiDescription())) return false;
+      if (kpiSampleType_ != other.kpiSampleType_) return false;
+      if (hasDeviceId() != other.hasDeviceId()) return false;
+      if (hasDeviceId()) {
+        if (!getDeviceId()
+            .equals(other.getDeviceId())) return false;
+      }
+      if (hasEndpointId() != other.hasEndpointId()) return false;
+      if (hasEndpointId()) {
+        if (!getEndpointId()
+            .equals(other.getEndpointId())) return false;
+      }
+      if (hasServiceId() != other.hasServiceId()) return false;
+      if (hasServiceId()) {
+        if (!getServiceId()
+            .equals(other.getServiceId())) return false;
+      }
+      if (!unknownFields.equals(other.unknownFields)) return false;
+      return true;
+    }
+
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptor().hashCode();
+      hash = (37 * hash) + KPI_DESCRIPTION_FIELD_NUMBER;
+      hash = (53 * hash) + getKpiDescription().hashCode();
+      hash = (37 * hash) + KPI_SAMPLE_TYPE_FIELD_NUMBER;
+      hash = (53 * hash) + kpiSampleType_;
+      if (hasDeviceId()) {
+        hash = (37 * hash) + DEVICE_ID_FIELD_NUMBER;
+        hash = (53 * hash) + getDeviceId().hashCode();
+      }
+      if (hasEndpointId()) {
+        hash = (37 * hash) + ENDPOINT_ID_FIELD_NUMBER;
+        hash = (53 * hash) + getEndpointId().hashCode();
+      }
+      if (hasServiceId()) {
+        hash = (37 * hash) + SERVICE_ID_FIELD_NUMBER;
+        hash = (53 * hash) + getServiceId().hashCode();
+      }
+      hash = (29 * hash) + unknownFields.hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static monitoring.Monitoring.KpiDescriptor parseFrom(
+        java.nio.ByteBuffer data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static monitoring.Monitoring.KpiDescriptor parseFrom(
+        java.nio.ByteBuffer data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static monitoring.Monitoring.KpiDescriptor parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static monitoring.Monitoring.KpiDescriptor parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static monitoring.Monitoring.KpiDescriptor parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static monitoring.Monitoring.KpiDescriptor parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static monitoring.Monitoring.KpiDescriptor parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
+    }
+    public static monitoring.Monitoring.KpiDescriptor parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
+    }
+    public static monitoring.Monitoring.KpiDescriptor parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input);
+    }
+    public static monitoring.Monitoring.KpiDescriptor parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+    }
+    public static monitoring.Monitoring.KpiDescriptor parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
+    }
+    public static monitoring.Monitoring.KpiDescriptor parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
+    }
+
+    @java.lang.Override
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder() {
+      return DEFAULT_INSTANCE.toBuilder();
+    }
+    public static Builder newBuilder(monitoring.Monitoring.KpiDescriptor prototype) {
+      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+    }
+    @java.lang.Override
+    public Builder toBuilder() {
+      return this == DEFAULT_INSTANCE
+          ? new Builder() : new Builder().mergeFrom(this);
+    }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code monitoring.KpiDescriptor}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
+        // @@protoc_insertion_point(builder_implements:monitoring.KpiDescriptor)
+        monitoring.Monitoring.KpiDescriptorOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return monitoring.Monitoring.internal_static_monitoring_KpiDescriptor_descriptor;
+      }
+
+      @java.lang.Override
+      protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return monitoring.Monitoring.internal_static_monitoring_KpiDescriptor_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                monitoring.Monitoring.KpiDescriptor.class, monitoring.Monitoring.KpiDescriptor.Builder.class);
+      }
+
+      // Construct using monitoring.Monitoring.KpiDescriptor.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessageV3
+                .alwaysUseFieldBuilders) {
+        }
+      }
+      @java.lang.Override
+      public Builder clear() {
+        super.clear();
+        kpiDescription_ = "";
+
+        kpiSampleType_ = 0;
+
+        if (deviceIdBuilder_ == null) {
+          deviceId_ = null;
+        } else {
+          deviceId_ = null;
+          deviceIdBuilder_ = null;
+        }
+        if (endpointIdBuilder_ == null) {
+          endpointId_ = null;
+        } else {
+          endpointId_ = null;
+          endpointIdBuilder_ = null;
+        }
+        if (serviceIdBuilder_ == null) {
+          serviceId_ = null;
+        } else {
+          serviceId_ = null;
+          serviceIdBuilder_ = null;
+        }
+        return this;
+      }
+
+      @java.lang.Override
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return monitoring.Monitoring.internal_static_monitoring_KpiDescriptor_descriptor;
+      }
+
+      @java.lang.Override
+      public monitoring.Monitoring.KpiDescriptor getDefaultInstanceForType() {
+        return monitoring.Monitoring.KpiDescriptor.getDefaultInstance();
+      }
+
+      @java.lang.Override
+      public monitoring.Monitoring.KpiDescriptor build() {
+        monitoring.Monitoring.KpiDescriptor result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      @java.lang.Override
+      public monitoring.Monitoring.KpiDescriptor buildPartial() {
+        monitoring.Monitoring.KpiDescriptor result = new monitoring.Monitoring.KpiDescriptor(this);
+        result.kpiDescription_ = kpiDescription_;
+        result.kpiSampleType_ = kpiSampleType_;
+        if (deviceIdBuilder_ == null) {
+          result.deviceId_ = deviceId_;
+        } else {
+          result.deviceId_ = deviceIdBuilder_.build();
+        }
+        if (endpointIdBuilder_ == null) {
+          result.endpointId_ = endpointId_;
+        } else {
+          result.endpointId_ = endpointIdBuilder_.build();
+        }
+        if (serviceIdBuilder_ == null) {
+          result.serviceId_ = serviceId_;
+        } else {
+          result.serviceId_ = serviceIdBuilder_.build();
+        }
+        onBuilt();
+        return result;
+      }
+
+      @java.lang.Override
+      public Builder clone() {
+        return super.clone();
+      }
+      @java.lang.Override
+      public Builder setField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          java.lang.Object value) {
+        return super.setField(field, value);
+      }
+      @java.lang.Override
+      public Builder clearField(
+          com.google.protobuf.Descriptors.FieldDescriptor field) {
+        return super.clearField(field);
+      }
+      @java.lang.Override
+      public Builder clearOneof(
+          com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+        return super.clearOneof(oneof);
+      }
+      @java.lang.Override
+      public Builder setRepeatedField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          int index, java.lang.Object value) {
+        return super.setRepeatedField(field, index, value);
+      }
+      @java.lang.Override
+      public Builder addRepeatedField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          java.lang.Object value) {
+        return super.addRepeatedField(field, value);
+      }
+      @java.lang.Override
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof monitoring.Monitoring.KpiDescriptor) {
+          return mergeFrom((monitoring.Monitoring.KpiDescriptor)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(monitoring.Monitoring.KpiDescriptor other) {
+        if (other == monitoring.Monitoring.KpiDescriptor.getDefaultInstance()) return this;
+        if (!other.getKpiDescription().isEmpty()) {
+          kpiDescription_ = other.kpiDescription_;
+          onChanged();
+        }
+        if (other.kpiSampleType_ != 0) {
+          setKpiSampleTypeValue(other.getKpiSampleTypeValue());
+        }
+        if (other.hasDeviceId()) {
+          mergeDeviceId(other.getDeviceId());
+        }
+        if (other.hasEndpointId()) {
+          mergeEndpointId(other.getEndpointId());
+        }
+        if (other.hasServiceId()) {
+          mergeServiceId(other.getServiceId());
+        }
+        this.mergeUnknownFields(other.unknownFields);
+        onChanged();
+        return this;
+      }
+
+      @java.lang.Override
+      public final boolean isInitialized() {
+        return true;
+      }
+
+      @java.lang.Override
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        monitoring.Monitoring.KpiDescriptor parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (monitoring.Monitoring.KpiDescriptor) e.getUnfinishedMessage();
+          throw e.unwrapIOException();
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+
+      private java.lang.Object kpiDescription_ = "";
+      /**
+       * <code>string kpi_description = 1;</code>
+       * @return The kpiDescription.
+       */
+      public java.lang.String getKpiDescription() {
+        java.lang.Object ref = kpiDescription_;
+        if (!(ref instanceof java.lang.String)) {
+          com.google.protobuf.ByteString bs =
+              (com.google.protobuf.ByteString) ref;
+          java.lang.String s = bs.toStringUtf8();
+          kpiDescription_ = s;
+          return s;
+        } else {
+          return (java.lang.String) ref;
+        }
+      }
+      /**
+       * <code>string kpi_description = 1;</code>
+       * @return The bytes for kpiDescription.
+       */
+      public com.google.protobuf.ByteString
+          getKpiDescriptionBytes() {
+        java.lang.Object ref = kpiDescription_;
+        if (ref instanceof String) {
+          com.google.protobuf.ByteString b = 
+              com.google.protobuf.ByteString.copyFromUtf8(
+                  (java.lang.String) ref);
+          kpiDescription_ = b;
+          return b;
+        } else {
+          return (com.google.protobuf.ByteString) ref;
+        }
+      }
+      /**
+       * <code>string kpi_description = 1;</code>
+       * @param value The kpiDescription to set.
+       * @return This builder for chaining.
+       */
+      public Builder setKpiDescription(
+          java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  
+        kpiDescription_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>string kpi_description = 1;</code>
+       * @return This builder for chaining.
+       */
+      public Builder clearKpiDescription() {
+        
+        kpiDescription_ = getDefaultInstance().getKpiDescription();
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>string kpi_description = 1;</code>
+       * @param value The bytes for kpiDescription to set.
+       * @return This builder for chaining.
+       */
+      public Builder setKpiDescriptionBytes(
+          com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  checkByteStringIsUtf8(value);
+        
+        kpiDescription_ = value;
+        onChanged();
+        return this;
+      }
+
+      private int kpiSampleType_ = 0;
+      /**
+       * <code>.kpi_sample_types.KpiSampleType kpi_sample_type = 2;</code>
+       * @return The enum numeric value on the wire for kpiSampleType.
+       */
+      @java.lang.Override public int getKpiSampleTypeValue() {
+        return kpiSampleType_;
+      }
+      /**
+       * <code>.kpi_sample_types.KpiSampleType kpi_sample_type = 2;</code>
+       * @param value The enum numeric value on the wire for kpiSampleType to set.
+       * @return This builder for chaining.
+       */
+      public Builder setKpiSampleTypeValue(int value) {
+        
+        kpiSampleType_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>.kpi_sample_types.KpiSampleType kpi_sample_type = 2;</code>
+       * @return The kpiSampleType.
+       */
+      @java.lang.Override
+      public kpi_sample_types.KpiSampleTypes.KpiSampleType getKpiSampleType() {
+        @SuppressWarnings("deprecation")
+        kpi_sample_types.KpiSampleTypes.KpiSampleType result = kpi_sample_types.KpiSampleTypes.KpiSampleType.valueOf(kpiSampleType_);
+        return result == null ? kpi_sample_types.KpiSampleTypes.KpiSampleType.UNRECOGNIZED : result;
+      }
+      /**
+       * <code>.kpi_sample_types.KpiSampleType kpi_sample_type = 2;</code>
+       * @param value The kpiSampleType to set.
+       * @return This builder for chaining.
+       */
+      public Builder setKpiSampleType(kpi_sample_types.KpiSampleTypes.KpiSampleType value) {
+        if (value == null) {
+          throw new NullPointerException();
+        }
+        
+        kpiSampleType_ = value.getNumber();
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>.kpi_sample_types.KpiSampleType kpi_sample_type = 2;</code>
+       * @return This builder for chaining.
+       */
+      public Builder clearKpiSampleType() {
+        
+        kpiSampleType_ = 0;
+        onChanged();
+        return this;
+      }
+
+      private context.ContextOuterClass.DeviceId deviceId_;
+      private com.google.protobuf.SingleFieldBuilderV3<
+          context.ContextOuterClass.DeviceId, context.ContextOuterClass.DeviceId.Builder, context.ContextOuterClass.DeviceIdOrBuilder> deviceIdBuilder_;
+      /**
+       * <code>.context.DeviceId device_id = 3;</code>
+       * @return Whether the deviceId field is set.
+       */
+      public boolean hasDeviceId() {
+        return deviceIdBuilder_ != null || deviceId_ != null;
+      }
+      /**
+       * <code>.context.DeviceId device_id = 3;</code>
+       * @return The deviceId.
+       */
+      public context.ContextOuterClass.DeviceId getDeviceId() {
+        if (deviceIdBuilder_ == null) {
+          return deviceId_ == null ? context.ContextOuterClass.DeviceId.getDefaultInstance() : deviceId_;
+        } else {
+          return deviceIdBuilder_.getMessage();
+        }
+      }
+      /**
+       * <code>.context.DeviceId device_id = 3;</code>
+       */
+      public Builder setDeviceId(context.ContextOuterClass.DeviceId value) {
+        if (deviceIdBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          deviceId_ = value;
+          onChanged();
+        } else {
+          deviceIdBuilder_.setMessage(value);
+        }
+
+        return this;
+      }
+      /**
+       * <code>.context.DeviceId device_id = 3;</code>
+       */
+      public Builder setDeviceId(
+          context.ContextOuterClass.DeviceId.Builder builderForValue) {
+        if (deviceIdBuilder_ == null) {
+          deviceId_ = builderForValue.build();
+          onChanged();
+        } else {
+          deviceIdBuilder_.setMessage(builderForValue.build());
+        }
+
+        return this;
+      }
+      /**
+       * <code>.context.DeviceId device_id = 3;</code>
+       */
+      public Builder mergeDeviceId(context.ContextOuterClass.DeviceId value) {
+        if (deviceIdBuilder_ == null) {
+          if (deviceId_ != null) {
+            deviceId_ =
+              context.ContextOuterClass.DeviceId.newBuilder(deviceId_).mergeFrom(value).buildPartial();
+          } else {
+            deviceId_ = value;
+          }
+          onChanged();
+        } else {
+          deviceIdBuilder_.mergeFrom(value);
+        }
+
+        return this;
+      }
+      /**
+       * <code>.context.DeviceId device_id = 3;</code>
+       */
+      public Builder clearDeviceId() {
+        if (deviceIdBuilder_ == null) {
+          deviceId_ = null;
+          onChanged();
+        } else {
+          deviceId_ = null;
+          deviceIdBuilder_ = null;
+        }
+
+        return this;
+      }
+      /**
+       * <code>.context.DeviceId device_id = 3;</code>
+       */
+      public context.ContextOuterClass.DeviceId.Builder getDeviceIdBuilder() {
+        
+        onChanged();
+        return getDeviceIdFieldBuilder().getBuilder();
+      }
+      /**
+       * <code>.context.DeviceId device_id = 3;</code>
+       */
+      public context.ContextOuterClass.DeviceIdOrBuilder getDeviceIdOrBuilder() {
+        if (deviceIdBuilder_ != null) {
+          return deviceIdBuilder_.getMessageOrBuilder();
+        } else {
+          return deviceId_ == null ?
+              context.ContextOuterClass.DeviceId.getDefaultInstance() : deviceId_;
+        }
+      }
+      /**
+       * <code>.context.DeviceId device_id = 3;</code>
+       */
+      private com.google.protobuf.SingleFieldBuilderV3<
+          context.ContextOuterClass.DeviceId, context.ContextOuterClass.DeviceId.Builder, context.ContextOuterClass.DeviceIdOrBuilder> 
+          getDeviceIdFieldBuilder() {
+        if (deviceIdBuilder_ == null) {
+          deviceIdBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
+              context.ContextOuterClass.DeviceId, context.ContextOuterClass.DeviceId.Builder, context.ContextOuterClass.DeviceIdOrBuilder>(
+                  getDeviceId(),
+                  getParentForChildren(),
+                  isClean());
+          deviceId_ = null;
+        }
+        return deviceIdBuilder_;
+      }
+
+      private context.ContextOuterClass.EndPointId endpointId_;
+      private com.google.protobuf.SingleFieldBuilderV3<
+          context.ContextOuterClass.EndPointId, context.ContextOuterClass.EndPointId.Builder, context.ContextOuterClass.EndPointIdOrBuilder> endpointIdBuilder_;
+      /**
+       * <code>.context.EndPointId endpoint_id = 4;</code>
+       * @return Whether the endpointId field is set.
+       */
+      public boolean hasEndpointId() {
+        return endpointIdBuilder_ != null || endpointId_ != null;
+      }
+      /**
+       * <code>.context.EndPointId endpoint_id = 4;</code>
+       * @return The endpointId.
+       */
+      public context.ContextOuterClass.EndPointId getEndpointId() {
+        if (endpointIdBuilder_ == null) {
+          return endpointId_ == null ? context.ContextOuterClass.EndPointId.getDefaultInstance() : endpointId_;
+        } else {
+          return endpointIdBuilder_.getMessage();
+        }
+      }
+      /**
+       * <code>.context.EndPointId endpoint_id = 4;</code>
+       */
+      public Builder setEndpointId(context.ContextOuterClass.EndPointId value) {
+        if (endpointIdBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          endpointId_ = value;
+          onChanged();
+        } else {
+          endpointIdBuilder_.setMessage(value);
+        }
+
+        return this;
+      }
+      /**
+       * <code>.context.EndPointId endpoint_id = 4;</code>
+       */
+      public Builder setEndpointId(
+          context.ContextOuterClass.EndPointId.Builder builderForValue) {
+        if (endpointIdBuilder_ == null) {
+          endpointId_ = builderForValue.build();
+          onChanged();
+        } else {
+          endpointIdBuilder_.setMessage(builderForValue.build());
+        }
+
+        return this;
+      }
+      /**
+       * <code>.context.EndPointId endpoint_id = 4;</code>
+       */
+      public Builder mergeEndpointId(context.ContextOuterClass.EndPointId value) {
+        if (endpointIdBuilder_ == null) {
+          if (endpointId_ != null) {
+            endpointId_ =
+              context.ContextOuterClass.EndPointId.newBuilder(endpointId_).mergeFrom(value).buildPartial();
+          } else {
+            endpointId_ = value;
+          }
+          onChanged();
+        } else {
+          endpointIdBuilder_.mergeFrom(value);
+        }
+
+        return this;
+      }
+      /**
+       * <code>.context.EndPointId endpoint_id = 4;</code>
+       */
+      public Builder clearEndpointId() {
+        if (endpointIdBuilder_ == null) {
+          endpointId_ = null;
+          onChanged();
+        } else {
+          endpointId_ = null;
+          endpointIdBuilder_ = null;
+        }
+
+        return this;
+      }
+      /**
+       * <code>.context.EndPointId endpoint_id = 4;</code>
+       */
+      public context.ContextOuterClass.EndPointId.Builder getEndpointIdBuilder() {
+        
+        onChanged();
+        return getEndpointIdFieldBuilder().getBuilder();
+      }
+      /**
+       * <code>.context.EndPointId endpoint_id = 4;</code>
+       */
+      public context.ContextOuterClass.EndPointIdOrBuilder getEndpointIdOrBuilder() {
+        if (endpointIdBuilder_ != null) {
+          return endpointIdBuilder_.getMessageOrBuilder();
+        } else {
+          return endpointId_ == null ?
+              context.ContextOuterClass.EndPointId.getDefaultInstance() : endpointId_;
+        }
+      }
+      /**
+       * <code>.context.EndPointId endpoint_id = 4;</code>
+       */
+      private com.google.protobuf.SingleFieldBuilderV3<
+          context.ContextOuterClass.EndPointId, context.ContextOuterClass.EndPointId.Builder, context.ContextOuterClass.EndPointIdOrBuilder> 
+          getEndpointIdFieldBuilder() {
+        if (endpointIdBuilder_ == null) {
+          endpointIdBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
+              context.ContextOuterClass.EndPointId, context.ContextOuterClass.EndPointId.Builder, context.ContextOuterClass.EndPointIdOrBuilder>(
+                  getEndpointId(),
+                  getParentForChildren(),
+                  isClean());
+          endpointId_ = null;
+        }
+        return endpointIdBuilder_;
+      }
+
+      private context.ContextOuterClass.ServiceId serviceId_;
+      private com.google.protobuf.SingleFieldBuilderV3<
+          context.ContextOuterClass.ServiceId, context.ContextOuterClass.ServiceId.Builder, context.ContextOuterClass.ServiceIdOrBuilder> serviceIdBuilder_;
+      /**
+       * <pre>
+       *  context.SliceId    slice_id    = 6;
+       * </pre>
+       *
+       * <code>.context.ServiceId service_id = 5;</code>
+       * @return Whether the serviceId field is set.
+       */
+      public boolean hasServiceId() {
+        return serviceIdBuilder_ != null || serviceId_ != null;
+      }
+      /**
+       * <pre>
+       *  context.SliceId    slice_id    = 6;
+       * </pre>
+       *
+       * <code>.context.ServiceId service_id = 5;</code>
+       * @return The serviceId.
+       */
+      public context.ContextOuterClass.ServiceId getServiceId() {
+        if (serviceIdBuilder_ == null) {
+          return serviceId_ == null ? context.ContextOuterClass.ServiceId.getDefaultInstance() : serviceId_;
+        } else {
+          return serviceIdBuilder_.getMessage();
+        }
+      }
+      /**
+       * <pre>
+       *  context.SliceId    slice_id    = 6;
+       * </pre>
+       *
+       * <code>.context.ServiceId service_id = 5;</code>
+       */
+      public Builder setServiceId(context.ContextOuterClass.ServiceId value) {
+        if (serviceIdBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          serviceId_ = value;
+          onChanged();
+        } else {
+          serviceIdBuilder_.setMessage(value);
+        }
+
+        return this;
+      }
+      /**
+       * <pre>
+       *  context.SliceId    slice_id    = 6;
+       * </pre>
+       *
+       * <code>.context.ServiceId service_id = 5;</code>
+       */
+      public Builder setServiceId(
+          context.ContextOuterClass.ServiceId.Builder builderForValue) {
+        if (serviceIdBuilder_ == null) {
+          serviceId_ = builderForValue.build();
+          onChanged();
+        } else {
+          serviceIdBuilder_.setMessage(builderForValue.build());
+        }
+
+        return this;
+      }
+      /**
+       * <pre>
+       *  context.SliceId    slice_id    = 6;
+       * </pre>
+       *
+       * <code>.context.ServiceId service_id = 5;</code>
+       */
+      public Builder mergeServiceId(context.ContextOuterClass.ServiceId value) {
+        if (serviceIdBuilder_ == null) {
+          if (serviceId_ != null) {
+            serviceId_ =
+              context.ContextOuterClass.ServiceId.newBuilder(serviceId_).mergeFrom(value).buildPartial();
+          } else {
+            serviceId_ = value;
+          }
+          onChanged();
+        } else {
+          serviceIdBuilder_.mergeFrom(value);
+        }
+
+        return this;
+      }
+      /**
+       * <pre>
+       *  context.SliceId    slice_id    = 6;
+       * </pre>
+       *
+       * <code>.context.ServiceId service_id = 5;</code>
+       */
+      public Builder clearServiceId() {
+        if (serviceIdBuilder_ == null) {
+          serviceId_ = null;
+          onChanged();
+        } else {
+          serviceId_ = null;
+          serviceIdBuilder_ = null;
+        }
+
+        return this;
+      }
+      /**
+       * <pre>
+       *  context.SliceId    slice_id    = 6;
+       * </pre>
+       *
+       * <code>.context.ServiceId service_id = 5;</code>
+       */
+      public context.ContextOuterClass.ServiceId.Builder getServiceIdBuilder() {
+        
+        onChanged();
+        return getServiceIdFieldBuilder().getBuilder();
+      }
+      /**
+       * <pre>
+       *  context.SliceId    slice_id    = 6;
+       * </pre>
+       *
+       * <code>.context.ServiceId service_id = 5;</code>
+       */
+      public context.ContextOuterClass.ServiceIdOrBuilder getServiceIdOrBuilder() {
+        if (serviceIdBuilder_ != null) {
+          return serviceIdBuilder_.getMessageOrBuilder();
+        } else {
+          return serviceId_ == null ?
+              context.ContextOuterClass.ServiceId.getDefaultInstance() : serviceId_;
+        }
+      }
+      /**
+       * <pre>
+       *  context.SliceId    slice_id    = 6;
+       * </pre>
+       *
+       * <code>.context.ServiceId service_id = 5;</code>
+       */
+      private com.google.protobuf.SingleFieldBuilderV3<
+          context.ContextOuterClass.ServiceId, context.ContextOuterClass.ServiceId.Builder, context.ContextOuterClass.ServiceIdOrBuilder> 
+          getServiceIdFieldBuilder() {
+        if (serviceIdBuilder_ == null) {
+          serviceIdBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
+              context.ContextOuterClass.ServiceId, context.ContextOuterClass.ServiceId.Builder, context.ContextOuterClass.ServiceIdOrBuilder>(
+                  getServiceId(),
+                  getParentForChildren(),
+                  isClean());
+          serviceId_ = null;
+        }
+        return serviceIdBuilder_;
+      }
+      @java.lang.Override
+      public final Builder setUnknownFields(
+          final com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.setUnknownFields(unknownFields);
+      }
+
+      @java.lang.Override
+      public final Builder mergeUnknownFields(
+          final com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.mergeUnknownFields(unknownFields);
+      }
+
+
+      // @@protoc_insertion_point(builder_scope:monitoring.KpiDescriptor)
+    }
+
+    // @@protoc_insertion_point(class_scope:monitoring.KpiDescriptor)
+    private static final monitoring.Monitoring.KpiDescriptor DEFAULT_INSTANCE;
+    static {
+      DEFAULT_INSTANCE = new monitoring.Monitoring.KpiDescriptor();
+    }
+
+    public static monitoring.Monitoring.KpiDescriptor getDefaultInstance() {
+      return DEFAULT_INSTANCE;
+    }
+
+    private static final com.google.protobuf.Parser<KpiDescriptor>
+        PARSER = new com.google.protobuf.AbstractParser<KpiDescriptor>() {
+      @java.lang.Override
+      public KpiDescriptor parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new KpiDescriptor(input, extensionRegistry);
+      }
+    };
+
+    public static com.google.protobuf.Parser<KpiDescriptor> parser() {
+      return PARSER;
+    }
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<KpiDescriptor> getParserForType() {
+      return PARSER;
+    }
+
+    @java.lang.Override
+    public monitoring.Monitoring.KpiDescriptor getDefaultInstanceForType() {
+      return DEFAULT_INSTANCE;
+    }
+
+  }
+
+  public interface MonitorKpiRequestOrBuilder extends
+      // @@protoc_insertion_point(interface_extends:monitoring.MonitorKpiRequest)
+      com.google.protobuf.MessageOrBuilder {
+
+    /**
+     * <code>.monitoring.KpiId kpi_id = 1;</code>
+     * @return Whether the kpiId field is set.
+     */
+    boolean hasKpiId();
+    /**
+     * <code>.monitoring.KpiId kpi_id = 1;</code>
+     * @return The kpiId.
+     */
+    monitoring.Monitoring.KpiId getKpiId();
+    /**
+     * <code>.monitoring.KpiId kpi_id = 1;</code>
+     */
+    monitoring.Monitoring.KpiIdOrBuilder getKpiIdOrBuilder();
+
+    /**
+     * <code>float sampling_duration_s = 2;</code>
+     * @return The samplingDurationS.
+     */
+    float getSamplingDurationS();
+
+    /**
+     * <code>float sampling_interval_s = 3;</code>
+     * @return The samplingIntervalS.
+     */
+    float getSamplingIntervalS();
+  }
+  /**
+   * Protobuf type {@code monitoring.MonitorKpiRequest}
+   */
+  public static final class MonitorKpiRequest extends
+      com.google.protobuf.GeneratedMessageV3 implements
+      // @@protoc_insertion_point(message_implements:monitoring.MonitorKpiRequest)
+      MonitorKpiRequestOrBuilder {
+  private static final long serialVersionUID = 0L;
+    // Use MonitorKpiRequest.newBuilder() to construct.
+    private MonitorKpiRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
+      super(builder);
+    }
+    private MonitorKpiRequest() {
+    }
+
+    @java.lang.Override
+    @SuppressWarnings({"unused"})
+    protected java.lang.Object newInstance(
+        UnusedPrivateParameter unused) {
+      return new MonitorKpiRequest();
+    }
+
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+    getUnknownFields() {
+      return this.unknownFields;
+    }
+    private MonitorKpiRequest(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      this();
+      if (extensionRegistry == null) {
+        throw new java.lang.NullPointerException();
+      }
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            case 10: {
+              monitoring.Monitoring.KpiId.Builder subBuilder = null;
+              if (kpiId_ != null) {
+                subBuilder = kpiId_.toBuilder();
+              }
+              kpiId_ = input.readMessage(monitoring.Monitoring.KpiId.parser(), extensionRegistry);
+              if (subBuilder != null) {
+                subBuilder.mergeFrom(kpiId_);
+                kpiId_ = subBuilder.buildPartial();
+              }
+
+              break;
+            }
+            case 21: {
+
+              samplingDurationS_ = input.readFloat();
+              break;
+            }
+            case 29: {
+
+              samplingIntervalS_ = input.readFloat();
+              break;
+            }
+            default: {
+              if (!parseUnknownField(
+                  input, unknownFields, extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e).setUnfinishedMessage(this);
+      } finally {
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return monitoring.Monitoring.internal_static_monitoring_MonitorKpiRequest_descriptor;
+    }
+
+    @java.lang.Override
+    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return monitoring.Monitoring.internal_static_monitoring_MonitorKpiRequest_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              monitoring.Monitoring.MonitorKpiRequest.class, monitoring.Monitoring.MonitorKpiRequest.Builder.class);
+    }
+
+    public static final int KPI_ID_FIELD_NUMBER = 1;
+    private monitoring.Monitoring.KpiId kpiId_;
+    /**
+     * <code>.monitoring.KpiId kpi_id = 1;</code>
+     * @return Whether the kpiId field is set.
+     */
+    @java.lang.Override
+    public boolean hasKpiId() {
+      return kpiId_ != null;
+    }
+    /**
+     * <code>.monitoring.KpiId kpi_id = 1;</code>
+     * @return The kpiId.
+     */
+    @java.lang.Override
+    public monitoring.Monitoring.KpiId getKpiId() {
+      return kpiId_ == null ? monitoring.Monitoring.KpiId.getDefaultInstance() : kpiId_;
+    }
+    /**
+     * <code>.monitoring.KpiId kpi_id = 1;</code>
+     */
+    @java.lang.Override
+    public monitoring.Monitoring.KpiIdOrBuilder getKpiIdOrBuilder() {
+      return getKpiId();
+    }
+
+    public static final int SAMPLING_DURATION_S_FIELD_NUMBER = 2;
+    private float samplingDurationS_;
+    /**
+     * <code>float sampling_duration_s = 2;</code>
+     * @return The samplingDurationS.
+     */
+    @java.lang.Override
+    public float getSamplingDurationS() {
+      return samplingDurationS_;
+    }
+
+    public static final int SAMPLING_INTERVAL_S_FIELD_NUMBER = 3;
+    private float samplingIntervalS_;
+    /**
+     * <code>float sampling_interval_s = 3;</code>
+     * @return The samplingIntervalS.
+     */
+    @java.lang.Override
+    public float getSamplingIntervalS() {
+      return samplingIntervalS_;
+    }
+
+    private byte memoizedIsInitialized = -1;
+    @java.lang.Override
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized == 1) return true;
+      if (isInitialized == 0) return false;
+
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    @java.lang.Override
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      if (kpiId_ != null) {
+        output.writeMessage(1, getKpiId());
+      }
+      if (samplingDurationS_ != 0F) {
+        output.writeFloat(2, samplingDurationS_);
+      }
+      if (samplingIntervalS_ != 0F) {
+        output.writeFloat(3, samplingIntervalS_);
+      }
+      unknownFields.writeTo(output);
+    }
+
+    @java.lang.Override
+    public int getSerializedSize() {
+      int size = memoizedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      if (kpiId_ != null) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeMessageSize(1, getKpiId());
+      }
+      if (samplingDurationS_ != 0F) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeFloatSize(2, samplingDurationS_);
+      }
+      if (samplingIntervalS_ != 0F) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeFloatSize(3, samplingIntervalS_);
+      }
+      size += unknownFields.getSerializedSize();
+      memoizedSize = size;
+      return size;
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof monitoring.Monitoring.MonitorKpiRequest)) {
+        return super.equals(obj);
+      }
+      monitoring.Monitoring.MonitorKpiRequest other = (monitoring.Monitoring.MonitorKpiRequest) obj;
+
+      if (hasKpiId() != other.hasKpiId()) return false;
+      if (hasKpiId()) {
+        if (!getKpiId()
+            .equals(other.getKpiId())) return false;
+      }
+      if (java.lang.Float.floatToIntBits(getSamplingDurationS())
+          != java.lang.Float.floatToIntBits(
+              other.getSamplingDurationS())) return false;
+      if (java.lang.Float.floatToIntBits(getSamplingIntervalS())
+          != java.lang.Float.floatToIntBits(
+              other.getSamplingIntervalS())) return false;
+      if (!unknownFields.equals(other.unknownFields)) return false;
+      return true;
+    }
+
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptor().hashCode();
+      if (hasKpiId()) {
+        hash = (37 * hash) + KPI_ID_FIELD_NUMBER;
+        hash = (53 * hash) + getKpiId().hashCode();
+      }
+      hash = (37 * hash) + SAMPLING_DURATION_S_FIELD_NUMBER;
+      hash = (53 * hash) + java.lang.Float.floatToIntBits(
+          getSamplingDurationS());
+      hash = (37 * hash) + SAMPLING_INTERVAL_S_FIELD_NUMBER;
+      hash = (53 * hash) + java.lang.Float.floatToIntBits(
+          getSamplingIntervalS());
+      hash = (29 * hash) + unknownFields.hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static monitoring.Monitoring.MonitorKpiRequest parseFrom(
+        java.nio.ByteBuffer data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static monitoring.Monitoring.MonitorKpiRequest parseFrom(
+        java.nio.ByteBuffer data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static monitoring.Monitoring.MonitorKpiRequest parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static monitoring.Monitoring.MonitorKpiRequest parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static monitoring.Monitoring.MonitorKpiRequest parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static monitoring.Monitoring.MonitorKpiRequest parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static monitoring.Monitoring.MonitorKpiRequest parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
+    }
+    public static monitoring.Monitoring.MonitorKpiRequest parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
+    }
+    public static monitoring.Monitoring.MonitorKpiRequest parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input);
+    }
+    public static monitoring.Monitoring.MonitorKpiRequest parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+    }
+    public static monitoring.Monitoring.MonitorKpiRequest parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
+    }
+    public static monitoring.Monitoring.MonitorKpiRequest parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
+    }
+
+    @java.lang.Override
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder() {
+      return DEFAULT_INSTANCE.toBuilder();
+    }
+    public static Builder newBuilder(monitoring.Monitoring.MonitorKpiRequest prototype) {
+      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+    }
+    @java.lang.Override
+    public Builder toBuilder() {
+      return this == DEFAULT_INSTANCE
+          ? new Builder() : new Builder().mergeFrom(this);
+    }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code monitoring.MonitorKpiRequest}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
+        // @@protoc_insertion_point(builder_implements:monitoring.MonitorKpiRequest)
+        monitoring.Monitoring.MonitorKpiRequestOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return monitoring.Monitoring.internal_static_monitoring_MonitorKpiRequest_descriptor;
+      }
+
+      @java.lang.Override
+      protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return monitoring.Monitoring.internal_static_monitoring_MonitorKpiRequest_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                monitoring.Monitoring.MonitorKpiRequest.class, monitoring.Monitoring.MonitorKpiRequest.Builder.class);
+      }
+
+      // Construct using monitoring.Monitoring.MonitorKpiRequest.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessageV3
+                .alwaysUseFieldBuilders) {
+        }
+      }
+      @java.lang.Override
+      public Builder clear() {
+        super.clear();
+        if (kpiIdBuilder_ == null) {
+          kpiId_ = null;
+        } else {
+          kpiId_ = null;
+          kpiIdBuilder_ = null;
+        }
+        samplingDurationS_ = 0F;
+
+        samplingIntervalS_ = 0F;
+
+        return this;
+      }
+
+      @java.lang.Override
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return monitoring.Monitoring.internal_static_monitoring_MonitorKpiRequest_descriptor;
+      }
+
+      @java.lang.Override
+      public monitoring.Monitoring.MonitorKpiRequest getDefaultInstanceForType() {
+        return monitoring.Monitoring.MonitorKpiRequest.getDefaultInstance();
+      }
+
+      @java.lang.Override
+      public monitoring.Monitoring.MonitorKpiRequest build() {
+        monitoring.Monitoring.MonitorKpiRequest result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      @java.lang.Override
+      public monitoring.Monitoring.MonitorKpiRequest buildPartial() {
+        monitoring.Monitoring.MonitorKpiRequest result = new monitoring.Monitoring.MonitorKpiRequest(this);
+        if (kpiIdBuilder_ == null) {
+          result.kpiId_ = kpiId_;
+        } else {
+          result.kpiId_ = kpiIdBuilder_.build();
+        }
+        result.samplingDurationS_ = samplingDurationS_;
+        result.samplingIntervalS_ = samplingIntervalS_;
+        onBuilt();
+        return result;
+      }
+
+      @java.lang.Override
+      public Builder clone() {
+        return super.clone();
+      }
+      @java.lang.Override
+      public Builder setField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          java.lang.Object value) {
+        return super.setField(field, value);
+      }
+      @java.lang.Override
+      public Builder clearField(
+          com.google.protobuf.Descriptors.FieldDescriptor field) {
+        return super.clearField(field);
+      }
+      @java.lang.Override
+      public Builder clearOneof(
+          com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+        return super.clearOneof(oneof);
+      }
+      @java.lang.Override
+      public Builder setRepeatedField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          int index, java.lang.Object value) {
+        return super.setRepeatedField(field, index, value);
+      }
+      @java.lang.Override
+      public Builder addRepeatedField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          java.lang.Object value) {
+        return super.addRepeatedField(field, value);
+      }
+      @java.lang.Override
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof monitoring.Monitoring.MonitorKpiRequest) {
+          return mergeFrom((monitoring.Monitoring.MonitorKpiRequest)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(monitoring.Monitoring.MonitorKpiRequest other) {
+        if (other == monitoring.Monitoring.MonitorKpiRequest.getDefaultInstance()) return this;
+        if (other.hasKpiId()) {
+          mergeKpiId(other.getKpiId());
+        }
+        if (other.getSamplingDurationS() != 0F) {
+          setSamplingDurationS(other.getSamplingDurationS());
+        }
+        if (other.getSamplingIntervalS() != 0F) {
+          setSamplingIntervalS(other.getSamplingIntervalS());
+        }
+        this.mergeUnknownFields(other.unknownFields);
+        onChanged();
+        return this;
+      }
+
+      @java.lang.Override
+      public final boolean isInitialized() {
+        return true;
+      }
+
+      @java.lang.Override
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        monitoring.Monitoring.MonitorKpiRequest parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (monitoring.Monitoring.MonitorKpiRequest) e.getUnfinishedMessage();
+          throw e.unwrapIOException();
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+
+      private monitoring.Monitoring.KpiId kpiId_;
+      private com.google.protobuf.SingleFieldBuilderV3<
+          monitoring.Monitoring.KpiId, monitoring.Monitoring.KpiId.Builder, monitoring.Monitoring.KpiIdOrBuilder> kpiIdBuilder_;
+      /**
+       * <code>.monitoring.KpiId kpi_id = 1;</code>
+       * @return Whether the kpiId field is set.
+       */
+      public boolean hasKpiId() {
+        return kpiIdBuilder_ != null || kpiId_ != null;
+      }
+      /**
+       * <code>.monitoring.KpiId kpi_id = 1;</code>
+       * @return The kpiId.
+       */
+      public monitoring.Monitoring.KpiId getKpiId() {
+        if (kpiIdBuilder_ == null) {
+          return kpiId_ == null ? monitoring.Monitoring.KpiId.getDefaultInstance() : kpiId_;
+        } else {
+          return kpiIdBuilder_.getMessage();
+        }
+      }
+      /**
+       * <code>.monitoring.KpiId kpi_id = 1;</code>
+       */
+      public Builder setKpiId(monitoring.Monitoring.KpiId value) {
+        if (kpiIdBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          kpiId_ = value;
+          onChanged();
+        } else {
+          kpiIdBuilder_.setMessage(value);
+        }
+
+        return this;
+      }
+      /**
+       * <code>.monitoring.KpiId kpi_id = 1;</code>
+       */
+      public Builder setKpiId(
+          monitoring.Monitoring.KpiId.Builder builderForValue) {
+        if (kpiIdBuilder_ == null) {
+          kpiId_ = builderForValue.build();
+          onChanged();
+        } else {
+          kpiIdBuilder_.setMessage(builderForValue.build());
+        }
+
+        return this;
+      }
+      /**
+       * <code>.monitoring.KpiId kpi_id = 1;</code>
+       */
+      public Builder mergeKpiId(monitoring.Monitoring.KpiId value) {
+        if (kpiIdBuilder_ == null) {
+          if (kpiId_ != null) {
+            kpiId_ =
+              monitoring.Monitoring.KpiId.newBuilder(kpiId_).mergeFrom(value).buildPartial();
+          } else {
+            kpiId_ = value;
+          }
+          onChanged();
+        } else {
+          kpiIdBuilder_.mergeFrom(value);
+        }
+
+        return this;
+      }
+      /**
+       * <code>.monitoring.KpiId kpi_id = 1;</code>
+       */
+      public Builder clearKpiId() {
+        if (kpiIdBuilder_ == null) {
+          kpiId_ = null;
+          onChanged();
+        } else {
+          kpiId_ = null;
+          kpiIdBuilder_ = null;
+        }
+
+        return this;
+      }
+      /**
+       * <code>.monitoring.KpiId kpi_id = 1;</code>
+       */
+      public monitoring.Monitoring.KpiId.Builder getKpiIdBuilder() {
+        
+        onChanged();
+        return getKpiIdFieldBuilder().getBuilder();
+      }
+      /**
+       * <code>.monitoring.KpiId kpi_id = 1;</code>
+       */
+      public monitoring.Monitoring.KpiIdOrBuilder getKpiIdOrBuilder() {
+        if (kpiIdBuilder_ != null) {
+          return kpiIdBuilder_.getMessageOrBuilder();
+        } else {
+          return kpiId_ == null ?
+              monitoring.Monitoring.KpiId.getDefaultInstance() : kpiId_;
+        }
+      }
+      /**
+       * <code>.monitoring.KpiId kpi_id = 1;</code>
+       */
+      private com.google.protobuf.SingleFieldBuilderV3<
+          monitoring.Monitoring.KpiId, monitoring.Monitoring.KpiId.Builder, monitoring.Monitoring.KpiIdOrBuilder> 
+          getKpiIdFieldBuilder() {
+        if (kpiIdBuilder_ == null) {
+          kpiIdBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
+              monitoring.Monitoring.KpiId, monitoring.Monitoring.KpiId.Builder, monitoring.Monitoring.KpiIdOrBuilder>(
+                  getKpiId(),
+                  getParentForChildren(),
+                  isClean());
+          kpiId_ = null;
+        }
+        return kpiIdBuilder_;
+      }
+
+      private float samplingDurationS_ ;
+      /**
+       * <code>float sampling_duration_s = 2;</code>
+       * @return The samplingDurationS.
+       */
+      @java.lang.Override
+      public float getSamplingDurationS() {
+        return samplingDurationS_;
+      }
+      /**
+       * <code>float sampling_duration_s = 2;</code>
+       * @param value The samplingDurationS to set.
+       * @return This builder for chaining.
+       */
+      public Builder setSamplingDurationS(float value) {
+        
+        samplingDurationS_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>float sampling_duration_s = 2;</code>
+       * @return This builder for chaining.
+       */
+      public Builder clearSamplingDurationS() {
+        
+        samplingDurationS_ = 0F;
+        onChanged();
+        return this;
+      }
+
+      private float samplingIntervalS_ ;
+      /**
+       * <code>float sampling_interval_s = 3;</code>
+       * @return The samplingIntervalS.
+       */
+      @java.lang.Override
+      public float getSamplingIntervalS() {
+        return samplingIntervalS_;
+      }
+      /**
+       * <code>float sampling_interval_s = 3;</code>
+       * @param value The samplingIntervalS to set.
+       * @return This builder for chaining.
+       */
+      public Builder setSamplingIntervalS(float value) {
+        
+        samplingIntervalS_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>float sampling_interval_s = 3;</code>
+       * @return This builder for chaining.
+       */
+      public Builder clearSamplingIntervalS() {
+        
+        samplingIntervalS_ = 0F;
+        onChanged();
+        return this;
+      }
+      @java.lang.Override
+      public final Builder setUnknownFields(
+          final com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.setUnknownFields(unknownFields);
+      }
+
+      @java.lang.Override
+      public final Builder mergeUnknownFields(
+          final com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.mergeUnknownFields(unknownFields);
+      }
+
+
+      // @@protoc_insertion_point(builder_scope:monitoring.MonitorKpiRequest)
+    }
+
+    // @@protoc_insertion_point(class_scope:monitoring.MonitorKpiRequest)
+    private static final monitoring.Monitoring.MonitorKpiRequest DEFAULT_INSTANCE;
+    static {
+      DEFAULT_INSTANCE = new monitoring.Monitoring.MonitorKpiRequest();
+    }
+
+    public static monitoring.Monitoring.MonitorKpiRequest getDefaultInstance() {
+      return DEFAULT_INSTANCE;
+    }
+
+    private static final com.google.protobuf.Parser<MonitorKpiRequest>
+        PARSER = new com.google.protobuf.AbstractParser<MonitorKpiRequest>() {
+      @java.lang.Override
+      public MonitorKpiRequest parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new MonitorKpiRequest(input, extensionRegistry);
+      }
+    };
+
+    public static com.google.protobuf.Parser<MonitorKpiRequest> parser() {
+      return PARSER;
+    }
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<MonitorKpiRequest> getParserForType() {
+      return PARSER;
+    }
+
+    @java.lang.Override
+    public monitoring.Monitoring.MonitorKpiRequest getDefaultInstanceForType() {
+      return DEFAULT_INSTANCE;
+    }
+
+  }
+
+  public interface KpiIdOrBuilder extends
+      // @@protoc_insertion_point(interface_extends:monitoring.KpiId)
+      com.google.protobuf.MessageOrBuilder {
+
+    /**
+     * <code>.context.Uuid kpi_id = 1;</code>
+     * @return Whether the kpiId field is set.
+     */
+    boolean hasKpiId();
+    /**
+     * <code>.context.Uuid kpi_id = 1;</code>
+     * @return The kpiId.
+     */
+    context.ContextOuterClass.Uuid getKpiId();
+    /**
+     * <code>.context.Uuid kpi_id = 1;</code>
+     */
+    context.ContextOuterClass.UuidOrBuilder getKpiIdOrBuilder();
+  }
+  /**
+   * Protobuf type {@code monitoring.KpiId}
+   */
+  public static final class KpiId extends
+      com.google.protobuf.GeneratedMessageV3 implements
+      // @@protoc_insertion_point(message_implements:monitoring.KpiId)
+      KpiIdOrBuilder {
+  private static final long serialVersionUID = 0L;
+    // Use KpiId.newBuilder() to construct.
+    private KpiId(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
+      super(builder);
+    }
+    private KpiId() {
+    }
+
+    @java.lang.Override
+    @SuppressWarnings({"unused"})
+    protected java.lang.Object newInstance(
+        UnusedPrivateParameter unused) {
+      return new KpiId();
+    }
+
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+    getUnknownFields() {
+      return this.unknownFields;
+    }
+    private KpiId(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      this();
+      if (extensionRegistry == null) {
+        throw new java.lang.NullPointerException();
+      }
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            case 10: {
+              context.ContextOuterClass.Uuid.Builder subBuilder = null;
+              if (kpiId_ != null) {
+                subBuilder = kpiId_.toBuilder();
+              }
+              kpiId_ = input.readMessage(context.ContextOuterClass.Uuid.parser(), extensionRegistry);
+              if (subBuilder != null) {
+                subBuilder.mergeFrom(kpiId_);
+                kpiId_ = subBuilder.buildPartial();
+              }
+
+              break;
+            }
+            default: {
+              if (!parseUnknownField(
+                  input, unknownFields, extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e).setUnfinishedMessage(this);
+      } finally {
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return monitoring.Monitoring.internal_static_monitoring_KpiId_descriptor;
+    }
+
+    @java.lang.Override
+    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return monitoring.Monitoring.internal_static_monitoring_KpiId_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              monitoring.Monitoring.KpiId.class, monitoring.Monitoring.KpiId.Builder.class);
+    }
+
+    public static final int KPI_ID_FIELD_NUMBER = 1;
+    private context.ContextOuterClass.Uuid kpiId_;
+    /**
+     * <code>.context.Uuid kpi_id = 1;</code>
+     * @return Whether the kpiId field is set.
+     */
+    @java.lang.Override
+    public boolean hasKpiId() {
+      return kpiId_ != null;
+    }
+    /**
+     * <code>.context.Uuid kpi_id = 1;</code>
+     * @return The kpiId.
+     */
+    @java.lang.Override
+    public context.ContextOuterClass.Uuid getKpiId() {
+      return kpiId_ == null ? context.ContextOuterClass.Uuid.getDefaultInstance() : kpiId_;
+    }
+    /**
+     * <code>.context.Uuid kpi_id = 1;</code>
+     */
+    @java.lang.Override
+    public context.ContextOuterClass.UuidOrBuilder getKpiIdOrBuilder() {
+      return getKpiId();
+    }
+
+    private byte memoizedIsInitialized = -1;
+    @java.lang.Override
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized == 1) return true;
+      if (isInitialized == 0) return false;
+
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    @java.lang.Override
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      if (kpiId_ != null) {
+        output.writeMessage(1, getKpiId());
+      }
+      unknownFields.writeTo(output);
+    }
+
+    @java.lang.Override
+    public int getSerializedSize() {
+      int size = memoizedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      if (kpiId_ != null) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeMessageSize(1, getKpiId());
+      }
+      size += unknownFields.getSerializedSize();
+      memoizedSize = size;
+      return size;
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof monitoring.Monitoring.KpiId)) {
+        return super.equals(obj);
+      }
+      monitoring.Monitoring.KpiId other = (monitoring.Monitoring.KpiId) obj;
+
+      if (hasKpiId() != other.hasKpiId()) return false;
+      if (hasKpiId()) {
+        if (!getKpiId()
+            .equals(other.getKpiId())) return false;
+      }
+      if (!unknownFields.equals(other.unknownFields)) return false;
+      return true;
+    }
+
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptor().hashCode();
+      if (hasKpiId()) {
+        hash = (37 * hash) + KPI_ID_FIELD_NUMBER;
+        hash = (53 * hash) + getKpiId().hashCode();
+      }
+      hash = (29 * hash) + unknownFields.hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static monitoring.Monitoring.KpiId parseFrom(
+        java.nio.ByteBuffer data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static monitoring.Monitoring.KpiId parseFrom(
+        java.nio.ByteBuffer data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static monitoring.Monitoring.KpiId parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static monitoring.Monitoring.KpiId parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static monitoring.Monitoring.KpiId parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static monitoring.Monitoring.KpiId parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static monitoring.Monitoring.KpiId parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
+    }
+    public static monitoring.Monitoring.KpiId parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
+    }
+    public static monitoring.Monitoring.KpiId parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input);
+    }
+    public static monitoring.Monitoring.KpiId parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+    }
+    public static monitoring.Monitoring.KpiId parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
+    }
+    public static monitoring.Monitoring.KpiId parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
+    }
+
+    @java.lang.Override
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder() {
+      return DEFAULT_INSTANCE.toBuilder();
+    }
+    public static Builder newBuilder(monitoring.Monitoring.KpiId prototype) {
+      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+    }
+    @java.lang.Override
+    public Builder toBuilder() {
+      return this == DEFAULT_INSTANCE
+          ? new Builder() : new Builder().mergeFrom(this);
+    }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code monitoring.KpiId}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
+        // @@protoc_insertion_point(builder_implements:monitoring.KpiId)
+        monitoring.Monitoring.KpiIdOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return monitoring.Monitoring.internal_static_monitoring_KpiId_descriptor;
+      }
+
+      @java.lang.Override
+      protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return monitoring.Monitoring.internal_static_monitoring_KpiId_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                monitoring.Monitoring.KpiId.class, monitoring.Monitoring.KpiId.Builder.class);
+      }
+
+      // Construct using monitoring.Monitoring.KpiId.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessageV3
+                .alwaysUseFieldBuilders) {
+        }
+      }
+      @java.lang.Override
+      public Builder clear() {
+        super.clear();
+        if (kpiIdBuilder_ == null) {
+          kpiId_ = null;
+        } else {
+          kpiId_ = null;
+          kpiIdBuilder_ = null;
+        }
+        return this;
+      }
+
+      @java.lang.Override
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return monitoring.Monitoring.internal_static_monitoring_KpiId_descriptor;
+      }
+
+      @java.lang.Override
+      public monitoring.Monitoring.KpiId getDefaultInstanceForType() {
+        return monitoring.Monitoring.KpiId.getDefaultInstance();
+      }
+
+      @java.lang.Override
+      public monitoring.Monitoring.KpiId build() {
+        monitoring.Monitoring.KpiId result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      @java.lang.Override
+      public monitoring.Monitoring.KpiId buildPartial() {
+        monitoring.Monitoring.KpiId result = new monitoring.Monitoring.KpiId(this);
+        if (kpiIdBuilder_ == null) {
+          result.kpiId_ = kpiId_;
+        } else {
+          result.kpiId_ = kpiIdBuilder_.build();
+        }
+        onBuilt();
+        return result;
+      }
+
+      @java.lang.Override
+      public Builder clone() {
+        return super.clone();
+      }
+      @java.lang.Override
+      public Builder setField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          java.lang.Object value) {
+        return super.setField(field, value);
+      }
+      @java.lang.Override
+      public Builder clearField(
+          com.google.protobuf.Descriptors.FieldDescriptor field) {
+        return super.clearField(field);
+      }
+      @java.lang.Override
+      public Builder clearOneof(
+          com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+        return super.clearOneof(oneof);
+      }
+      @java.lang.Override
+      public Builder setRepeatedField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          int index, java.lang.Object value) {
+        return super.setRepeatedField(field, index, value);
+      }
+      @java.lang.Override
+      public Builder addRepeatedField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          java.lang.Object value) {
+        return super.addRepeatedField(field, value);
+      }
+      @java.lang.Override
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof monitoring.Monitoring.KpiId) {
+          return mergeFrom((monitoring.Monitoring.KpiId)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(monitoring.Monitoring.KpiId other) {
+        if (other == monitoring.Monitoring.KpiId.getDefaultInstance()) return this;
+        if (other.hasKpiId()) {
+          mergeKpiId(other.getKpiId());
+        }
+        this.mergeUnknownFields(other.unknownFields);
+        onChanged();
+        return this;
+      }
+
+      @java.lang.Override
+      public final boolean isInitialized() {
+        return true;
+      }
+
+      @java.lang.Override
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        monitoring.Monitoring.KpiId parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (monitoring.Monitoring.KpiId) e.getUnfinishedMessage();
+          throw e.unwrapIOException();
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+
+      private context.ContextOuterClass.Uuid kpiId_;
+      private com.google.protobuf.SingleFieldBuilderV3<
+          context.ContextOuterClass.Uuid, context.ContextOuterClass.Uuid.Builder, context.ContextOuterClass.UuidOrBuilder> kpiIdBuilder_;
+      /**
+       * <code>.context.Uuid kpi_id = 1;</code>
+       * @return Whether the kpiId field is set.
+       */
+      public boolean hasKpiId() {
+        return kpiIdBuilder_ != null || kpiId_ != null;
+      }
+      /**
+       * <code>.context.Uuid kpi_id = 1;</code>
+       * @return The kpiId.
+       */
+      public context.ContextOuterClass.Uuid getKpiId() {
+        if (kpiIdBuilder_ == null) {
+          return kpiId_ == null ? context.ContextOuterClass.Uuid.getDefaultInstance() : kpiId_;
+        } else {
+          return kpiIdBuilder_.getMessage();
+        }
+      }
+      /**
+       * <code>.context.Uuid kpi_id = 1;</code>
+       */
+      public Builder setKpiId(context.ContextOuterClass.Uuid value) {
+        if (kpiIdBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          kpiId_ = value;
+          onChanged();
+        } else {
+          kpiIdBuilder_.setMessage(value);
+        }
+
+        return this;
+      }
+      /**
+       * <code>.context.Uuid kpi_id = 1;</code>
+       */
+      public Builder setKpiId(
+          context.ContextOuterClass.Uuid.Builder builderForValue) {
+        if (kpiIdBuilder_ == null) {
+          kpiId_ = builderForValue.build();
+          onChanged();
+        } else {
+          kpiIdBuilder_.setMessage(builderForValue.build());
+        }
+
+        return this;
+      }
+      /**
+       * <code>.context.Uuid kpi_id = 1;</code>
+       */
+      public Builder mergeKpiId(context.ContextOuterClass.Uuid value) {
+        if (kpiIdBuilder_ == null) {
+          if (kpiId_ != null) {
+            kpiId_ =
+              context.ContextOuterClass.Uuid.newBuilder(kpiId_).mergeFrom(value).buildPartial();
+          } else {
+            kpiId_ = value;
+          }
+          onChanged();
+        } else {
+          kpiIdBuilder_.mergeFrom(value);
+        }
+
+        return this;
+      }
+      /**
+       * <code>.context.Uuid kpi_id = 1;</code>
+       */
+      public Builder clearKpiId() {
+        if (kpiIdBuilder_ == null) {
+          kpiId_ = null;
+          onChanged();
+        } else {
+          kpiId_ = null;
+          kpiIdBuilder_ = null;
+        }
+
+        return this;
+      }
+      /**
+       * <code>.context.Uuid kpi_id = 1;</code>
+       */
+      public context.ContextOuterClass.Uuid.Builder getKpiIdBuilder() {
+        
+        onChanged();
+        return getKpiIdFieldBuilder().getBuilder();
+      }
+      /**
+       * <code>.context.Uuid kpi_id = 1;</code>
+       */
+      public context.ContextOuterClass.UuidOrBuilder getKpiIdOrBuilder() {
+        if (kpiIdBuilder_ != null) {
+          return kpiIdBuilder_.getMessageOrBuilder();
+        } else {
+          return kpiId_ == null ?
+              context.ContextOuterClass.Uuid.getDefaultInstance() : kpiId_;
+        }
+      }
+      /**
+       * <code>.context.Uuid kpi_id = 1;</code>
+       */
+      private com.google.protobuf.SingleFieldBuilderV3<
+          context.ContextOuterClass.Uuid, context.ContextOuterClass.Uuid.Builder, context.ContextOuterClass.UuidOrBuilder> 
+          getKpiIdFieldBuilder() {
+        if (kpiIdBuilder_ == null) {
+          kpiIdBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
+              context.ContextOuterClass.Uuid, context.ContextOuterClass.Uuid.Builder, context.ContextOuterClass.UuidOrBuilder>(
+                  getKpiId(),
+                  getParentForChildren(),
+                  isClean());
+          kpiId_ = null;
+        }
+        return kpiIdBuilder_;
+      }
+      @java.lang.Override
+      public final Builder setUnknownFields(
+          final com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.setUnknownFields(unknownFields);
+      }
+
+      @java.lang.Override
+      public final Builder mergeUnknownFields(
+          final com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.mergeUnknownFields(unknownFields);
+      }
+
+
+      // @@protoc_insertion_point(builder_scope:monitoring.KpiId)
+    }
+
+    // @@protoc_insertion_point(class_scope:monitoring.KpiId)
+    private static final monitoring.Monitoring.KpiId DEFAULT_INSTANCE;
+    static {
+      DEFAULT_INSTANCE = new monitoring.Monitoring.KpiId();
+    }
+
+    public static monitoring.Monitoring.KpiId getDefaultInstance() {
+      return DEFAULT_INSTANCE;
+    }
+
+    private static final com.google.protobuf.Parser<KpiId>
+        PARSER = new com.google.protobuf.AbstractParser<KpiId>() {
+      @java.lang.Override
+      public KpiId parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new KpiId(input, extensionRegistry);
+      }
+    };
+
+    public static com.google.protobuf.Parser<KpiId> parser() {
+      return PARSER;
+    }
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<KpiId> getParserForType() {
+      return PARSER;
+    }
+
+    @java.lang.Override
+    public monitoring.Monitoring.KpiId getDefaultInstanceForType() {
+      return DEFAULT_INSTANCE;
+    }
+
+  }
+
+  public interface KpiOrBuilder extends
+      // @@protoc_insertion_point(interface_extends:monitoring.Kpi)
+      com.google.protobuf.MessageOrBuilder {
+
+    /**
+     * <code>.monitoring.KpiId kpi_id = 1;</code>
+     * @return Whether the kpiId field is set.
+     */
+    boolean hasKpiId();
+    /**
+     * <code>.monitoring.KpiId kpi_id = 1;</code>
+     * @return The kpiId.
+     */
+    monitoring.Monitoring.KpiId getKpiId();
+    /**
+     * <code>.monitoring.KpiId kpi_id = 1;</code>
+     */
+    monitoring.Monitoring.KpiIdOrBuilder getKpiIdOrBuilder();
+
+    /**
+     * <code>string timestamp = 2;</code>
+     * @return The timestamp.
+     */
+    java.lang.String getTimestamp();
+    /**
+     * <code>string timestamp = 2;</code>
+     * @return The bytes for timestamp.
+     */
+    com.google.protobuf.ByteString
+        getTimestampBytes();
+
+    /**
+     * <code>.monitoring.KpiValue kpi_value = 4;</code>
+     * @return Whether the kpiValue field is set.
+     */
+    boolean hasKpiValue();
+    /**
+     * <code>.monitoring.KpiValue kpi_value = 4;</code>
+     * @return The kpiValue.
+     */
+    monitoring.Monitoring.KpiValue getKpiValue();
+    /**
+     * <code>.monitoring.KpiValue kpi_value = 4;</code>
+     */
+    monitoring.Monitoring.KpiValueOrBuilder getKpiValueOrBuilder();
+  }
+  /**
+   * Protobuf type {@code monitoring.Kpi}
+   */
+  public static final class Kpi extends
+      com.google.protobuf.GeneratedMessageV3 implements
+      // @@protoc_insertion_point(message_implements:monitoring.Kpi)
+      KpiOrBuilder {
+  private static final long serialVersionUID = 0L;
+    // Use Kpi.newBuilder() to construct.
+    private Kpi(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
+      super(builder);
+    }
+    private Kpi() {
+      timestamp_ = "";
+    }
+
+    @java.lang.Override
+    @SuppressWarnings({"unused"})
+    protected java.lang.Object newInstance(
+        UnusedPrivateParameter unused) {
+      return new Kpi();
+    }
+
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+    getUnknownFields() {
+      return this.unknownFields;
+    }
+    private Kpi(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      this();
+      if (extensionRegistry == null) {
+        throw new java.lang.NullPointerException();
+      }
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            case 10: {
+              monitoring.Monitoring.KpiId.Builder subBuilder = null;
+              if (kpiId_ != null) {
+                subBuilder = kpiId_.toBuilder();
+              }
+              kpiId_ = input.readMessage(monitoring.Monitoring.KpiId.parser(), extensionRegistry);
+              if (subBuilder != null) {
+                subBuilder.mergeFrom(kpiId_);
+                kpiId_ = subBuilder.buildPartial();
+              }
+
+              break;
+            }
+            case 18: {
+              java.lang.String s = input.readStringRequireUtf8();
+
+              timestamp_ = s;
+              break;
+            }
+            case 34: {
+              monitoring.Monitoring.KpiValue.Builder subBuilder = null;
+              if (kpiValue_ != null) {
+                subBuilder = kpiValue_.toBuilder();
+              }
+              kpiValue_ = input.readMessage(monitoring.Monitoring.KpiValue.parser(), extensionRegistry);
+              if (subBuilder != null) {
+                subBuilder.mergeFrom(kpiValue_);
+                kpiValue_ = subBuilder.buildPartial();
+              }
+
+              break;
+            }
+            default: {
+              if (!parseUnknownField(
+                  input, unknownFields, extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e).setUnfinishedMessage(this);
+      } finally {
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return monitoring.Monitoring.internal_static_monitoring_Kpi_descriptor;
+    }
+
+    @java.lang.Override
+    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return monitoring.Monitoring.internal_static_monitoring_Kpi_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              monitoring.Monitoring.Kpi.class, monitoring.Monitoring.Kpi.Builder.class);
+    }
+
+    public static final int KPI_ID_FIELD_NUMBER = 1;
+    private monitoring.Monitoring.KpiId kpiId_;
+    /**
+     * <code>.monitoring.KpiId kpi_id = 1;</code>
+     * @return Whether the kpiId field is set.
+     */
+    @java.lang.Override
+    public boolean hasKpiId() {
+      return kpiId_ != null;
+    }
+    /**
+     * <code>.monitoring.KpiId kpi_id = 1;</code>
+     * @return The kpiId.
+     */
+    @java.lang.Override
+    public monitoring.Monitoring.KpiId getKpiId() {
+      return kpiId_ == null ? monitoring.Monitoring.KpiId.getDefaultInstance() : kpiId_;
+    }
+    /**
+     * <code>.monitoring.KpiId kpi_id = 1;</code>
+     */
+    @java.lang.Override
+    public monitoring.Monitoring.KpiIdOrBuilder getKpiIdOrBuilder() {
+      return getKpiId();
+    }
+
+    public static final int TIMESTAMP_FIELD_NUMBER = 2;
+    private volatile java.lang.Object timestamp_;
+    /**
+     * <code>string timestamp = 2;</code>
+     * @return The timestamp.
+     */
+    @java.lang.Override
+    public java.lang.String getTimestamp() {
+      java.lang.Object ref = timestamp_;
+      if (ref instanceof java.lang.String) {
+        return (java.lang.String) ref;
+      } else {
+        com.google.protobuf.ByteString bs = 
+            (com.google.protobuf.ByteString) ref;
+        java.lang.String s = bs.toStringUtf8();
+        timestamp_ = s;
+        return s;
+      }
+    }
+    /**
+     * <code>string timestamp = 2;</code>
+     * @return The bytes for timestamp.
+     */
+    @java.lang.Override
+    public com.google.protobuf.ByteString
+        getTimestampBytes() {
+      java.lang.Object ref = timestamp_;
+      if (ref instanceof java.lang.String) {
+        com.google.protobuf.ByteString b = 
+            com.google.protobuf.ByteString.copyFromUtf8(
+                (java.lang.String) ref);
+        timestamp_ = b;
+        return b;
+      } else {
+        return (com.google.protobuf.ByteString) ref;
+      }
+    }
+
+    public static final int KPI_VALUE_FIELD_NUMBER = 4;
+    private monitoring.Monitoring.KpiValue kpiValue_;
+    /**
+     * <code>.monitoring.KpiValue kpi_value = 4;</code>
+     * @return Whether the kpiValue field is set.
+     */
+    @java.lang.Override
+    public boolean hasKpiValue() {
+      return kpiValue_ != null;
+    }
+    /**
+     * <code>.monitoring.KpiValue kpi_value = 4;</code>
+     * @return The kpiValue.
+     */
+    @java.lang.Override
+    public monitoring.Monitoring.KpiValue getKpiValue() {
+      return kpiValue_ == null ? monitoring.Monitoring.KpiValue.getDefaultInstance() : kpiValue_;
+    }
+    /**
+     * <code>.monitoring.KpiValue kpi_value = 4;</code>
+     */
+    @java.lang.Override
+    public monitoring.Monitoring.KpiValueOrBuilder getKpiValueOrBuilder() {
+      return getKpiValue();
+    }
+
+    private byte memoizedIsInitialized = -1;
+    @java.lang.Override
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized == 1) return true;
+      if (isInitialized == 0) return false;
+
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    @java.lang.Override
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      if (kpiId_ != null) {
+        output.writeMessage(1, getKpiId());
+      }
+      if (!getTimestampBytes().isEmpty()) {
+        com.google.protobuf.GeneratedMessageV3.writeString(output, 2, timestamp_);
+      }
+      if (kpiValue_ != null) {
+        output.writeMessage(4, getKpiValue());
+      }
+      unknownFields.writeTo(output);
+    }
+
+    @java.lang.Override
+    public int getSerializedSize() {
+      int size = memoizedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      if (kpiId_ != null) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeMessageSize(1, getKpiId());
+      }
+      if (!getTimestampBytes().isEmpty()) {
+        size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, timestamp_);
+      }
+      if (kpiValue_ != null) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeMessageSize(4, getKpiValue());
+      }
+      size += unknownFields.getSerializedSize();
+      memoizedSize = size;
+      return size;
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof monitoring.Monitoring.Kpi)) {
+        return super.equals(obj);
+      }
+      monitoring.Monitoring.Kpi other = (monitoring.Monitoring.Kpi) obj;
+
+      if (hasKpiId() != other.hasKpiId()) return false;
+      if (hasKpiId()) {
+        if (!getKpiId()
+            .equals(other.getKpiId())) return false;
+      }
+      if (!getTimestamp()
+          .equals(other.getTimestamp())) return false;
+      if (hasKpiValue() != other.hasKpiValue()) return false;
+      if (hasKpiValue()) {
+        if (!getKpiValue()
+            .equals(other.getKpiValue())) return false;
+      }
+      if (!unknownFields.equals(other.unknownFields)) return false;
+      return true;
+    }
+
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptor().hashCode();
+      if (hasKpiId()) {
+        hash = (37 * hash) + KPI_ID_FIELD_NUMBER;
+        hash = (53 * hash) + getKpiId().hashCode();
+      }
+      hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER;
+      hash = (53 * hash) + getTimestamp().hashCode();
+      if (hasKpiValue()) {
+        hash = (37 * hash) + KPI_VALUE_FIELD_NUMBER;
+        hash = (53 * hash) + getKpiValue().hashCode();
+      }
+      hash = (29 * hash) + unknownFields.hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static monitoring.Monitoring.Kpi parseFrom(
+        java.nio.ByteBuffer data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static monitoring.Monitoring.Kpi parseFrom(
+        java.nio.ByteBuffer data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static monitoring.Monitoring.Kpi parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static monitoring.Monitoring.Kpi parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static monitoring.Monitoring.Kpi parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static monitoring.Monitoring.Kpi parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static monitoring.Monitoring.Kpi parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
+    }
+    public static monitoring.Monitoring.Kpi parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
+    }
+    public static monitoring.Monitoring.Kpi parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input);
+    }
+    public static monitoring.Monitoring.Kpi parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+    }
+    public static monitoring.Monitoring.Kpi parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
+    }
+    public static monitoring.Monitoring.Kpi parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
+    }
+
+    @java.lang.Override
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder() {
+      return DEFAULT_INSTANCE.toBuilder();
+    }
+    public static Builder newBuilder(monitoring.Monitoring.Kpi prototype) {
+      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+    }
+    @java.lang.Override
+    public Builder toBuilder() {
+      return this == DEFAULT_INSTANCE
+          ? new Builder() : new Builder().mergeFrom(this);
+    }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code monitoring.Kpi}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
+        // @@protoc_insertion_point(builder_implements:monitoring.Kpi)
+        monitoring.Monitoring.KpiOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return monitoring.Monitoring.internal_static_monitoring_Kpi_descriptor;
+      }
+
+      @java.lang.Override
+      protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return monitoring.Monitoring.internal_static_monitoring_Kpi_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                monitoring.Monitoring.Kpi.class, monitoring.Monitoring.Kpi.Builder.class);
+      }
+
+      // Construct using monitoring.Monitoring.Kpi.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessageV3
+                .alwaysUseFieldBuilders) {
+        }
+      }
+      @java.lang.Override
+      public Builder clear() {
+        super.clear();
+        if (kpiIdBuilder_ == null) {
+          kpiId_ = null;
+        } else {
+          kpiId_ = null;
+          kpiIdBuilder_ = null;
+        }
+        timestamp_ = "";
+
+        if (kpiValueBuilder_ == null) {
+          kpiValue_ = null;
+        } else {
+          kpiValue_ = null;
+          kpiValueBuilder_ = null;
+        }
+        return this;
+      }
+
+      @java.lang.Override
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return monitoring.Monitoring.internal_static_monitoring_Kpi_descriptor;
+      }
+
+      @java.lang.Override
+      public monitoring.Monitoring.Kpi getDefaultInstanceForType() {
+        return monitoring.Monitoring.Kpi.getDefaultInstance();
+      }
+
+      @java.lang.Override
+      public monitoring.Monitoring.Kpi build() {
+        monitoring.Monitoring.Kpi result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      @java.lang.Override
+      public monitoring.Monitoring.Kpi buildPartial() {
+        monitoring.Monitoring.Kpi result = new monitoring.Monitoring.Kpi(this);
+        if (kpiIdBuilder_ == null) {
+          result.kpiId_ = kpiId_;
+        } else {
+          result.kpiId_ = kpiIdBuilder_.build();
+        }
+        result.timestamp_ = timestamp_;
+        if (kpiValueBuilder_ == null) {
+          result.kpiValue_ = kpiValue_;
+        } else {
+          result.kpiValue_ = kpiValueBuilder_.build();
+        }
+        onBuilt();
+        return result;
+      }
+
+      @java.lang.Override
+      public Builder clone() {
+        return super.clone();
+      }
+      @java.lang.Override
+      public Builder setField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          java.lang.Object value) {
+        return super.setField(field, value);
+      }
+      @java.lang.Override
+      public Builder clearField(
+          com.google.protobuf.Descriptors.FieldDescriptor field) {
+        return super.clearField(field);
+      }
+      @java.lang.Override
+      public Builder clearOneof(
+          com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+        return super.clearOneof(oneof);
+      }
+      @java.lang.Override
+      public Builder setRepeatedField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          int index, java.lang.Object value) {
+        return super.setRepeatedField(field, index, value);
+      }
+      @java.lang.Override
+      public Builder addRepeatedField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          java.lang.Object value) {
+        return super.addRepeatedField(field, value);
+      }
+      @java.lang.Override
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof monitoring.Monitoring.Kpi) {
+          return mergeFrom((monitoring.Monitoring.Kpi)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(monitoring.Monitoring.Kpi other) {
+        if (other == monitoring.Monitoring.Kpi.getDefaultInstance()) return this;
+        if (other.hasKpiId()) {
+          mergeKpiId(other.getKpiId());
+        }
+        if (!other.getTimestamp().isEmpty()) {
+          timestamp_ = other.timestamp_;
+          onChanged();
+        }
+        if (other.hasKpiValue()) {
+          mergeKpiValue(other.getKpiValue());
+        }
+        this.mergeUnknownFields(other.unknownFields);
+        onChanged();
+        return this;
+      }
+
+      @java.lang.Override
+      public final boolean isInitialized() {
+        return true;
+      }
+
+      @java.lang.Override
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        monitoring.Monitoring.Kpi parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (monitoring.Monitoring.Kpi) e.getUnfinishedMessage();
+          throw e.unwrapIOException();
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+
+      private monitoring.Monitoring.KpiId kpiId_;
+      private com.google.protobuf.SingleFieldBuilderV3<
+          monitoring.Monitoring.KpiId, monitoring.Monitoring.KpiId.Builder, monitoring.Monitoring.KpiIdOrBuilder> kpiIdBuilder_;
+      /**
+       * <code>.monitoring.KpiId kpi_id = 1;</code>
+       * @return Whether the kpiId field is set.
+       */
+      public boolean hasKpiId() {
+        return kpiIdBuilder_ != null || kpiId_ != null;
+      }
+      /**
+       * <code>.monitoring.KpiId kpi_id = 1;</code>
+       * @return The kpiId.
+       */
+      public monitoring.Monitoring.KpiId getKpiId() {
+        if (kpiIdBuilder_ == null) {
+          return kpiId_ == null ? monitoring.Monitoring.KpiId.getDefaultInstance() : kpiId_;
+        } else {
+          return kpiIdBuilder_.getMessage();
+        }
+      }
+      /**
+       * <code>.monitoring.KpiId kpi_id = 1;</code>
+       */
+      public Builder setKpiId(monitoring.Monitoring.KpiId value) {
+        if (kpiIdBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          kpiId_ = value;
+          onChanged();
+        } else {
+          kpiIdBuilder_.setMessage(value);
+        }
+
+        return this;
+      }
+      /**
+       * <code>.monitoring.KpiId kpi_id = 1;</code>
+       */
+      public Builder setKpiId(
+          monitoring.Monitoring.KpiId.Builder builderForValue) {
+        if (kpiIdBuilder_ == null) {
+          kpiId_ = builderForValue.build();
+          onChanged();
+        } else {
+          kpiIdBuilder_.setMessage(builderForValue.build());
+        }
+
+        return this;
+      }
+      /**
+       * <code>.monitoring.KpiId kpi_id = 1;</code>
+       */
+      public Builder mergeKpiId(monitoring.Monitoring.KpiId value) {
+        if (kpiIdBuilder_ == null) {
+          if (kpiId_ != null) {
+            kpiId_ =
+              monitoring.Monitoring.KpiId.newBuilder(kpiId_).mergeFrom(value).buildPartial();
+          } else {
+            kpiId_ = value;
+          }
+          onChanged();
+        } else {
+          kpiIdBuilder_.mergeFrom(value);
+        }
+
+        return this;
+      }
+      /**
+       * <code>.monitoring.KpiId kpi_id = 1;</code>
+       */
+      public Builder clearKpiId() {
+        if (kpiIdBuilder_ == null) {
+          kpiId_ = null;
+          onChanged();
+        } else {
+          kpiId_ = null;
+          kpiIdBuilder_ = null;
+        }
+
+        return this;
+      }
+      /**
+       * <code>.monitoring.KpiId kpi_id = 1;</code>
+       */
+      public monitoring.Monitoring.KpiId.Builder getKpiIdBuilder() {
+        
+        onChanged();
+        return getKpiIdFieldBuilder().getBuilder();
+      }
+      /**
+       * <code>.monitoring.KpiId kpi_id = 1;</code>
+       */
+      public monitoring.Monitoring.KpiIdOrBuilder getKpiIdOrBuilder() {
+        if (kpiIdBuilder_ != null) {
+          return kpiIdBuilder_.getMessageOrBuilder();
+        } else {
+          return kpiId_ == null ?
+              monitoring.Monitoring.KpiId.getDefaultInstance() : kpiId_;
+        }
+      }
+      /**
+       * <code>.monitoring.KpiId kpi_id = 1;</code>
+       */
+      private com.google.protobuf.SingleFieldBuilderV3<
+          monitoring.Monitoring.KpiId, monitoring.Monitoring.KpiId.Builder, monitoring.Monitoring.KpiIdOrBuilder> 
+          getKpiIdFieldBuilder() {
+        if (kpiIdBuilder_ == null) {
+          kpiIdBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
+              monitoring.Monitoring.KpiId, monitoring.Monitoring.KpiId.Builder, monitoring.Monitoring.KpiIdOrBuilder>(
+                  getKpiId(),
+                  getParentForChildren(),
+                  isClean());
+          kpiId_ = null;
+        }
+        return kpiIdBuilder_;
+      }
+
+      private java.lang.Object timestamp_ = "";
+      /**
+       * <code>string timestamp = 2;</code>
+       * @return The timestamp.
+       */
+      public java.lang.String getTimestamp() {
+        java.lang.Object ref = timestamp_;
+        if (!(ref instanceof java.lang.String)) {
+          com.google.protobuf.ByteString bs =
+              (com.google.protobuf.ByteString) ref;
+          java.lang.String s = bs.toStringUtf8();
+          timestamp_ = s;
+          return s;
+        } else {
+          return (java.lang.String) ref;
+        }
+      }
+      /**
+       * <code>string timestamp = 2;</code>
+       * @return The bytes for timestamp.
+       */
+      public com.google.protobuf.ByteString
+          getTimestampBytes() {
+        java.lang.Object ref = timestamp_;
+        if (ref instanceof String) {
+          com.google.protobuf.ByteString b = 
+              com.google.protobuf.ByteString.copyFromUtf8(
+                  (java.lang.String) ref);
+          timestamp_ = b;
+          return b;
+        } else {
+          return (com.google.protobuf.ByteString) ref;
+        }
+      }
+      /**
+       * <code>string timestamp = 2;</code>
+       * @param value The timestamp to set.
+       * @return This builder for chaining.
+       */
+      public Builder setTimestamp(
+          java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  
+        timestamp_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>string timestamp = 2;</code>
+       * @return This builder for chaining.
+       */
+      public Builder clearTimestamp() {
+        
+        timestamp_ = getDefaultInstance().getTimestamp();
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>string timestamp = 2;</code>
+       * @param value The bytes for timestamp to set.
+       * @return This builder for chaining.
+       */
+      public Builder setTimestampBytes(
+          com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  checkByteStringIsUtf8(value);
+        
+        timestamp_ = value;
+        onChanged();
+        return this;
+      }
+
+      private monitoring.Monitoring.KpiValue kpiValue_;
+      private com.google.protobuf.SingleFieldBuilderV3<
+          monitoring.Monitoring.KpiValue, monitoring.Monitoring.KpiValue.Builder, monitoring.Monitoring.KpiValueOrBuilder> kpiValueBuilder_;
+      /**
+       * <code>.monitoring.KpiValue kpi_value = 4;</code>
+       * @return Whether the kpiValue field is set.
+       */
+      public boolean hasKpiValue() {
+        return kpiValueBuilder_ != null || kpiValue_ != null;
+      }
+      /**
+       * <code>.monitoring.KpiValue kpi_value = 4;</code>
+       * @return The kpiValue.
+       */
+      public monitoring.Monitoring.KpiValue getKpiValue() {
+        if (kpiValueBuilder_ == null) {
+          return kpiValue_ == null ? monitoring.Monitoring.KpiValue.getDefaultInstance() : kpiValue_;
+        } else {
+          return kpiValueBuilder_.getMessage();
+        }
+      }
+      /**
+       * <code>.monitoring.KpiValue kpi_value = 4;</code>
+       */
+      public Builder setKpiValue(monitoring.Monitoring.KpiValue value) {
+        if (kpiValueBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          kpiValue_ = value;
+          onChanged();
+        } else {
+          kpiValueBuilder_.setMessage(value);
+        }
+
+        return this;
+      }
+      /**
+       * <code>.monitoring.KpiValue kpi_value = 4;</code>
+       */
+      public Builder setKpiValue(
+          monitoring.Monitoring.KpiValue.Builder builderForValue) {
+        if (kpiValueBuilder_ == null) {
+          kpiValue_ = builderForValue.build();
+          onChanged();
+        } else {
+          kpiValueBuilder_.setMessage(builderForValue.build());
+        }
+
+        return this;
+      }
+      /**
+       * <code>.monitoring.KpiValue kpi_value = 4;</code>
+       */
+      public Builder mergeKpiValue(monitoring.Monitoring.KpiValue value) {
+        if (kpiValueBuilder_ == null) {
+          if (kpiValue_ != null) {
+            kpiValue_ =
+              monitoring.Monitoring.KpiValue.newBuilder(kpiValue_).mergeFrom(value).buildPartial();
+          } else {
+            kpiValue_ = value;
+          }
+          onChanged();
+        } else {
+          kpiValueBuilder_.mergeFrom(value);
+        }
+
+        return this;
+      }
+      /**
+       * <code>.monitoring.KpiValue kpi_value = 4;</code>
+       */
+      public Builder clearKpiValue() {
+        if (kpiValueBuilder_ == null) {
+          kpiValue_ = null;
+          onChanged();
+        } else {
+          kpiValue_ = null;
+          kpiValueBuilder_ = null;
+        }
+
+        return this;
+      }
+      /**
+       * <code>.monitoring.KpiValue kpi_value = 4;</code>
+       */
+      public monitoring.Monitoring.KpiValue.Builder getKpiValueBuilder() {
+        
+        onChanged();
+        return getKpiValueFieldBuilder().getBuilder();
+      }
+      /**
+       * <code>.monitoring.KpiValue kpi_value = 4;</code>
+       */
+      public monitoring.Monitoring.KpiValueOrBuilder getKpiValueOrBuilder() {
+        if (kpiValueBuilder_ != null) {
+          return kpiValueBuilder_.getMessageOrBuilder();
+        } else {
+          return kpiValue_ == null ?
+              monitoring.Monitoring.KpiValue.getDefaultInstance() : kpiValue_;
+        }
+      }
+      /**
+       * <code>.monitoring.KpiValue kpi_value = 4;</code>
+       */
+      private com.google.protobuf.SingleFieldBuilderV3<
+          monitoring.Monitoring.KpiValue, monitoring.Monitoring.KpiValue.Builder, monitoring.Monitoring.KpiValueOrBuilder> 
+          getKpiValueFieldBuilder() {
+        if (kpiValueBuilder_ == null) {
+          kpiValueBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
+              monitoring.Monitoring.KpiValue, monitoring.Monitoring.KpiValue.Builder, monitoring.Monitoring.KpiValueOrBuilder>(
+                  getKpiValue(),
+                  getParentForChildren(),
+                  isClean());
+          kpiValue_ = null;
+        }
+        return kpiValueBuilder_;
+      }
+      @java.lang.Override
+      public final Builder setUnknownFields(
+          final com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.setUnknownFields(unknownFields);
+      }
+
+      @java.lang.Override
+      public final Builder mergeUnknownFields(
+          final com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.mergeUnknownFields(unknownFields);
+      }
+
+
+      // @@protoc_insertion_point(builder_scope:monitoring.Kpi)
+    }
+
+    // @@protoc_insertion_point(class_scope:monitoring.Kpi)
+    private static final monitoring.Monitoring.Kpi DEFAULT_INSTANCE;
+    static {
+      DEFAULT_INSTANCE = new monitoring.Monitoring.Kpi();
+    }
+
+    public static monitoring.Monitoring.Kpi getDefaultInstance() {
+      return DEFAULT_INSTANCE;
+    }
+
+    private static final com.google.protobuf.Parser<Kpi>
+        PARSER = new com.google.protobuf.AbstractParser<Kpi>() {
+      @java.lang.Override
+      public Kpi parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new Kpi(input, extensionRegistry);
+      }
+    };
+
+    public static com.google.protobuf.Parser<Kpi> parser() {
+      return PARSER;
+    }
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<Kpi> getParserForType() {
+      return PARSER;
+    }
+
+    @java.lang.Override
+    public monitoring.Monitoring.Kpi getDefaultInstanceForType() {
+      return DEFAULT_INSTANCE;
+    }
+
+  }
+
+  public interface KpiValueOrBuilder extends
+      // @@protoc_insertion_point(interface_extends:monitoring.KpiValue)
+      com.google.protobuf.MessageOrBuilder {
+
+    /**
+     * <code>uint32 intVal = 1;</code>
+     * @return Whether the intVal field is set.
+     */
+    boolean hasIntVal();
+    /**
+     * <code>uint32 intVal = 1;</code>
+     * @return The intVal.
+     */
+    int getIntVal();
+
+    /**
+     * <code>float floatVal = 2;</code>
+     * @return Whether the floatVal field is set.
+     */
+    boolean hasFloatVal();
+    /**
+     * <code>float floatVal = 2;</code>
+     * @return The floatVal.
+     */
+    float getFloatVal();
+
+    /**
+     * <code>string stringVal = 3;</code>
+     * @return Whether the stringVal field is set.
+     */
+    boolean hasStringVal();
+    /**
+     * <code>string stringVal = 3;</code>
+     * @return The stringVal.
+     */
+    java.lang.String getStringVal();
+    /**
+     * <code>string stringVal = 3;</code>
+     * @return The bytes for stringVal.
+     */
+    com.google.protobuf.ByteString
+        getStringValBytes();
+
+    /**
+     * <code>bool boolVal = 4;</code>
+     * @return Whether the boolVal field is set.
+     */
+    boolean hasBoolVal();
+    /**
+     * <code>bool boolVal = 4;</code>
+     * @return The boolVal.
+     */
+    boolean getBoolVal();
+
+    public monitoring.Monitoring.KpiValue.ValueCase getValueCase();
+  }
+  /**
+   * Protobuf type {@code monitoring.KpiValue}
+   */
+  public static final class KpiValue extends
+      com.google.protobuf.GeneratedMessageV3 implements
+      // @@protoc_insertion_point(message_implements:monitoring.KpiValue)
+      KpiValueOrBuilder {
+  private static final long serialVersionUID = 0L;
+    // Use KpiValue.newBuilder() to construct.
+    private KpiValue(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
+      super(builder);
+    }
+    private KpiValue() {
+    }
+
+    @java.lang.Override
+    @SuppressWarnings({"unused"})
+    protected java.lang.Object newInstance(
+        UnusedPrivateParameter unused) {
+      return new KpiValue();
+    }
+
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+    getUnknownFields() {
+      return this.unknownFields;
+    }
+    private KpiValue(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      this();
+      if (extensionRegistry == null) {
+        throw new java.lang.NullPointerException();
+      }
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            case 8: {
+              valueCase_ = 1;
+              value_ = input.readUInt32();
+              break;
+            }
+            case 21: {
+              valueCase_ = 2;
+              value_ = input.readFloat();
+              break;
+            }
+            case 26: {
+              java.lang.String s = input.readStringRequireUtf8();
+              valueCase_ = 3;
+              value_ = s;
+              break;
+            }
+            case 32: {
+              valueCase_ = 4;
+              value_ = input.readBool();
+              break;
+            }
+            default: {
+              if (!parseUnknownField(
+                  input, unknownFields, extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e).setUnfinishedMessage(this);
+      } finally {
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return monitoring.Monitoring.internal_static_monitoring_KpiValue_descriptor;
+    }
+
+    @java.lang.Override
+    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return monitoring.Monitoring.internal_static_monitoring_KpiValue_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              monitoring.Monitoring.KpiValue.class, monitoring.Monitoring.KpiValue.Builder.class);
+    }
+
+    private int valueCase_ = 0;
+    private java.lang.Object value_;
+    public enum ValueCase
+        implements com.google.protobuf.Internal.EnumLite,
+            com.google.protobuf.AbstractMessage.InternalOneOfEnum {
+      INTVAL(1),
+      FLOATVAL(2),
+      STRINGVAL(3),
+      BOOLVAL(4),
+      VALUE_NOT_SET(0);
+      private final int value;
+      private ValueCase(int value) {
+        this.value = value;
+      }
+      /**
+       * @param value The number of the enum to look for.
+       * @return The enum associated with the given number.
+       * @deprecated Use {@link #forNumber(int)} instead.
+       */
+      @java.lang.Deprecated
+      public static ValueCase valueOf(int value) {
+        return forNumber(value);
+      }
+
+      public static ValueCase forNumber(int value) {
+        switch (value) {
+          case 1: return INTVAL;
+          case 2: return FLOATVAL;
+          case 3: return STRINGVAL;
+          case 4: return BOOLVAL;
+          case 0: return VALUE_NOT_SET;
+          default: return null;
+        }
+      }
+      public int getNumber() {
+        return this.value;
+      }
+    };
+
+    public ValueCase
+    getValueCase() {
+      return ValueCase.forNumber(
+          valueCase_);
+    }
+
+    public static final int INTVAL_FIELD_NUMBER = 1;
+    /**
+     * <code>uint32 intVal = 1;</code>
+     * @return Whether the intVal field is set.
+     */
+    @java.lang.Override
+    public boolean hasIntVal() {
+      return valueCase_ == 1;
+    }
+    /**
+     * <code>uint32 intVal = 1;</code>
+     * @return The intVal.
+     */
+    @java.lang.Override
+    public int getIntVal() {
+      if (valueCase_ == 1) {
+        return (java.lang.Integer) value_;
+      }
+      return 0;
+    }
+
+    public static final int FLOATVAL_FIELD_NUMBER = 2;
+    /**
+     * <code>float floatVal = 2;</code>
+     * @return Whether the floatVal field is set.
+     */
+    @java.lang.Override
+    public boolean hasFloatVal() {
+      return valueCase_ == 2;
+    }
+    /**
+     * <code>float floatVal = 2;</code>
+     * @return The floatVal.
+     */
+    @java.lang.Override
+    public float getFloatVal() {
+      if (valueCase_ == 2) {
+        return (java.lang.Float) value_;
+      }
+      return 0F;
+    }
+
+    public static final int STRINGVAL_FIELD_NUMBER = 3;
+    /**
+     * <code>string stringVal = 3;</code>
+     * @return Whether the stringVal field is set.
+     */
+    public boolean hasStringVal() {
+      return valueCase_ == 3;
+    }
+    /**
+     * <code>string stringVal = 3;</code>
+     * @return The stringVal.
+     */
+    public java.lang.String getStringVal() {
+      java.lang.Object ref = "";
+      if (valueCase_ == 3) {
+        ref = value_;
+      }
+      if (ref instanceof java.lang.String) {
+        return (java.lang.String) ref;
+      } else {
+        com.google.protobuf.ByteString bs = 
+            (com.google.protobuf.ByteString) ref;
+        java.lang.String s = bs.toStringUtf8();
+        if (valueCase_ == 3) {
+          value_ = s;
+        }
+        return s;
+      }
+    }
+    /**
+     * <code>string stringVal = 3;</code>
+     * @return The bytes for stringVal.
+     */
+    public com.google.protobuf.ByteString
+        getStringValBytes() {
+      java.lang.Object ref = "";
+      if (valueCase_ == 3) {
+        ref = value_;
+      }
+      if (ref instanceof java.lang.String) {
+        com.google.protobuf.ByteString b = 
+            com.google.protobuf.ByteString.copyFromUtf8(
+                (java.lang.String) ref);
+        if (valueCase_ == 3) {
+          value_ = b;
+        }
+        return b;
+      } else {
+        return (com.google.protobuf.ByteString) ref;
+      }
+    }
+
+    public static final int BOOLVAL_FIELD_NUMBER = 4;
+    /**
+     * <code>bool boolVal = 4;</code>
+     * @return Whether the boolVal field is set.
+     */
+    @java.lang.Override
+    public boolean hasBoolVal() {
+      return valueCase_ == 4;
+    }
+    /**
+     * <code>bool boolVal = 4;</code>
+     * @return The boolVal.
+     */
+    @java.lang.Override
+    public boolean getBoolVal() {
+      if (valueCase_ == 4) {
+        return (java.lang.Boolean) value_;
+      }
+      return false;
+    }
+
+    private byte memoizedIsInitialized = -1;
+    @java.lang.Override
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized == 1) return true;
+      if (isInitialized == 0) return false;
+
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    @java.lang.Override
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      if (valueCase_ == 1) {
+        output.writeUInt32(
+            1, (int)((java.lang.Integer) value_));
+      }
+      if (valueCase_ == 2) {
+        output.writeFloat(
+            2, (float)((java.lang.Float) value_));
+      }
+      if (valueCase_ == 3) {
+        com.google.protobuf.GeneratedMessageV3.writeString(output, 3, value_);
+      }
+      if (valueCase_ == 4) {
+        output.writeBool(
+            4, (boolean)((java.lang.Boolean) value_));
+      }
+      unknownFields.writeTo(output);
+    }
+
+    @java.lang.Override
+    public int getSerializedSize() {
+      int size = memoizedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      if (valueCase_ == 1) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeUInt32Size(
+              1, (int)((java.lang.Integer) value_));
+      }
+      if (valueCase_ == 2) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeFloatSize(
+              2, (float)((java.lang.Float) value_));
+      }
+      if (valueCase_ == 3) {
+        size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, value_);
+      }
+      if (valueCase_ == 4) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBoolSize(
+              4, (boolean)((java.lang.Boolean) value_));
+      }
+      size += unknownFields.getSerializedSize();
+      memoizedSize = size;
+      return size;
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof monitoring.Monitoring.KpiValue)) {
+        return super.equals(obj);
+      }
+      monitoring.Monitoring.KpiValue other = (monitoring.Monitoring.KpiValue) obj;
+
+      if (!getValueCase().equals(other.getValueCase())) return false;
+      switch (valueCase_) {
+        case 1:
+          if (getIntVal()
+              != other.getIntVal()) return false;
+          break;
+        case 2:
+          if (java.lang.Float.floatToIntBits(getFloatVal())
+              != java.lang.Float.floatToIntBits(
+                  other.getFloatVal())) return false;
+          break;
+        case 3:
+          if (!getStringVal()
+              .equals(other.getStringVal())) return false;
+          break;
+        case 4:
+          if (getBoolVal()
+              != other.getBoolVal()) return false;
+          break;
+        case 0:
+        default:
+      }
+      if (!unknownFields.equals(other.unknownFields)) return false;
+      return true;
+    }
+
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptor().hashCode();
+      switch (valueCase_) {
+        case 1:
+          hash = (37 * hash) + INTVAL_FIELD_NUMBER;
+          hash = (53 * hash) + getIntVal();
+          break;
+        case 2:
+          hash = (37 * hash) + FLOATVAL_FIELD_NUMBER;
+          hash = (53 * hash) + java.lang.Float.floatToIntBits(
+              getFloatVal());
+          break;
+        case 3:
+          hash = (37 * hash) + STRINGVAL_FIELD_NUMBER;
+          hash = (53 * hash) + getStringVal().hashCode();
+          break;
+        case 4:
+          hash = (37 * hash) + BOOLVAL_FIELD_NUMBER;
+          hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
+              getBoolVal());
+          break;
+        case 0:
+        default:
+      }
+      hash = (29 * hash) + unknownFields.hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static monitoring.Monitoring.KpiValue parseFrom(
+        java.nio.ByteBuffer data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static monitoring.Monitoring.KpiValue parseFrom(
+        java.nio.ByteBuffer data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static monitoring.Monitoring.KpiValue parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static monitoring.Monitoring.KpiValue parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static monitoring.Monitoring.KpiValue parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static monitoring.Monitoring.KpiValue parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static monitoring.Monitoring.KpiValue parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
+    }
+    public static monitoring.Monitoring.KpiValue parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
+    }
+    public static monitoring.Monitoring.KpiValue parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input);
+    }
+    public static monitoring.Monitoring.KpiValue parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+    }
+    public static monitoring.Monitoring.KpiValue parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
+    }
+    public static monitoring.Monitoring.KpiValue parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
+    }
+
+    @java.lang.Override
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder() {
+      return DEFAULT_INSTANCE.toBuilder();
+    }
+    public static Builder newBuilder(monitoring.Monitoring.KpiValue prototype) {
+      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+    }
+    @java.lang.Override
+    public Builder toBuilder() {
+      return this == DEFAULT_INSTANCE
+          ? new Builder() : new Builder().mergeFrom(this);
+    }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code monitoring.KpiValue}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
+        // @@protoc_insertion_point(builder_implements:monitoring.KpiValue)
+        monitoring.Monitoring.KpiValueOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return monitoring.Monitoring.internal_static_monitoring_KpiValue_descriptor;
+      }
+
+      @java.lang.Override
+      protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return monitoring.Monitoring.internal_static_monitoring_KpiValue_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                monitoring.Monitoring.KpiValue.class, monitoring.Monitoring.KpiValue.Builder.class);
+      }
+
+      // Construct using monitoring.Monitoring.KpiValue.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessageV3
+                .alwaysUseFieldBuilders) {
+        }
+      }
+      @java.lang.Override
+      public Builder clear() {
+        super.clear();
+        valueCase_ = 0;
+        value_ = null;
+        return this;
+      }
+
+      @java.lang.Override
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return monitoring.Monitoring.internal_static_monitoring_KpiValue_descriptor;
+      }
+
+      @java.lang.Override
+      public monitoring.Monitoring.KpiValue getDefaultInstanceForType() {
+        return monitoring.Monitoring.KpiValue.getDefaultInstance();
+      }
+
+      @java.lang.Override
+      public monitoring.Monitoring.KpiValue build() {
+        monitoring.Monitoring.KpiValue result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      @java.lang.Override
+      public monitoring.Monitoring.KpiValue buildPartial() {
+        monitoring.Monitoring.KpiValue result = new monitoring.Monitoring.KpiValue(this);
+        if (valueCase_ == 1) {
+          result.value_ = value_;
+        }
+        if (valueCase_ == 2) {
+          result.value_ = value_;
+        }
+        if (valueCase_ == 3) {
+          result.value_ = value_;
+        }
+        if (valueCase_ == 4) {
+          result.value_ = value_;
+        }
+        result.valueCase_ = valueCase_;
+        onBuilt();
+        return result;
+      }
+
+      @java.lang.Override
+      public Builder clone() {
+        return super.clone();
+      }
+      @java.lang.Override
+      public Builder setField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          java.lang.Object value) {
+        return super.setField(field, value);
+      }
+      @java.lang.Override
+      public Builder clearField(
+          com.google.protobuf.Descriptors.FieldDescriptor field) {
+        return super.clearField(field);
+      }
+      @java.lang.Override
+      public Builder clearOneof(
+          com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+        return super.clearOneof(oneof);
+      }
+      @java.lang.Override
+      public Builder setRepeatedField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          int index, java.lang.Object value) {
+        return super.setRepeatedField(field, index, value);
+      }
+      @java.lang.Override
+      public Builder addRepeatedField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          java.lang.Object value) {
+        return super.addRepeatedField(field, value);
+      }
+      @java.lang.Override
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof monitoring.Monitoring.KpiValue) {
+          return mergeFrom((monitoring.Monitoring.KpiValue)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(monitoring.Monitoring.KpiValue other) {
+        if (other == monitoring.Monitoring.KpiValue.getDefaultInstance()) return this;
+        switch (other.getValueCase()) {
+          case INTVAL: {
+            setIntVal(other.getIntVal());
+            break;
+          }
+          case FLOATVAL: {
+            setFloatVal(other.getFloatVal());
+            break;
+          }
+          case STRINGVAL: {
+            valueCase_ = 3;
+            value_ = other.value_;
+            onChanged();
+            break;
+          }
+          case BOOLVAL: {
+            setBoolVal(other.getBoolVal());
+            break;
+          }
+          case VALUE_NOT_SET: {
+            break;
+          }
+        }
+        this.mergeUnknownFields(other.unknownFields);
+        onChanged();
+        return this;
+      }
+
+      @java.lang.Override
+      public final boolean isInitialized() {
+        return true;
+      }
+
+      @java.lang.Override
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        monitoring.Monitoring.KpiValue parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (monitoring.Monitoring.KpiValue) e.getUnfinishedMessage();
+          throw e.unwrapIOException();
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+      private int valueCase_ = 0;
+      private java.lang.Object value_;
+      public ValueCase
+          getValueCase() {
+        return ValueCase.forNumber(
+            valueCase_);
+      }
+
+      public Builder clearValue() {
+        valueCase_ = 0;
+        value_ = null;
+        onChanged();
+        return this;
+      }
+
+
+      /**
+       * <code>uint32 intVal = 1;</code>
+       * @return Whether the intVal field is set.
+       */
+      public boolean hasIntVal() {
+        return valueCase_ == 1;
+      }
+      /**
+       * <code>uint32 intVal = 1;</code>
+       * @return The intVal.
+       */
+      public int getIntVal() {
+        if (valueCase_ == 1) {
+          return (java.lang.Integer) value_;
+        }
+        return 0;
+      }
+      /**
+       * <code>uint32 intVal = 1;</code>
+       * @param value The intVal to set.
+       * @return This builder for chaining.
+       */
+      public Builder setIntVal(int value) {
+        valueCase_ = 1;
+        value_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>uint32 intVal = 1;</code>
+       * @return This builder for chaining.
+       */
+      public Builder clearIntVal() {
+        if (valueCase_ == 1) {
+          valueCase_ = 0;
+          value_ = null;
+          onChanged();
+        }
+        return this;
+      }
+
+      /**
+       * <code>float floatVal = 2;</code>
+       * @return Whether the floatVal field is set.
+       */
+      public boolean hasFloatVal() {
+        return valueCase_ == 2;
+      }
+      /**
+       * <code>float floatVal = 2;</code>
+       * @return The floatVal.
+       */
+      public float getFloatVal() {
+        if (valueCase_ == 2) {
+          return (java.lang.Float) value_;
+        }
+        return 0F;
+      }
+      /**
+       * <code>float floatVal = 2;</code>
+       * @param value The floatVal to set.
+       * @return This builder for chaining.
+       */
+      public Builder setFloatVal(float value) {
+        valueCase_ = 2;
+        value_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>float floatVal = 2;</code>
+       * @return This builder for chaining.
+       */
+      public Builder clearFloatVal() {
+        if (valueCase_ == 2) {
+          valueCase_ = 0;
+          value_ = null;
+          onChanged();
+        }
+        return this;
+      }
+
+      /**
+       * <code>string stringVal = 3;</code>
+       * @return Whether the stringVal field is set.
+       */
+      @java.lang.Override
+      public boolean hasStringVal() {
+        return valueCase_ == 3;
+      }
+      /**
+       * <code>string stringVal = 3;</code>
+       * @return The stringVal.
+       */
+      @java.lang.Override
+      public java.lang.String getStringVal() {
+        java.lang.Object ref = "";
+        if (valueCase_ == 3) {
+          ref = value_;
+        }
+        if (!(ref instanceof java.lang.String)) {
+          com.google.protobuf.ByteString bs =
+              (com.google.protobuf.ByteString) ref;
+          java.lang.String s = bs.toStringUtf8();
+          if (valueCase_ == 3) {
+            value_ = s;
+          }
+          return s;
+        } else {
+          return (java.lang.String) ref;
+        }
+      }
+      /**
+       * <code>string stringVal = 3;</code>
+       * @return The bytes for stringVal.
+       */
+      @java.lang.Override
+      public com.google.protobuf.ByteString
+          getStringValBytes() {
+        java.lang.Object ref = "";
+        if (valueCase_ == 3) {
+          ref = value_;
+        }
+        if (ref instanceof String) {
+          com.google.protobuf.ByteString b = 
+              com.google.protobuf.ByteString.copyFromUtf8(
+                  (java.lang.String) ref);
+          if (valueCase_ == 3) {
+            value_ = b;
+          }
+          return b;
+        } else {
+          return (com.google.protobuf.ByteString) ref;
+        }
+      }
+      /**
+       * <code>string stringVal = 3;</code>
+       * @param value The stringVal to set.
+       * @return This builder for chaining.
+       */
+      public Builder setStringVal(
+          java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  valueCase_ = 3;
+        value_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>string stringVal = 3;</code>
+       * @return This builder for chaining.
+       */
+      public Builder clearStringVal() {
+        if (valueCase_ == 3) {
+          valueCase_ = 0;
+          value_ = null;
+          onChanged();
+        }
+        return this;
+      }
+      /**
+       * <code>string stringVal = 3;</code>
+       * @param value The bytes for stringVal to set.
+       * @return This builder for chaining.
+       */
+      public Builder setStringValBytes(
+          com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  checkByteStringIsUtf8(value);
+        valueCase_ = 3;
+        value_ = value;
+        onChanged();
+        return this;
+      }
+
+      /**
+       * <code>bool boolVal = 4;</code>
+       * @return Whether the boolVal field is set.
+       */
+      public boolean hasBoolVal() {
+        return valueCase_ == 4;
+      }
+      /**
+       * <code>bool boolVal = 4;</code>
+       * @return The boolVal.
+       */
+      public boolean getBoolVal() {
+        if (valueCase_ == 4) {
+          return (java.lang.Boolean) value_;
+        }
+        return false;
+      }
+      /**
+       * <code>bool boolVal = 4;</code>
+       * @param value The boolVal to set.
+       * @return This builder for chaining.
+       */
+      public Builder setBoolVal(boolean value) {
+        valueCase_ = 4;
+        value_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>bool boolVal = 4;</code>
+       * @return This builder for chaining.
+       */
+      public Builder clearBoolVal() {
+        if (valueCase_ == 4) {
+          valueCase_ = 0;
+          value_ = null;
+          onChanged();
+        }
+        return this;
+      }
+      @java.lang.Override
+      public final Builder setUnknownFields(
+          final com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.setUnknownFields(unknownFields);
+      }
+
+      @java.lang.Override
+      public final Builder mergeUnknownFields(
+          final com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.mergeUnknownFields(unknownFields);
+      }
+
+
+      // @@protoc_insertion_point(builder_scope:monitoring.KpiValue)
+    }
+
+    // @@protoc_insertion_point(class_scope:monitoring.KpiValue)
+    private static final monitoring.Monitoring.KpiValue DEFAULT_INSTANCE;
+    static {
+      DEFAULT_INSTANCE = new monitoring.Monitoring.KpiValue();
+    }
+
+    public static monitoring.Monitoring.KpiValue getDefaultInstance() {
+      return DEFAULT_INSTANCE;
+    }
+
+    private static final com.google.protobuf.Parser<KpiValue>
+        PARSER = new com.google.protobuf.AbstractParser<KpiValue>() {
+      @java.lang.Override
+      public KpiValue parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new KpiValue(input, extensionRegistry);
+      }
+    };
+
+    public static com.google.protobuf.Parser<KpiValue> parser() {
+      return PARSER;
+    }
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<KpiValue> getParserForType() {
+      return PARSER;
+    }
+
+    @java.lang.Override
+    public monitoring.Monitoring.KpiValue getDefaultInstanceForType() {
+      return DEFAULT_INSTANCE;
+    }
+
+  }
+
+  public interface KpiListOrBuilder extends
+      // @@protoc_insertion_point(interface_extends:monitoring.KpiList)
+      com.google.protobuf.MessageOrBuilder {
+
+    /**
+     * <code>repeated .monitoring.Kpi kpi_list = 1;</code>
+     */
+    java.util.List<monitoring.Monitoring.Kpi> 
+        getKpiListList();
+    /**
+     * <code>repeated .monitoring.Kpi kpi_list = 1;</code>
+     */
+    monitoring.Monitoring.Kpi getKpiList(int index);
+    /**
+     * <code>repeated .monitoring.Kpi kpi_list = 1;</code>
+     */
+    int getKpiListCount();
+    /**
+     * <code>repeated .monitoring.Kpi kpi_list = 1;</code>
+     */
+    java.util.List<? extends monitoring.Monitoring.KpiOrBuilder> 
+        getKpiListOrBuilderList();
+    /**
+     * <code>repeated .monitoring.Kpi kpi_list = 1;</code>
+     */
+    monitoring.Monitoring.KpiOrBuilder getKpiListOrBuilder(
+        int index);
+  }
+  /**
+   * Protobuf type {@code monitoring.KpiList}
+   */
+  public static final class KpiList extends
+      com.google.protobuf.GeneratedMessageV3 implements
+      // @@protoc_insertion_point(message_implements:monitoring.KpiList)
+      KpiListOrBuilder {
+  private static final long serialVersionUID = 0L;
+    // Use KpiList.newBuilder() to construct.
+    private KpiList(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
+      super(builder);
+    }
+    private KpiList() {
+      kpiList_ = java.util.Collections.emptyList();
+    }
+
+    @java.lang.Override
+    @SuppressWarnings({"unused"})
+    protected java.lang.Object newInstance(
+        UnusedPrivateParameter unused) {
+      return new KpiList();
+    }
+
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+    getUnknownFields() {
+      return this.unknownFields;
+    }
+    private KpiList(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      this();
+      if (extensionRegistry == null) {
+        throw new java.lang.NullPointerException();
+      }
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            case 10: {
+              if (!((mutable_bitField0_ & 0x00000001) != 0)) {
+                kpiList_ = new java.util.ArrayList<monitoring.Monitoring.Kpi>();
+                mutable_bitField0_ |= 0x00000001;
+              }
+              kpiList_.add(
+                  input.readMessage(monitoring.Monitoring.Kpi.parser(), extensionRegistry));
+              break;
+            }
+            default: {
+              if (!parseUnknownField(
+                  input, unknownFields, extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e).setUnfinishedMessage(this);
+      } finally {
+        if (((mutable_bitField0_ & 0x00000001) != 0)) {
+          kpiList_ = java.util.Collections.unmodifiableList(kpiList_);
+        }
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return monitoring.Monitoring.internal_static_monitoring_KpiList_descriptor;
+    }
+
+    @java.lang.Override
+    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return monitoring.Monitoring.internal_static_monitoring_KpiList_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              monitoring.Monitoring.KpiList.class, monitoring.Monitoring.KpiList.Builder.class);
+    }
+
+    public static final int KPI_LIST_FIELD_NUMBER = 1;
+    private java.util.List<monitoring.Monitoring.Kpi> kpiList_;
+    /**
+     * <code>repeated .monitoring.Kpi kpi_list = 1;</code>
+     */
+    @java.lang.Override
+    public java.util.List<monitoring.Monitoring.Kpi> getKpiListList() {
+      return kpiList_;
+    }
+    /**
+     * <code>repeated .monitoring.Kpi kpi_list = 1;</code>
+     */
+    @java.lang.Override
+    public java.util.List<? extends monitoring.Monitoring.KpiOrBuilder> 
+        getKpiListOrBuilderList() {
+      return kpiList_;
+    }
+    /**
+     * <code>repeated .monitoring.Kpi kpi_list = 1;</code>
+     */
+    @java.lang.Override
+    public int getKpiListCount() {
+      return kpiList_.size();
+    }
+    /**
+     * <code>repeated .monitoring.Kpi kpi_list = 1;</code>
+     */
+    @java.lang.Override
+    public monitoring.Monitoring.Kpi getKpiList(int index) {
+      return kpiList_.get(index);
+    }
+    /**
+     * <code>repeated .monitoring.Kpi kpi_list = 1;</code>
+     */
+    @java.lang.Override
+    public monitoring.Monitoring.KpiOrBuilder getKpiListOrBuilder(
+        int index) {
+      return kpiList_.get(index);
+    }
+
+    private byte memoizedIsInitialized = -1;
+    @java.lang.Override
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized == 1) return true;
+      if (isInitialized == 0) return false;
+
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    @java.lang.Override
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      for (int i = 0; i < kpiList_.size(); i++) {
+        output.writeMessage(1, kpiList_.get(i));
+      }
+      unknownFields.writeTo(output);
+    }
+
+    @java.lang.Override
+    public int getSerializedSize() {
+      int size = memoizedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      for (int i = 0; i < kpiList_.size(); i++) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeMessageSize(1, kpiList_.get(i));
+      }
+      size += unknownFields.getSerializedSize();
+      memoizedSize = size;
+      return size;
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof monitoring.Monitoring.KpiList)) {
+        return super.equals(obj);
+      }
+      monitoring.Monitoring.KpiList other = (monitoring.Monitoring.KpiList) obj;
+
+      if (!getKpiListList()
+          .equals(other.getKpiListList())) return false;
+      if (!unknownFields.equals(other.unknownFields)) return false;
+      return true;
+    }
+
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptor().hashCode();
+      if (getKpiListCount() > 0) {
+        hash = (37 * hash) + KPI_LIST_FIELD_NUMBER;
+        hash = (53 * hash) + getKpiListList().hashCode();
+      }
+      hash = (29 * hash) + unknownFields.hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static monitoring.Monitoring.KpiList parseFrom(
+        java.nio.ByteBuffer data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static monitoring.Monitoring.KpiList parseFrom(
+        java.nio.ByteBuffer data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static monitoring.Monitoring.KpiList parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static monitoring.Monitoring.KpiList parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static monitoring.Monitoring.KpiList parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static monitoring.Monitoring.KpiList parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static monitoring.Monitoring.KpiList parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
+    }
+    public static monitoring.Monitoring.KpiList parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
+    }
+    public static monitoring.Monitoring.KpiList parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input);
+    }
+    public static monitoring.Monitoring.KpiList parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+    }
+    public static monitoring.Monitoring.KpiList parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
+    }
+    public static monitoring.Monitoring.KpiList parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
+    }
+
+    @java.lang.Override
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder() {
+      return DEFAULT_INSTANCE.toBuilder();
+    }
+    public static Builder newBuilder(monitoring.Monitoring.KpiList prototype) {
+      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+    }
+    @java.lang.Override
+    public Builder toBuilder() {
+      return this == DEFAULT_INSTANCE
+          ? new Builder() : new Builder().mergeFrom(this);
+    }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code monitoring.KpiList}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
+        // @@protoc_insertion_point(builder_implements:monitoring.KpiList)
+        monitoring.Monitoring.KpiListOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return monitoring.Monitoring.internal_static_monitoring_KpiList_descriptor;
+      }
+
+      @java.lang.Override
+      protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return monitoring.Monitoring.internal_static_monitoring_KpiList_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                monitoring.Monitoring.KpiList.class, monitoring.Monitoring.KpiList.Builder.class);
+      }
+
+      // Construct using monitoring.Monitoring.KpiList.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessageV3
+                .alwaysUseFieldBuilders) {
+          getKpiListFieldBuilder();
+        }
+      }
+      @java.lang.Override
+      public Builder clear() {
+        super.clear();
+        if (kpiListBuilder_ == null) {
+          kpiList_ = java.util.Collections.emptyList();
+          bitField0_ = (bitField0_ & ~0x00000001);
+        } else {
+          kpiListBuilder_.clear();
+        }
+        return this;
+      }
+
+      @java.lang.Override
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return monitoring.Monitoring.internal_static_monitoring_KpiList_descriptor;
+      }
+
+      @java.lang.Override
+      public monitoring.Monitoring.KpiList getDefaultInstanceForType() {
+        return monitoring.Monitoring.KpiList.getDefaultInstance();
+      }
+
+      @java.lang.Override
+      public monitoring.Monitoring.KpiList build() {
+        monitoring.Monitoring.KpiList result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      @java.lang.Override
+      public monitoring.Monitoring.KpiList buildPartial() {
+        monitoring.Monitoring.KpiList result = new monitoring.Monitoring.KpiList(this);
+        int from_bitField0_ = bitField0_;
+        if (kpiListBuilder_ == null) {
+          if (((bitField0_ & 0x00000001) != 0)) {
+            kpiList_ = java.util.Collections.unmodifiableList(kpiList_);
+            bitField0_ = (bitField0_ & ~0x00000001);
+          }
+          result.kpiList_ = kpiList_;
+        } else {
+          result.kpiList_ = kpiListBuilder_.build();
+        }
+        onBuilt();
+        return result;
+      }
+
+      @java.lang.Override
+      public Builder clone() {
+        return super.clone();
+      }
+      @java.lang.Override
+      public Builder setField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          java.lang.Object value) {
+        return super.setField(field, value);
+      }
+      @java.lang.Override
+      public Builder clearField(
+          com.google.protobuf.Descriptors.FieldDescriptor field) {
+        return super.clearField(field);
+      }
+      @java.lang.Override
+      public Builder clearOneof(
+          com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+        return super.clearOneof(oneof);
+      }
+      @java.lang.Override
+      public Builder setRepeatedField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          int index, java.lang.Object value) {
+        return super.setRepeatedField(field, index, value);
+      }
+      @java.lang.Override
+      public Builder addRepeatedField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          java.lang.Object value) {
+        return super.addRepeatedField(field, value);
+      }
+      @java.lang.Override
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof monitoring.Monitoring.KpiList) {
+          return mergeFrom((monitoring.Monitoring.KpiList)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(monitoring.Monitoring.KpiList other) {
+        if (other == monitoring.Monitoring.KpiList.getDefaultInstance()) return this;
+        if (kpiListBuilder_ == null) {
+          if (!other.kpiList_.isEmpty()) {
+            if (kpiList_.isEmpty()) {
+              kpiList_ = other.kpiList_;
+              bitField0_ = (bitField0_ & ~0x00000001);
+            } else {
+              ensureKpiListIsMutable();
+              kpiList_.addAll(other.kpiList_);
+            }
+            onChanged();
+          }
+        } else {
+          if (!other.kpiList_.isEmpty()) {
+            if (kpiListBuilder_.isEmpty()) {
+              kpiListBuilder_.dispose();
+              kpiListBuilder_ = null;
+              kpiList_ = other.kpiList_;
+              bitField0_ = (bitField0_ & ~0x00000001);
+              kpiListBuilder_ = 
+                com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
+                   getKpiListFieldBuilder() : null;
+            } else {
+              kpiListBuilder_.addAllMessages(other.kpiList_);
+            }
+          }
+        }
+        this.mergeUnknownFields(other.unknownFields);
+        onChanged();
+        return this;
+      }
+
+      @java.lang.Override
+      public final boolean isInitialized() {
+        return true;
+      }
+
+      @java.lang.Override
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        monitoring.Monitoring.KpiList parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (monitoring.Monitoring.KpiList) e.getUnfinishedMessage();
+          throw e.unwrapIOException();
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+      private int bitField0_;
+
+      private java.util.List<monitoring.Monitoring.Kpi> kpiList_ =
+        java.util.Collections.emptyList();
+      private void ensureKpiListIsMutable() {
+        if (!((bitField0_ & 0x00000001) != 0)) {
+          kpiList_ = new java.util.ArrayList<monitoring.Monitoring.Kpi>(kpiList_);
+          bitField0_ |= 0x00000001;
+         }
+      }
+
+      private com.google.protobuf.RepeatedFieldBuilderV3<
+          monitoring.Monitoring.Kpi, monitoring.Monitoring.Kpi.Builder, monitoring.Monitoring.KpiOrBuilder> kpiListBuilder_;
+
+      /**
+       * <code>repeated .monitoring.Kpi kpi_list = 1;</code>
+       */
+      public java.util.List<monitoring.Monitoring.Kpi> getKpiListList() {
+        if (kpiListBuilder_ == null) {
+          return java.util.Collections.unmodifiableList(kpiList_);
+        } else {
+          return kpiListBuilder_.getMessageList();
+        }
+      }
+      /**
+       * <code>repeated .monitoring.Kpi kpi_list = 1;</code>
+       */
+      public int getKpiListCount() {
+        if (kpiListBuilder_ == null) {
+          return kpiList_.size();
+        } else {
+          return kpiListBuilder_.getCount();
+        }
+      }
+      /**
+       * <code>repeated .monitoring.Kpi kpi_list = 1;</code>
+       */
+      public monitoring.Monitoring.Kpi getKpiList(int index) {
+        if (kpiListBuilder_ == null) {
+          return kpiList_.get(index);
+        } else {
+          return kpiListBuilder_.getMessage(index);
+        }
+      }
+      /**
+       * <code>repeated .monitoring.Kpi kpi_list = 1;</code>
+       */
+      public Builder setKpiList(
+          int index, monitoring.Monitoring.Kpi value) {
+        if (kpiListBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          ensureKpiListIsMutable();
+          kpiList_.set(index, value);
+          onChanged();
+        } else {
+          kpiListBuilder_.setMessage(index, value);
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .monitoring.Kpi kpi_list = 1;</code>
+       */
+      public Builder setKpiList(
+          int index, monitoring.Monitoring.Kpi.Builder builderForValue) {
+        if (kpiListBuilder_ == null) {
+          ensureKpiListIsMutable();
+          kpiList_.set(index, builderForValue.build());
+          onChanged();
+        } else {
+          kpiListBuilder_.setMessage(index, builderForValue.build());
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .monitoring.Kpi kpi_list = 1;</code>
+       */
+      public Builder addKpiList(monitoring.Monitoring.Kpi value) {
+        if (kpiListBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          ensureKpiListIsMutable();
+          kpiList_.add(value);
+          onChanged();
+        } else {
+          kpiListBuilder_.addMessage(value);
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .monitoring.Kpi kpi_list = 1;</code>
+       */
+      public Builder addKpiList(
+          int index, monitoring.Monitoring.Kpi value) {
+        if (kpiListBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          ensureKpiListIsMutable();
+          kpiList_.add(index, value);
+          onChanged();
+        } else {
+          kpiListBuilder_.addMessage(index, value);
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .monitoring.Kpi kpi_list = 1;</code>
+       */
+      public Builder addKpiList(
+          monitoring.Monitoring.Kpi.Builder builderForValue) {
+        if (kpiListBuilder_ == null) {
+          ensureKpiListIsMutable();
+          kpiList_.add(builderForValue.build());
+          onChanged();
+        } else {
+          kpiListBuilder_.addMessage(builderForValue.build());
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .monitoring.Kpi kpi_list = 1;</code>
+       */
+      public Builder addKpiList(
+          int index, monitoring.Monitoring.Kpi.Builder builderForValue) {
+        if (kpiListBuilder_ == null) {
+          ensureKpiListIsMutable();
+          kpiList_.add(index, builderForValue.build());
+          onChanged();
+        } else {
+          kpiListBuilder_.addMessage(index, builderForValue.build());
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .monitoring.Kpi kpi_list = 1;</code>
+       */
+      public Builder addAllKpiList(
+          java.lang.Iterable<? extends monitoring.Monitoring.Kpi> values) {
+        if (kpiListBuilder_ == null) {
+          ensureKpiListIsMutable();
+          com.google.protobuf.AbstractMessageLite.Builder.addAll(
+              values, kpiList_);
+          onChanged();
+        } else {
+          kpiListBuilder_.addAllMessages(values);
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .monitoring.Kpi kpi_list = 1;</code>
+       */
+      public Builder clearKpiList() {
+        if (kpiListBuilder_ == null) {
+          kpiList_ = java.util.Collections.emptyList();
+          bitField0_ = (bitField0_ & ~0x00000001);
+          onChanged();
+        } else {
+          kpiListBuilder_.clear();
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .monitoring.Kpi kpi_list = 1;</code>
+       */
+      public Builder removeKpiList(int index) {
+        if (kpiListBuilder_ == null) {
+          ensureKpiListIsMutable();
+          kpiList_.remove(index);
+          onChanged();
+        } else {
+          kpiListBuilder_.remove(index);
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .monitoring.Kpi kpi_list = 1;</code>
+       */
+      public monitoring.Monitoring.Kpi.Builder getKpiListBuilder(
+          int index) {
+        return getKpiListFieldBuilder().getBuilder(index);
+      }
+      /**
+       * <code>repeated .monitoring.Kpi kpi_list = 1;</code>
+       */
+      public monitoring.Monitoring.KpiOrBuilder getKpiListOrBuilder(
+          int index) {
+        if (kpiListBuilder_ == null) {
+          return kpiList_.get(index);  } else {
+          return kpiListBuilder_.getMessageOrBuilder(index);
+        }
+      }
+      /**
+       * <code>repeated .monitoring.Kpi kpi_list = 1;</code>
+       */
+      public java.util.List<? extends monitoring.Monitoring.KpiOrBuilder> 
+           getKpiListOrBuilderList() {
+        if (kpiListBuilder_ != null) {
+          return kpiListBuilder_.getMessageOrBuilderList();
+        } else {
+          return java.util.Collections.unmodifiableList(kpiList_);
+        }
+      }
+      /**
+       * <code>repeated .monitoring.Kpi kpi_list = 1;</code>
+       */
+      public monitoring.Monitoring.Kpi.Builder addKpiListBuilder() {
+        return getKpiListFieldBuilder().addBuilder(
+            monitoring.Monitoring.Kpi.getDefaultInstance());
+      }
+      /**
+       * <code>repeated .monitoring.Kpi kpi_list = 1;</code>
+       */
+      public monitoring.Monitoring.Kpi.Builder addKpiListBuilder(
+          int index) {
+        return getKpiListFieldBuilder().addBuilder(
+            index, monitoring.Monitoring.Kpi.getDefaultInstance());
+      }
+      /**
+       * <code>repeated .monitoring.Kpi kpi_list = 1;</code>
+       */
+      public java.util.List<monitoring.Monitoring.Kpi.Builder> 
+           getKpiListBuilderList() {
+        return getKpiListFieldBuilder().getBuilderList();
+      }
+      private com.google.protobuf.RepeatedFieldBuilderV3<
+          monitoring.Monitoring.Kpi, monitoring.Monitoring.Kpi.Builder, monitoring.Monitoring.KpiOrBuilder> 
+          getKpiListFieldBuilder() {
+        if (kpiListBuilder_ == null) {
+          kpiListBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
+              monitoring.Monitoring.Kpi, monitoring.Monitoring.Kpi.Builder, monitoring.Monitoring.KpiOrBuilder>(
+                  kpiList_,
+                  ((bitField0_ & 0x00000001) != 0),
+                  getParentForChildren(),
+                  isClean());
+          kpiList_ = null;
+        }
+        return kpiListBuilder_;
+      }
+      @java.lang.Override
+      public final Builder setUnknownFields(
+          final com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.setUnknownFields(unknownFields);
+      }
+
+      @java.lang.Override
+      public final Builder mergeUnknownFields(
+          final com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.mergeUnknownFields(unknownFields);
+      }
+
+
+      // @@protoc_insertion_point(builder_scope:monitoring.KpiList)
+    }
+
+    // @@protoc_insertion_point(class_scope:monitoring.KpiList)
+    private static final monitoring.Monitoring.KpiList DEFAULT_INSTANCE;
+    static {
+      DEFAULT_INSTANCE = new monitoring.Monitoring.KpiList();
+    }
+
+    public static monitoring.Monitoring.KpiList getDefaultInstance() {
+      return DEFAULT_INSTANCE;
+    }
+
+    private static final com.google.protobuf.Parser<KpiList>
+        PARSER = new com.google.protobuf.AbstractParser<KpiList>() {
+      @java.lang.Override
+      public KpiList parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new KpiList(input, extensionRegistry);
+      }
+    };
+
+    public static com.google.protobuf.Parser<KpiList> parser() {
+      return PARSER;
+    }
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<KpiList> getParserForType() {
+      return PARSER;
+    }
+
+    @java.lang.Override
+    public monitoring.Monitoring.KpiList getDefaultInstanceForType() {
+      return DEFAULT_INSTANCE;
+    }
+
+  }
+
+  private static final com.google.protobuf.Descriptors.Descriptor
+    internal_static_monitoring_KpiDescriptor_descriptor;
+  private static final 
+    com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+      internal_static_monitoring_KpiDescriptor_fieldAccessorTable;
+  private static final com.google.protobuf.Descriptors.Descriptor
+    internal_static_monitoring_MonitorKpiRequest_descriptor;
+  private static final 
+    com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+      internal_static_monitoring_MonitorKpiRequest_fieldAccessorTable;
+  private static final com.google.protobuf.Descriptors.Descriptor
+    internal_static_monitoring_KpiId_descriptor;
+  private static final 
+    com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+      internal_static_monitoring_KpiId_fieldAccessorTable;
+  private static final com.google.protobuf.Descriptors.Descriptor
+    internal_static_monitoring_Kpi_descriptor;
+  private static final 
+    com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+      internal_static_monitoring_Kpi_fieldAccessorTable;
+  private static final com.google.protobuf.Descriptors.Descriptor
+    internal_static_monitoring_KpiValue_descriptor;
+  private static final 
+    com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+      internal_static_monitoring_KpiValue_fieldAccessorTable;
+  private static final com.google.protobuf.Descriptors.Descriptor
+    internal_static_monitoring_KpiList_descriptor;
+  private static final 
+    com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+      internal_static_monitoring_KpiList_fieldAccessorTable;
+
+  public static com.google.protobuf.Descriptors.FileDescriptor
+      getDescriptor() {
+    return descriptor;
+  }
+  private static  com.google.protobuf.Descriptors.FileDescriptor
+      descriptor;
+  static {
+    java.lang.String[] descriptorData = {
+      "\n\020monitoring.proto\022\nmonitoring\032\rcontext." +
+      "proto\032\026kpi_sample_types.proto\"\332\001\n\rKpiDes" +
+      "criptor\022\027\n\017kpi_description\030\001 \001(\t\0228\n\017kpi_" +
+      "sample_type\030\002 \001(\0162\037.kpi_sample_types.Kpi" +
+      "SampleType\022$\n\tdevice_id\030\003 \001(\0132\021.context." +
+      "DeviceId\022(\n\013endpoint_id\030\004 \001(\0132\023.context." +
+      "EndPointId\022&\n\nservice_id\030\005 \001(\0132\022.context" +
+      ".ServiceId\"p\n\021MonitorKpiRequest\022!\n\006kpi_i" +
+      "d\030\001 \001(\0132\021.monitoring.KpiId\022\033\n\023sampling_d" +
+      "uration_s\030\002 \001(\002\022\033\n\023sampling_interval_s\030\003" +
+      " \001(\002\"&\n\005KpiId\022\035\n\006kpi_id\030\001 \001(\0132\r.context." +
+      "Uuid\"d\n\003Kpi\022!\n\006kpi_id\030\001 \001(\0132\021.monitoring" +
+      ".KpiId\022\021\n\ttimestamp\030\002 \001(\t\022\'\n\tkpi_value\030\004" +
+      " \001(\0132\024.monitoring.KpiValue\"a\n\010KpiValue\022\020" +
+      "\n\006intVal\030\001 \001(\rH\000\022\022\n\010floatVal\030\002 \001(\002H\000\022\023\n\t" +
+      "stringVal\030\003 \001(\tH\000\022\021\n\007boolVal\030\004 \001(\010H\000B\007\n\005" +
+      "value\",\n\007KpiList\022!\n\010kpi_list\030\001 \003(\0132\017.mon" +
+      "itoring.Kpi2\363\002\n\021MonitoringService\022;\n\tCre" +
+      "ateKpi\022\031.monitoring.KpiDescriptor\032\021.moni" +
+      "toring.KpiId\"\000\022B\n\020GetKpiDescriptor\022\021.mon" +
+      "itoring.KpiId\032\031.monitoring.KpiDescriptor" +
+      "\"\000\022/\n\nIncludeKpi\022\017.monitoring.Kpi\032\016.cont" +
+      "ext.Empty\"\000\022=\n\nMonitorKpi\022\035.monitoring.M" +
+      "onitorKpiRequest\032\016.context.Empty\"\000\0226\n\014Ge" +
+      "tStreamKpi\022\021.monitoring.KpiId\032\017.monitori" +
+      "ng.Kpi\"\0000\001\0225\n\rGetInstantKpi\022\021.monitoring" +
+      ".KpiId\032\017.monitoring.Kpi\"\000b\006proto3"
+    };
+    descriptor = com.google.protobuf.Descriptors.FileDescriptor
+      .internalBuildGeneratedFileFrom(descriptorData,
+        new com.google.protobuf.Descriptors.FileDescriptor[] {
+          context.ContextOuterClass.getDescriptor(),
+          kpi_sample_types.KpiSampleTypes.getDescriptor(),
+        });
+    internal_static_monitoring_KpiDescriptor_descriptor =
+      getDescriptor().getMessageTypes().get(0);
+    internal_static_monitoring_KpiDescriptor_fieldAccessorTable = new
+      com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
+        internal_static_monitoring_KpiDescriptor_descriptor,
+        new java.lang.String[] { "KpiDescription", "KpiSampleType", "DeviceId", "EndpointId", "ServiceId", });
+    internal_static_monitoring_MonitorKpiRequest_descriptor =
+      getDescriptor().getMessageTypes().get(1);
+    internal_static_monitoring_MonitorKpiRequest_fieldAccessorTable = new
+      com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
+        internal_static_monitoring_MonitorKpiRequest_descriptor,
+        new java.lang.String[] { "KpiId", "SamplingDurationS", "SamplingIntervalS", });
+    internal_static_monitoring_KpiId_descriptor =
+      getDescriptor().getMessageTypes().get(2);
+    internal_static_monitoring_KpiId_fieldAccessorTable = new
+      com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
+        internal_static_monitoring_KpiId_descriptor,
+        new java.lang.String[] { "KpiId", });
+    internal_static_monitoring_Kpi_descriptor =
+      getDescriptor().getMessageTypes().get(3);
+    internal_static_monitoring_Kpi_fieldAccessorTable = new
+      com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
+        internal_static_monitoring_Kpi_descriptor,
+        new java.lang.String[] { "KpiId", "Timestamp", "KpiValue", });
+    internal_static_monitoring_KpiValue_descriptor =
+      getDescriptor().getMessageTypes().get(4);
+    internal_static_monitoring_KpiValue_fieldAccessorTable = new
+      com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
+        internal_static_monitoring_KpiValue_descriptor,
+        new java.lang.String[] { "IntVal", "FloatVal", "StringVal", "BoolVal", "Value", });
+    internal_static_monitoring_KpiList_descriptor =
+      getDescriptor().getMessageTypes().get(5);
+    internal_static_monitoring_KpiList_fieldAccessorTable = new
+      com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
+        internal_static_monitoring_KpiList_descriptor,
+        new java.lang.String[] { "KpiList", });
+    context.ContextOuterClass.getDescriptor();
+    kpi_sample_types.KpiSampleTypes.getDescriptor();
+  }
+
+  // @@protoc_insertion_point(outer_class_scope)
+}
diff --git a/src/automation/target/generated-sources/grpc/monitoring/MonitoringService.java b/src/automation/target/generated-sources/grpc/monitoring/MonitoringService.java
new file mode 100644
index 0000000000000000000000000000000000000000..480e193125e51851a72e18473c139d71190bab11
--- /dev/null
+++ b/src/automation/target/generated-sources/grpc/monitoring/MonitoringService.java
@@ -0,0 +1,26 @@
+package monitoring;
+
+import io.quarkus.grpc.runtime.MutinyService;
+
+@javax.annotation.Generated(
+value = "by Mutiny Grpc generator",
+comments = "Source: monitoring.proto")
+public interface MonitoringService extends MutinyService {
+
+    
+    io.smallrye.mutiny.Uni<monitoring.Monitoring.KpiId> createKpi(monitoring.Monitoring.KpiDescriptor request);
+    
+    io.smallrye.mutiny.Uni<monitoring.Monitoring.KpiDescriptor> getKpiDescriptor(monitoring.Monitoring.KpiId request);
+    
+    io.smallrye.mutiny.Uni<context.ContextOuterClass.Empty> includeKpi(monitoring.Monitoring.Kpi request);
+    
+    io.smallrye.mutiny.Uni<context.ContextOuterClass.Empty> monitorKpi(monitoring.Monitoring.MonitorKpiRequest request);
+    
+    io.smallrye.mutiny.Uni<monitoring.Monitoring.Kpi> getInstantKpi(monitoring.Monitoring.KpiId request);
+    
+    
+    io.smallrye.mutiny.Multi<monitoring.Monitoring.Kpi> getStreamKpi(monitoring.Monitoring.KpiId request);
+    
+    
+
+}
\ No newline at end of file
diff --git a/src/automation/target/generated-sources/grpc/monitoring/MonitoringServiceBean.java b/src/automation/target/generated-sources/grpc/monitoring/MonitoringServiceBean.java
new file mode 100644
index 0000000000000000000000000000000000000000..b0b2f5abd4144d8f6e41c2613a36c5b78051dd57
--- /dev/null
+++ b/src/automation/target/generated-sources/grpc/monitoring/MonitoringServiceBean.java
@@ -0,0 +1,68 @@
+package monitoring;
+
+import io.grpc.BindableService;
+import io.quarkus.grpc.GrpcService;
+import io.quarkus.grpc.runtime.MutinyBean;
+
+@javax.annotation.Generated(
+value = "by Mutiny Grpc generator",
+comments = "Source: monitoring.proto")
+public class MonitoringServiceBean extends MutinyMonitoringServiceGrpc.MonitoringServiceImplBase implements BindableService, MutinyBean {
+
+    private final MonitoringService delegate;
+
+    MonitoringServiceBean(@GrpcService MonitoringService delegate) {
+       this.delegate = delegate;
+    }
+
+    @Override
+    public io.smallrye.mutiny.Uni<monitoring.Monitoring.KpiId> createKpi(monitoring.Monitoring.KpiDescriptor request) {
+       try {
+         return delegate.createKpi(request);
+       } catch (UnsupportedOperationException e) {
+          throw new io.grpc.StatusRuntimeException(io.grpc.Status.UNIMPLEMENTED);
+       }
+    }
+    @Override
+    public io.smallrye.mutiny.Uni<monitoring.Monitoring.KpiDescriptor> getKpiDescriptor(monitoring.Monitoring.KpiId request) {
+       try {
+         return delegate.getKpiDescriptor(request);
+       } catch (UnsupportedOperationException e) {
+          throw new io.grpc.StatusRuntimeException(io.grpc.Status.UNIMPLEMENTED);
+       }
+    }
+    @Override
+    public io.smallrye.mutiny.Uni<context.ContextOuterClass.Empty> includeKpi(monitoring.Monitoring.Kpi request) {
+       try {
+         return delegate.includeKpi(request);
+       } catch (UnsupportedOperationException e) {
+          throw new io.grpc.StatusRuntimeException(io.grpc.Status.UNIMPLEMENTED);
+       }
+    }
+    @Override
+    public io.smallrye.mutiny.Uni<context.ContextOuterClass.Empty> monitorKpi(monitoring.Monitoring.MonitorKpiRequest request) {
+       try {
+         return delegate.monitorKpi(request);
+       } catch (UnsupportedOperationException e) {
+          throw new io.grpc.StatusRuntimeException(io.grpc.Status.UNIMPLEMENTED);
+       }
+    }
+    @Override
+    public io.smallrye.mutiny.Uni<monitoring.Monitoring.Kpi> getInstantKpi(monitoring.Monitoring.KpiId request) {
+       try {
+         return delegate.getInstantKpi(request);
+       } catch (UnsupportedOperationException e) {
+          throw new io.grpc.StatusRuntimeException(io.grpc.Status.UNIMPLEMENTED);
+       }
+    }
+
+    @Override
+    public io.smallrye.mutiny.Multi<monitoring.Monitoring.Kpi> getStreamKpi(monitoring.Monitoring.KpiId request) {
+       try {
+         return delegate.getStreamKpi(request);
+       } catch (UnsupportedOperationException e) {
+          throw new io.grpc.StatusRuntimeException(io.grpc.Status.UNIMPLEMENTED);
+       }
+    }
+
+}
\ No newline at end of file
diff --git a/src/automation/target/generated-sources/grpc/monitoring/MonitoringServiceClient.java b/src/automation/target/generated-sources/grpc/monitoring/MonitoringServiceClient.java
new file mode 100644
index 0000000000000000000000000000000000000000..293a7c418e6e027792e8007234e34225b8c1848a
--- /dev/null
+++ b/src/automation/target/generated-sources/grpc/monitoring/MonitoringServiceClient.java
@@ -0,0 +1,49 @@
+package monitoring;
+
+import java.util.function.BiFunction;
+
+import io.quarkus.grpc.runtime.MutinyClient;
+
+@javax.annotation.Generated(
+value = "by Mutiny Grpc generator",
+comments = "Source: monitoring.proto")
+public class MonitoringServiceClient implements MonitoringService, MutinyClient<MutinyMonitoringServiceGrpc.MutinyMonitoringServiceStub> {
+
+    private final MutinyMonitoringServiceGrpc.MutinyMonitoringServiceStub stub;
+
+    public MonitoringServiceClient(String name, io.grpc.Channel channel, BiFunction<String, MutinyMonitoringServiceGrpc.MutinyMonitoringServiceStub, MutinyMonitoringServiceGrpc.MutinyMonitoringServiceStub> stubConfigurator) {
+       this.stub = stubConfigurator.apply(name,MutinyMonitoringServiceGrpc.newMutinyStub(channel));
+    }
+
+    @Override
+    public MutinyMonitoringServiceGrpc.MutinyMonitoringServiceStub getStub() {
+       return stub;
+    }
+
+    @Override
+    public io.smallrye.mutiny.Uni<monitoring.Monitoring.KpiId> createKpi(monitoring.Monitoring.KpiDescriptor request) {
+       return stub.createKpi(request);
+    }
+    @Override
+    public io.smallrye.mutiny.Uni<monitoring.Monitoring.KpiDescriptor> getKpiDescriptor(monitoring.Monitoring.KpiId request) {
+       return stub.getKpiDescriptor(request);
+    }
+    @Override
+    public io.smallrye.mutiny.Uni<context.ContextOuterClass.Empty> includeKpi(monitoring.Monitoring.Kpi request) {
+       return stub.includeKpi(request);
+    }
+    @Override
+    public io.smallrye.mutiny.Uni<context.ContextOuterClass.Empty> monitorKpi(monitoring.Monitoring.MonitorKpiRequest request) {
+       return stub.monitorKpi(request);
+    }
+    @Override
+    public io.smallrye.mutiny.Uni<monitoring.Monitoring.Kpi> getInstantKpi(monitoring.Monitoring.KpiId request) {
+       return stub.getInstantKpi(request);
+    }
+
+    @Override
+    public io.smallrye.mutiny.Multi<monitoring.Monitoring.Kpi> getStreamKpi(monitoring.Monitoring.KpiId request) {
+       return stub.getStreamKpi(request);
+    }
+
+}
\ No newline at end of file
diff --git a/src/automation/target/generated-sources/grpc/monitoring/MonitoringServiceGrpc.java b/src/automation/target/generated-sources/grpc/monitoring/MonitoringServiceGrpc.java
new file mode 100644
index 0000000000000000000000000000000000000000..7749c322a714ed96bc523a6bb77da9d43e54dbca
--- /dev/null
+++ b/src/automation/target/generated-sources/grpc/monitoring/MonitoringServiceGrpc.java
@@ -0,0 +1,638 @@
+package monitoring;
+
+import static io.grpc.MethodDescriptor.generateFullMethodName;
+
+/**
+ */
+@javax.annotation.Generated(
+    value = "by gRPC proto compiler (version 1.38.1)",
+    comments = "Source: monitoring.proto")
+public final class MonitoringServiceGrpc {
+
+  private MonitoringServiceGrpc() {}
+
+  public static final String SERVICE_NAME = "monitoring.MonitoringService";
+
+  // Static method descriptors that strictly reflect the proto.
+  private static volatile io.grpc.MethodDescriptor<monitoring.Monitoring.KpiDescriptor,
+      monitoring.Monitoring.KpiId> getCreateKpiMethod;
+
+  @io.grpc.stub.annotations.RpcMethod(
+      fullMethodName = SERVICE_NAME + '/' + "CreateKpi",
+      requestType = monitoring.Monitoring.KpiDescriptor.class,
+      responseType = monitoring.Monitoring.KpiId.class,
+      methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
+  public static io.grpc.MethodDescriptor<monitoring.Monitoring.KpiDescriptor,
+      monitoring.Monitoring.KpiId> getCreateKpiMethod() {
+    io.grpc.MethodDescriptor<monitoring.Monitoring.KpiDescriptor, monitoring.Monitoring.KpiId> getCreateKpiMethod;
+    if ((getCreateKpiMethod = MonitoringServiceGrpc.getCreateKpiMethod) == null) {
+      synchronized (MonitoringServiceGrpc.class) {
+        if ((getCreateKpiMethod = MonitoringServiceGrpc.getCreateKpiMethod) == null) {
+          MonitoringServiceGrpc.getCreateKpiMethod = getCreateKpiMethod =
+              io.grpc.MethodDescriptor.<monitoring.Monitoring.KpiDescriptor, monitoring.Monitoring.KpiId>newBuilder()
+              .setType(io.grpc.MethodDescriptor.MethodType.UNARY)
+              .setFullMethodName(generateFullMethodName(SERVICE_NAME, "CreateKpi"))
+              .setSampledToLocalTracing(true)
+              .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
+                  monitoring.Monitoring.KpiDescriptor.getDefaultInstance()))
+              .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
+                  monitoring.Monitoring.KpiId.getDefaultInstance()))
+              .setSchemaDescriptor(new MonitoringServiceMethodDescriptorSupplier("CreateKpi"))
+              .build();
+        }
+      }
+    }
+    return getCreateKpiMethod;
+  }
+
+  private static volatile io.grpc.MethodDescriptor<monitoring.Monitoring.KpiId,
+      monitoring.Monitoring.KpiDescriptor> getGetKpiDescriptorMethod;
+
+  @io.grpc.stub.annotations.RpcMethod(
+      fullMethodName = SERVICE_NAME + '/' + "GetKpiDescriptor",
+      requestType = monitoring.Monitoring.KpiId.class,
+      responseType = monitoring.Monitoring.KpiDescriptor.class,
+      methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
+  public static io.grpc.MethodDescriptor<monitoring.Monitoring.KpiId,
+      monitoring.Monitoring.KpiDescriptor> getGetKpiDescriptorMethod() {
+    io.grpc.MethodDescriptor<monitoring.Monitoring.KpiId, monitoring.Monitoring.KpiDescriptor> getGetKpiDescriptorMethod;
+    if ((getGetKpiDescriptorMethod = MonitoringServiceGrpc.getGetKpiDescriptorMethod) == null) {
+      synchronized (MonitoringServiceGrpc.class) {
+        if ((getGetKpiDescriptorMethod = MonitoringServiceGrpc.getGetKpiDescriptorMethod) == null) {
+          MonitoringServiceGrpc.getGetKpiDescriptorMethod = getGetKpiDescriptorMethod =
+              io.grpc.MethodDescriptor.<monitoring.Monitoring.KpiId, monitoring.Monitoring.KpiDescriptor>newBuilder()
+              .setType(io.grpc.MethodDescriptor.MethodType.UNARY)
+              .setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetKpiDescriptor"))
+              .setSampledToLocalTracing(true)
+              .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
+                  monitoring.Monitoring.KpiId.getDefaultInstance()))
+              .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
+                  monitoring.Monitoring.KpiDescriptor.getDefaultInstance()))
+              .setSchemaDescriptor(new MonitoringServiceMethodDescriptorSupplier("GetKpiDescriptor"))
+              .build();
+        }
+      }
+    }
+    return getGetKpiDescriptorMethod;
+  }
+
+  private static volatile io.grpc.MethodDescriptor<monitoring.Monitoring.Kpi,
+      context.ContextOuterClass.Empty> getIncludeKpiMethod;
+
+  @io.grpc.stub.annotations.RpcMethod(
+      fullMethodName = SERVICE_NAME + '/' + "IncludeKpi",
+      requestType = monitoring.Monitoring.Kpi.class,
+      responseType = context.ContextOuterClass.Empty.class,
+      methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
+  public static io.grpc.MethodDescriptor<monitoring.Monitoring.Kpi,
+      context.ContextOuterClass.Empty> getIncludeKpiMethod() {
+    io.grpc.MethodDescriptor<monitoring.Monitoring.Kpi, context.ContextOuterClass.Empty> getIncludeKpiMethod;
+    if ((getIncludeKpiMethod = MonitoringServiceGrpc.getIncludeKpiMethod) == null) {
+      synchronized (MonitoringServiceGrpc.class) {
+        if ((getIncludeKpiMethod = MonitoringServiceGrpc.getIncludeKpiMethod) == null) {
+          MonitoringServiceGrpc.getIncludeKpiMethod = getIncludeKpiMethod =
+              io.grpc.MethodDescriptor.<monitoring.Monitoring.Kpi, context.ContextOuterClass.Empty>newBuilder()
+              .setType(io.grpc.MethodDescriptor.MethodType.UNARY)
+              .setFullMethodName(generateFullMethodName(SERVICE_NAME, "IncludeKpi"))
+              .setSampledToLocalTracing(true)
+              .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
+                  monitoring.Monitoring.Kpi.getDefaultInstance()))
+              .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
+                  context.ContextOuterClass.Empty.getDefaultInstance()))
+              .setSchemaDescriptor(new MonitoringServiceMethodDescriptorSupplier("IncludeKpi"))
+              .build();
+        }
+      }
+    }
+    return getIncludeKpiMethod;
+  }
+
+  private static volatile io.grpc.MethodDescriptor<monitoring.Monitoring.MonitorKpiRequest,
+      context.ContextOuterClass.Empty> getMonitorKpiMethod;
+
+  @io.grpc.stub.annotations.RpcMethod(
+      fullMethodName = SERVICE_NAME + '/' + "MonitorKpi",
+      requestType = monitoring.Monitoring.MonitorKpiRequest.class,
+      responseType = context.ContextOuterClass.Empty.class,
+      methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
+  public static io.grpc.MethodDescriptor<monitoring.Monitoring.MonitorKpiRequest,
+      context.ContextOuterClass.Empty> getMonitorKpiMethod() {
+    io.grpc.MethodDescriptor<monitoring.Monitoring.MonitorKpiRequest, context.ContextOuterClass.Empty> getMonitorKpiMethod;
+    if ((getMonitorKpiMethod = MonitoringServiceGrpc.getMonitorKpiMethod) == null) {
+      synchronized (MonitoringServiceGrpc.class) {
+        if ((getMonitorKpiMethod = MonitoringServiceGrpc.getMonitorKpiMethod) == null) {
+          MonitoringServiceGrpc.getMonitorKpiMethod = getMonitorKpiMethod =
+              io.grpc.MethodDescriptor.<monitoring.Monitoring.MonitorKpiRequest, context.ContextOuterClass.Empty>newBuilder()
+              .setType(io.grpc.MethodDescriptor.MethodType.UNARY)
+              .setFullMethodName(generateFullMethodName(SERVICE_NAME, "MonitorKpi"))
+              .setSampledToLocalTracing(true)
+              .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
+                  monitoring.Monitoring.MonitorKpiRequest.getDefaultInstance()))
+              .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
+                  context.ContextOuterClass.Empty.getDefaultInstance()))
+              .setSchemaDescriptor(new MonitoringServiceMethodDescriptorSupplier("MonitorKpi"))
+              .build();
+        }
+      }
+    }
+    return getMonitorKpiMethod;
+  }
+
+  private static volatile io.grpc.MethodDescriptor<monitoring.Monitoring.KpiId,
+      monitoring.Monitoring.Kpi> getGetStreamKpiMethod;
+
+  @io.grpc.stub.annotations.RpcMethod(
+      fullMethodName = SERVICE_NAME + '/' + "GetStreamKpi",
+      requestType = monitoring.Monitoring.KpiId.class,
+      responseType = monitoring.Monitoring.Kpi.class,
+      methodType = io.grpc.MethodDescriptor.MethodType.SERVER_STREAMING)
+  public static io.grpc.MethodDescriptor<monitoring.Monitoring.KpiId,
+      monitoring.Monitoring.Kpi> getGetStreamKpiMethod() {
+    io.grpc.MethodDescriptor<monitoring.Monitoring.KpiId, monitoring.Monitoring.Kpi> getGetStreamKpiMethod;
+    if ((getGetStreamKpiMethod = MonitoringServiceGrpc.getGetStreamKpiMethod) == null) {
+      synchronized (MonitoringServiceGrpc.class) {
+        if ((getGetStreamKpiMethod = MonitoringServiceGrpc.getGetStreamKpiMethod) == null) {
+          MonitoringServiceGrpc.getGetStreamKpiMethod = getGetStreamKpiMethod =
+              io.grpc.MethodDescriptor.<monitoring.Monitoring.KpiId, monitoring.Monitoring.Kpi>newBuilder()
+              .setType(io.grpc.MethodDescriptor.MethodType.SERVER_STREAMING)
+              .setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetStreamKpi"))
+              .setSampledToLocalTracing(true)
+              .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
+                  monitoring.Monitoring.KpiId.getDefaultInstance()))
+              .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
+                  monitoring.Monitoring.Kpi.getDefaultInstance()))
+              .setSchemaDescriptor(new MonitoringServiceMethodDescriptorSupplier("GetStreamKpi"))
+              .build();
+        }
+      }
+    }
+    return getGetStreamKpiMethod;
+  }
+
+  private static volatile io.grpc.MethodDescriptor<monitoring.Monitoring.KpiId,
+      monitoring.Monitoring.Kpi> getGetInstantKpiMethod;
+
+  @io.grpc.stub.annotations.RpcMethod(
+      fullMethodName = SERVICE_NAME + '/' + "GetInstantKpi",
+      requestType = monitoring.Monitoring.KpiId.class,
+      responseType = monitoring.Monitoring.Kpi.class,
+      methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
+  public static io.grpc.MethodDescriptor<monitoring.Monitoring.KpiId,
+      monitoring.Monitoring.Kpi> getGetInstantKpiMethod() {
+    io.grpc.MethodDescriptor<monitoring.Monitoring.KpiId, monitoring.Monitoring.Kpi> getGetInstantKpiMethod;
+    if ((getGetInstantKpiMethod = MonitoringServiceGrpc.getGetInstantKpiMethod) == null) {
+      synchronized (MonitoringServiceGrpc.class) {
+        if ((getGetInstantKpiMethod = MonitoringServiceGrpc.getGetInstantKpiMethod) == null) {
+          MonitoringServiceGrpc.getGetInstantKpiMethod = getGetInstantKpiMethod =
+              io.grpc.MethodDescriptor.<monitoring.Monitoring.KpiId, monitoring.Monitoring.Kpi>newBuilder()
+              .setType(io.grpc.MethodDescriptor.MethodType.UNARY)
+              .setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetInstantKpi"))
+              .setSampledToLocalTracing(true)
+              .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
+                  monitoring.Monitoring.KpiId.getDefaultInstance()))
+              .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
+                  monitoring.Monitoring.Kpi.getDefaultInstance()))
+              .setSchemaDescriptor(new MonitoringServiceMethodDescriptorSupplier("GetInstantKpi"))
+              .build();
+        }
+      }
+    }
+    return getGetInstantKpiMethod;
+  }
+
+  /**
+   * Creates a new async stub that supports all call types for the service
+   */
+  public static MonitoringServiceStub newStub(io.grpc.Channel channel) {
+    io.grpc.stub.AbstractStub.StubFactory<MonitoringServiceStub> factory =
+      new io.grpc.stub.AbstractStub.StubFactory<MonitoringServiceStub>() {
+        @java.lang.Override
+        public MonitoringServiceStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
+          return new MonitoringServiceStub(channel, callOptions);
+        }
+      };
+    return MonitoringServiceStub.newStub(factory, channel);
+  }
+
+  /**
+   * Creates a new blocking-style stub that supports unary and streaming output calls on the service
+   */
+  public static MonitoringServiceBlockingStub newBlockingStub(
+      io.grpc.Channel channel) {
+    io.grpc.stub.AbstractStub.StubFactory<MonitoringServiceBlockingStub> factory =
+      new io.grpc.stub.AbstractStub.StubFactory<MonitoringServiceBlockingStub>() {
+        @java.lang.Override
+        public MonitoringServiceBlockingStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
+          return new MonitoringServiceBlockingStub(channel, callOptions);
+        }
+      };
+    return MonitoringServiceBlockingStub.newStub(factory, channel);
+  }
+
+  /**
+   * Creates a new ListenableFuture-style stub that supports unary calls on the service
+   */
+  public static MonitoringServiceFutureStub newFutureStub(
+      io.grpc.Channel channel) {
+    io.grpc.stub.AbstractStub.StubFactory<MonitoringServiceFutureStub> factory =
+      new io.grpc.stub.AbstractStub.StubFactory<MonitoringServiceFutureStub>() {
+        @java.lang.Override
+        public MonitoringServiceFutureStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
+          return new MonitoringServiceFutureStub(channel, callOptions);
+        }
+      };
+    return MonitoringServiceFutureStub.newStub(factory, channel);
+  }
+
+  /**
+   */
+  public static abstract class MonitoringServiceImplBase implements io.grpc.BindableService {
+
+    /**
+     */
+    public void createKpi(monitoring.Monitoring.KpiDescriptor request,
+        io.grpc.stub.StreamObserver<monitoring.Monitoring.KpiId> responseObserver) {
+      io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getCreateKpiMethod(), responseObserver);
+    }
+
+    /**
+     */
+    public void getKpiDescriptor(monitoring.Monitoring.KpiId request,
+        io.grpc.stub.StreamObserver<monitoring.Monitoring.KpiDescriptor> responseObserver) {
+      io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGetKpiDescriptorMethod(), responseObserver);
+    }
+
+    /**
+     */
+    public void includeKpi(monitoring.Monitoring.Kpi request,
+        io.grpc.stub.StreamObserver<context.ContextOuterClass.Empty> responseObserver) {
+      io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getIncludeKpiMethod(), responseObserver);
+    }
+
+    /**
+     */
+    public void monitorKpi(monitoring.Monitoring.MonitorKpiRequest request,
+        io.grpc.stub.StreamObserver<context.ContextOuterClass.Empty> responseObserver) {
+      io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getMonitorKpiMethod(), responseObserver);
+    }
+
+    /**
+     */
+    public void getStreamKpi(monitoring.Monitoring.KpiId request,
+        io.grpc.stub.StreamObserver<monitoring.Monitoring.Kpi> responseObserver) {
+      io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGetStreamKpiMethod(), responseObserver);
+    }
+
+    /**
+     */
+    public void getInstantKpi(monitoring.Monitoring.KpiId request,
+        io.grpc.stub.StreamObserver<monitoring.Monitoring.Kpi> responseObserver) {
+      io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGetInstantKpiMethod(), responseObserver);
+    }
+
+    @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() {
+      return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())
+          .addMethod(
+            getCreateKpiMethod(),
+            io.grpc.stub.ServerCalls.asyncUnaryCall(
+              new MethodHandlers<
+                monitoring.Monitoring.KpiDescriptor,
+                monitoring.Monitoring.KpiId>(
+                  this, METHODID_CREATE_KPI)))
+          .addMethod(
+            getGetKpiDescriptorMethod(),
+            io.grpc.stub.ServerCalls.asyncUnaryCall(
+              new MethodHandlers<
+                monitoring.Monitoring.KpiId,
+                monitoring.Monitoring.KpiDescriptor>(
+                  this, METHODID_GET_KPI_DESCRIPTOR)))
+          .addMethod(
+            getIncludeKpiMethod(),
+            io.grpc.stub.ServerCalls.asyncUnaryCall(
+              new MethodHandlers<
+                monitoring.Monitoring.Kpi,
+                context.ContextOuterClass.Empty>(
+                  this, METHODID_INCLUDE_KPI)))
+          .addMethod(
+            getMonitorKpiMethod(),
+            io.grpc.stub.ServerCalls.asyncUnaryCall(
+              new MethodHandlers<
+                monitoring.Monitoring.MonitorKpiRequest,
+                context.ContextOuterClass.Empty>(
+                  this, METHODID_MONITOR_KPI)))
+          .addMethod(
+            getGetStreamKpiMethod(),
+            io.grpc.stub.ServerCalls.asyncServerStreamingCall(
+              new MethodHandlers<
+                monitoring.Monitoring.KpiId,
+                monitoring.Monitoring.Kpi>(
+                  this, METHODID_GET_STREAM_KPI)))
+          .addMethod(
+            getGetInstantKpiMethod(),
+            io.grpc.stub.ServerCalls.asyncUnaryCall(
+              new MethodHandlers<
+                monitoring.Monitoring.KpiId,
+                monitoring.Monitoring.Kpi>(
+                  this, METHODID_GET_INSTANT_KPI)))
+          .build();
+    }
+  }
+
+  /**
+   */
+  public static final class MonitoringServiceStub extends io.grpc.stub.AbstractAsyncStub<MonitoringServiceStub> {
+    private MonitoringServiceStub(
+        io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
+      super(channel, callOptions);
+    }
+
+    @java.lang.Override
+    protected MonitoringServiceStub build(
+        io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
+      return new MonitoringServiceStub(channel, callOptions);
+    }
+
+    /**
+     */
+    public void createKpi(monitoring.Monitoring.KpiDescriptor request,
+        io.grpc.stub.StreamObserver<monitoring.Monitoring.KpiId> responseObserver) {
+      io.grpc.stub.ClientCalls.asyncUnaryCall(
+          getChannel().newCall(getCreateKpiMethod(), getCallOptions()), request, responseObserver);
+    }
+
+    /**
+     */
+    public void getKpiDescriptor(monitoring.Monitoring.KpiId request,
+        io.grpc.stub.StreamObserver<monitoring.Monitoring.KpiDescriptor> responseObserver) {
+      io.grpc.stub.ClientCalls.asyncUnaryCall(
+          getChannel().newCall(getGetKpiDescriptorMethod(), getCallOptions()), request, responseObserver);
+    }
+
+    /**
+     */
+    public void includeKpi(monitoring.Monitoring.Kpi request,
+        io.grpc.stub.StreamObserver<context.ContextOuterClass.Empty> responseObserver) {
+      io.grpc.stub.ClientCalls.asyncUnaryCall(
+          getChannel().newCall(getIncludeKpiMethod(), getCallOptions()), request, responseObserver);
+    }
+
+    /**
+     */
+    public void monitorKpi(monitoring.Monitoring.MonitorKpiRequest request,
+        io.grpc.stub.StreamObserver<context.ContextOuterClass.Empty> responseObserver) {
+      io.grpc.stub.ClientCalls.asyncUnaryCall(
+          getChannel().newCall(getMonitorKpiMethod(), getCallOptions()), request, responseObserver);
+    }
+
+    /**
+     */
+    public void getStreamKpi(monitoring.Monitoring.KpiId request,
+        io.grpc.stub.StreamObserver<monitoring.Monitoring.Kpi> responseObserver) {
+      io.grpc.stub.ClientCalls.asyncServerStreamingCall(
+          getChannel().newCall(getGetStreamKpiMethod(), getCallOptions()), request, responseObserver);
+    }
+
+    /**
+     */
+    public void getInstantKpi(monitoring.Monitoring.KpiId request,
+        io.grpc.stub.StreamObserver<monitoring.Monitoring.Kpi> responseObserver) {
+      io.grpc.stub.ClientCalls.asyncUnaryCall(
+          getChannel().newCall(getGetInstantKpiMethod(), getCallOptions()), request, responseObserver);
+    }
+  }
+
+  /**
+   */
+  public static final class MonitoringServiceBlockingStub extends io.grpc.stub.AbstractBlockingStub<MonitoringServiceBlockingStub> {
+    private MonitoringServiceBlockingStub(
+        io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
+      super(channel, callOptions);
+    }
+
+    @java.lang.Override
+    protected MonitoringServiceBlockingStub build(
+        io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
+      return new MonitoringServiceBlockingStub(channel, callOptions);
+    }
+
+    /**
+     */
+    public monitoring.Monitoring.KpiId createKpi(monitoring.Monitoring.KpiDescriptor request) {
+      return io.grpc.stub.ClientCalls.blockingUnaryCall(
+          getChannel(), getCreateKpiMethod(), getCallOptions(), request);
+    }
+
+    /**
+     */
+    public monitoring.Monitoring.KpiDescriptor getKpiDescriptor(monitoring.Monitoring.KpiId request) {
+      return io.grpc.stub.ClientCalls.blockingUnaryCall(
+          getChannel(), getGetKpiDescriptorMethod(), getCallOptions(), request);
+    }
+
+    /**
+     */
+    public context.ContextOuterClass.Empty includeKpi(monitoring.Monitoring.Kpi request) {
+      return io.grpc.stub.ClientCalls.blockingUnaryCall(
+          getChannel(), getIncludeKpiMethod(), getCallOptions(), request);
+    }
+
+    /**
+     */
+    public context.ContextOuterClass.Empty monitorKpi(monitoring.Monitoring.MonitorKpiRequest request) {
+      return io.grpc.stub.ClientCalls.blockingUnaryCall(
+          getChannel(), getMonitorKpiMethod(), getCallOptions(), request);
+    }
+
+    /**
+     */
+    public java.util.Iterator<monitoring.Monitoring.Kpi> getStreamKpi(
+        monitoring.Monitoring.KpiId request) {
+      return io.grpc.stub.ClientCalls.blockingServerStreamingCall(
+          getChannel(), getGetStreamKpiMethod(), getCallOptions(), request);
+    }
+
+    /**
+     */
+    public monitoring.Monitoring.Kpi getInstantKpi(monitoring.Monitoring.KpiId request) {
+      return io.grpc.stub.ClientCalls.blockingUnaryCall(
+          getChannel(), getGetInstantKpiMethod(), getCallOptions(), request);
+    }
+  }
+
+  /**
+   */
+  public static final class MonitoringServiceFutureStub extends io.grpc.stub.AbstractFutureStub<MonitoringServiceFutureStub> {
+    private MonitoringServiceFutureStub(
+        io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
+      super(channel, callOptions);
+    }
+
+    @java.lang.Override
+    protected MonitoringServiceFutureStub build(
+        io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
+      return new MonitoringServiceFutureStub(channel, callOptions);
+    }
+
+    /**
+     */
+    public com.google.common.util.concurrent.ListenableFuture<monitoring.Monitoring.KpiId> createKpi(
+        monitoring.Monitoring.KpiDescriptor request) {
+      return io.grpc.stub.ClientCalls.futureUnaryCall(
+          getChannel().newCall(getCreateKpiMethod(), getCallOptions()), request);
+    }
+
+    /**
+     */
+    public com.google.common.util.concurrent.ListenableFuture<monitoring.Monitoring.KpiDescriptor> getKpiDescriptor(
+        monitoring.Monitoring.KpiId request) {
+      return io.grpc.stub.ClientCalls.futureUnaryCall(
+          getChannel().newCall(getGetKpiDescriptorMethod(), getCallOptions()), request);
+    }
+
+    /**
+     */
+    public com.google.common.util.concurrent.ListenableFuture<context.ContextOuterClass.Empty> includeKpi(
+        monitoring.Monitoring.Kpi request) {
+      return io.grpc.stub.ClientCalls.futureUnaryCall(
+          getChannel().newCall(getIncludeKpiMethod(), getCallOptions()), request);
+    }
+
+    /**
+     */
+    public com.google.common.util.concurrent.ListenableFuture<context.ContextOuterClass.Empty> monitorKpi(
+        monitoring.Monitoring.MonitorKpiRequest request) {
+      return io.grpc.stub.ClientCalls.futureUnaryCall(
+          getChannel().newCall(getMonitorKpiMethod(), getCallOptions()), request);
+    }
+
+    /**
+     */
+    public com.google.common.util.concurrent.ListenableFuture<monitoring.Monitoring.Kpi> getInstantKpi(
+        monitoring.Monitoring.KpiId request) {
+      return io.grpc.stub.ClientCalls.futureUnaryCall(
+          getChannel().newCall(getGetInstantKpiMethod(), getCallOptions()), request);
+    }
+  }
+
+  private static final int METHODID_CREATE_KPI = 0;
+  private static final int METHODID_GET_KPI_DESCRIPTOR = 1;
+  private static final int METHODID_INCLUDE_KPI = 2;
+  private static final int METHODID_MONITOR_KPI = 3;
+  private static final int METHODID_GET_STREAM_KPI = 4;
+  private static final int METHODID_GET_INSTANT_KPI = 5;
+
+  private static final class MethodHandlers<Req, Resp> implements
+      io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
+      io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,
+      io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,
+      io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {
+    private final MonitoringServiceImplBase serviceImpl;
+    private final int methodId;
+
+    MethodHandlers(MonitoringServiceImplBase serviceImpl, int methodId) {
+      this.serviceImpl = serviceImpl;
+      this.methodId = methodId;
+    }
+
+    @java.lang.Override
+    @java.lang.SuppressWarnings("unchecked")
+    public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {
+      switch (methodId) {
+        case METHODID_CREATE_KPI:
+          serviceImpl.createKpi((monitoring.Monitoring.KpiDescriptor) request,
+              (io.grpc.stub.StreamObserver<monitoring.Monitoring.KpiId>) responseObserver);
+          break;
+        case METHODID_GET_KPI_DESCRIPTOR:
+          serviceImpl.getKpiDescriptor((monitoring.Monitoring.KpiId) request,
+              (io.grpc.stub.StreamObserver<monitoring.Monitoring.KpiDescriptor>) responseObserver);
+          break;
+        case METHODID_INCLUDE_KPI:
+          serviceImpl.includeKpi((monitoring.Monitoring.Kpi) request,
+              (io.grpc.stub.StreamObserver<context.ContextOuterClass.Empty>) responseObserver);
+          break;
+        case METHODID_MONITOR_KPI:
+          serviceImpl.monitorKpi((monitoring.Monitoring.MonitorKpiRequest) request,
+              (io.grpc.stub.StreamObserver<context.ContextOuterClass.Empty>) responseObserver);
+          break;
+        case METHODID_GET_STREAM_KPI:
+          serviceImpl.getStreamKpi((monitoring.Monitoring.KpiId) request,
+              (io.grpc.stub.StreamObserver<monitoring.Monitoring.Kpi>) responseObserver);
+          break;
+        case METHODID_GET_INSTANT_KPI:
+          serviceImpl.getInstantKpi((monitoring.Monitoring.KpiId) request,
+              (io.grpc.stub.StreamObserver<monitoring.Monitoring.Kpi>) responseObserver);
+          break;
+        default:
+          throw new AssertionError();
+      }
+    }
+
+    @java.lang.Override
+    @java.lang.SuppressWarnings("unchecked")
+    public io.grpc.stub.StreamObserver<Req> invoke(
+        io.grpc.stub.StreamObserver<Resp> responseObserver) {
+      switch (methodId) {
+        default:
+          throw new AssertionError();
+      }
+    }
+  }
+
+  private static abstract class MonitoringServiceBaseDescriptorSupplier
+      implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier {
+    MonitoringServiceBaseDescriptorSupplier() {}
+
+    @java.lang.Override
+    public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() {
+      return monitoring.Monitoring.getDescriptor();
+    }
+
+    @java.lang.Override
+    public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() {
+      return getFileDescriptor().findServiceByName("MonitoringService");
+    }
+  }
+
+  private static final class MonitoringServiceFileDescriptorSupplier
+      extends MonitoringServiceBaseDescriptorSupplier {
+    MonitoringServiceFileDescriptorSupplier() {}
+  }
+
+  private static final class MonitoringServiceMethodDescriptorSupplier
+      extends MonitoringServiceBaseDescriptorSupplier
+      implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {
+    private final String methodName;
+
+    MonitoringServiceMethodDescriptorSupplier(String methodName) {
+      this.methodName = methodName;
+    }
+
+    @java.lang.Override
+    public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {
+      return getServiceDescriptor().findMethodByName(methodName);
+    }
+  }
+
+  private static volatile io.grpc.ServiceDescriptor serviceDescriptor;
+
+  public static io.grpc.ServiceDescriptor getServiceDescriptor() {
+    io.grpc.ServiceDescriptor result = serviceDescriptor;
+    if (result == null) {
+      synchronized (MonitoringServiceGrpc.class) {
+        result = serviceDescriptor;
+        if (result == null) {
+          serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)
+              .setSchemaDescriptor(new MonitoringServiceFileDescriptorSupplier())
+              .addMethod(getCreateKpiMethod())
+              .addMethod(getGetKpiDescriptorMethod())
+              .addMethod(getIncludeKpiMethod())
+              .addMethod(getMonitorKpiMethod())
+              .addMethod(getGetStreamKpiMethod())
+              .addMethod(getGetInstantKpiMethod())
+              .build();
+        }
+      }
+    }
+    return result;
+  }
+}
diff --git a/src/automation/target/generated-sources/grpc/monitoring/MutinyMonitoringServiceGrpc.java b/src/automation/target/generated-sources/grpc/monitoring/MutinyMonitoringServiceGrpc.java
new file mode 100644
index 0000000000000000000000000000000000000000..e5157378c1d8d4608d5da2ec0e429fbb2412c175
--- /dev/null
+++ b/src/automation/target/generated-sources/grpc/monitoring/MutinyMonitoringServiceGrpc.java
@@ -0,0 +1,240 @@
+package monitoring;
+
+import static monitoring.MonitoringServiceGrpc.getServiceDescriptor;
+import static io.grpc.stub.ServerCalls.asyncUnaryCall;
+import static io.grpc.stub.ServerCalls.asyncServerStreamingCall;
+import static io.grpc.stub.ServerCalls.asyncClientStreamingCall;
+import static io.grpc.stub.ServerCalls.asyncBidiStreamingCall;
+
+@javax.annotation.Generated(
+value = "by Mutiny Grpc generator",
+comments = "Source: monitoring.proto")
+public final class MutinyMonitoringServiceGrpc implements io.quarkus.grpc.runtime.MutinyGrpc {
+    private MutinyMonitoringServiceGrpc() {}
+
+    public static MutinyMonitoringServiceStub newMutinyStub(io.grpc.Channel channel) {
+        return new MutinyMonitoringServiceStub(channel);
+    }
+
+    
+    public static final class MutinyMonitoringServiceStub extends io.grpc.stub.AbstractStub<MutinyMonitoringServiceStub> implements io.quarkus.grpc.runtime.MutinyStub {
+        private MonitoringServiceGrpc.MonitoringServiceStub delegateStub;
+
+        private MutinyMonitoringServiceStub(io.grpc.Channel channel) {
+            super(channel);
+            delegateStub = MonitoringServiceGrpc.newStub(channel);
+        }
+
+        private MutinyMonitoringServiceStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
+            super(channel, callOptions);
+            delegateStub = MonitoringServiceGrpc.newStub(channel).build(channel, callOptions);
+        }
+
+        @Override
+        protected MutinyMonitoringServiceStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
+            return new MutinyMonitoringServiceStub(channel, callOptions);
+        }
+
+        
+        public io.smallrye.mutiny.Uni<monitoring.Monitoring.KpiId> createKpi(monitoring.Monitoring.KpiDescriptor request) {
+            return io.quarkus.grpc.runtime.ClientCalls.oneToOne(request, delegateStub::createKpi);
+        }
+
+        
+        public io.smallrye.mutiny.Uni<monitoring.Monitoring.KpiDescriptor> getKpiDescriptor(monitoring.Monitoring.KpiId request) {
+            return io.quarkus.grpc.runtime.ClientCalls.oneToOne(request, delegateStub::getKpiDescriptor);
+        }
+
+        
+        public io.smallrye.mutiny.Uni<context.ContextOuterClass.Empty> includeKpi(monitoring.Monitoring.Kpi request) {
+            return io.quarkus.grpc.runtime.ClientCalls.oneToOne(request, delegateStub::includeKpi);
+        }
+
+        
+        public io.smallrye.mutiny.Uni<context.ContextOuterClass.Empty> monitorKpi(monitoring.Monitoring.MonitorKpiRequest request) {
+            return io.quarkus.grpc.runtime.ClientCalls.oneToOne(request, delegateStub::monitorKpi);
+        }
+
+        
+        public io.smallrye.mutiny.Uni<monitoring.Monitoring.Kpi> getInstantKpi(monitoring.Monitoring.KpiId request) {
+            return io.quarkus.grpc.runtime.ClientCalls.oneToOne(request, delegateStub::getInstantKpi);
+        }
+
+        
+        public io.smallrye.mutiny.Multi<monitoring.Monitoring.Kpi> getStreamKpi(monitoring.Monitoring.KpiId request) {
+            return io.quarkus.grpc.runtime.ClientCalls.oneToMany(request, delegateStub::getStreamKpi);
+        }
+
+    }
+
+    
+    public static abstract class MonitoringServiceImplBase implements io.grpc.BindableService {
+
+        private String compression;
+        /**
+        * Set whether the server will try to use a compressed response.
+        *
+        * @param compression the compression, e.g {@code gzip}
+        */
+        public MonitoringServiceImplBase withCompression(String compression) {
+        this.compression = compression;
+        return this;
+        }
+
+
+        
+        public io.smallrye.mutiny.Uni<monitoring.Monitoring.KpiId> createKpi(monitoring.Monitoring.KpiDescriptor request) {
+            throw new io.grpc.StatusRuntimeException(io.grpc.Status.UNIMPLEMENTED);
+        }
+
+        
+        public io.smallrye.mutiny.Uni<monitoring.Monitoring.KpiDescriptor> getKpiDescriptor(monitoring.Monitoring.KpiId request) {
+            throw new io.grpc.StatusRuntimeException(io.grpc.Status.UNIMPLEMENTED);
+        }
+
+        
+        public io.smallrye.mutiny.Uni<context.ContextOuterClass.Empty> includeKpi(monitoring.Monitoring.Kpi request) {
+            throw new io.grpc.StatusRuntimeException(io.grpc.Status.UNIMPLEMENTED);
+        }
+
+        
+        public io.smallrye.mutiny.Uni<context.ContextOuterClass.Empty> monitorKpi(monitoring.Monitoring.MonitorKpiRequest request) {
+            throw new io.grpc.StatusRuntimeException(io.grpc.Status.UNIMPLEMENTED);
+        }
+
+        
+        public io.smallrye.mutiny.Uni<monitoring.Monitoring.Kpi> getInstantKpi(monitoring.Monitoring.KpiId request) {
+            throw new io.grpc.StatusRuntimeException(io.grpc.Status.UNIMPLEMENTED);
+        }
+
+        
+        public io.smallrye.mutiny.Multi<monitoring.Monitoring.Kpi> getStreamKpi(monitoring.Monitoring.KpiId request) {
+            throw new io.grpc.StatusRuntimeException(io.grpc.Status.UNIMPLEMENTED);
+        }
+
+        @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() {
+            return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())
+                    .addMethod(
+                            monitoring.MonitoringServiceGrpc.getCreateKpiMethod(),
+                            asyncUnaryCall(
+                                    new MethodHandlers<
+                                            monitoring.Monitoring.KpiDescriptor,
+                                            monitoring.Monitoring.KpiId>(
+                                            this, METHODID_CREATE_KPI, compression)))
+                    .addMethod(
+                            monitoring.MonitoringServiceGrpc.getGetKpiDescriptorMethod(),
+                            asyncUnaryCall(
+                                    new MethodHandlers<
+                                            monitoring.Monitoring.KpiId,
+                                            monitoring.Monitoring.KpiDescriptor>(
+                                            this, METHODID_GET_KPI_DESCRIPTOR, compression)))
+                    .addMethod(
+                            monitoring.MonitoringServiceGrpc.getIncludeKpiMethod(),
+                            asyncUnaryCall(
+                                    new MethodHandlers<
+                                            monitoring.Monitoring.Kpi,
+                                            context.ContextOuterClass.Empty>(
+                                            this, METHODID_INCLUDE_KPI, compression)))
+                    .addMethod(
+                            monitoring.MonitoringServiceGrpc.getMonitorKpiMethod(),
+                            asyncUnaryCall(
+                                    new MethodHandlers<
+                                            monitoring.Monitoring.MonitorKpiRequest,
+                                            context.ContextOuterClass.Empty>(
+                                            this, METHODID_MONITOR_KPI, compression)))
+                    .addMethod(
+                            monitoring.MonitoringServiceGrpc.getGetStreamKpiMethod(),
+                            asyncServerStreamingCall(
+                                    new MethodHandlers<
+                                            monitoring.Monitoring.KpiId,
+                                            monitoring.Monitoring.Kpi>(
+                                            this, METHODID_GET_STREAM_KPI, compression)))
+                    .addMethod(
+                            monitoring.MonitoringServiceGrpc.getGetInstantKpiMethod(),
+                            asyncUnaryCall(
+                                    new MethodHandlers<
+                                            monitoring.Monitoring.KpiId,
+                                            monitoring.Monitoring.Kpi>(
+                                            this, METHODID_GET_INSTANT_KPI, compression)))
+                    .build();
+        }
+    }
+
+    private static final int METHODID_CREATE_KPI = 0;
+    private static final int METHODID_GET_KPI_DESCRIPTOR = 1;
+    private static final int METHODID_INCLUDE_KPI = 2;
+    private static final int METHODID_MONITOR_KPI = 3;
+    private static final int METHODID_GET_STREAM_KPI = 4;
+    private static final int METHODID_GET_INSTANT_KPI = 5;
+
+    private static final class MethodHandlers<Req, Resp> implements
+            io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
+            io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,
+            io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,
+            io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {
+        private final MonitoringServiceImplBase serviceImpl;
+        private final int methodId;
+        private final String compression;
+
+        MethodHandlers(MonitoringServiceImplBase serviceImpl, int methodId, String compression) {
+            this.serviceImpl = serviceImpl;
+            this.methodId = methodId;
+            this.compression = compression;
+        }
+
+        @java.lang.Override
+        @java.lang.SuppressWarnings("unchecked")
+        public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {
+            switch (methodId) {
+                case METHODID_CREATE_KPI:
+                    io.quarkus.grpc.runtime.ServerCalls.oneToOne((monitoring.Monitoring.KpiDescriptor) request,
+                            (io.grpc.stub.StreamObserver<monitoring.Monitoring.KpiId>) responseObserver,
+                            compression,
+                            serviceImpl::createKpi);
+                    break;
+                case METHODID_GET_KPI_DESCRIPTOR:
+                    io.quarkus.grpc.runtime.ServerCalls.oneToOne((monitoring.Monitoring.KpiId) request,
+                            (io.grpc.stub.StreamObserver<monitoring.Monitoring.KpiDescriptor>) responseObserver,
+                            compression,
+                            serviceImpl::getKpiDescriptor);
+                    break;
+                case METHODID_INCLUDE_KPI:
+                    io.quarkus.grpc.runtime.ServerCalls.oneToOne((monitoring.Monitoring.Kpi) request,
+                            (io.grpc.stub.StreamObserver<context.ContextOuterClass.Empty>) responseObserver,
+                            compression,
+                            serviceImpl::includeKpi);
+                    break;
+                case METHODID_MONITOR_KPI:
+                    io.quarkus.grpc.runtime.ServerCalls.oneToOne((monitoring.Monitoring.MonitorKpiRequest) request,
+                            (io.grpc.stub.StreamObserver<context.ContextOuterClass.Empty>) responseObserver,
+                            compression,
+                            serviceImpl::monitorKpi);
+                    break;
+                case METHODID_GET_STREAM_KPI:
+                    io.quarkus.grpc.runtime.ServerCalls.oneToMany((monitoring.Monitoring.KpiId) request,
+                            (io.grpc.stub.StreamObserver<monitoring.Monitoring.Kpi>) responseObserver,
+                            compression,
+                            serviceImpl::getStreamKpi);
+                    break;
+                case METHODID_GET_INSTANT_KPI:
+                    io.quarkus.grpc.runtime.ServerCalls.oneToOne((monitoring.Monitoring.KpiId) request,
+                            (io.grpc.stub.StreamObserver<monitoring.Monitoring.Kpi>) responseObserver,
+                            compression,
+                            serviceImpl::getInstantKpi);
+                    break;
+                default:
+                    throw new java.lang.AssertionError();
+            }
+        }
+
+        @java.lang.Override
+        @java.lang.SuppressWarnings("unchecked")
+        public io.grpc.stub.StreamObserver<Req> invoke(io.grpc.stub.StreamObserver<Resp> responseObserver) {
+            switch (methodId) {
+                default:
+                    throw new java.lang.AssertionError();
+            }
+        }
+    }
+
+}
\ No newline at end of file
diff --git a/src/automation/target/kubernetes/kubernetes.yml b/src/automation/target/kubernetes/kubernetes.yml
index a57486db6f17111a43600c943a392fa462112f49..47b852adf6beb80c60488a298e9039cd2011469d 100644
--- a/src/automation/target/kubernetes/kubernetes.yml
+++ b/src/automation/target/kubernetes/kubernetes.yml
@@ -3,8 +3,8 @@ apiVersion: v1
 kind: Service
 metadata:
   annotations:
-    app.quarkus.io/commit-id: 43db942bd5641ae293df75b65cd1eefd40b8085e
-    app.quarkus.io/build-timestamp: 2021-11-17 - 06:42:01 +0000
+    app.quarkus.io/commit-id: cb66db4bc415be2e94c17f4152e9805301b202b5
+    app.quarkus.io/build-timestamp: 2022-01-05 - 10:42:14 +0000
   labels:
     app.kubernetes.io/name: automationservice
     app.kubernetes.io/version: 0.0.1
@@ -27,28 +27,28 @@ apiVersion: apps/v1
 kind: Deployment
 metadata:
   annotations:
-    app.quarkus.io/commit-id: 43db942bd5641ae293df75b65cd1eefd40b8085e
-    app.quarkus.io/build-timestamp: 2021-11-17 - 06:42:01 +0000
+    app.quarkus.io/commit-id: cb66db4bc415be2e94c17f4152e9805301b202b5
+    app.quarkus.io/build-timestamp: 2022-01-05 - 10:42:14 +0000
   labels:
     app: automationservice
-    app.kubernetes.io/name: automationservice
     app.kubernetes.io/version: 0.0.1
+    app.kubernetes.io/name: automationservice
   name: automationservice
 spec:
   replicas: 1
   selector:
     matchLabels:
-      app.kubernetes.io/name: automationservice
       app.kubernetes.io/version: 0.0.1
+      app.kubernetes.io/name: automationservice
   template:
     metadata:
       annotations:
-        app.quarkus.io/commit-id: 43db942bd5641ae293df75b65cd1eefd40b8085e
-        app.quarkus.io/build-timestamp: 2021-11-17 - 06:42:01 +0000
+        app.quarkus.io/commit-id: cb66db4bc415be2e94c17f4152e9805301b202b5
+        app.quarkus.io/build-timestamp: 2022-01-05 - 10:42:14 +0000
       labels:
         app: automationservice
-        app.kubernetes.io/name: automationservice
         app.kubernetes.io/version: 0.0.1
+        app.kubernetes.io/name: automationservice
     spec:
       containers:
         - env:
@@ -56,10 +56,10 @@ spec:
               valueFrom:
                 fieldRef:
                   fieldPath: metadata.namespace
-            - name: CONTEXT_SERVICE_HOST
-              value: context
             - name: DEVICE_SERVICE_HOST
-              value: device
+              value: DeviceService
+            - name: CONTEXT_SERVICE_HOST
+              value: ContextService
           image: registry.gitlab.com/teraflow-h2020/controller/automation:0.0.1
           imagePullPolicy: Always
           livenessProbe:
diff --git a/src/centralizedattackdetector/.gitlab-ci.yml b/src/centralizedattackdetector/.gitlab-ci.yml
deleted file mode 100644
index c2e4d7a4d360b90fc7e8cedef83aae69c1d07d5d..0000000000000000000000000000000000000000
--- a/src/centralizedattackdetector/.gitlab-ci.yml
+++ /dev/null
@@ -1,60 +0,0 @@
-# Build, tag, and push the Docker images to the GitLab Docker registry
-build centralizedattackdetector:
-  variables:
-    IMAGE_NAME: 'centralizedattackdetector' # name of the microservice
-    IMAGE_NAME_TEST: 'centralizedattackdetector-test' # name of the microservice
-    IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
-  stage: build
-  before_script:
-    - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
-  script:
-    - docker build -t "$IMAGE_NAME:$IMAGE_TAG" -f ./src/$IMAGE_NAME/Dockerfile ./src/
-    - docker tag "$IMAGE_NAME:$IMAGE_TAG" "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
-    - docker push "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
-  rules:
-    - changes:
-      - src/$IMAGE_NAME/**
-      - .gitlab-ci.yml
-
-# Pull, execute, and run unitary tests for the Docker image from the GitLab registry
-unit_test centralizedattackdetector:
-  variables:
-    IMAGE_NAME: 'centralizedattackdetector' # name of the microservice
-    IMAGE_NAME_TEST: 'centralizedattackdetector-test' # name of the microservice
-    IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
-  stage: unit_test
-  needs:
-    - build centralizedattackdetector
-  before_script:
-    - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
-    - if docker network list | grep teraflowbridge; then echo "teraflowbridge is already created"; else docker network create -d bridge teraflowbridge; fi  
-  script:
-    - docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
-    - docker run -d -p 10000:10000 --name $IMAGE_NAME --network=teraflowbridge "$IMAGE_NAME:$IMAGE_TAG"
-    - docker ps -a
-    - sleep 5
-    - docker ps -a
-    - docker logs $IMAGE_NAME
-    - docker exec -i $IMAGE_NAME bash -c "pytest --log-level=DEBUG --verbose $IMAGE_NAME/tests/test_unitary.py"
-  after_script:
-    - docker stop $IMAGE_NAME
-    - docker rm $IMAGE_NAME
-  rules:
-    - changes:
-      - src/$IMAGE_NAME/**
-      - .gitlab-ci.yml
-
-# Deployment of the service in Kubernetes Cluster
-deploy centralizedattackdetector:
-  stage: deploy
-  needs:
-    - build centralizedattackdetector
-    - unit_test centralizedattackdetector
-    - dependencies all
-    - integ_test execute
-  script:
-    - kubectl version
-    - kubectl get all
-    - kubectl apply -f "manifests/centralizedattackdetectorservice.yaml"
-    - kubectl delete pods --selector app=centralizedattackdetectorservice
-    - kubectl get all
diff --git a/src/centralizedattackdetector/service/CentralizedAttackDetectorServiceServicerImpl.py b/src/centralizedattackdetector/service/CentralizedAttackDetectorServiceServicerImpl.py
deleted file mode 100644
index 1f5fbdcb9655cda3351580b50c19eddf2c0d192a..0000000000000000000000000000000000000000
--- a/src/centralizedattackdetector/service/CentralizedAttackDetectorServiceServicerImpl.py
+++ /dev/null
@@ -1,34 +0,0 @@
-import grpc, logging
-from common.rpc_method_wrapper.Decorator import create_metrics, safe_and_metered_rpc_method
-from centralizedattackdetector.proto.context_pb2 import Empty, Service
-from centralizedattackdetector.proto.monitoring_pb2 import KpiList
-from centralizedattackdetector.proto.centralized_attack_detector_pb2_grpc import (
-    CentralizedAttackDetectorServiceServicer)
-
-LOGGER = logging.getLogger(__name__)
-
-SERVICE_NAME = 'CentralizedAttackDetector'
-METHOD_NAMES = ['NotifyServiceUpdate', 'DetectAttack', 'ReportSummarizedKpi', 'ReportKpi']
-METRICS = create_metrics(SERVICE_NAME, METHOD_NAMES)
-
-class CentralizedAttackDetectorServiceServicerImpl(CentralizedAttackDetectorServiceServicer):
-
-    def __init__(self):
-        LOGGER.debug('Creating Servicer...')
-        LOGGER.debug('Servicer Created')
-
-    @safe_and_metered_rpc_method(METRICS, LOGGER)
-    def NotifyServiceUpdate(self, request : Service, context : grpc.ServicerContext) -> Empty:
-        return Empty()
-
-    @safe_and_metered_rpc_method(METRICS, LOGGER)
-    def DetectAttack(self, request : Empty, context : grpc.ServicerContext) -> Empty:
-        return Empty()
-
-    @safe_and_metered_rpc_method(METRICS, LOGGER)
-    def ReportSummarizedKpi(self, request : KpiList, context : grpc.ServicerContext) -> Empty:
-        return Empty()
-
-    @safe_and_metered_rpc_method(METRICS, LOGGER)
-    def ReportKpi(self, request : KpiList, context : grpc.ServicerContext) -> Empty:
-        return Empty()
diff --git a/src/centralizedattackdetector/tests/test_unitary.py b/src/centralizedattackdetector/tests/test_unitary.py
deleted file mode 100644
index 177e03080157dc59bdd0f71caa0430ec83fdd5c9..0000000000000000000000000000000000000000
--- a/src/centralizedattackdetector/tests/test_unitary.py
+++ /dev/null
@@ -1,41 +0,0 @@
-import logging, pytest
-from centralizedattackdetector.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD
-from centralizedattackdetector.client.CentralizedAttackDetectorClient import CentralizedAttackDetectorClient
-from centralizedattackdetector.proto.context_pb2 import Empty, Service
-from centralizedattackdetector.proto.monitoring_pb2 import Kpi, KpiList
-from centralizedattackdetector.service.CentralizedAttackDetectorService import CentralizedAttackDetectorService
-
-port = 10000 + GRPC_SERVICE_PORT # avoid privileged ports
-
-LOGGER = logging.getLogger(__name__)
-LOGGER.setLevel(logging.DEBUG)
-
-@pytest.fixture(scope='session')
-def centralized_attack_detector_service():
-    _service = CentralizedAttackDetectorService(
-        port=port, max_workers=GRPC_MAX_WORKERS, grace_period=GRPC_GRACE_PERIOD)
-    _service.start()
-    yield _service
-    _service.stop()
-
-@pytest.fixture(scope='session')
-def centralized_attack_detector_client(centralized_attack_detector_service):
-    _client = CentralizedAttackDetectorClient(address='127.0.0.1', port=port)
-    yield _client
-    _client.close()
-
-def test_notify_service_update(centralized_attack_detector_client: CentralizedAttackDetectorClient):
-    service = Service()
-    centralized_attack_detector_client.NotifyServiceUpdate(service)
-
-def test_detect_attack(centralized_attack_detector_client: CentralizedAttackDetectorClient):
-    request = Empty()
-    centralized_attack_detector_client.DetectAttack(request)
-
-def test_report_summarized_kpi(centralized_attack_detector_client: CentralizedAttackDetectorClient):
-    kpi_list = KpiList()
-    centralized_attack_detector_client.ReportSummarizedKpi(kpi_list)
-
-def test_report_kpi(centralized_attack_detector_client: CentralizedAttackDetectorClient):
-    kpi_list = KpiList()
-    centralized_attack_detector_client.ReportKpi(kpi_list)
diff --git a/src/common/tests/PytestGenerateTests.py b/src/common/tests/PytestGenerateTests.py
new file mode 100644
index 0000000000000000000000000000000000000000..1c5abcf45b2f3e2d14fd31a5b3c1ee5d434f10b3
--- /dev/null
+++ b/src/common/tests/PytestGenerateTests.py
@@ -0,0 +1,42 @@
+# Create a set of tests enabling to run tests as follows ...
+#   from common.tests.PytestGenerateTests import pytest_generate_tests # pylint: disable=unused-import
+#
+#   scenario1 = ('basic', {'attribute': 'value'})
+#   scenario2 = ('advanced', {'attribute': 'value2'})
+#
+#   class TestSampleWithScenarios:
+#       scenarios = [scenario1, scenario2]
+#
+#       def test_demo1(self, attribute):
+#           assert isinstance(attribute, str)
+#
+#       def test_demo2(self, attribute):
+#           assert isinstance(attribute, str)
+#
+# ... and run them as:
+#   $ pytest --log-level=INFO --verbose my_test.py
+#   =================== test session starts ===================
+#   platform linux -- Python 3.9.6, pytest-6.2.4, py-1.10.0, pluggy-0.13.1 -- /home/.../.pyenv/.../bin/python3.9
+#   cachedir: .pytest_cache
+#   benchmark: 3.4.1 (defaults: timer=time.perf_counter disable_gc=False min_rounds=5 min_time=0.000005 max_time=1.0
+#                               calibration_precision=10 warmup=False warmup_iterations=100000)
+#   rootdir: /home/.../tests
+#   plugins: benchmark-3.4.1
+#   collected 4 items
+#
+#   my_test.py::TestSampleWithScenarios::test_demo1[basic] PASSED          [ 25%]
+#   my_test.py::TestSampleWithScenarios::test_demo2[basic] PASSED          [ 50%]
+#   my_test.py::TestSampleWithScenarios::test_demo1[advanced] PASSED       [ 75%]
+#   my_test.py::TestSampleWithScenarios::test_demo2[advanced] PASSED       [100%]
+#
+#   ==================== 4 passed in 0.02s ====================
+
+def pytest_generate_tests(metafunc):
+    idlist = []
+    argvalues = []
+    for scenario in metafunc.cls.scenarios:
+        idlist.append(scenario[0])
+        items = scenario[1].items()
+        argnames = [x[0] for x in items]
+        argvalues.append([x[1] for x in items])
+    metafunc.parametrize(argnames, argvalues, ids=idlist, scope='class')
diff --git a/src/centralizedattackdetector/__init__.py b/src/common/tests/__init__.py
similarity index 100%
rename from src/centralizedattackdetector/__init__.py
rename to src/common/tests/__init__.py
diff --git a/src/common/type_checkers/Assertions.py b/src/common/type_checkers/Assertions.py
index f11a059eb5463751a920b116e86438b7e1b13484..31458843c2b5d6e850a7230249b7d499a1a894b9 100644
--- a/src/common/type_checkers/Assertions.py
+++ b/src/common/type_checkers/Assertions.py
@@ -27,6 +27,16 @@ def validate_device_operational_status_enum(message):
         'DEVICEOPERATIONALSTATUS_ENABLED'
     ]
 
+def validate_kpi_sample_types_enum(message):
+    assert isinstance(message, str)
+    assert message in [
+        'KPISAMPLETYPE_UNKNOWN',
+        'KPISAMPLETYPE_PACKETS_TRANSMITTED',
+        'KPISAMPLETYPE_PACKETS_RECEIVED',
+        'KPISAMPLETYPE_BYTES_TRANSMITTED',
+        'KPISAMPLETYPE_BYTES_RECEIVED',
+    ]
+
 def validate_service_type_enum(message):
     assert isinstance(message, str)
     assert message in [
@@ -128,6 +138,12 @@ def validate_endpoint_id(message):
     assert 'endpoint_uuid' in message
     validate_uuid(message['endpoint_uuid'])
 
+def validate_connection_id(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'connection_uuid' in message
+    validate_uuid(message['connection_uuid'])
+
 
 # ----- Lists of Identifiers -------------------------------------------------------------------------------------------
 
@@ -166,6 +182,13 @@ def validate_link_ids(message):
     assert isinstance(message['link_ids'], list)
     for link_id in message['link_ids']: validate_link_id(link_id)
 
+def validate_connection_ids(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'connection_ids' in message
+    assert isinstance(message['connection_ids'], list)
+    for connection_id in message['connection_ids']: validate_connection_id(connection_id)
+
 
 # ----- Objects --------------------------------------------------------------------------------------------------------
 
@@ -222,11 +245,14 @@ def validate_topology(message, num_devices=None, num_links=None):
 
 def validate_endpoint(message):
     assert isinstance(message, dict)
-    assert len(message.keys()) == 2
+    assert len(message.keys()) == 3
     assert 'endpoint_id' in message
     validate_endpoint_id(message['endpoint_id'])
     assert 'endpoint_type' in message
     assert isinstance(message['endpoint_type'], str)
+    assert 'kpi_sample_types' in message
+    assert isinstance(message['kpi_sample_types'], list)
+    for kpi_sample_type in message['kpi_sample_types']: validate_kpi_sample_types_enum(kpi_sample_type)
 
 def validate_device(message):
     assert isinstance(message, dict)
@@ -255,6 +281,20 @@ def validate_link(message):
     assert isinstance(message['link_endpoint_ids'], list)
     for endpoint_id in message['link_endpoint_ids']: validate_endpoint_id(endpoint_id)
 
+def validate_connection(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 4
+    assert 'connection_id' in message
+    validate_connection_id(message['connection_id'])
+    assert 'service_id' in message
+    validate_service_id(message['service_id'])
+    assert 'path_hops_endpoint_ids' in message
+    assert isinstance(message['path_hops_endpoint_ids'], list)
+    for endpoint_id in message['path_hops_endpoint_ids']: validate_endpoint_id(endpoint_id)
+    assert 'sub_service_ids' in message
+    assert isinstance(message['sub_service_ids'], list)
+    for sub_service_id in message['sub_service_ids']: validate_service_id(sub_service_id)
+
 
 # ----- Lists of Objects -----------------------------------------------------------------------------------------------
 
@@ -292,3 +332,10 @@ def validate_links(message):
     assert 'links' in message
     assert isinstance(message['links'], list)
     for link in message['links']: validate_link(link)
+
+def validate_connections(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'connections' in message
+    assert isinstance(message['connections'], list)
+    for connection in message['connections']: validate_connection(connection)
diff --git a/src/common/type_checkers/Checkers.py b/src/common/type_checkers/Checkers.py
index d0eddcf213143c4c3d99c9edfafd1305384e777b..f78395c9c5f480bf75b4c9344dfeb3b48f9da062 100644
--- a/src/common/type_checkers/Checkers.py
+++ b/src/common/type_checkers/Checkers.py
@@ -1,5 +1,5 @@
 import re
-from typing import Any, Container, List, Optional, Pattern, Set, Sized, Tuple, Union
+from typing import Any, Container, Dict, List, Optional, Pattern, Set, Sized, Tuple, Union
 
 def chk_none(name : str, value : Any, reason=None) -> Any:
     if value is None: return value
@@ -11,6 +11,11 @@ def chk_not_none(name : str, value : Any, reason=None) -> Any:
     if reason is None: reason = 'must not be None.'
     raise ValueError('{}({}) {}'.format(str(name), str(value), str(reason)))
 
+def chk_attribute(name : str, container : Dict, container_name : str, **kwargs):
+    if name in container: return container[name]
+    if 'default' in kwargs: return kwargs['default']
+    raise AttributeError('Missing object({:s}) in container({:s})'.format(str(name), str(container_name)))
+
 def chk_type(name : str, value : Any, type_or_types : Union[type, Set[type]] = set()) -> Any:
     if isinstance(value, type_or_types): return value
     msg = '{}({}) is of a wrong type({}). Accepted type_or_types({}).'
diff --git a/src/compute/.gitlab-ci.yml b/src/compute/.gitlab-ci.yml
index 9021dd3f4fe948176b7427814596fada13de99bf..96607c896fcfc88d0505b73da007fcc2dcb5c257 100644
--- a/src/compute/.gitlab-ci.yml
+++ b/src/compute/.gitlab-ci.yml
@@ -1,8 +1,7 @@
-# Build, tag, and push the Docker images to the GitLab Docker registry
+# Build, tag and push the Docker image to the GitLab registry
 build compute:
   variables:
     IMAGE_NAME: 'compute' # name of the microservice
-    IMAGE_NAME_TEST: 'compute-test' # name of the microservice
     IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: build
   before_script:
@@ -11,50 +10,80 @@ build compute:
     - docker build -t "$IMAGE_NAME:$IMAGE_TAG" -f ./src/$IMAGE_NAME/Dockerfile ./src/
     - docker tag "$IMAGE_NAME:$IMAGE_TAG" "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
     - docker push "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
+  coverage: '/TOTAL\s+\d+\s+\d+\s+(\d+%)/'
+  after_script:
+    - docker images --filter="dangling=true" --quiet | xargs -r docker rmi
   rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' 
     - changes:
-      - src/$IMAGE_NAME/**
+      - src/$IMAGE_NAME/**/*.{py,in,yml}
+      - src/$IMAGE_NAME/Dockerfile
+      - src/$IMAGE_NAME/tests/*.py
+      - manifests/${IMAGE_NAME}service.yaml
       - .gitlab-ci.yml
 
-# Pull, execute, and run unitary tests for the Docker image from the GitLab registry
-unit_test compute:
+# Apply unit test to the component
+unit test compute:
   variables:
     IMAGE_NAME: 'compute' # name of the microservice
-    IMAGE_NAME_TEST: 'compute-test' # name of the microservice
     IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: unit_test
   needs:
     - build compute
   before_script:
     - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
-    - if docker network list | grep teraflowbridge; then echo "teraflowbridge is already created"; else docker network create -d bridge teraflowbridge; fi  
+    - if docker network list | grep teraflowbridge; then echo "teraflowbridge is already created"; else docker network create -d bridge teraflowbridge; fi
+    - if docker container ls | grep $IMAGE_NAME; then docker rm -f $IMAGE_NAME; else echo "$IMAGE_NAME image is not in the system"; fi
   script:
     - docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
-    - docker run -d -p 9090:9090 --name $IMAGE_NAME --network=teraflowbridge "$IMAGE_NAME:$IMAGE_TAG"
-    - docker ps -a
+    - docker run --name $IMAGE_NAME -d -p 9090:9090 -v "$PWD/src/$IMAGE_NAME/tests:/opt/results" --network=teraflowbridge $CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG
     - sleep 5
     - docker ps -a
     - docker logs $IMAGE_NAME
-    - docker exec -i $IMAGE_NAME bash -c "pytest --log-level=DEBUG --verbose $IMAGE_NAME/tests/test_unitary.py"
+    - docker exec -i $IMAGE_NAME bash -c "coverage run -m pytest --log-level=INFO --verbose $IMAGE_NAME/tests/test_unitary.py --junitxml=/opt/results/${IMAGE_NAME}_report.xml; coverage xml -o /opt/results/${IMAGE_NAME}_coverage.xml; coverage report --include='${IMAGE_NAME}/*' --show-missing"
+  coverage: '/TOTAL\s+\d+\s+\d+\s+(\d+%)/'
   after_script:
-    - docker stop $IMAGE_NAME
-    - docker rm $IMAGE_NAME
+    - docker rm -f $IMAGE_NAME
+    - docker network rm teraflowbridge
   rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' 
     - changes:
-      - src/$IMAGE_NAME/**
+      - src/$IMAGE_NAME/**/*.{py,in,yml}
+      - src/$IMAGE_NAME/Dockerfile
+      - src/$IMAGE_NAME/tests/*.py
+      - src/$IMAGE_NAME/tests/Dockerfile
+      - manifests/${IMAGE_NAME}service.yaml
       - .gitlab-ci.yml
+  artifacts:
+      when: always
+      reports:
+        junit: src/$IMAGE_NAME/tests/${IMAGE_NAME}_report.xml
+        cobertura: src/$IMAGE_NAME/tests/${IMAGE_NAME}_coverage.xml
 
 # Deployment of the service in Kubernetes Cluster
 deploy compute:
+  variables:
+    IMAGE_NAME: 'compute' # name of the microservice
+    IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: deploy
   needs:
-    - build compute
-    - unit_test compute
-    - dependencies all
-    - integ_test execute
+    - unit test compute
+    # - integ_test execute
   script:
+    - 'sed -i "s/$IMAGE_NAME:.*/$IMAGE_NAME:$IMAGE_TAG/" manifests/${IMAGE_NAME}service.yaml'
     - kubectl version
     - kubectl get all
-    - kubectl apply -f "manifests/computeservice.yaml"
-    - kubectl delete pods --selector app=computeservice
+    - kubectl apply -f "manifests/${IMAGE_NAME}service.yaml"
     - kubectl get all
+  # environment:
+  #   name: test
+  #   url: https://example.com
+  #   kubernetes:
+  #     namespace: test
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+      when: manual    
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
+      when: manual
diff --git a/src/compute/Config.py b/src/compute/Config.py
index e95740493fd16940cee2d1e780472a1e90801303..b2d3179fab6e55368ba751aac48de507551c4516 100644
--- a/src/compute/Config.py
+++ b/src/compute/Config.py
@@ -1,4 +1,5 @@
 import logging
+from werkzeug.security import generate_password_hash
 
 # General settings
 LOG_LEVEL = logging.WARNING
@@ -10,7 +11,10 @@ GRPC_GRACE_PERIOD = 60
 
 # REST-API settings
 RESTAPI_SERVICE_PORT = 8080
-RESTAPI_BASE_URL = '/api'
+RESTAPI_BASE_URL = '/restconf/data'
+RESTAPI_USERS = {   # TODO: implement a database of credentials and permissions
+    'admin': generate_password_hash('admin'),
+}
 
 # Prometheus settings
 METRICS_PORT = 9192
diff --git a/src/compute/Dockerfile b/src/compute/Dockerfile
index 83e4fad35704dd8febb8aa627936d54c02e89f68..99d4e3ed1adc8a688874f3291f4891112543b3ff 100644
--- a/src/compute/Dockerfile
+++ b/src/compute/Dockerfile
@@ -30,6 +30,8 @@ RUN python3 -m pip install -r compute/requirements.in
 # Add files into working directory
 COPY common/. common
 COPY compute/. compute
+COPY context/. context
+COPY service/. service
 
 # Start compute service
 ENTRYPOINT ["python", "-m", "compute.service"]
diff --git a/src/compute/genproto.sh b/src/compute/genproto.sh
index 01c247db1659aa36f8bb68258ff7aba974a689c8..c991aaf01610a0d54c96683cb4c4cb49490d74a1 100755
--- a/src/compute/genproto.sh
+++ b/src/compute/genproto.sh
@@ -26,9 +26,11 @@ touch proto/__init__.py
 python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto context.proto
 python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto service.proto
 python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto compute.proto
+python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto kpi_sample_types.proto
 
 rm proto/context_pb2_grpc.py
 rm proto/service_pb2_grpc.py
+rm proto/kpi_sample_types_pb2_grpc.py
 
 sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/context_pb2.py
 sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/service_pb2.py
diff --git a/src/compute/proto/context_pb2.py b/src/compute/proto/context_pb2.py
index 8b4848bc33bfb0eba76590c8a3a627b2db84ca9f..68602b16f264ceac9acc3ef6669b09d5984e72c2 100644
--- a/src/compute/proto/context_pb2.py
+++ b/src/compute/proto/context_pb2.py
@@ -12,6 +12,7 @@ from google.protobuf import symbol_database as _symbol_database
 _sym_db = _symbol_database.Default()
 
 
+from . import kpi_sample_types_pb2 as kpi__sample__types__pb2
 
 
 DESCRIPTOR = _descriptor.FileDescriptor(
@@ -20,8 +21,9 @@ DESCRIPTOR = _descriptor.FileDescriptor(
   syntax='proto3',
   serialized_options=None,
   create_key=_descriptor._internal_create_key,
-  serialized_pb=b'\n\rcontext.proto\x12\x07\x63ontext\"\x07\n\x05\x45mpty\"\x14\n\x04Uuid\x12\x0c\n\x04uuid\x18\x01 \x01(\t\"F\n\x05\x45vent\x12\x11\n\ttimestamp\x18\x01 \x01(\x01\x12*\n\nevent_type\x18\x02 \x01(\x0e\x32\x16.context.EventTypeEnum\"0\n\tContextId\x12#\n\x0c\x63ontext_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\xb6\x01\n\x07\x43ontext\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12)\n\x0ctopology_ids\x18\x02 \x03(\x0b\x32\x13.context.TopologyId\x12\'\n\x0bservice_ids\x18\x03 \x03(\x0b\x32\x12.context.ServiceId\x12/\n\ncontroller\x18\x04 \x01(\x0b\x32\x1b.context.TeraFlowController\"8\n\rContextIdList\x12\'\n\x0b\x63ontext_ids\x18\x01 \x03(\x0b\x32\x12.context.ContextId\"1\n\x0b\x43ontextList\x12\"\n\x08\x63ontexts\x18\x01 \x03(\x0b\x32\x10.context.Context\"U\n\x0c\x43ontextEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12&\n\ncontext_id\x18\x02 \x01(\x0b\x32\x12.context.ContextId\"Z\n\nTopologyId\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12$\n\rtopology_uuid\x18\x02 \x01(\x0b\x32\r.context.Uuid\"~\n\x08Topology\x12(\n\x0btopology_id\x18\x01 \x01(\x0b\x32\x13.context.TopologyId\x12%\n\ndevice_ids\x18\x02 \x03(\x0b\x32\x11.context.DeviceId\x12!\n\x08link_ids\x18\x03 \x03(\x0b\x32\x0f.context.LinkId\";\n\x0eTopologyIdList\x12)\n\x0ctopology_ids\x18\x01 \x03(\x0b\x32\x13.context.TopologyId\"5\n\x0cTopologyList\x12%\n\ntopologies\x18\x01 \x03(\x0b\x32\x11.context.Topology\"X\n\rTopologyEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12(\n\x0btopology_id\x18\x02 \x01(\x0b\x32\x13.context.TopologyId\".\n\x08\x44\x65viceId\x12\"\n\x0b\x64\x65vice_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\x9a\x02\n\x06\x44\x65vice\x12$\n\tdevice_id\x18\x01 \x01(\x0b\x32\x11.context.DeviceId\x12\x13\n\x0b\x64\x65vice_type\x18\x02 \x01(\t\x12,\n\rdevice_config\x18\x03 \x01(\x0b\x32\x15.context.DeviceConfig\x12G\n\x19\x64\x65vice_operational_status\x18\x04 \x01(\x0e\x32$.context.DeviceOperationalStatusEnum\x12\x31\n\x0e\x64\x65vice_drivers\x18\x05 \x03(\x0e\x32\x19.context.DeviceDriverEnum\x12+\n\x10\x64\x65vice_endpoints\x18\x06 \x03(\x0b\x32\x11.context.EndPoint\"9\n\x0c\x44\x65viceConfig\x12)\n\x0c\x63onfig_rules\x18\x01 \x03(\x0b\x32\x13.context.ConfigRule\"5\n\x0c\x44\x65viceIdList\x12%\n\ndevice_ids\x18\x01 \x03(\x0b\x32\x11.context.DeviceId\".\n\nDeviceList\x12 \n\x07\x64\x65vices\x18\x01 \x03(\x0b\x32\x0f.context.Device\"R\n\x0b\x44\x65viceEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12$\n\tdevice_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\"*\n\x06LinkId\x12 \n\tlink_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"X\n\x04Link\x12 \n\x07link_id\x18\x01 \x01(\x0b\x32\x0f.context.LinkId\x12.\n\x11link_endpoint_ids\x18\x02 \x03(\x0b\x32\x13.context.EndPointId\"/\n\nLinkIdList\x12!\n\x08link_ids\x18\x01 \x03(\x0b\x32\x0f.context.LinkId\"(\n\x08LinkList\x12\x1c\n\x05links\x18\x01 \x03(\x0b\x32\r.context.Link\"L\n\tLinkEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12 \n\x07link_id\x18\x02 \x01(\x0b\x32\x0f.context.LinkId\"X\n\tServiceId\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12#\n\x0cservice_uuid\x18\x02 \x01(\x0b\x32\r.context.Uuid\"\xa6\x02\n\x07Service\x12&\n\nservice_id\x18\x01 \x01(\x0b\x32\x12.context.ServiceId\x12.\n\x0cservice_type\x18\x02 \x01(\x0e\x32\x18.context.ServiceTypeEnum\x12\x31\n\x14service_endpoint_ids\x18\x03 \x03(\x0b\x32\x13.context.EndPointId\x12\x30\n\x13service_constraints\x18\x04 \x03(\x0b\x32\x13.context.Constraint\x12.\n\x0eservice_status\x18\x05 \x01(\x0b\x32\x16.context.ServiceStatus\x12.\n\x0eservice_config\x18\x06 \x01(\x0b\x32\x16.context.ServiceConfig\"C\n\rServiceStatus\x12\x32\n\x0eservice_status\x18\x01 \x01(\x0e\x32\x1a.context.ServiceStatusEnum\":\n\rServiceConfig\x12)\n\x0c\x63onfig_rules\x18\x01 \x03(\x0b\x32\x13.context.ConfigRule\"8\n\rServiceIdList\x12\'\n\x0bservice_ids\x18\x01 \x03(\x0b\x32\x12.context.ServiceId\"1\n\x0bServiceList\x12\"\n\x08services\x18\x01 \x03(\x0b\x32\x10.context.Service\"U\n\x0cServiceEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12&\n\nservice_id\x18\x02 \x01(\x0b\x32\x12.context.ServiceId\"\x82\x01\n\nEndPointId\x12(\n\x0btopology_id\x18\x01 \x01(\x0b\x32\x13.context.TopologyId\x12$\n\tdevice_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\x12$\n\rendpoint_uuid\x18\x03 \x01(\x0b\x32\r.context.Uuid\"K\n\x08\x45ndPoint\x12(\n\x0b\x65ndpoint_id\x18\x01 \x01(\x0b\x32\x13.context.EndPointId\x12\x15\n\rendpoint_type\x18\x02 \x01(\t\"e\n\nConfigRule\x12)\n\x06\x61\x63tion\x18\x01 \x01(\x0e\x32\x19.context.ConfigActionEnum\x12\x14\n\x0cresource_key\x18\x02 \x01(\t\x12\x16\n\x0eresource_value\x18\x03 \x01(\t\"?\n\nConstraint\x12\x17\n\x0f\x63onstraint_type\x18\x01 \x01(\t\x12\x18\n\x10\x63onstraint_value\x18\x02 \x01(\t\"6\n\x0c\x43onnectionId\x12&\n\x0f\x63onnection_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\x8d\x01\n\nConnection\x12,\n\rconnection_id\x18\x01 \x01(\x0b\x32\x15.context.ConnectionId\x12.\n\x12related_service_id\x18\x02 \x01(\x0b\x32\x12.context.ServiceId\x12!\n\x04path\x18\x03 \x03(\x0b\x32\x13.context.EndPointId\"A\n\x10\x43onnectionIdList\x12-\n\x0e\x63onnection_ids\x18\x01 \x03(\x0b\x32\x15.context.ConnectionId\":\n\x0e\x43onnectionList\x12(\n\x0b\x63onnections\x18\x01 \x03(\x0b\x32\x13.context.Connection\"^\n\x12TeraFlowController\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x12\n\nip_address\x18\x02 \x01(\t\x12\x0c\n\x04port\x18\x03 \x01(\r\"U\n\x14\x41uthenticationResult\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x15\n\rauthenticated\x18\x02 \x01(\x08*j\n\rEventTypeEnum\x12\x17\n\x13\x45VENTTYPE_UNDEFINED\x10\x00\x12\x14\n\x10\x45VENTTYPE_CREATE\x10\x01\x12\x14\n\x10\x45VENTTYPE_UPDATE\x10\x02\x12\x14\n\x10\x45VENTTYPE_REMOVE\x10\x03*\xc5\x01\n\x10\x44\x65viceDriverEnum\x12\x1a\n\x16\x44\x45VICEDRIVER_UNDEFINED\x10\x00\x12\x1b\n\x17\x44\x45VICEDRIVER_OPENCONFIG\x10\x01\x12\x1e\n\x1a\x44\x45VICEDRIVER_TRANSPORT_API\x10\x02\x12\x13\n\x0f\x44\x45VICEDRIVER_P4\x10\x03\x12&\n\"DEVICEDRIVER_IETF_NETWORK_TOPOLOGY\x10\x04\x12\x1b\n\x17\x44\x45VICEDRIVER_ONF_TR_352\x10\x05*\x8f\x01\n\x1b\x44\x65viceOperationalStatusEnum\x12%\n!DEVICEOPERATIONALSTATUS_UNDEFINED\x10\x00\x12$\n DEVICEOPERATIONALSTATUS_DISABLED\x10\x01\x12#\n\x1f\x44\x45VICEOPERATIONALSTATUS_ENABLED\x10\x02*\x81\x01\n\x0fServiceTypeEnum\x12\x17\n\x13SERVICETYPE_UNKNOWN\x10\x00\x12\x14\n\x10SERVICETYPE_L3NM\x10\x01\x12\x14\n\x10SERVICETYPE_L2NM\x10\x02\x12)\n%SERVICETYPE_TAPI_CONNECTIVITY_SERVICE\x10\x03*\x88\x01\n\x11ServiceStatusEnum\x12\x1b\n\x17SERVICESTATUS_UNDEFINED\x10\x00\x12\x19\n\x15SERVICESTATUS_PLANNED\x10\x01\x12\x18\n\x14SERVICESTATUS_ACTIVE\x10\x02\x12!\n\x1dSERVICESTATUS_PENDING_REMOVAL\x10\x03*]\n\x10\x43onfigActionEnum\x12\x1a\n\x16\x43ONFIGACTION_UNDEFINED\x10\x00\x12\x14\n\x10\x43ONFIGACTION_SET\x10\x01\x12\x17\n\x13\x43ONFIGACTION_DELETE\x10\x02\x32\xa5\r\n\x0e\x43ontextService\x12:\n\x0eListContextIds\x12\x0e.context.Empty\x1a\x16.context.ContextIdList\"\x00\x12\x36\n\x0cListContexts\x12\x0e.context.Empty\x1a\x14.context.ContextList\"\x00\x12\x34\n\nGetContext\x12\x12.context.ContextId\x1a\x10.context.Context\"\x00\x12\x34\n\nSetContext\x12\x10.context.Context\x1a\x12.context.ContextId\"\x00\x12\x35\n\rRemoveContext\x12\x12.context.ContextId\x1a\x0e.context.Empty\"\x00\x12=\n\x10GetContextEvents\x12\x0e.context.Empty\x1a\x15.context.ContextEvent\"\x00\x30\x01\x12@\n\x0fListTopologyIds\x12\x12.context.ContextId\x1a\x17.context.TopologyIdList\"\x00\x12=\n\x0eListTopologies\x12\x12.context.ContextId\x1a\x15.context.TopologyList\"\x00\x12\x37\n\x0bGetTopology\x12\x13.context.TopologyId\x1a\x11.context.Topology\"\x00\x12\x37\n\x0bSetTopology\x12\x11.context.Topology\x1a\x13.context.TopologyId\"\x00\x12\x37\n\x0eRemoveTopology\x12\x13.context.TopologyId\x1a\x0e.context.Empty\"\x00\x12?\n\x11GetTopologyEvents\x12\x0e.context.Empty\x1a\x16.context.TopologyEvent\"\x00\x30\x01\x12\x38\n\rListDeviceIds\x12\x0e.context.Empty\x1a\x15.context.DeviceIdList\"\x00\x12\x34\n\x0bListDevices\x12\x0e.context.Empty\x1a\x13.context.DeviceList\"\x00\x12\x31\n\tGetDevice\x12\x11.context.DeviceId\x1a\x0f.context.Device\"\x00\x12\x31\n\tSetDevice\x12\x0f.context.Device\x1a\x11.context.DeviceId\"\x00\x12\x33\n\x0cRemoveDevice\x12\x11.context.DeviceId\x1a\x0e.context.Empty\"\x00\x12;\n\x0fGetDeviceEvents\x12\x0e.context.Empty\x1a\x14.context.DeviceEvent\"\x00\x30\x01\x12\x34\n\x0bListLinkIds\x12\x0e.context.Empty\x1a\x13.context.LinkIdList\"\x00\x12\x30\n\tListLinks\x12\x0e.context.Empty\x1a\x11.context.LinkList\"\x00\x12+\n\x07GetLink\x12\x0f.context.LinkId\x1a\r.context.Link\"\x00\x12+\n\x07SetLink\x12\r.context.Link\x1a\x0f.context.LinkId\"\x00\x12/\n\nRemoveLink\x12\x0f.context.LinkId\x1a\x0e.context.Empty\"\x00\x12\x37\n\rGetLinkEvents\x12\x0e.context.Empty\x1a\x12.context.LinkEvent\"\x00\x30\x01\x12>\n\x0eListServiceIds\x12\x12.context.ContextId\x1a\x16.context.ServiceIdList\"\x00\x12:\n\x0cListServices\x12\x12.context.ContextId\x1a\x14.context.ServiceList\"\x00\x12\x34\n\nGetService\x12\x12.context.ServiceId\x1a\x10.context.Service\"\x00\x12\x34\n\nSetService\x12\x10.context.Service\x1a\x12.context.ServiceId\"\x00\x12\x35\n\rRemoveService\x12\x12.context.ServiceId\x1a\x0e.context.Empty\"\x00\x12=\n\x10GetServiceEvents\x12\x0e.context.Empty\x1a\x15.context.ServiceEvent\"\x00\x30\x01\x62\x06proto3'
-)
+  serialized_pb=b'\n\rcontext.proto\x12\x07\x63ontext\x1a\x16kpi_sample_types.proto\"\x07\n\x05\x45mpty\"\x14\n\x04Uuid\x12\x0c\n\x04uuid\x18\x01 \x01(\t\"F\n\x05\x45vent\x12\x11\n\ttimestamp\x18\x01 \x01(\x01\x12*\n\nevent_type\x18\x02 \x01(\x0e\x32\x16.context.EventTypeEnum\"0\n\tContextId\x12#\n\x0c\x63ontext_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\xb6\x01\n\x07\x43ontext\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12)\n\x0ctopology_ids\x18\x02 \x03(\x0b\x32\x13.context.TopologyId\x12\'\n\x0bservice_ids\x18\x03 \x03(\x0b\x32\x12.context.ServiceId\x12/\n\ncontroller\x18\x04 \x01(\x0b\x32\x1b.context.TeraFlowController\"8\n\rContextIdList\x12\'\n\x0b\x63ontext_ids\x18\x01 \x03(\x0b\x32\x12.context.ContextId\"1\n\x0b\x43ontextList\x12\"\n\x08\x63ontexts\x18\x01 \x03(\x0b\x32\x10.context.Context\"U\n\x0c\x43ontextEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12&\n\ncontext_id\x18\x02 \x01(\x0b\x32\x12.context.ContextId\"Z\n\nTopologyId\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12$\n\rtopology_uuid\x18\x02 \x01(\x0b\x32\r.context.Uuid\"~\n\x08Topology\x12(\n\x0btopology_id\x18\x01 \x01(\x0b\x32\x13.context.TopologyId\x12%\n\ndevice_ids\x18\x02 \x03(\x0b\x32\x11.context.DeviceId\x12!\n\x08link_ids\x18\x03 \x03(\x0b\x32\x0f.context.LinkId\";\n\x0eTopologyIdList\x12)\n\x0ctopology_ids\x18\x01 \x03(\x0b\x32\x13.context.TopologyId\"5\n\x0cTopologyList\x12%\n\ntopologies\x18\x01 \x03(\x0b\x32\x11.context.Topology\"X\n\rTopologyEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12(\n\x0btopology_id\x18\x02 \x01(\x0b\x32\x13.context.TopologyId\".\n\x08\x44\x65viceId\x12\"\n\x0b\x64\x65vice_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\x9a\x02\n\x06\x44\x65vice\x12$\n\tdevice_id\x18\x01 \x01(\x0b\x32\x11.context.DeviceId\x12\x13\n\x0b\x64\x65vice_type\x18\x02 \x01(\t\x12,\n\rdevice_config\x18\x03 \x01(\x0b\x32\x15.context.DeviceConfig\x12G\n\x19\x64\x65vice_operational_status\x18\x04 \x01(\x0e\x32$.context.DeviceOperationalStatusEnum\x12\x31\n\x0e\x64\x65vice_drivers\x18\x05 \x03(\x0e\x32\x19.context.DeviceDriverEnum\x12+\n\x10\x64\x65vice_endpoints\x18\x06 \x03(\x0b\x32\x11.context.EndPoint\"9\n\x0c\x44\x65viceConfig\x12)\n\x0c\x63onfig_rules\x18\x01 \x03(\x0b\x32\x13.context.ConfigRule\"5\n\x0c\x44\x65viceIdList\x12%\n\ndevice_ids\x18\x01 \x03(\x0b\x32\x11.context.DeviceId\".\n\nDeviceList\x12 \n\x07\x64\x65vices\x18\x01 \x03(\x0b\x32\x0f.context.Device\"R\n\x0b\x44\x65viceEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12$\n\tdevice_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\"*\n\x06LinkId\x12 \n\tlink_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"X\n\x04Link\x12 \n\x07link_id\x18\x01 \x01(\x0b\x32\x0f.context.LinkId\x12.\n\x11link_endpoint_ids\x18\x02 \x03(\x0b\x32\x13.context.EndPointId\"/\n\nLinkIdList\x12!\n\x08link_ids\x18\x01 \x03(\x0b\x32\x0f.context.LinkId\"(\n\x08LinkList\x12\x1c\n\x05links\x18\x01 \x03(\x0b\x32\r.context.Link\"L\n\tLinkEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12 \n\x07link_id\x18\x02 \x01(\x0b\x32\x0f.context.LinkId\"X\n\tServiceId\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12#\n\x0cservice_uuid\x18\x02 \x01(\x0b\x32\r.context.Uuid\"\xa6\x02\n\x07Service\x12&\n\nservice_id\x18\x01 \x01(\x0b\x32\x12.context.ServiceId\x12.\n\x0cservice_type\x18\x02 \x01(\x0e\x32\x18.context.ServiceTypeEnum\x12\x31\n\x14service_endpoint_ids\x18\x03 \x03(\x0b\x32\x13.context.EndPointId\x12\x30\n\x13service_constraints\x18\x04 \x03(\x0b\x32\x13.context.Constraint\x12.\n\x0eservice_status\x18\x05 \x01(\x0b\x32\x16.context.ServiceStatus\x12.\n\x0eservice_config\x18\x06 \x01(\x0b\x32\x16.context.ServiceConfig\"C\n\rServiceStatus\x12\x32\n\x0eservice_status\x18\x01 \x01(\x0e\x32\x1a.context.ServiceStatusEnum\":\n\rServiceConfig\x12)\n\x0c\x63onfig_rules\x18\x01 \x03(\x0b\x32\x13.context.ConfigRule\"8\n\rServiceIdList\x12\'\n\x0bservice_ids\x18\x01 \x03(\x0b\x32\x12.context.ServiceId\"1\n\x0bServiceList\x12\"\n\x08services\x18\x01 \x03(\x0b\x32\x10.context.Service\"U\n\x0cServiceEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12&\n\nservice_id\x18\x02 \x01(\x0b\x32\x12.context.ServiceId\"6\n\x0c\x43onnectionId\x12&\n\x0f\x63onnection_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\xc4\x01\n\nConnection\x12,\n\rconnection_id\x18\x01 \x01(\x0b\x32\x15.context.ConnectionId\x12&\n\nservice_id\x18\x02 \x01(\x0b\x32\x12.context.ServiceId\x12\x33\n\x16path_hops_endpoint_ids\x18\x03 \x03(\x0b\x32\x13.context.EndPointId\x12+\n\x0fsub_service_ids\x18\x04 \x03(\x0b\x32\x12.context.ServiceId\"A\n\x10\x43onnectionIdList\x12-\n\x0e\x63onnection_ids\x18\x01 \x03(\x0b\x32\x15.context.ConnectionId\":\n\x0e\x43onnectionList\x12(\n\x0b\x63onnections\x18\x01 \x03(\x0b\x32\x13.context.Connection\"^\n\x0f\x43onnectionEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12,\n\rconnection_id\x18\x02 \x01(\x0b\x32\x15.context.ConnectionId\"\x82\x01\n\nEndPointId\x12(\n\x0btopology_id\x18\x01 \x01(\x0b\x32\x13.context.TopologyId\x12$\n\tdevice_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\x12$\n\rendpoint_uuid\x18\x03 \x01(\x0b\x32\r.context.Uuid\"\x86\x01\n\x08\x45ndPoint\x12(\n\x0b\x65ndpoint_id\x18\x01 \x01(\x0b\x32\x13.context.EndPointId\x12\x15\n\rendpoint_type\x18\x02 \x01(\t\x12\x39\n\x10kpi_sample_types\x18\x03 \x03(\x0e\x32\x1f.kpi_sample_types.KpiSampleType\"e\n\nConfigRule\x12)\n\x06\x61\x63tion\x18\x01 \x01(\x0e\x32\x19.context.ConfigActionEnum\x12\x14\n\x0cresource_key\x18\x02 \x01(\t\x12\x16\n\x0eresource_value\x18\x03 \x01(\t\"?\n\nConstraint\x12\x17\n\x0f\x63onstraint_type\x18\x01 \x01(\t\x12\x18\n\x10\x63onstraint_value\x18\x02 \x01(\t\"^\n\x12TeraFlowController\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x12\n\nip_address\x18\x02 \x01(\t\x12\x0c\n\x04port\x18\x03 \x01(\r\"U\n\x14\x41uthenticationResult\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x15\n\rauthenticated\x18\x02 \x01(\x08*j\n\rEventTypeEnum\x12\x17\n\x13\x45VENTTYPE_UNDEFINED\x10\x00\x12\x14\n\x10\x45VENTTYPE_CREATE\x10\x01\x12\x14\n\x10\x45VENTTYPE_UPDATE\x10\x02\x12\x14\n\x10\x45VENTTYPE_REMOVE\x10\x03*\xc5\x01\n\x10\x44\x65viceDriverEnum\x12\x1a\n\x16\x44\x45VICEDRIVER_UNDEFINED\x10\x00\x12\x1b\n\x17\x44\x45VICEDRIVER_OPENCONFIG\x10\x01\x12\x1e\n\x1a\x44\x45VICEDRIVER_TRANSPORT_API\x10\x02\x12\x13\n\x0f\x44\x45VICEDRIVER_P4\x10\x03\x12&\n\"DEVICEDRIVER_IETF_NETWORK_TOPOLOGY\x10\x04\x12\x1b\n\x17\x44\x45VICEDRIVER_ONF_TR_352\x10\x05*\x8f\x01\n\x1b\x44\x65viceOperationalStatusEnum\x12%\n!DEVICEOPERATIONALSTATUS_UNDEFINED\x10\x00\x12$\n DEVICEOPERATIONALSTATUS_DISABLED\x10\x01\x12#\n\x1f\x44\x45VICEOPERATIONALSTATUS_ENABLED\x10\x02*\x81\x01\n\x0fServiceTypeEnum\x12\x17\n\x13SERVICETYPE_UNKNOWN\x10\x00\x12\x14\n\x10SERVICETYPE_L3NM\x10\x01\x12\x14\n\x10SERVICETYPE_L2NM\x10\x02\x12)\n%SERVICETYPE_TAPI_CONNECTIVITY_SERVICE\x10\x03*\x88\x01\n\x11ServiceStatusEnum\x12\x1b\n\x17SERVICESTATUS_UNDEFINED\x10\x00\x12\x19\n\x15SERVICESTATUS_PLANNED\x10\x01\x12\x18\n\x14SERVICESTATUS_ACTIVE\x10\x02\x12!\n\x1dSERVICESTATUS_PENDING_REMOVAL\x10\x03*]\n\x10\x43onfigActionEnum\x12\x1a\n\x16\x43ONFIGACTION_UNDEFINED\x10\x00\x12\x14\n\x10\x43ONFIGACTION_SET\x10\x01\x12\x17\n\x13\x43ONFIGACTION_DELETE\x10\x02\x32\xad\x10\n\x0e\x43ontextService\x12:\n\x0eListContextIds\x12\x0e.context.Empty\x1a\x16.context.ContextIdList\"\x00\x12\x36\n\x0cListContexts\x12\x0e.context.Empty\x1a\x14.context.ContextList\"\x00\x12\x34\n\nGetContext\x12\x12.context.ContextId\x1a\x10.context.Context\"\x00\x12\x34\n\nSetContext\x12\x10.context.Context\x1a\x12.context.ContextId\"\x00\x12\x35\n\rRemoveContext\x12\x12.context.ContextId\x1a\x0e.context.Empty\"\x00\x12=\n\x10GetContextEvents\x12\x0e.context.Empty\x1a\x15.context.ContextEvent\"\x00\x30\x01\x12@\n\x0fListTopologyIds\x12\x12.context.ContextId\x1a\x17.context.TopologyIdList\"\x00\x12=\n\x0eListTopologies\x12\x12.context.ContextId\x1a\x15.context.TopologyList\"\x00\x12\x37\n\x0bGetTopology\x12\x13.context.TopologyId\x1a\x11.context.Topology\"\x00\x12\x37\n\x0bSetTopology\x12\x11.context.Topology\x1a\x13.context.TopologyId\"\x00\x12\x37\n\x0eRemoveTopology\x12\x13.context.TopologyId\x1a\x0e.context.Empty\"\x00\x12?\n\x11GetTopologyEvents\x12\x0e.context.Empty\x1a\x16.context.TopologyEvent\"\x00\x30\x01\x12\x38\n\rListDeviceIds\x12\x0e.context.Empty\x1a\x15.context.DeviceIdList\"\x00\x12\x34\n\x0bListDevices\x12\x0e.context.Empty\x1a\x13.context.DeviceList\"\x00\x12\x31\n\tGetDevice\x12\x11.context.DeviceId\x1a\x0f.context.Device\"\x00\x12\x31\n\tSetDevice\x12\x0f.context.Device\x1a\x11.context.DeviceId\"\x00\x12\x33\n\x0cRemoveDevice\x12\x11.context.DeviceId\x1a\x0e.context.Empty\"\x00\x12;\n\x0fGetDeviceEvents\x12\x0e.context.Empty\x1a\x14.context.DeviceEvent\"\x00\x30\x01\x12\x34\n\x0bListLinkIds\x12\x0e.context.Empty\x1a\x13.context.LinkIdList\"\x00\x12\x30\n\tListLinks\x12\x0e.context.Empty\x1a\x11.context.LinkList\"\x00\x12+\n\x07GetLink\x12\x0f.context.LinkId\x1a\r.context.Link\"\x00\x12+\n\x07SetLink\x12\r.context.Link\x1a\x0f.context.LinkId\"\x00\x12/\n\nRemoveLink\x12\x0f.context.LinkId\x1a\x0e.context.Empty\"\x00\x12\x37\n\rGetLinkEvents\x12\x0e.context.Empty\x1a\x12.context.LinkEvent\"\x00\x30\x01\x12>\n\x0eListServiceIds\x12\x12.context.ContextId\x1a\x16.context.ServiceIdList\"\x00\x12:\n\x0cListServices\x12\x12.context.ContextId\x1a\x14.context.ServiceList\"\x00\x12\x34\n\nGetService\x12\x12.context.ServiceId\x1a\x10.context.Service\"\x00\x12\x34\n\nSetService\x12\x10.context.Service\x1a\x12.context.ServiceId\"\x00\x12\x35\n\rRemoveService\x12\x12.context.ServiceId\x1a\x0e.context.Empty\"\x00\x12=\n\x10GetServiceEvents\x12\x0e.context.Empty\x1a\x15.context.ServiceEvent\"\x00\x30\x01\x12\x44\n\x11ListConnectionIds\x12\x12.context.ServiceId\x1a\x19.context.ConnectionIdList\"\x00\x12@\n\x0fListConnections\x12\x12.context.ServiceId\x1a\x17.context.ConnectionList\"\x00\x12=\n\rGetConnection\x12\x15.context.ConnectionId\x1a\x13.context.Connection\"\x00\x12=\n\rSetConnection\x12\x13.context.Connection\x1a\x15.context.ConnectionId\"\x00\x12;\n\x10RemoveConnection\x12\x15.context.ConnectionId\x1a\x0e.context.Empty\"\x00\x12\x43\n\x13GetConnectionEvents\x12\x0e.context.Empty\x1a\x18.context.ConnectionEvent\"\x00\x30\x01\x62\x06proto3'
+  ,
+  dependencies=[kpi__sample__types__pb2.DESCRIPTOR,])
 
 _EVENTTYPEENUM = _descriptor.EnumDescriptor(
   name='EventTypeEnum',
@@ -53,8 +55,8 @@ _EVENTTYPEENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=3468,
-  serialized_end=3574,
+  serialized_start=3703,
+  serialized_end=3809,
 )
 _sym_db.RegisterEnumDescriptor(_EVENTTYPEENUM)
 
@@ -99,8 +101,8 @@ _DEVICEDRIVERENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=3577,
-  serialized_end=3774,
+  serialized_start=3812,
+  serialized_end=4009,
 )
 _sym_db.RegisterEnumDescriptor(_DEVICEDRIVERENUM)
 
@@ -130,8 +132,8 @@ _DEVICEOPERATIONALSTATUSENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=3777,
-  serialized_end=3920,
+  serialized_start=4012,
+  serialized_end=4155,
 )
 _sym_db.RegisterEnumDescriptor(_DEVICEOPERATIONALSTATUSENUM)
 
@@ -166,8 +168,8 @@ _SERVICETYPEENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=3923,
-  serialized_end=4052,
+  serialized_start=4158,
+  serialized_end=4287,
 )
 _sym_db.RegisterEnumDescriptor(_SERVICETYPEENUM)
 
@@ -202,8 +204,8 @@ _SERVICESTATUSENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=4055,
-  serialized_end=4191,
+  serialized_start=4290,
+  serialized_end=4426,
 )
 _sym_db.RegisterEnumDescriptor(_SERVICESTATUSENUM)
 
@@ -233,8 +235,8 @@ _CONFIGACTIONENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=4193,
-  serialized_end=4286,
+  serialized_start=4428,
+  serialized_end=4521,
 )
 _sym_db.RegisterEnumDescriptor(_CONFIGACTIONENUM)
 
@@ -286,8 +288,8 @@ _EMPTY = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=26,
-  serialized_end=33,
+  serialized_start=50,
+  serialized_end=57,
 )
 
 
@@ -318,8 +320,8 @@ _UUID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=35,
-  serialized_end=55,
+  serialized_start=59,
+  serialized_end=79,
 )
 
 
@@ -357,8 +359,8 @@ _EVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=57,
-  serialized_end=127,
+  serialized_start=81,
+  serialized_end=151,
 )
 
 
@@ -389,8 +391,8 @@ _CONTEXTID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=129,
-  serialized_end=177,
+  serialized_start=153,
+  serialized_end=201,
 )
 
 
@@ -442,8 +444,8 @@ _CONTEXT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=180,
-  serialized_end=362,
+  serialized_start=204,
+  serialized_end=386,
 )
 
 
@@ -474,8 +476,8 @@ _CONTEXTIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=364,
-  serialized_end=420,
+  serialized_start=388,
+  serialized_end=444,
 )
 
 
@@ -506,8 +508,8 @@ _CONTEXTLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=422,
-  serialized_end=471,
+  serialized_start=446,
+  serialized_end=495,
 )
 
 
@@ -545,8 +547,8 @@ _CONTEXTEVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=473,
-  serialized_end=558,
+  serialized_start=497,
+  serialized_end=582,
 )
 
 
@@ -584,8 +586,8 @@ _TOPOLOGYID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=560,
-  serialized_end=650,
+  serialized_start=584,
+  serialized_end=674,
 )
 
 
@@ -630,8 +632,8 @@ _TOPOLOGY = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=652,
-  serialized_end=778,
+  serialized_start=676,
+  serialized_end=802,
 )
 
 
@@ -662,8 +664,8 @@ _TOPOLOGYIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=780,
-  serialized_end=839,
+  serialized_start=804,
+  serialized_end=863,
 )
 
 
@@ -694,8 +696,8 @@ _TOPOLOGYLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=841,
-  serialized_end=894,
+  serialized_start=865,
+  serialized_end=918,
 )
 
 
@@ -733,8 +735,8 @@ _TOPOLOGYEVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=896,
-  serialized_end=984,
+  serialized_start=920,
+  serialized_end=1008,
 )
 
 
@@ -765,8 +767,8 @@ _DEVICEID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=986,
-  serialized_end=1032,
+  serialized_start=1010,
+  serialized_end=1056,
 )
 
 
@@ -832,8 +834,8 @@ _DEVICE = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1035,
-  serialized_end=1317,
+  serialized_start=1059,
+  serialized_end=1341,
 )
 
 
@@ -864,8 +866,8 @@ _DEVICECONFIG = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1319,
-  serialized_end=1376,
+  serialized_start=1343,
+  serialized_end=1400,
 )
 
 
@@ -896,8 +898,8 @@ _DEVICEIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1378,
-  serialized_end=1431,
+  serialized_start=1402,
+  serialized_end=1455,
 )
 
 
@@ -928,8 +930,8 @@ _DEVICELIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1433,
-  serialized_end=1479,
+  serialized_start=1457,
+  serialized_end=1503,
 )
 
 
@@ -967,8 +969,8 @@ _DEVICEEVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1481,
-  serialized_end=1563,
+  serialized_start=1505,
+  serialized_end=1587,
 )
 
 
@@ -999,8 +1001,8 @@ _LINKID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1565,
-  serialized_end=1607,
+  serialized_start=1589,
+  serialized_end=1631,
 )
 
 
@@ -1038,8 +1040,8 @@ _LINK = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1609,
-  serialized_end=1697,
+  serialized_start=1633,
+  serialized_end=1721,
 )
 
 
@@ -1070,8 +1072,8 @@ _LINKIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1699,
-  serialized_end=1746,
+  serialized_start=1723,
+  serialized_end=1770,
 )
 
 
@@ -1102,8 +1104,8 @@ _LINKLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1748,
-  serialized_end=1788,
+  serialized_start=1772,
+  serialized_end=1812,
 )
 
 
@@ -1141,8 +1143,8 @@ _LINKEVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1790,
-  serialized_end=1866,
+  serialized_start=1814,
+  serialized_end=1890,
 )
 
 
@@ -1180,8 +1182,8 @@ _SERVICEID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1868,
-  serialized_end=1956,
+  serialized_start=1892,
+  serialized_end=1980,
 )
 
 
@@ -1247,8 +1249,8 @@ _SERVICE = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1959,
-  serialized_end=2253,
+  serialized_start=1983,
+  serialized_end=2277,
 )
 
 
@@ -1279,8 +1281,8 @@ _SERVICESTATUS = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2255,
-  serialized_end=2322,
+  serialized_start=2279,
+  serialized_end=2346,
 )
 
 
@@ -1311,8 +1313,8 @@ _SERVICECONFIG = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2324,
-  serialized_end=2382,
+  serialized_start=2348,
+  serialized_end=2406,
 )
 
 
@@ -1343,8 +1345,8 @@ _SERVICEIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2384,
-  serialized_end=2440,
+  serialized_start=2408,
+  serialized_end=2464,
 )
 
 
@@ -1375,8 +1377,8 @@ _SERVICELIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2442,
-  serialized_end=2491,
+  serialized_start=2466,
+  serialized_end=2515,
 )
 
 
@@ -1414,40 +1416,26 @@ _SERVICEEVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2493,
-  serialized_end=2578,
+  serialized_start=2517,
+  serialized_end=2602,
 )
 
 
-_ENDPOINTID = _descriptor.Descriptor(
-  name='EndPointId',
-  full_name='context.EndPointId',
+_CONNECTIONID = _descriptor.Descriptor(
+  name='ConnectionId',
+  full_name='context.ConnectionId',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='topology_id', full_name='context.EndPointId.topology_id', index=0,
+      name='connection_uuid', full_name='context.ConnectionId.connection_uuid', index=0,
       number=1, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='device_id', full_name='context.EndPointId.device_id', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='endpoint_uuid', full_name='context.EndPointId.endpoint_uuid', index=2,
-      number=3, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
   ],
   extensions=[
   ],
@@ -1460,30 +1448,44 @@ _ENDPOINTID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2581,
-  serialized_end=2711,
+  serialized_start=2604,
+  serialized_end=2658,
 )
 
 
-_ENDPOINT = _descriptor.Descriptor(
-  name='EndPoint',
-  full_name='context.EndPoint',
+_CONNECTION = _descriptor.Descriptor(
+  name='Connection',
+  full_name='context.Connection',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='endpoint_id', full_name='context.EndPoint.endpoint_id', index=0,
+      name='connection_id', full_name='context.Connection.connection_id', index=0,
       number=1, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='endpoint_type', full_name='context.EndPoint.endpoint_type', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='service_id', full_name='context.Connection.service_id', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='path_hops_endpoint_ids', full_name='context.Connection.path_hops_endpoint_ids', index=2,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='sub_service_ids', full_name='context.Connection.sub_service_ids', index=3,
+      number=4, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -1499,37 +1501,55 @@ _ENDPOINT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2713,
-  serialized_end=2788,
+  serialized_start=2661,
+  serialized_end=2857,
 )
 
 
-_CONFIGRULE = _descriptor.Descriptor(
-  name='ConfigRule',
-  full_name='context.ConfigRule',
+_CONNECTIONIDLIST = _descriptor.Descriptor(
+  name='ConnectionIdList',
+  full_name='context.ConnectionIdList',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='action', full_name='context.ConfigRule.action', index=0,
-      number=1, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='resource_key', full_name='context.ConfigRule.resource_key', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='connection_ids', full_name='context.ConnectionIdList.connection_ids', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2859,
+  serialized_end=2924,
+)
+
+
+_CONNECTIONLIST = _descriptor.Descriptor(
+  name='ConnectionList',
+  full_name='context.ConnectionList',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
     _descriptor.FieldDescriptor(
-      name='resource_value', full_name='context.ConfigRule.resource_value', index=2,
-      number=3, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='connections', full_name='context.ConnectionList.connections', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -1545,30 +1565,30 @@ _CONFIGRULE = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2790,
-  serialized_end=2891,
+  serialized_start=2926,
+  serialized_end=2984,
 )
 
 
-_CONSTRAINT = _descriptor.Descriptor(
-  name='Constraint',
-  full_name='context.Constraint',
+_CONNECTIONEVENT = _descriptor.Descriptor(
+  name='ConnectionEvent',
+  full_name='context.ConnectionEvent',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='constraint_type', full_name='context.Constraint.constraint_type', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='event', full_name='context.ConnectionEvent.event', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='constraint_value', full_name='context.Constraint.constraint_value', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='connection_id', full_name='context.ConnectionEvent.connection_id', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -1584,26 +1604,40 @@ _CONSTRAINT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2893,
-  serialized_end=2956,
+  serialized_start=2986,
+  serialized_end=3080,
 )
 
 
-_CONNECTIONID = _descriptor.Descriptor(
-  name='ConnectionId',
-  full_name='context.ConnectionId',
+_ENDPOINTID = _descriptor.Descriptor(
+  name='EndPointId',
+  full_name='context.EndPointId',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='connection_uuid', full_name='context.ConnectionId.connection_uuid', index=0,
+      name='topology_id', full_name='context.EndPointId.topology_id', index=0,
       number=1, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='device_id', full_name='context.EndPointId.device_id', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='endpoint_uuid', full_name='context.EndPointId.endpoint_uuid', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
   ],
   extensions=[
   ],
@@ -1616,36 +1650,36 @@ _CONNECTIONID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2958,
-  serialized_end=3012,
+  serialized_start=3083,
+  serialized_end=3213,
 )
 
 
-_CONNECTION = _descriptor.Descriptor(
-  name='Connection',
-  full_name='context.Connection',
+_ENDPOINT = _descriptor.Descriptor(
+  name='EndPoint',
+  full_name='context.EndPoint',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='connection_id', full_name='context.Connection.connection_id', index=0,
+      name='endpoint_id', full_name='context.EndPoint.endpoint_id', index=0,
       number=1, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='related_service_id', full_name='context.Connection.related_service_id', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
+      name='endpoint_type', full_name='context.EndPoint.endpoint_type', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='path', full_name='context.Connection.path', index=2,
-      number=3, type=11, cpp_type=10, label=3,
+      name='kpi_sample_types', full_name='context.EndPoint.kpi_sample_types', index=2,
+      number=3, type=14, cpp_type=8, label=3,
       has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
@@ -1662,23 +1696,37 @@ _CONNECTION = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=3015,
-  serialized_end=3156,
+  serialized_start=3216,
+  serialized_end=3350,
 )
 
 
-_CONNECTIONIDLIST = _descriptor.Descriptor(
-  name='ConnectionIdList',
-  full_name='context.ConnectionIdList',
+_CONFIGRULE = _descriptor.Descriptor(
+  name='ConfigRule',
+  full_name='context.ConfigRule',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='connection_ids', full_name='context.ConnectionIdList.connection_ids', index=0,
-      number=1, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
+      name='action', full_name='context.ConfigRule.action', index=0,
+      number=1, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='resource_key', full_name='context.ConfigRule.resource_key', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='resource_value', full_name='context.ConfigRule.resource_value', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -1694,23 +1742,30 @@ _CONNECTIONIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=3158,
-  serialized_end=3223,
+  serialized_start=3352,
+  serialized_end=3453,
 )
 
 
-_CONNECTIONLIST = _descriptor.Descriptor(
-  name='ConnectionList',
-  full_name='context.ConnectionList',
+_CONSTRAINT = _descriptor.Descriptor(
+  name='Constraint',
+  full_name='context.Constraint',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='connections', full_name='context.ConnectionList.connections', index=0,
-      number=1, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
+      name='constraint_type', full_name='context.Constraint.constraint_type', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='constraint_value', full_name='context.Constraint.constraint_value', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -1726,8 +1781,8 @@ _CONNECTIONLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=3225,
-  serialized_end=3283,
+  serialized_start=3455,
+  serialized_end=3518,
 )
 
 
@@ -1772,8 +1827,8 @@ _TERAFLOWCONTROLLER = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=3285,
-  serialized_end=3379,
+  serialized_start=3520,
+  serialized_end=3614,
 )
 
 
@@ -1811,8 +1866,8 @@ _AUTHENTICATIONRESULT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=3381,
-  serialized_end=3466,
+  serialized_start=3616,
+  serialized_end=3701,
 )
 
 _EVENT.fields_by_name['event_type'].enum_type = _EVENTTYPEENUM
@@ -1866,17 +1921,21 @@ _SERVICEIDLIST.fields_by_name['service_ids'].message_type = _SERVICEID
 _SERVICELIST.fields_by_name['services'].message_type = _SERVICE
 _SERVICEEVENT.fields_by_name['event'].message_type = _EVENT
 _SERVICEEVENT.fields_by_name['service_id'].message_type = _SERVICEID
+_CONNECTIONID.fields_by_name['connection_uuid'].message_type = _UUID
+_CONNECTION.fields_by_name['connection_id'].message_type = _CONNECTIONID
+_CONNECTION.fields_by_name['service_id'].message_type = _SERVICEID
+_CONNECTION.fields_by_name['path_hops_endpoint_ids'].message_type = _ENDPOINTID
+_CONNECTION.fields_by_name['sub_service_ids'].message_type = _SERVICEID
+_CONNECTIONIDLIST.fields_by_name['connection_ids'].message_type = _CONNECTIONID
+_CONNECTIONLIST.fields_by_name['connections'].message_type = _CONNECTION
+_CONNECTIONEVENT.fields_by_name['event'].message_type = _EVENT
+_CONNECTIONEVENT.fields_by_name['connection_id'].message_type = _CONNECTIONID
 _ENDPOINTID.fields_by_name['topology_id'].message_type = _TOPOLOGYID
 _ENDPOINTID.fields_by_name['device_id'].message_type = _DEVICEID
 _ENDPOINTID.fields_by_name['endpoint_uuid'].message_type = _UUID
 _ENDPOINT.fields_by_name['endpoint_id'].message_type = _ENDPOINTID
+_ENDPOINT.fields_by_name['kpi_sample_types'].enum_type = kpi__sample__types__pb2._KPISAMPLETYPE
 _CONFIGRULE.fields_by_name['action'].enum_type = _CONFIGACTIONENUM
-_CONNECTIONID.fields_by_name['connection_uuid'].message_type = _UUID
-_CONNECTION.fields_by_name['connection_id'].message_type = _CONNECTIONID
-_CONNECTION.fields_by_name['related_service_id'].message_type = _SERVICEID
-_CONNECTION.fields_by_name['path'].message_type = _ENDPOINTID
-_CONNECTIONIDLIST.fields_by_name['connection_ids'].message_type = _CONNECTIONID
-_CONNECTIONLIST.fields_by_name['connections'].message_type = _CONNECTION
 _TERAFLOWCONTROLLER.fields_by_name['context_id'].message_type = _CONTEXTID
 _AUTHENTICATIONRESULT.fields_by_name['context_id'].message_type = _CONTEXTID
 DESCRIPTOR.message_types_by_name['Empty'] = _EMPTY
@@ -1910,14 +1969,15 @@ DESCRIPTOR.message_types_by_name['ServiceConfig'] = _SERVICECONFIG
 DESCRIPTOR.message_types_by_name['ServiceIdList'] = _SERVICEIDLIST
 DESCRIPTOR.message_types_by_name['ServiceList'] = _SERVICELIST
 DESCRIPTOR.message_types_by_name['ServiceEvent'] = _SERVICEEVENT
-DESCRIPTOR.message_types_by_name['EndPointId'] = _ENDPOINTID
-DESCRIPTOR.message_types_by_name['EndPoint'] = _ENDPOINT
-DESCRIPTOR.message_types_by_name['ConfigRule'] = _CONFIGRULE
-DESCRIPTOR.message_types_by_name['Constraint'] = _CONSTRAINT
 DESCRIPTOR.message_types_by_name['ConnectionId'] = _CONNECTIONID
 DESCRIPTOR.message_types_by_name['Connection'] = _CONNECTION
 DESCRIPTOR.message_types_by_name['ConnectionIdList'] = _CONNECTIONIDLIST
 DESCRIPTOR.message_types_by_name['ConnectionList'] = _CONNECTIONLIST
+DESCRIPTOR.message_types_by_name['ConnectionEvent'] = _CONNECTIONEVENT
+DESCRIPTOR.message_types_by_name['EndPointId'] = _ENDPOINTID
+DESCRIPTOR.message_types_by_name['EndPoint'] = _ENDPOINT
+DESCRIPTOR.message_types_by_name['ConfigRule'] = _CONFIGRULE
+DESCRIPTOR.message_types_by_name['Constraint'] = _CONSTRAINT
 DESCRIPTOR.message_types_by_name['TeraFlowController'] = _TERAFLOWCONTROLLER
 DESCRIPTOR.message_types_by_name['AuthenticationResult'] = _AUTHENTICATIONRESULT
 DESCRIPTOR.enum_types_by_name['EventTypeEnum'] = _EVENTTYPEENUM
@@ -2145,34 +2205,6 @@ ServiceEvent = _reflection.GeneratedProtocolMessageType('ServiceEvent', (_messag
   })
 _sym_db.RegisterMessage(ServiceEvent)
 
-EndPointId = _reflection.GeneratedProtocolMessageType('EndPointId', (_message.Message,), {
-  'DESCRIPTOR' : _ENDPOINTID,
-  '__module__' : 'context_pb2'
-  # @@protoc_insertion_point(class_scope:context.EndPointId)
-  })
-_sym_db.RegisterMessage(EndPointId)
-
-EndPoint = _reflection.GeneratedProtocolMessageType('EndPoint', (_message.Message,), {
-  'DESCRIPTOR' : _ENDPOINT,
-  '__module__' : 'context_pb2'
-  # @@protoc_insertion_point(class_scope:context.EndPoint)
-  })
-_sym_db.RegisterMessage(EndPoint)
-
-ConfigRule = _reflection.GeneratedProtocolMessageType('ConfigRule', (_message.Message,), {
-  'DESCRIPTOR' : _CONFIGRULE,
-  '__module__' : 'context_pb2'
-  # @@protoc_insertion_point(class_scope:context.ConfigRule)
-  })
-_sym_db.RegisterMessage(ConfigRule)
-
-Constraint = _reflection.GeneratedProtocolMessageType('Constraint', (_message.Message,), {
-  'DESCRIPTOR' : _CONSTRAINT,
-  '__module__' : 'context_pb2'
-  # @@protoc_insertion_point(class_scope:context.Constraint)
-  })
-_sym_db.RegisterMessage(Constraint)
-
 ConnectionId = _reflection.GeneratedProtocolMessageType('ConnectionId', (_message.Message,), {
   'DESCRIPTOR' : _CONNECTIONID,
   '__module__' : 'context_pb2'
@@ -2201,6 +2233,41 @@ ConnectionList = _reflection.GeneratedProtocolMessageType('ConnectionList', (_me
   })
 _sym_db.RegisterMessage(ConnectionList)
 
+ConnectionEvent = _reflection.GeneratedProtocolMessageType('ConnectionEvent', (_message.Message,), {
+  'DESCRIPTOR' : _CONNECTIONEVENT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ConnectionEvent)
+  })
+_sym_db.RegisterMessage(ConnectionEvent)
+
+EndPointId = _reflection.GeneratedProtocolMessageType('EndPointId', (_message.Message,), {
+  'DESCRIPTOR' : _ENDPOINTID,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.EndPointId)
+  })
+_sym_db.RegisterMessage(EndPointId)
+
+EndPoint = _reflection.GeneratedProtocolMessageType('EndPoint', (_message.Message,), {
+  'DESCRIPTOR' : _ENDPOINT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.EndPoint)
+  })
+_sym_db.RegisterMessage(EndPoint)
+
+ConfigRule = _reflection.GeneratedProtocolMessageType('ConfigRule', (_message.Message,), {
+  'DESCRIPTOR' : _CONFIGRULE,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ConfigRule)
+  })
+_sym_db.RegisterMessage(ConfigRule)
+
+Constraint = _reflection.GeneratedProtocolMessageType('Constraint', (_message.Message,), {
+  'DESCRIPTOR' : _CONSTRAINT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Constraint)
+  })
+_sym_db.RegisterMessage(Constraint)
+
 TeraFlowController = _reflection.GeneratedProtocolMessageType('TeraFlowController', (_message.Message,), {
   'DESCRIPTOR' : _TERAFLOWCONTROLLER,
   '__module__' : 'context_pb2'
@@ -2224,8 +2291,8 @@ _CONTEXTSERVICE = _descriptor.ServiceDescriptor(
   index=0,
   serialized_options=None,
   create_key=_descriptor._internal_create_key,
-  serialized_start=4289,
-  serialized_end=5990,
+  serialized_start=4524,
+  serialized_end=6617,
   methods=[
   _descriptor.MethodDescriptor(
     name='ListContextIds',
@@ -2527,6 +2594,66 @@ _CONTEXTSERVICE = _descriptor.ServiceDescriptor(
     serialized_options=None,
     create_key=_descriptor._internal_create_key,
   ),
+  _descriptor.MethodDescriptor(
+    name='ListConnectionIds',
+    full_name='context.ContextService.ListConnectionIds',
+    index=30,
+    containing_service=None,
+    input_type=_SERVICEID,
+    output_type=_CONNECTIONIDLIST,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='ListConnections',
+    full_name='context.ContextService.ListConnections',
+    index=31,
+    containing_service=None,
+    input_type=_SERVICEID,
+    output_type=_CONNECTIONLIST,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetConnection',
+    full_name='context.ContextService.GetConnection',
+    index=32,
+    containing_service=None,
+    input_type=_CONNECTIONID,
+    output_type=_CONNECTION,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='SetConnection',
+    full_name='context.ContextService.SetConnection',
+    index=33,
+    containing_service=None,
+    input_type=_CONNECTION,
+    output_type=_CONNECTIONID,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='RemoveConnection',
+    full_name='context.ContextService.RemoveConnection',
+    index=34,
+    containing_service=None,
+    input_type=_CONNECTIONID,
+    output_type=_EMPTY,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetConnectionEvents',
+    full_name='context.ContextService.GetConnectionEvents',
+    index=35,
+    containing_service=None,
+    input_type=_EMPTY,
+    output_type=_CONNECTIONEVENT,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
 ])
 _sym_db.RegisterServiceDescriptor(_CONTEXTSERVICE)
 
diff --git a/src/compute/proto/kpi_sample_types_pb2.py b/src/compute/proto/kpi_sample_types_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..ea7fd2f82757d4c3db02d7e2c7817e2787b0b490
--- /dev/null
+++ b/src/compute/proto/kpi_sample_types_pb2.py
@@ -0,0 +1,78 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: kpi_sample_types.proto
+"""Generated protocol buffer code."""
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='kpi_sample_types.proto',
+  package='kpi_sample_types',
+  syntax='proto3',
+  serialized_options=None,
+  create_key=_descriptor._internal_create_key,
+  serialized_pb=b'\n\x16kpi_sample_types.proto\x12\x10kpi_sample_types*\xbe\x01\n\rKpiSampleType\x12\x19\n\x15KPISAMPLETYPE_UNKNOWN\x10\x00\x12%\n!KPISAMPLETYPE_PACKETS_TRANSMITTED\x10\x65\x12\"\n\x1eKPISAMPLETYPE_PACKETS_RECEIVED\x10\x66\x12$\n\x1fKPISAMPLETYPE_BYTES_TRANSMITTED\x10\xc9\x01\x12!\n\x1cKPISAMPLETYPE_BYTES_RECEIVED\x10\xca\x01\x62\x06proto3'
+)
+
+_KPISAMPLETYPE = _descriptor.EnumDescriptor(
+  name='KpiSampleType',
+  full_name='kpi_sample_types.KpiSampleType',
+  filename=None,
+  file=DESCRIPTOR,
+  create_key=_descriptor._internal_create_key,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='KPISAMPLETYPE_UNKNOWN', index=0, number=0,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='KPISAMPLETYPE_PACKETS_TRANSMITTED', index=1, number=101,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='KPISAMPLETYPE_PACKETS_RECEIVED', index=2, number=102,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='KPISAMPLETYPE_BYTES_TRANSMITTED', index=3, number=201,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='KPISAMPLETYPE_BYTES_RECEIVED', index=4, number=202,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=45,
+  serialized_end=235,
+)
+_sym_db.RegisterEnumDescriptor(_KPISAMPLETYPE)
+
+KpiSampleType = enum_type_wrapper.EnumTypeWrapper(_KPISAMPLETYPE)
+KPISAMPLETYPE_UNKNOWN = 0
+KPISAMPLETYPE_PACKETS_TRANSMITTED = 101
+KPISAMPLETYPE_PACKETS_RECEIVED = 102
+KPISAMPLETYPE_BYTES_TRANSMITTED = 201
+KPISAMPLETYPE_BYTES_RECEIVED = 202
+
+
+DESCRIPTOR.enum_types_by_name['KpiSampleType'] = _KPISAMPLETYPE
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+
+# @@protoc_insertion_point(module_scope)
diff --git a/src/compute/requirements.in b/src/compute/requirements.in
index 1da334a54b1c42b01eb8f731d8fd5bd975edd2cf..42a2f3d40194e61bae5c1e14bb81eb4bc43d5504 100644
--- a/src/compute/requirements.in
+++ b/src/compute/requirements.in
@@ -1,5 +1,11 @@
+Flask
+Flask-HTTPAuth
+Flask-RESTful
 grpcio-health-checking
 grpcio
+jsonschema
 prometheus-client
 pytest
 pytest-benchmark
+requests
+coverage
diff --git a/src/compute/service/ComputeService.py b/src/compute/service/ComputeService.py
index 36d43283c43c82faff1748428943cb9a9687c840..51a15472bc83416ebbfc3c421e5cfed2a9682bdc 100644
--- a/src/compute/service/ComputeService.py
+++ b/src/compute/service/ComputeService.py
@@ -24,9 +24,9 @@ class ComputeService:
         self.server = None
 
     def start(self):
-        self.endpoint = '{}:{}'.format(self.address, self.port)
-        LOGGER.debug('Starting Service (tentative endpoint: {}, max_workers: {})...'.format(
-            self.endpoint, self.max_workers))
+        self.endpoint = '{:s}:{:s}'.format(str(self.address), str(self.port))
+        LOGGER.debug('Starting Service (tentative endpoint: {:s}, max_workers: {:s})...'.format(
+            str(self.endpoint), str(self.max_workers)))
 
         self.pool = futures.ThreadPoolExecutor(max_workers=self.max_workers)
         self.server = grpc.server(self.pool) # , interceptors=(tracer_interceptor,))
@@ -39,15 +39,15 @@ class ComputeService:
         add_HealthServicer_to_server(self.health_servicer, self.server)
 
         port = self.server.add_insecure_port(self.endpoint)
-        self.endpoint = '{}:{}'.format(self.address, port)
-        LOGGER.info('Listening on {}...'.format(self.endpoint))
+        self.endpoint = '{:s}:{:s}'.format(str(self.address), str(port))
+        LOGGER.info('Listening on {:s}...'.format(str(self.endpoint)))
         self.server.start()
         self.health_servicer.set(OVERALL_HEALTH, HealthCheckResponse.SERVING) # pylint: disable=maybe-no-member
 
         LOGGER.debug('Service started')
 
     def stop(self):
-        LOGGER.debug('Stopping service (grace period {} seconds)...'.format(self.grace_period))
+        LOGGER.debug('Stopping service (grace period {:s} seconds)...'.format(str(self.grace_period)))
         self.health_servicer.enter_graceful_shutdown()
         self.server.stop(self.grace_period)
         LOGGER.debug('Service stopped')
diff --git a/src/compute/service/__main__.py b/src/compute/service/__main__.py
index f45af374c471222bb4fdb089860418c5895d6321..eacc1f6c464112192194fca5827033aedc57385c 100644
--- a/src/compute/service/__main__.py
+++ b/src/compute/service/__main__.py
@@ -4,9 +4,9 @@ from common.Settings import get_setting
 from compute.Config import (
     GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD, LOG_LEVEL, RESTAPI_SERVICE_PORT, RESTAPI_BASE_URL,
     METRICS_PORT)
-from compute.service.ComputeService import ComputeService
-from compute.service.rest_server.Server import Server
-from compute.service.rest_server.resources.Compute import Compute
+from .ComputeService import ComputeService
+from .rest_server.RestServer import RestServer
+from .rest_server.nbi_plugins.ietf_l2vpn import register_ietf_l2vpn
 
 terminate = threading.Event()
 LOGGER = None
@@ -41,9 +41,8 @@ def main():
     grpc_service = ComputeService(port=grpc_service_port, max_workers=max_workers, grace_period=grace_period)
     grpc_service.start()
 
-    rest_server = Server(port=restapi_service_port, base_url=restapi_base_url)
-    rest_server.add_resource(
-        Compute, '/restconf/config/compute', endpoint='api.compute')
+    rest_server = RestServer(port=restapi_service_port, base_url=restapi_base_url)
+    register_ietf_l2vpn(rest_server)
     rest_server.start()
 
     # Wait for Ctrl+C or termination signal
diff --git a/src/compute/service/rest_server/Server.py b/src/compute/service/rest_server/RestServer.py
similarity index 66%
rename from src/compute/service/rest_server/Server.py
rename to src/compute/service/rest_server/RestServer.py
index c68515e915a6de82b3b08525d3383ac21b6c25b2..8ed8dbbbf69bc89c9c76fdf31e16b0687d47856e 100644
--- a/src/compute/service/rest_server/Server.py
+++ b/src/compute/service/rest_server/RestServer.py
@@ -1,6 +1,6 @@
-import logging, threading
-from flask import Flask
-from flask_restful import Api
+import logging, threading, time
+from flask import Flask, request
+from flask_restful import Api, Resource
 from werkzeug.serving import make_server
 from compute.Config import RESTAPI_BASE_URL, RESTAPI_SERVICE_PORT
 
@@ -9,16 +9,24 @@ logging.getLogger('werkzeug').setLevel(logging.WARNING)
 BIND_ADDRESS = '0.0.0.0'
 LOGGER = logging.getLogger(__name__)
 
-class Server(threading.Thread):
+def log_request(response):
+    timestamp = time.strftime('[%Y-%b-%d %H:%M]')
+    LOGGER.info('%s %s %s %s %s', timestamp, request.remote_addr, request.method, request.full_path, response.status)
+    return response
+
+class RestServer(threading.Thread):
     def __init__(self, host=BIND_ADDRESS, port=RESTAPI_SERVICE_PORT, base_url=RESTAPI_BASE_URL):
         threading.Thread.__init__(self, daemon=True)
         self.host = host
         self.port = port
         self.base_url = base_url
+        self.srv = None
+        self.ctx = None
         self.app = Flask(__name__)
+        self.app.after_request(log_request)
         self.api = Api(self.app, prefix=self.base_url)
 
-    def add_resource(self, resource, *urls, **kwargs):
+    def add_resource(self, resource : Resource, *urls, **kwargs):
         self.api.add_resource(resource, *urls, **kwargs)
 
     def run(self):
diff --git a/src/centralizedattackdetector/client/__init__.py b/src/compute/service/rest_server/nbi_plugins/__init__.py
similarity index 100%
rename from src/centralizedattackdetector/client/__init__.py
rename to src/compute/service/rest_server/nbi_plugins/__init__.py
diff --git a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/Constants.py b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/Constants.py
new file mode 100644
index 0000000000000000000000000000000000000000..87c32c444d39acb048ede9105c9a0dc2c7e3899e
--- /dev/null
+++ b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/Constants.py
@@ -0,0 +1,3 @@
+DEFAULT_MTU = 1512
+DEFAULT_ADDRESS_FAMILIES = ['IPV4']
+DEFAULT_SUB_INTERFACE_INDEX = 0
diff --git a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_Service.py b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_Service.py
new file mode 100644
index 0000000000000000000000000000000000000000..752a027ad0d41f67f6a2312ee166a51ebcbc23bd
--- /dev/null
+++ b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_Service.py
@@ -0,0 +1,69 @@
+import logging
+from typing import Dict, List
+from flask import request
+from flask.json import jsonify
+from flask_restful import Resource
+from werkzeug.exceptions import UnsupportedMediaType
+from common.Constants import DEFAULT_CONTEXT_UUID
+from common.Settings import get_setting
+from context.client.ContextClient import ContextClient
+from context.proto.context_pb2 import ServiceId
+from service.client.ServiceClient import ServiceClient
+from service.proto.context_pb2 import Service, ServiceStatusEnum, ServiceTypeEnum
+from .tools.Authentication import HTTP_AUTH
+from .tools.HttpStatusCodes import HTTP_CREATED, HTTP_GATEWAYTIMEOUT, HTTP_NOCONTENT, HTTP_OK, HTTP_SERVERERROR
+
+LOGGER = logging.getLogger(__name__)
+
+class L2VPN_Service(Resource):
+    def __init__(self) -> None:
+        super().__init__()
+        self.context_client = ContextClient(
+            get_setting('CONTEXTSERVICE_SERVICE_HOST'), get_setting('CONTEXTSERVICE_SERVICE_PORT_GRPC'))
+        self.service_client = ServiceClient(
+            get_setting('SERVICESERVICE_SERVICE_HOST'), get_setting('SERVICESERVICE_SERVICE_PORT_GRPC'))
+
+    @HTTP_AUTH.login_required
+    def get(self, vpn_id : str):
+        LOGGER.debug('VPN_Id: {:s}'.format(str(vpn_id)))
+        LOGGER.debug('Request: {:s}'.format(str(request)))
+
+        # pylint: disable=no-member
+        service_id_request = ServiceId()
+        service_id_request.context_id.context_uuid.uuid = DEFAULT_CONTEXT_UUID
+        service_id_request.service_uuid.uuid = vpn_id
+
+        try:
+            service_reply = self.context_client.GetService(service_id_request)
+            if service_reply.service_id != service_id_request: # pylint: disable=no-member
+                raise Exception('Service retrieval failed. Wrong Service Id was returned')
+
+            service_ready_status = ServiceStatusEnum.SERVICESTATUS_ACTIVE
+            service_status = service_reply.service_status.service_status
+            response = jsonify({})
+            response.status_code = HTTP_OK if service_status == service_ready_status else HTTP_GATEWAYTIMEOUT
+        except Exception as e: # pylint: disable=broad-except
+            LOGGER.exception('Something went wrong Retrieving Service {:s}'.format(str(request)))
+            response = jsonify({'error': str(e)})
+            response.status_code = HTTP_SERVERERROR
+        return response
+
+    @HTTP_AUTH.login_required
+    def delete(self, vpn_id : str):
+        LOGGER.debug('VPN_Id: {:s}'.format(str(vpn_id)))
+        LOGGER.debug('Request: {:s}'.format(str(request)))
+
+        # pylint: disable=no-member
+        service_id_request = ServiceId()
+        service_id_request.context_id.context_uuid.uuid = DEFAULT_CONTEXT_UUID
+        service_id_request.service_uuid.uuid = vpn_id
+
+        try:
+            self.service_client.DeleteService(service_id_request)
+            response = jsonify({})
+            response.status_code = HTTP_NOCONTENT
+        except Exception as e: # pylint: disable=broad-except
+            LOGGER.exception('Something went wrong Deleting Service {:s}'.format(str(request)))
+            response = jsonify({'error': str(e)})
+            response.status_code = HTTP_SERVERERROR
+        return response
diff --git a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_Services.py b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_Services.py
new file mode 100644
index 0000000000000000000000000000000000000000..2ed0293f0729c6d4617a445034702f706a6daa25
--- /dev/null
+++ b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_Services.py
@@ -0,0 +1,55 @@
+import logging
+from typing import Dict, List
+from flask import request
+from flask.json import jsonify
+from flask_restful import Resource
+from werkzeug.exceptions import UnsupportedMediaType
+from common.Constants import DEFAULT_CONTEXT_UUID
+from common.Settings import get_setting
+from service.client.ServiceClient import ServiceClient
+from service.proto.context_pb2 import Service, ServiceStatusEnum, ServiceTypeEnum
+from .schemas.vpn_service import SCHEMA_VPN_SERVICE
+from .tools.Authentication import HTTP_AUTH
+from .tools.HttpStatusCodes import HTTP_CREATED, HTTP_SERVERERROR
+from .tools.Validator import validate_message
+
+LOGGER = logging.getLogger(__name__)
+
+class L2VPN_Services(Resource):
+    def __init__(self) -> None:
+        super().__init__()
+        self.service_client = ServiceClient(
+            get_setting('SERVICESERVICE_SERVICE_HOST'), get_setting('SERVICESERVICE_SERVICE_PORT_GRPC'))
+
+    @HTTP_AUTH.login_required
+    def get(self):
+        return {}
+
+    @HTTP_AUTH.login_required
+    def post(self):
+        if not request.is_json: raise UnsupportedMediaType('JSON payload is required')
+        request_data : Dict = request.json
+        LOGGER.debug('Request: {:s}'.format(str(request_data)))
+        validate_message(SCHEMA_VPN_SERVICE, request_data)
+
+        vpn_services : List[Dict] = request_data['ietf-l2vpn-svc:vpn-service']
+        for vpn_service in vpn_services:
+            # pylint: disable=no-member
+            service_request = Service()
+            service_request.service_id.context_id.context_uuid.uuid = DEFAULT_CONTEXT_UUID
+            service_request.service_id.service_uuid.uuid = vpn_service['vpn-id']
+            service_request.service_type = ServiceTypeEnum.SERVICETYPE_L3NM
+            service_request.service_status.service_status = ServiceStatusEnum.SERVICESTATUS_PLANNED
+
+            try:
+                service_reply = self.service_client.CreateService(service_request)
+                if service_reply != service_request.service_id: # pylint: disable=no-member
+                    raise Exception('Service creation failed. Wrong Service Id was returned')
+
+                response = jsonify({})
+                response.status_code = HTTP_CREATED
+            except Exception as e: # pylint: disable=broad-except
+                LOGGER.exception('Something went wrong Creating Service {:s}'.format(str(request)))
+                response = jsonify({'error': str(e)})
+                response.status_code = HTTP_SERVERERROR
+        return response
diff --git a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_SiteNetworkAccesses.py b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_SiteNetworkAccesses.py
new file mode 100644
index 0000000000000000000000000000000000000000..1f02e50a4847b78f431fa720b577c6c3cc42cc7f
--- /dev/null
+++ b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/L2VPN_SiteNetworkAccesses.py
@@ -0,0 +1,161 @@
+import json, logging
+from typing import Dict
+from flask import request
+from flask.json import jsonify
+from flask.wrappers import Response
+from flask_restful import Resource
+from werkzeug.exceptions import UnsupportedMediaType
+from common.Constants import DEFAULT_CONTEXT_UUID
+from common.Settings import get_setting
+from context.client.ContextClient import ContextClient
+from context.proto.context_pb2 import Service, ServiceId, ServiceStatusEnum
+from service.client.ServiceClient import ServiceClient
+from .schemas.site_network_access import SCHEMA_SITE_NETWORK_ACCESS
+from .tools.Authentication import HTTP_AUTH
+from .tools.HttpStatusCodes import HTTP_NOCONTENT, HTTP_SERVERERROR
+from .tools.Validator import validate_message
+from .Constants import DEFAULT_ADDRESS_FAMILIES, DEFAULT_MTU, DEFAULT_SUB_INTERFACE_INDEX
+
+LOGGER = logging.getLogger(__name__)
+
+def process_site_network_access(context_client : ContextClient, site_network_access : Dict) -> Service:
+    vpn_id = site_network_access['vpn-attachment']['vpn-id']
+    cvlan_id = site_network_access['connection']['tagged-interface']['dot1q-vlan-tagged']['cvlan-id']
+    bearer_reference = site_network_access['bearer']['bearer-reference']
+
+    # Assume bearer_reference    = '<device_uuid>:<endpoint_uuid>:<router_id>'
+    # Assume route_distinguisher = 0:<cvlan_id>
+    device_uuid,endpoint_uuid,router_id = bearer_reference.split(':')
+    route_distinguisher = '0:{:d}'.format(cvlan_id)
+
+    # pylint: disable=no-member
+    service_id = ServiceId()
+    service_id.context_id.context_uuid.uuid = DEFAULT_CONTEXT_UUID
+    service_id.service_uuid.uuid = vpn_id
+
+    service_readonly = context_client.GetService(service_id)
+    service = Service()
+    service.CopyFrom(service_readonly)
+
+    for endpoint_id in service.service_endpoint_ids:                        # pylint: disable=no-member
+        if endpoint_id.device_id.device_uuid.uuid != device_uuid: continue
+        if endpoint_id.endpoint_uuid.uuid != endpoint_uuid: continue
+        break   # found, do nothing
+    else:
+        # not found, add it
+        endpoint_id = service.service_endpoint_ids.add()                    # pylint: disable=no-member
+        endpoint_id.device_id.device_uuid.uuid = device_uuid
+        endpoint_id.endpoint_uuid.uuid = endpoint_uuid
+
+    for config_rule in service.service_config.config_rules:                 # pylint: disable=no-member
+        if config_rule.resource_key != 'settings': continue
+        json_settings = json.loads(config_rule.resource_value)
+
+        if 'route_distinguisher' not in json_settings:                      # missing, add it
+            json_settings['route_distinguisher'] = route_distinguisher
+        elif json_settings['route_distinguisher'] != route_distinguisher:   # differs, raise exception
+            msg = 'Specified RouteDistinguisher({:s}) differs from Service RouteDistinguisher({:s})'
+            raise Exception(msg.format(str(json_settings['route_distinguisher']), str(route_distinguisher)))
+
+        if 'mtu' not in json_settings:                                      # missing, add it
+            json_settings['mtu'] = DEFAULT_MTU
+        elif json_settings['mtu'] != DEFAULT_MTU:                           # differs, raise exception
+            msg = 'Specified MTU({:s}) differs from Service MTU({:s})'
+            raise Exception(msg.format(str(json_settings['mtu']), str(DEFAULT_MTU)))
+
+        if 'address_families' not in json_settings:                         # missing, add it
+            json_settings['address_families'] = DEFAULT_ADDRESS_FAMILIES
+        elif json_settings['address_families'] != DEFAULT_ADDRESS_FAMILIES: # differs, raise exception
+            msg = 'Specified AddressFamilies({:s}) differs from Service AddressFamilies({:s})'
+            raise Exception(msg.format(str(json_settings['address_families']), str(DEFAULT_ADDRESS_FAMILIES)))
+
+        config_rule.resource_value = json.dumps(json_settings, sort_keys=True)
+        break
+    else:
+        # not found, add it
+        config_rule = service.service_config.config_rules.add()             # pylint: disable=no-member
+        config_rule.resource_key = 'settings'
+        config_rule.resource_value = json.dumps({
+            'route_distinguisher': route_distinguisher,
+            'mtu': DEFAULT_MTU,
+            'address_families': DEFAULT_ADDRESS_FAMILIES,
+        }, sort_keys=True)
+
+    endpoint_settings_key = 'device[{:s}]/endpoint[{:s}]/settings'.format(device_uuid, endpoint_uuid)
+    for config_rule in service.service_config.config_rules:                 # pylint: disable=no-member
+        if config_rule.resource_key != endpoint_settings_key: continue
+        json_settings = json.loads(config_rule.resource_value)
+
+        if 'router_id' not in json_settings:                                # missing, add it
+            json_settings['router_id'] = router_id
+        elif json_settings['router_id'] != router_id:                       # differs, raise exception
+            msg = 'Specified RouterId({:s}) differs from Service RouterId({:s})'
+            raise Exception(msg.format(str(json_settings['router_id']), str(router_id)))
+
+        if 'sub_interface_index' not in json_settings:                      # missing, add it
+            json_settings['sub_interface_index'] = DEFAULT_SUB_INTERFACE_INDEX
+        elif json_settings['sub_interface_index'] != DEFAULT_SUB_INTERFACE_INDEX:   # differs, raise exception
+            msg = 'Specified SubInterfaceIndex({:s}) differs from Service SubInterfaceIndex({:s})'
+            raise Exception(msg.format(
+                str(json_settings['sub_interface_index']), str(DEFAULT_SUB_INTERFACE_INDEX)))
+
+        config_rule.resource_value = json.dumps(json_settings, sort_keys=True)
+        break
+    else:
+        # not found, add it
+        config_rule = service.service_config.config_rules.add()             # pylint: disable=no-member
+        config_rule.resource_key = endpoint_settings_key
+        config_rule.resource_value = json.dumps({
+            'router_id': router_id,
+            'sub_interface_index': DEFAULT_SUB_INTERFACE_INDEX,
+        }, sort_keys=True)
+
+    if len(service.service_endpoint_ids) >= 2:
+        service.service_status.service_status = ServiceStatusEnum.SERVICESTATUS_ACTIVE
+
+    return service
+
+def process_list_site_network_access(
+    context_client : ContextClient, service_client : ServiceClient, request_data : Dict) -> Response:
+
+    LOGGER.debug('Request: {:s}'.format(str(request_data)))
+    validate_message(SCHEMA_SITE_NETWORK_ACCESS, request_data)
+
+    errors = []
+    for site_network_access in request_data['ietf-l2vpn-svc:site-network-access']:
+        try:
+            service_request = process_site_network_access(context_client, site_network_access)
+            service_reply = service_client.UpdateService(service_request)
+            if service_reply != service_request.service_id: # pylint: disable=no-member
+                raise Exception('Service update failed. Wrong Service Id was returned')
+        except Exception as e: # pylint: disable=broad-except
+            LOGGER.exception('Something went wrong Updating Service {:s}'.format(str(request)))
+            errors.append({'error': str(e)})
+
+    response = jsonify(errors)
+    response.status_code = HTTP_NOCONTENT if len(errors) == 0 else HTTP_SERVERERROR
+    return response
+
+class L2VPN_SiteNetworkAccesses(Resource):
+    def __init__(self) -> None:
+        super().__init__()
+        self.context_client = ContextClient(
+            get_setting('CONTEXTSERVICE_SERVICE_HOST'), get_setting('CONTEXTSERVICE_SERVICE_PORT_GRPC'))
+        self.service_client = ServiceClient(
+            get_setting('SERVICESERVICE_SERVICE_HOST'), get_setting('SERVICESERVICE_SERVICE_PORT_GRPC'))
+
+    #@HTTP_AUTH.login_required
+    #def get(self):
+    #    return {}
+
+    @HTTP_AUTH.login_required
+    def post(self, site_id : str):
+        if not request.is_json: raise UnsupportedMediaType('JSON payload is required')
+        LOGGER.debug('Site_Id: {:s}'.format(str(site_id)))
+        return process_list_site_network_access(self.context_client, self.service_client, request.json)
+
+    @HTTP_AUTH.login_required
+    def put(self, site_id : str):
+        if not request.is_json: raise UnsupportedMediaType('JSON payload is required')
+        LOGGER.debug('Site_Id: {:s}'.format(str(site_id)))
+        return process_list_site_network_access(self.context_client, self.service_client, request.json)
diff --git a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/__init__.py b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..979c8a3bc1903381516bf0f9683bbe4e4f2c3cb3
--- /dev/null
+++ b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/__init__.py
@@ -0,0 +1,22 @@
+# RFC 8466 - L2VPN Service Model (L2SM)
+# Ref: https://datatracker.ietf.org/doc/html/rfc8466
+
+from flask_restful import Resource
+from compute.service.rest_server.RestServer import RestServer
+from .L2VPN_Services import L2VPN_Services
+from .L2VPN_Service import L2VPN_Service
+from .L2VPN_SiteNetworkAccesses import L2VPN_SiteNetworkAccesses
+
+URL_PREFIX      = '/ietf-l2vpn-svc:l2vpn-svc'
+
+def _add_resource(rest_server : RestServer, resource : Resource, *urls, **kwargs):
+    urls = [(URL_PREFIX + url) for url in urls]
+    rest_server.add_resource(resource, *urls, **kwargs)
+
+def register_ietf_l2vpn(rest_server : RestServer):
+    _add_resource(rest_server, L2VPN_Services,
+        '/vpn-services')
+    _add_resource(rest_server, L2VPN_Service,
+        '/vpn-services/vpn-service=<vpn_id>', '/vpn-services/vpn-service=<vpn_id>/')
+    _add_resource(rest_server, L2VPN_SiteNetworkAccesses,
+        '/sites/site=<site_id>/site-network-accesses', '/sites/site=<site_id>/site-network-accesses/')
diff --git a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/schemas/Common.py b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/schemas/Common.py
new file mode 100644
index 0000000000000000000000000000000000000000..f54da792b526cede52b94892ee9946fb63c6b015
--- /dev/null
+++ b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/schemas/Common.py
@@ -0,0 +1,2 @@
+# String pattern for UUIDs such as '3fd942ee-2dc3-41d1-aeec-65aa85d117b2'
+REGEX_UUID = r'[a-fA-F0-9]{8}\-[a-fA-F0-9]{4}\-[a-fA-F0-9]{4}\-[a-fA-F0-9]{4}\-[a-fA-F0-9]{12}'
diff --git a/src/centralizedattackdetector/proto/__init__.py b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/schemas/__init__.py
similarity index 100%
rename from src/centralizedattackdetector/proto/__init__.py
rename to src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/schemas/__init__.py
diff --git a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/schemas/site_network_access.py b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/schemas/site_network_access.py
new file mode 100644
index 0000000000000000000000000000000000000000..33ba8cc7fe5be76f82fbd74cd3608703f37e76a0
--- /dev/null
+++ b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/schemas/site_network_access.py
@@ -0,0 +1,66 @@
+# Example request:
+# request = {'ietf-l2vpn-svc:site-network-access': [{
+#     'network-access-id': '3fd942ee-2dc3-41d1-aeec-65aa85d117b2',
+#     'vpn-attachment': {'vpn-id': '954b1b53-4a8c-406d-9eff-750ec2c9a258',
+#         'site-role': 'any-to-any-role'},
+#     'connection': {'encapsulation-type': 'dot1q-vlan-tagged', 'tagged-interface': {
+#         'dot1q-vlan-tagged': {'cvlan-id': 1234}}},
+#     'bearer': {'bearer-reference': '1a'}
+# }]}
+
+from .Common import REGEX_UUID
+
+SCHEMA_SITE_NETWORK_ACCESS = {
+    '$schema': 'https://json-schema.org/draft/2020-12/schema',
+    'type': 'object',
+    'required': ['ietf-l2vpn-svc:site-network-access'],
+    'properties': {
+        'ietf-l2vpn-svc:site-network-access': {
+            'type': 'array',
+            'minItems': 1,
+            'maxItems': 1,  # by now we do not support multiple site-network-access in the same message
+            'items': {
+                'type': 'object',
+                'required': ['network-access-id', 'vpn-attachment', 'connection', 'bearer'],
+                'properties': {
+                    'network-access-id': {'type': 'string', 'pattern': REGEX_UUID},
+                    'vpn-attachment': {
+                        'type': 'object',
+                        'required': ['vpn-id', 'site-role'],
+                        'properties': {
+                            'vpn-id': {'type': 'string', 'pattern': REGEX_UUID},
+                            'site-role': {'type': 'string', 'minLength': 1},
+                        },
+                    },
+                    'connection': {
+                        'type': 'object',
+                        'required': ['encapsulation-type', 'tagged-interface'],
+                        'properties': {
+                            'encapsulation-type': {'enum': ['dot1q-vlan-tagged']},
+                            'tagged-interface': {
+                                'type': 'object',
+                                'required': ['dot1q-vlan-tagged'],
+                                'properties': {
+                                    'dot1q-vlan-tagged': {
+                                        'type': 'object',
+                                        'required': ['cvlan-id'],
+                                        'properties': {
+                                            'cvlan-id': {'type': 'integer', 'minimum': 1, 'maximum': 4094},
+                                        },
+                                    },
+                                },
+                            },
+                        },
+                    },
+                    'bearer': {
+                        'type': 'object',
+                        'required': ['bearer-reference'],
+                        'properties': {
+                            'bearer-reference': {'type': 'string', 'minLength': 1},
+                        },
+                    },
+                },
+            },
+        },
+    },
+}
diff --git a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/schemas/vpn_service.py b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/schemas/vpn_service.py
new file mode 100644
index 0000000000000000000000000000000000000000..54e9c53163b8d764a37b613501f6b427d6e1773d
--- /dev/null
+++ b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/schemas/vpn_service.py
@@ -0,0 +1,32 @@
+# Example request:
+# request = {'ietf-l2vpn-svc:vpn-service': [{
+#   'vpn-id': 'c6270231-f1de-4687-b2ed-7b58f9105775',
+#   'vpn-svc-type': 'vpws',
+#   'svc-topo': 'any-to-any',
+#   'customer-name': 'osm'
+# }]}
+
+from .Common import REGEX_UUID
+
+SCHEMA_VPN_SERVICE = {
+    '$schema': 'https://json-schema.org/draft/2020-12/schema',
+    'type': 'object',
+    'required': ['ietf-l2vpn-svc:vpn-service'],
+    'properties': {
+        'ietf-l2vpn-svc:vpn-service': {
+            'type': 'array',
+            'minItems': 1,
+            'maxItems': 1,  # by now we do not support multiple vpn-service in the same message
+            'items': {
+                'type': 'object',
+                'required': ['vpn-id', 'vpn-svc-type', 'svc-topo', 'customer-name'],
+                'properties': {
+                    'vpn-id': {'type': 'string', 'pattern': REGEX_UUID},
+                    'vpn-svc-type': {'enum': ['vpws']},
+                    'svc-topo': {'enum': ['any-to-any']},
+                    'customer-name': {'const': 'osm'},
+                },
+            }
+        }
+    },
+}
diff --git a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/tools/Authentication.py b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/tools/Authentication.py
new file mode 100644
index 0000000000000000000000000000000000000000..de7c9eafd7b2d5afdc39b82a4d02bea20127fa4a
--- /dev/null
+++ b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/tools/Authentication.py
@@ -0,0 +1,11 @@
+from flask_httpauth import HTTPBasicAuth
+from werkzeug.security import check_password_hash
+from compute.Config import RESTAPI_USERS
+
+HTTP_AUTH = HTTPBasicAuth()
+
+@HTTP_AUTH.verify_password
+def verify_password(username, password):
+    if username not in RESTAPI_USERS: return None
+    if not check_password_hash(RESTAPI_USERS[username], password): return None
+    return username
diff --git a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/tools/HttpStatusCodes.py b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/tools/HttpStatusCodes.py
new file mode 100644
index 0000000000000000000000000000000000000000..5879670102e861bf1598104ace80f1f0cdb931ca
--- /dev/null
+++ b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/tools/HttpStatusCodes.py
@@ -0,0 +1,6 @@
+HTTP_OK             = 200
+HTTP_CREATED        = 201
+HTTP_NOCONTENT      = 204
+HTTP_BADREQUEST     = 400
+HTTP_SERVERERROR    = 500
+HTTP_GATEWAYTIMEOUT = 504
\ No newline at end of file
diff --git a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/tools/Validator.py b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/tools/Validator.py
new file mode 100644
index 0000000000000000000000000000000000000000..9c126d71beba72ebb7b69d9852927cb31ac2a614
--- /dev/null
+++ b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/tools/Validator.py
@@ -0,0 +1,21 @@
+from typing import List
+from flask.json import jsonify
+from jsonschema import _utils
+from jsonschema.validators import validator_for
+from jsonschema.protocols import Validator
+from jsonschema.exceptions import ValidationError
+from werkzeug.exceptions import BadRequest
+from .HttpStatusCodes import HTTP_BADREQUEST
+
+def validate_message(schema, message):
+    validator_class = validator_for(schema)
+    validator : Validator = validator_class(schema)
+    errors : List[ValidationError] = sorted(validator.iter_errors(message), key=str)
+    if len(errors) == 0: return
+    response = jsonify([
+        {'message': str(error.message), 'schema': str(error.schema), 'validator': str(error.validator),
+         'where': str(_utils.format_as_index(container='message', indices=error.relative_path))}
+        for error in errors
+    ])
+    response.status_code = HTTP_BADREQUEST
+    raise BadRequest(response=response)
diff --git a/src/centralizedattackdetector/service/__init__.py b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/tools/__init__.py
similarity index 100%
rename from src/centralizedattackdetector/service/__init__.py
rename to src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/tools/__init__.py
diff --git a/src/compute/service/rest_server/resources/Compute.py b/src/compute/service/rest_server/resources/Compute.py
deleted file mode 100644
index 4b845be2edd20c512bd0669739d402207d71fa94..0000000000000000000000000000000000000000
--- a/src/compute/service/rest_server/resources/Compute.py
+++ /dev/null
@@ -1,52 +0,0 @@
-import logging
-from flask.json import jsonify
-from flask_restful import Resource
-from common.Settings import get_setting
-from common.Constants import DEFAULT_CONTEXT_UUID
-from service.client.ServiceClient import ServiceClient
-from service.proto.context_pb2 import Service, ServiceStatusEnum, ServiceTypeEnum
-
-LOGGER = logging.getLogger(__name__)
-
-class Compute(Resource):
-    def __init__(self) -> None:
-        super().__init__()
-
-    def get(self):
-        # Here implement HTTP GET method
-        raise NotImplementedError()
-
-    def post(self):
-        # Here implement HTTP POST method
-
-        # Retrieve required data from request
-        new_service_context_id = DEFAULT_CONTEXT_UUID
-        new_service_id = 'my-service-id'
-
-        # Find Service address/port from environment and instantiate client
-        service_host = get_setting('SERVICESERVICE_SERVICE_HOST')
-        service_port = get_setting('SERVICESERVICE_SERVICE_PORT_GRPC')
-        service_client = ServiceClient(service_host, service_port)
-
-        # Compose a dummy CreateService request
-        request = Service()
-        request.service_id.context_id.context_uuid.uuid = new_service_context_id
-        request.service_id.service_uuid.uuid = new_service_id
-        request.service_type = ServiceTypeEnum.SERVICETYPE_L2NM
-        request.service_status.service_status = ServiceStatusEnum.SERVICESTATUS_PLANNED
-        
-        try:
-            # Issue gRPC request to Service component
-            reply = service_client.CreateService(request)
-
-            # Parse CreateService reply, here we check that obtained service Id and context are the expected ones.
-            reply_context_uuid = reply.context_id.context_uuid.uuid
-            reply_service_uuid = reply.service_uuid.uuid
-            #succeeded = (reply_context_uuid == new_service_context_id) and (reply_service_uuid == new_service_id)
-            succeeded = True
-            reply = {'succeeded': succeeded}
-        except Exception as e:
-            LOGGER.exception('Something went wrong Creating Service {:s}'.format(str(request)))
-            reply = {'succeeded': False, 'error': str(e)}
-
-        return jsonify(reply)
diff --git a/src/compute/tests/MockService.py b/src/compute/tests/MockService.py
new file mode 100644
index 0000000000000000000000000000000000000000..54b420f5aa1cf015c90f09b874f9b37225e07328
--- /dev/null
+++ b/src/compute/tests/MockService.py
@@ -0,0 +1,41 @@
+import grpc, logging
+from concurrent import futures
+
+GRPC_MAX_WORKERS  = 10
+GRPC_GRACE_PERIOD = 60
+
+class MockService:
+    def __init__(self, address, port, max_workers=GRPC_MAX_WORKERS, grace_period=GRPC_GRACE_PERIOD, cls_name=__name__):
+        self.logger = logging.getLogger(cls_name)
+        self.address = address
+        self.port = port
+        self.endpoint = None
+        self.max_workers = max_workers
+        self.grace_period = grace_period
+        self.pool = None
+        self.server = None
+
+    def install_servicers(self):
+        pass
+
+    def start(self):
+        self.endpoint = '{:s}:{:s}'.format(str(self.address), str(self.port))
+        self.logger.info('Starting Service (tentative endpoint: {:s}, max_workers: {:s})...'.format(
+            str(self.endpoint), str(self.max_workers)))
+
+        self.pool = futures.ThreadPoolExecutor(max_workers=self.max_workers)
+        self.server = grpc.server(self.pool) # , interceptors=(tracer_interceptor,))
+
+        self.install_servicers()
+
+        port = self.server.add_insecure_port(self.endpoint)
+        self.endpoint = '{:s}:{:s}'.format(str(self.address), str(port))
+        self.logger.info('Listening on {:s}...'.format(str(self.endpoint)))
+        self.server.start()
+
+        self.logger.debug('Service started')
+
+    def stop(self):
+        self.logger.debug('Stopping service (grace period {:s} seconds)...'.format(str(self.grace_period)))
+        self.server.stop(self.grace_period)
+        self.logger.debug('Service stopped')
diff --git a/src/compute/tests/MockServicerImpl_Context.py b/src/compute/tests/MockServicerImpl_Context.py
new file mode 100644
index 0000000000000000000000000000000000000000..d79a755d49773dff4b298abdba6dfa38d9e69d57
--- /dev/null
+++ b/src/compute/tests/MockServicerImpl_Context.py
@@ -0,0 +1,188 @@
+import grpc, logging
+from typing import Any, Dict, Iterator, List
+from context.proto.context_pb2 import (
+    Context, ContextEvent, ContextId, ContextIdList, ContextList, Device, DeviceEvent, DeviceId, DeviceIdList,
+    DeviceList, Empty, Link, LinkEvent, LinkId, LinkIdList, LinkList, Service, ServiceEvent, ServiceId, ServiceIdList,
+    ServiceList, Topology, TopologyEvent, TopologyId, TopologyIdList, TopologyList)
+from context.proto.context_pb2_grpc import ContextServiceServicer
+from .Tools import grpc_message_to_json_string
+
+LOGGER = logging.getLogger(__name__)
+
+def get_container(database : Dict[str, Dict[str, Any]], container_name : str) -> Dict[str, Any]:
+    return database.setdefault(container_name, {})
+
+def get_entries(database : Dict[str, Dict[str, Any]], container_name : str) -> List[Any]:
+    container = get_container(database, container_name)
+    return [container[entry_uuid] for entry_uuid in sorted(container.keys())]
+
+def get_entry(
+    context : grpc.ServicerContext, database : Dict[str, Dict[str, Any]], container_name : str, entry_uuid : str
+) -> Any:
+    LOGGER.debug('[get_entry] AFTER database={:s}'.format(str(database)))
+    container = get_container(database, container_name)
+    if entry_uuid not in container:
+        context.abort(grpc.StatusCode.INTERNAL, str('{:s}({:s}) not found'.format(container_name, entry_uuid)))
+    return container[entry_uuid]
+
+def set_entry(database : Dict[str, Dict[str, Any]], container_name : str, entry_uuid : str, entry : Any) -> Any:
+    container = get_container(database, container_name)
+    LOGGER.debug('[set_entry] BEFORE database={:s}'.format(str(database)))
+    container[entry_uuid] = entry
+    LOGGER.debug('[set_entry] AFTER database={:s}'.format(str(database)))
+    return entry
+
+def del_entry(
+    context : grpc.ServicerContext, database : Dict[str, Dict[str, Any]], container_name : str, entry_uuid : str
+) -> Any:
+    container = get_container(database, container_name)
+    if entry_uuid not in container:
+        context.abort(grpc.StatusCode.INTERNAL, str('{:s}({:s}) not found'.format(container_name, entry_uuid)))
+    del container[entry_uuid]
+    return Empty()
+
+class MockServicerImpl_Context(ContextServiceServicer):
+    def __init__(self):
+        LOGGER.info('[__init__] Creating Servicer...')
+        self.database : Dict[str, Any] = {}
+        LOGGER.info('[__init__] Servicer Created')
+
+    # ----- Context ----------------------------------------------------------------------------------------------------
+
+    def ListContextIds(self, request: Empty, context : grpc.ServicerContext) -> ContextIdList:
+        LOGGER.info('[ListContextIds] request={:s}'.format(grpc_message_to_json_string(request)))
+        return ContextIdList(context_ids=[context.context_id for context in get_entries(self.database, 'context')])
+
+    def ListContexts(self, request: Empty, context : grpc.ServicerContext) -> ContextList:
+        LOGGER.info('[ListContexts] request={:s}'.format(grpc_message_to_json_string(request)))
+        return ContextList(contexts=get_entries(self.database, 'context'))
+
+    def GetContext(self, request: ContextId, context : grpc.ServicerContext) -> Context:
+        LOGGER.info('[GetContext] request={:s}'.format(grpc_message_to_json_string(request)))
+        return get_entry(context, self.database, 'context', request.context_uuid.uuid)
+
+    def SetContext(self, request: Context, context : grpc.ServicerContext) -> ContextId:
+        LOGGER.info('[SetContext] request={:s}'.format(grpc_message_to_json_string(request)))
+        return set_entry(self.database, 'context', request.context_uuid.uuid, request).context_id
+
+    def RemoveContext(self, request: ContextId, context : grpc.ServicerContext) -> Empty:
+        LOGGER.info('[RemoveContext] request={:s}'.format(grpc_message_to_json_string(request)))
+        return del_entry(context, self.database, 'context', request.context_uuid.uuid)
+
+    def GetContextEvents(self, request: Empty, context : grpc.ServicerContext) -> Iterator[ContextEvent]:
+        LOGGER.info('[GetContextEvents] request={:s}'.format(grpc_message_to_json_string(request)))
+
+
+    # ----- Topology ---------------------------------------------------------------------------------------------------
+
+    def ListTopologyIds(self, request: ContextId, context : grpc.ServicerContext) -> TopologyIdList:
+        LOGGER.info('[ListTopologyIds] request={:s}'.format(grpc_message_to_json_string(request)))
+        topologies = get_entries(self.database, 'topology[{:s}]'.format(str(request.context_id.context_uuid.uuid)))
+        return TopologyIdList(topology_ids=[topology.topology_id for topology in topologies])
+
+    def ListTopologies(self, request: ContextId, context : grpc.ServicerContext) -> TopologyList:
+        LOGGER.info('[ListTopologies] request={:s}'.format(grpc_message_to_json_string(request)))
+        topologies = get_entries(self.database, 'topology[{:s}]'.format(str(request.context_id.context_uuid.uuid)))
+        return TopologyList(topologies=[topology for topology in topologies])
+
+    def GetTopology(self, request: TopologyId, context : grpc.ServicerContext) -> Topology:
+        LOGGER.info('[GetTopology] request={:s}'.format(grpc_message_to_json_string(request)))
+        container_name = 'topology[{:s}]'.format(str(request.context_id.context_uuid.uuid))
+        return get_entry(context, self.database, container_name, request.topology_uuid.uuid)
+
+    def SetTopology(self, request: Topology, context : grpc.ServicerContext) -> TopologyId:
+        LOGGER.info('[SetTopology] request={:s}'.format(grpc_message_to_json_string(request)))
+        container_name = 'topology[{:s}]'.format(str(request.topology_id.context_id.context_uuid.uuid))
+        return set_entry(self.database, container_name, request.topology_id.topology_uuid.uuid, request).topology_id
+
+    def RemoveTopology(self, request: TopologyId, context : grpc.ServicerContext) -> Empty:
+        LOGGER.info('[RemoveTopology] request={:s}'.format(grpc_message_to_json_string(request)))
+        container_name = 'topology[{:s}]'.format(str(request.context_id.context_uuid.uuid))
+        return del_entry(context, self.database, container_name, request.topology_uuid.uuid)
+
+    def GetTopologyEvents(self, request: Empty, context : grpc.ServicerContext) -> Iterator[TopologyEvent]:
+        LOGGER.info('[GetTopologyEvents] request={:s}'.format(grpc_message_to_json_string(request)))
+
+
+    # ----- Device -----------------------------------------------------------------------------------------------------
+
+    def ListDeviceIds(self, request: Empty, context : grpc.ServicerContext) -> DeviceIdList:
+        LOGGER.info('[ListDeviceIds] request={:s}'.format(grpc_message_to_json_string(request)))
+        return DeviceIdList(device_ids=[device.device_id for device in get_entries(self.database, 'device')])
+
+    def ListDevices(self, request: Empty, context : grpc.ServicerContext) -> DeviceList:
+        LOGGER.info('[ListDevices] request={:s}'.format(grpc_message_to_json_string(request)))
+        return DeviceList(devices=get_entries(self.database, 'device'))
+
+    def GetDevice(self, request: DeviceId, context : grpc.ServicerContext) -> Device:
+        LOGGER.info('[GetDevice] request={:s}'.format(grpc_message_to_json_string(request)))
+        return get_entry(context, self.database, 'device', request.device_uuid.uuid)
+
+    def SetDevice(self, request: Context, context : grpc.ServicerContext) -> DeviceId:
+        LOGGER.info('[SetDevice] request={:s}'.format(grpc_message_to_json_string(request)))
+        return set_entry(self.database, 'device', request.device_uuid.uuid, request).device_id
+
+    def RemoveDevice(self, request: DeviceId, context : grpc.ServicerContext) -> Empty:
+        LOGGER.info('[RemoveDevice] request={:s}'.format(grpc_message_to_json_string(request)))
+        return del_entry(context, self.database, 'device', request.device_uuid.uuid)
+
+    def GetDeviceEvents(self, request: Empty, context : grpc.ServicerContext) -> Iterator[DeviceEvent]:
+        LOGGER.info('[GetDeviceEvents] request={:s}'.format(grpc_message_to_json_string(request)))
+
+
+    # ----- Link -------------------------------------------------------------------------------------------------------
+
+    def ListLinkIds(self, request: Empty, context : grpc.ServicerContext) -> LinkIdList:
+        LOGGER.info('[ListLinkIds] request={:s}'.format(grpc_message_to_json_string(request)))
+        return LinkIdList(link_ids=[link.link_id for link in get_entries(self.database, 'link')])
+
+    def ListLinks(self, request: Empty, context : grpc.ServicerContext) -> LinkList:
+        LOGGER.info('[ListLinks] request={:s}'.format(grpc_message_to_json_string(request)))
+        return LinkList(links=get_entries(self.database, 'link'))
+
+    def GetLink(self, request: LinkId, context : grpc.ServicerContext) -> Link:
+        LOGGER.info('[GetLink] request={:s}'.format(grpc_message_to_json_string(request)))
+        return get_entry(context, self.database, 'link', request.link_uuid.uuid)
+
+    def SetLink(self, request: Context, context : grpc.ServicerContext) -> LinkId:
+        LOGGER.info('[SetLink] request={:s}'.format(grpc_message_to_json_string(request)))
+        return set_entry(self.database, 'link', request.link_uuid.uuid, request).link_id
+
+    def RemoveLink(self, request: LinkId, context : grpc.ServicerContext) -> Empty:
+        LOGGER.info('[RemoveLink] request={:s}'.format(grpc_message_to_json_string(request)))
+        return del_entry(context, self.database, 'link', request.link_uuid.uuid)
+
+    def GetLinkEvents(self, request: Empty, context : grpc.ServicerContext) -> Iterator[LinkEvent]:
+        LOGGER.info('[GetLinkEvents] request={:s}'.format(grpc_message_to_json_string(request)))
+
+
+    # ----- Service ----------------------------------------------------------------------------------------------------
+
+    def ListServiceIds(self, request: ContextId, context : grpc.ServicerContext) -> ServiceIdList:
+        LOGGER.info('[ListServiceIds] request={:s}'.format(grpc_message_to_json_string(request)))
+        services = get_entries(self.database, 'service[{:s}]'.format(str(request.context_id.context_uuid.uuid)))
+        return ServiceIdList(service_ids=[service.service_id for service in services])
+
+    def ListServices(self, request: ContextId, context : grpc.ServicerContext) -> ServiceList:
+        LOGGER.info('[ListServices] request={:s}'.format(grpc_message_to_json_string(request)))
+        services = get_entries(self.database, 'service[{:s}]'.format(str(request.context_id.context_uuid.uuid)))
+        return ServiceList(services=[service for service in services])
+
+    def GetService(self, request: ServiceId, context : grpc.ServicerContext) -> Service:
+        LOGGER.info('[GetService] request={:s}'.format(grpc_message_to_json_string(request)))
+        container_name = 'service[{:s}]'.format(str(request.context_id.context_uuid.uuid))
+        return get_entry(context, self.database, container_name, request.service_uuid.uuid)
+
+    def SetService(self, request: Service, context : grpc.ServicerContext) -> ServiceId:
+        LOGGER.info('[SetService] request={:s}'.format(grpc_message_to_json_string(request)))
+        return set_entry(
+            self.database, 'service[{:s}]'.format(str(request.service_id.context_id.context_uuid.uuid)),
+            request.service_id.service_uuid.uuid, request).service_id
+
+    def RemoveService(self, request: ServiceId, context : grpc.ServicerContext) -> Empty:
+        LOGGER.info('[RemoveService] request={:s}'.format(grpc_message_to_json_string(request)))
+        container_name = 'service[{:s}]'.format(str(request.context_id.context_uuid.uuid))
+        return del_entry(context, self.database, container_name, request.service_uuid.uuid)
+
+    def GetServiceEvents(self, request: Empty, context : grpc.ServicerContext) -> Iterator[ServiceEvent]:
+        LOGGER.info('[GetServiceEvents] request={:s}'.format(grpc_message_to_json_string(request)))
diff --git a/src/compute/tests/MockServicerImpl_Service.py b/src/compute/tests/MockServicerImpl_Service.py
new file mode 100644
index 0000000000000000000000000000000000000000..75fdc3073dac1b942c7701f3a0be9feacb60109b
--- /dev/null
+++ b/src/compute/tests/MockServicerImpl_Service.py
@@ -0,0 +1,32 @@
+import grpc, logging
+from common.Settings import get_setting
+from context.client.ContextClient import ContextClient
+from service.proto.context_pb2 import ConnectionList, Empty, Service, ServiceId
+from service.proto.service_pb2_grpc import ServiceServiceServicer
+from .Tools import grpc_message_to_json_string
+
+LOGGER = logging.getLogger(__name__)
+
+class MockServicerImpl_Service(ServiceServiceServicer):
+    def __init__(self):
+        LOGGER.info('[__init__] Creating Servicer...')
+        self.context_client = ContextClient(
+            get_setting('CONTEXTSERVICE_SERVICE_HOST'),
+            get_setting('CONTEXTSERVICE_SERVICE_PORT_GRPC'))
+        LOGGER.info('[__init__] Servicer Created')
+
+    def CreateService(self, request : Service, context : grpc.ServicerContext) -> ServiceId:
+        LOGGER.info('[CreateService] request={:s}'.format(grpc_message_to_json_string(request)))
+        return self.context_client.SetService(request)
+
+    def UpdateService(self, request : Service, context : grpc.ServicerContext) -> ServiceId:
+        LOGGER.info('[UpdateService] request={:s}'.format(grpc_message_to_json_string(request)))
+        return self.context_client.SetService(request)
+
+    def DeleteService(self, request : ServiceId, context : grpc.ServicerContext) -> Empty:
+        LOGGER.info('[DeleteService] request={:s}'.format(grpc_message_to_json_string(request)))
+        return self.context_client.RemoveService(request)
+
+    def GetConnectionList(self, request : ServiceId, context : grpc.ServicerContext) -> ConnectionList:
+        LOGGER.info('[GetConnectionList] request={:s}'.format(grpc_message_to_json_string(request)))
+        return ConnectionList()
diff --git a/src/compute/tests/Tools.py b/src/compute/tests/Tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..a96c38ce546d4062df8229f5506f9dd49af6fc81
--- /dev/null
+++ b/src/compute/tests/Tools.py
@@ -0,0 +1,7 @@
+import json
+from google.protobuf.json_format import MessageToDict
+
+def grpc_message_to_json_string(message):
+    return json.dumps(MessageToDict(
+        message, including_default_value_fields=True, preserving_proto_field_name=True, use_integers_for_enums=False),
+        sort_keys=True)
diff --git a/src/compute/tests/mock_osm/MockOSM.py b/src/compute/tests/mock_osm/MockOSM.py
new file mode 100644
index 0000000000000000000000000000000000000000..c50ee6c88e75a62a743bba065830ae82827fa7d7
--- /dev/null
+++ b/src/compute/tests/mock_osm/MockOSM.py
@@ -0,0 +1,94 @@
+import logging
+from .WimconnectorIETFL2VPN import WimconnectorIETFL2VPN
+
+LOGGER = logging.getLogger(__name__)
+
+WIM_USERNAME = 'admin'
+WIM_PASSWORD = 'admin'
+
+# Ref: https://osm.etsi.org/wikipub/index.php/WIM
+WIM_MAPPING  = [
+    {
+        'device-id'           : 'dev-1',            # pop_switch_dpid
+        #'device_interface_id' : ??,                # pop_switch_port
+        'service_endpoint_id' : 'ep-1',             # wan_service_endpoint_id
+        'service_mapping_info': {                   # wan_service_mapping_info, other extra info
+            'bearer': {'bearer-reference': 'dev-1:ep-1:10.0.0.1'},
+            'site-id': '1',
+        },
+        #'switch_dpid'         : ??,                # wan_switch_dpid
+        #'switch_port'         : ??,                # wan_switch_port
+        #'datacenter_id'       : ??,                # vim_account
+    },
+    {
+        'device-id'           : 'dev-2',            # pop_switch_dpid
+        #'device_interface_id' : ??,                # pop_switch_port
+        'service_endpoint_id' : 'ep-2',             # wan_service_endpoint_id
+        'service_mapping_info': {                   # wan_service_mapping_info, other extra info
+            'bearer': {'bearer-reference': 'dev-2:ep-2:10.0.0.2'},
+            'site-id': '2',
+        },
+        #'switch_dpid'         : ??,                # wan_switch_dpid
+        #'switch_port'         : ??,                # wan_switch_port
+        #'datacenter_id'       : ??,                # vim_account
+    },
+    {
+        'device-id'           : 'dev-3',            # pop_switch_dpid
+        #'device_interface_id' : ??,                # pop_switch_port
+        'service_endpoint_id' : 'ep-3',             # wan_service_endpoint_id
+        'service_mapping_info': {                   # wan_service_mapping_info, other extra info
+            'bearer': {'bearer-reference': 'dev-3:ep-3:10.0.0.3'},
+            'site-id': '3',
+        },
+        #'switch_dpid'         : ??,                # wan_switch_dpid
+        #'switch_port'         : ??,                # wan_switch_port
+        #'datacenter_id'       : ??,                # vim_account
+    },
+]
+
+SERVICE_TYPE = 'ELINE'
+SERVICE_CONNECTION_POINTS_1 = [
+    {'service_endpoint_id': 'ep-1',
+        'service_endpoint_encapsulation_type': 'dot1q',
+        'service_endpoint_encapsulation_info': {'vlan': 1234}},
+    {'service_endpoint_id': 'ep-2',
+        'service_endpoint_encapsulation_type': 'dot1q',
+        'service_endpoint_encapsulation_info': {'vlan': 1234}},
+]
+
+SERVICE_CONNECTION_POINTS_2 = [
+    {'service_endpoint_id': 'ep-3',
+        'service_endpoint_encapsulation_type': 'dot1q',
+        'service_endpoint_encapsulation_info': {'vlan': 1234}},
+]
+
+class MockOSM:
+    def __init__(self, wim_url):
+        wim = {'wim_url': wim_url}
+        wim_account = {'user': WIM_USERNAME, 'password': WIM_PASSWORD}
+        config = {'mapping_not_needed': False, 'service_endpoint_mapping': WIM_MAPPING}
+        self.wim = WimconnectorIETFL2VPN(wim, wim_account, config=config)
+        self.service_uuid = None
+        self.conn_info = None
+
+    def create_connectivity_service(self):
+        self.wim.check_credentials()
+        LOGGER.info('[create_connectivity_service] connection_points={:s}'.format(str(SERVICE_CONNECTION_POINTS_1)))
+        result = self.wim.create_connectivity_service(SERVICE_TYPE, SERVICE_CONNECTION_POINTS_1)
+        LOGGER.info('[create_connectivity_service] result={:s}'.format(str(result)))
+        self.service_uuid, self.conn_info = result
+
+    def get_connectivity_service_status(self):
+        self.wim.check_credentials()
+        result = self.wim.get_connectivity_service_status(self.service_uuid, conn_info=self.conn_info)
+        LOGGER.info('[get_connectivity_service] result={:s}'.format(str(result)))
+
+    def edit_connectivity_service(self):
+        self.wim.check_credentials()
+        LOGGER.info('[edit_connectivity_service] connection_points={:s}'.format(str(SERVICE_CONNECTION_POINTS_2)))
+        self.wim.edit_connectivity_service(
+            self.service_uuid, conn_info=self.conn_info, connection_points=SERVICE_CONNECTION_POINTS_2)
+
+    def delete_connectivity_service(self):
+        self.wim.check_credentials()
+        self.wim.delete_connectivity_service(self.service_uuid, conn_info=self.conn_info)
diff --git a/src/compute/tests/mock_osm/WimconnectorIETFL2VPN.py b/src/compute/tests/mock_osm/WimconnectorIETFL2VPN.py
new file mode 100644
index 0000000000000000000000000000000000000000..182115bad67a4fbe1eb04a83ed8d54be964568c8
--- /dev/null
+++ b/src/compute/tests/mock_osm/WimconnectorIETFL2VPN.py
@@ -0,0 +1,499 @@
+# -*- coding: utf-8 -*-
+##
+# Copyright 2018 Telefonica
+# All Rights Reserved.
+#
+# Contributors: Oscar Gonzalez de Dios, Manuel Lopez Bravo, Guillermo Pajares Martin
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# This work has been performed in the context of the Metro-Haul project -
+# funded by the European Commission under Grant number 761727 through the
+# Horizon 2020 program.
+##
+"""The SDN/WIM connector is responsible for establishing wide area network
+connectivity.
+
+This SDN/WIM connector implements the standard IETF RFC 8466 "A YANG Data
+ Model for Layer 2 Virtual Private Network (L2VPN) Service Delivery"
+
+It receives the endpoints and the necessary details to request
+the Layer 2 service.
+"""
+import requests
+import uuid
+import logging
+#from osm_ro_plugin.sdnconn import SdnConnectorBase, SdnConnectorError
+from .sdnconn import SdnConnectorBase, SdnConnectorError
+
+"""Check layer where we move it"""
+
+
+class WimconnectorIETFL2VPN(SdnConnectorBase):
+    def __init__(self, wim, wim_account, config=None, logger=None):
+        """IETF L2VPN WIM connector
+
+        Arguments: (To be completed)
+            wim (dict): WIM record, as stored in the database
+            wim_account (dict): WIM account record, as stored in the database
+        """
+        self.logger = logging.getLogger("ro.sdn.ietfl2vpn")
+        super().__init__(wim, wim_account, config, logger)
+        self.headers = {"Content-Type": "application/json"}
+        self.mappings = {
+            m["service_endpoint_id"]: m for m in self.service_endpoint_mapping
+        }
+        self.user = wim_account.get("user")
+        self.passwd = wim_account.get("password")           # replace "passwordd" -> "password"
+
+        if self.user and self.passwd is not None:
+            self.auth = (self.user, self.passwd)
+        else:
+            self.auth = None
+
+        self.logger.info("IETFL2VPN Connector Initialized.")
+
+    def check_credentials(self):
+        endpoint = "{}/restconf/data/ietf-l2vpn-svc:l2vpn-svc/vpn-services".format(
+            self.wim["wim_url"]
+        )
+
+        try:
+            response = requests.get(endpoint, auth=self.auth)
+            http_code = response.status_code
+        except requests.exceptions.RequestException as e:
+            raise SdnConnectorError(e.message, http_code=503)
+
+        if http_code != 200:
+            raise SdnConnectorError("Failed while authenticating", http_code=http_code)
+
+        self.logger.info("Credentials checked")
+
+    def get_connectivity_service_status(self, service_uuid, conn_info=None):
+        """Monitor the status of the connectivity service stablished
+
+        Arguments:
+            service_uuid: Connectivity service unique identifier
+
+        Returns:
+            Examples::
+                {'sdn_status': 'ACTIVE'}
+                {'sdn_status': 'INACTIVE'}
+                {'sdn_status': 'DOWN'}
+                {'sdn_status': 'ERROR'}
+        """
+        try:
+            self.logger.info("Sending get connectivity service stuatus")
+            servicepoint = "{}/restconf/data/ietf-l2vpn-svc:l2vpn-svc/vpn-services/vpn-service={}/".format(
+                self.wim["wim_url"], service_uuid
+            )
+            response = requests.get(servicepoint, auth=self.auth)
+
+            if response.status_code != requests.codes.ok:
+                raise SdnConnectorError(
+                    "Unable to obtain connectivity servcice status",
+                    http_code=response.status_code,
+                )
+
+            service_status = {"sdn_status": "ACTIVE"}
+
+            return service_status
+        except requests.exceptions.ConnectionError:
+            raise SdnConnectorError("Request Timeout", http_code=408)
+
+    def search_mapp(self, connection_point):
+        id = connection_point["service_endpoint_id"]
+        if id not in self.mappings:
+            raise SdnConnectorError("Endpoint {} not located".format(str(id)))
+        else:
+            return self.mappings[id]
+
+    def create_connectivity_service(self, service_type, connection_points, **kwargs):
+        """Stablish WAN connectivity between the endpoints
+
+        Arguments:
+            service_type (str): ``ELINE`` (L2), ``ELAN`` (L2), ``ETREE`` (L2),
+                ``L3``.
+            connection_points (list): each point corresponds to
+                an entry point from the DC to the transport network. One
+                connection point serves to identify the specific access and
+                some other service parameters, such as encapsulation type.
+                Represented by a dict as follows::
+
+                    {
+                      "service_endpoint_id": ..., (str[uuid])
+                      "service_endpoint_encapsulation_type": ...,
+                           (enum: none, dot1q, ...)
+                      "service_endpoint_encapsulation_info": {
+                        ... (dict)
+                        "vlan": ..., (int, present if encapsulation is dot1q)
+                        "vni": ... (int, present if encapsulation is vxlan),
+                        "peers": [(ipv4_1), (ipv4_2)]
+                            (present if encapsulation is vxlan)
+                      }
+                    }
+
+              The service endpoint ID should be previously informed to the WIM
+              engine in the RO when the WIM port mapping is registered.
+
+        Keyword Arguments:
+            bandwidth (int): value in kilobytes
+            latency (int): value in milliseconds
+
+        Other QoS might be passed as keyword arguments.
+
+        Returns:
+            tuple: ``(service_id, conn_info)`` containing:
+               - *service_uuid* (str): UUID of the established connectivity
+                  service
+               - *conn_info* (dict or None): Information to be stored at the
+                 database (or ``None``). This information will be provided to
+                 the :meth:`~.edit_connectivity_service` and :obj:`~.delete`.
+                 **MUST** be JSON/YAML-serializable (plain data structures).
+
+        Raises:
+            SdnConnectorException: In case of error.
+        """
+        if service_type == "ELINE":
+            if len(connection_points) > 2:
+                raise SdnConnectorError(
+                    "Connections between more than 2 endpoints are not supported"
+                )
+
+            if len(connection_points) < 2:
+                raise SdnConnectorError("Connections must be of at least 2 endpoints")
+
+            """First step, create the vpn service"""
+            uuid_l2vpn = str(uuid.uuid4())
+            vpn_service = {}
+            vpn_service["vpn-id"] = uuid_l2vpn
+            vpn_service["vpn-svc-type"] = "vpws"            # Rename "vpn-scv-type" -> "vpn-svc-type"
+            vpn_service["svc-topo"] = "any-to-any"
+            vpn_service["customer-name"] = "osm"
+            vpn_service_list = []
+            vpn_service_list.append(vpn_service)
+            vpn_service_l = {"ietf-l2vpn-svc:vpn-service": vpn_service_list}
+            response_service_creation = None
+            conn_info = []
+            self.logger.info("Sending vpn-service :{}".format(vpn_service_l))
+
+            try:
+                endpoint_service_creation = (
+                    "{}/restconf/data/ietf-l2vpn-svc:l2vpn-svc/vpn-services".format(
+                        self.wim["wim_url"]
+                    )
+                )
+                response_service_creation = requests.post(
+                    endpoint_service_creation,
+                    headers=self.headers,
+                    json=vpn_service_l,
+                    auth=self.auth,
+                )
+            except requests.exceptions.ConnectionError:
+                raise SdnConnectorError(
+                    "Request to create service Timeout", http_code=408
+                )
+
+            if response_service_creation.status_code == 409:
+                raise SdnConnectorError(
+                    "Service already exists",
+                    http_code=response_service_creation.status_code,
+                )
+            elif response_service_creation.status_code != requests.codes.created:
+                raise SdnConnectorError(
+                    "Request to create service not accepted",
+                    http_code=response_service_creation.status_code,
+                )
+
+            """Second step, create the connections and vpn attachments"""
+            for connection_point in connection_points:
+                connection_point_wan_info = self.search_mapp(connection_point)
+                site_network_access = {}
+                connection = {}
+
+                if connection_point["service_endpoint_encapsulation_type"] != "none":
+                    if (
+                        connection_point["service_endpoint_encapsulation_type"]
+                        == "dot1q"
+                    ):
+                        """The connection is a VLAN"""
+                        connection["encapsulation-type"] = "dot1q-vlan-tagged"
+                        tagged = {}
+                        tagged_interf = {}
+                        service_endpoint_encapsulation_info = connection_point[
+                            "service_endpoint_encapsulation_info"
+                        ]
+
+                        if service_endpoint_encapsulation_info["vlan"] is None:
+                            raise SdnConnectorError("VLAN must be provided")
+
+                        tagged_interf["cvlan-id"] = service_endpoint_encapsulation_info[
+                            "vlan"
+                        ]
+                        tagged["dot1q-vlan-tagged"] = tagged_interf
+                        connection["tagged-interface"] = tagged
+                    else:
+                        raise NotImplementedError("Encapsulation type not implemented")
+
+                site_network_access["connection"] = connection
+                self.logger.info("Sending connection:{}".format(connection))
+                vpn_attach = {}
+                vpn_attach["vpn-id"] = uuid_l2vpn
+                vpn_attach["site-role"] = vpn_service["svc-topo"] + "-role"
+                site_network_access["vpn-attachment"] = vpn_attach
+                self.logger.info("Sending vpn-attachement :{}".format(vpn_attach))
+                uuid_sna = str(uuid.uuid4())
+                site_network_access["network-access-id"] = uuid_sna
+                site_network_access["bearer"] = connection_point_wan_info[
+                    "service_mapping_info"
+                ]["bearer"]
+                site_network_accesses = {}
+                site_network_access_list = []
+                site_network_access_list.append(site_network_access)
+                site_network_accesses[
+                    "ietf-l2vpn-svc:site-network-access"
+                ] = site_network_access_list
+                conn_info_d = {}
+                conn_info_d["site"] = connection_point_wan_info["service_mapping_info"][
+                    "site-id"
+                ]
+                conn_info_d["site-network-access-id"] = site_network_access[
+                    "network-access-id"
+                ]
+                conn_info_d["mapping"] = None
+                conn_info.append(conn_info_d)
+
+                try:
+                    endpoint_site_network_access_creation = (
+                        "{}/restconf/data/ietf-l2vpn-svc:l2vpn-svc/"
+                        "sites/site={}/site-network-accesses/".format(
+                            self.wim["wim_url"],
+                            connection_point_wan_info["service_mapping_info"][
+                                "site-id"
+                            ],
+                        )
+                    )
+                    response_endpoint_site_network_access_creation = requests.post(
+                        endpoint_site_network_access_creation,
+                        headers=self.headers,
+                        json=site_network_accesses,
+                        auth=self.auth,
+                    )
+
+                    if (
+                        response_endpoint_site_network_access_creation.status_code
+                        == 409
+                    ):
+                        self.delete_connectivity_service(vpn_service["vpn-id"])
+
+                        raise SdnConnectorError(
+                            "Site_Network_Access with ID '{}' already exists".format(
+                                site_network_access["network-access-id"]
+                            ),
+                            http_code=response_endpoint_site_network_access_creation.status_code,
+                        )
+                    elif (
+                        response_endpoint_site_network_access_creation.status_code
+                        == 400
+                    ):
+                        self.delete_connectivity_service(vpn_service["vpn-id"])
+
+                        raise SdnConnectorError(
+                            "Site {} does not exist".format(
+                                connection_point_wan_info["service_mapping_info"][
+                                    "site-id"
+                                ]
+                            ),
+                            http_code=response_endpoint_site_network_access_creation.status_code,
+                        )
+                    elif (
+                        response_endpoint_site_network_access_creation.status_code
+                        != requests.codes.created
+                        and response_endpoint_site_network_access_creation.status_code
+                        != requests.codes.no_content
+                    ):
+                        self.delete_connectivity_service(vpn_service["vpn-id"])
+
+                        raise SdnConnectorError(
+                            "Request no accepted",
+                            http_code=response_endpoint_site_network_access_creation.status_code,
+                        )
+                except requests.exceptions.ConnectionError:
+                    self.delete_connectivity_service(vpn_service["vpn-id"])
+
+                    raise SdnConnectorError("Request Timeout", http_code=408)
+
+            return uuid_l2vpn, conn_info
+        else:
+            raise NotImplementedError
+
+    def delete_connectivity_service(self, service_uuid, conn_info=None):
+        """Disconnect multi-site endpoints previously connected
+
+        This method should receive as the first argument the UUID generated by
+        the ``create_connectivity_service``
+        """
+        try:
+            self.logger.info("Sending delete")
+            servicepoint = "{}/restconf/data/ietf-l2vpn-svc:l2vpn-svc/vpn-services/vpn-service={}/".format(
+                self.wim["wim_url"], service_uuid
+            )
+            response = requests.delete(servicepoint, auth=self.auth)
+
+            if response.status_code != requests.codes.no_content:
+                raise SdnConnectorError(
+                    "Error in the request", http_code=response.status_code
+                )
+        except requests.exceptions.ConnectionError:
+            raise SdnConnectorError("Request Timeout", http_code=408)
+
+    def edit_connectivity_service(
+        self, service_uuid, conn_info=None, connection_points=None, **kwargs
+    ):
+        """Change an existing connectivity service, see
+        ``create_connectivity_service``"""
+        # sites = {"sites": {}}
+        # site_list = []
+        vpn_service = {}
+        vpn_service["svc-topo"] = "any-to-any"
+        counter = 0
+
+        for connection_point in connection_points:
+            site_network_access = {}
+            connection_point_wan_info = self.search_mapp(connection_point)
+            params_site = {}
+            params_site["site-id"] = connection_point_wan_info["service_mapping_info"][
+                "site-id"
+            ]
+            params_site["site-vpn-flavor"] = "site-vpn-flavor-single"
+            device_site = {}
+            device_site["device-id"] = connection_point_wan_info["device-id"]
+            params_site["devices"] = device_site
+            # network_access = {}
+            connection = {}
+
+            if connection_point["service_endpoint_encapsulation_type"] != "none":
+                if connection_point["service_endpoint_encapsulation_type"] == "dot1q":
+                    """The connection is a VLAN"""
+                    connection["encapsulation-type"] = "dot1q-vlan-tagged"
+                    tagged = {}
+                    tagged_interf = {}
+                    service_endpoint_encapsulation_info = connection_point[
+                        "service_endpoint_encapsulation_info"
+                    ]
+
+                    if service_endpoint_encapsulation_info["vlan"] is None:
+                        raise SdnConnectorError("VLAN must be provided")
+
+                    tagged_interf["cvlan-id"] = service_endpoint_encapsulation_info[
+                        "vlan"
+                    ]
+                    tagged["dot1q-vlan-tagged"] = tagged_interf
+                    connection["tagged-interface"] = tagged
+                else:
+                    raise NotImplementedError("Encapsulation type not implemented")
+
+            site_network_access["connection"] = connection
+            vpn_attach = {}
+            vpn_attach["vpn-id"] = service_uuid
+            vpn_attach["site-role"] = vpn_service["svc-topo"] + "-role"
+            site_network_access["vpn-attachment"] = vpn_attach
+            uuid_sna = conn_info[counter]["site-network-access-id"]
+            site_network_access["network-access-id"] = uuid_sna
+            site_network_access["bearer"] = connection_point_wan_info[
+                "service_mapping_info"
+            ]["bearer"]
+            site_network_accesses = {}
+            site_network_access_list = []
+            site_network_access_list.append(site_network_access)
+            site_network_accesses[
+                "ietf-l2vpn-svc:site-network-access"
+            ] = site_network_access_list
+
+            try:
+                endpoint_site_network_access_edit = (
+                    "{}/restconf/data/ietf-l2vpn-svc:l2vpn-svc/"
+                    "sites/site={}/site-network-accesses/".format(
+                        self.wim["wim_url"],
+                        connection_point_wan_info["service_mapping_info"]["site-id"],
+                    )
+                )
+                response_endpoint_site_network_access_creation = requests.put(
+                    endpoint_site_network_access_edit,
+                    headers=self.headers,
+                    json=site_network_accesses,
+                    auth=self.auth,
+                )
+
+                if response_endpoint_site_network_access_creation.status_code == 400:
+                    raise SdnConnectorError(
+                        "Service does not exist",
+                        http_code=response_endpoint_site_network_access_creation.status_code,
+                    )
+                elif (
+                    response_endpoint_site_network_access_creation.status_code != 201
+                    and response_endpoint_site_network_access_creation.status_code
+                    != 204
+                ):
+                    raise SdnConnectorError(
+                        "Request no accepted",
+                        http_code=response_endpoint_site_network_access_creation.status_code,
+                    )
+            except requests.exceptions.ConnectionError:
+                raise SdnConnectorError("Request Timeout", http_code=408)
+
+            counter += 1
+
+        return None
+
+    def clear_all_connectivity_services(self):
+        """Delete all WAN Links corresponding to a WIM"""
+        try:
+            self.logger.info("Sending clear all connectivity services")
+            servicepoint = (
+                "{}/restconf/data/ietf-l2vpn-svc:l2vpn-svc/vpn-services".format(
+                    self.wim["wim_url"]
+                )
+            )
+            response = requests.delete(servicepoint, auth=self.auth)
+
+            if response.status_code != requests.codes.no_content:
+                raise SdnConnectorError(
+                    "Unable to clear all connectivity services",
+                    http_code=response.status_code,
+                )
+        except requests.exceptions.ConnectionError:
+            raise SdnConnectorError("Request Timeout", http_code=408)
+
+    def get_all_active_connectivity_services(self):
+        """Provide information about all active connections provisioned by a
+        WIM
+        """
+        try:
+            self.logger.info("Sending get all connectivity services")
+            servicepoint = (
+                "{}/restconf/data/ietf-l2vpn-svc:l2vpn-svc/vpn-services".format(
+                    self.wim["wim_url"]
+                )
+            )
+            response = requests.get(servicepoint, auth=self.auth)
+
+            if response.status_code != requests.codes.ok:
+                raise SdnConnectorError(
+                    "Unable to get all connectivity services",
+                    http_code=response.status_code,
+                )
+
+            return response
+        except requests.exceptions.ConnectionError:
+            raise SdnConnectorError("Request Timeout", http_code=408)
diff --git a/src/centralizedattackdetector/tests/__init__.py b/src/compute/tests/mock_osm/__init__.py
similarity index 100%
rename from src/centralizedattackdetector/tests/__init__.py
rename to src/compute/tests/mock_osm/__init__.py
diff --git a/src/compute/tests/mock_osm/acknowledgements.txt b/src/compute/tests/mock_osm/acknowledgements.txt
new file mode 100644
index 0000000000000000000000000000000000000000..b7ce926dd006d9bc8afaffbed212d90fb05adbef
--- /dev/null
+++ b/src/compute/tests/mock_osm/acknowledgements.txt
@@ -0,0 +1,3 @@
+MockOSM is based on source code taken from:
+https://osm.etsi.org/gitlab/osm/ro/-/blob/master/RO-plugin/osm_ro_plugin/sdnconn.py
+https://osm.etsi.org/gitlab/osm/ro/-/blob/master/RO-SDN-ietfl2vpn/osm_rosdn_ietfl2vpn/wimconn_ietfl2vpn.py
diff --git a/src/compute/tests/mock_osm/sdnconn.py b/src/compute/tests/mock_osm/sdnconn.py
new file mode 100644
index 0000000000000000000000000000000000000000..a1849c9ef3e1a1260ff42bbadabc99f91a6435d7
--- /dev/null
+++ b/src/compute/tests/mock_osm/sdnconn.py
@@ -0,0 +1,242 @@
+# -*- coding: utf-8 -*-
+##
+# Copyright 2018 University of Bristol - High Performance Networks Research
+# Group
+# All Rights Reserved.
+#
+# Contributors: Anderson Bravalheri, Dimitrios Gkounis, Abubakar Siddique
+# Muqaddas, Navdeep Uniyal, Reza Nejabati and Dimitra Simeonidou
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact with: <highperformance-networks@bristol.ac.uk>
+#
+# Neither the name of the University of Bristol nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# This work has been performed in the context of DCMS UK 5G Testbeds
+# & Trials Programme and in the framework of the Metro-Haul project -
+# funded by the European Commission under Grant number 761727 through the
+# Horizon 2020 and 5G-PPP programmes.
+##
+"""The SDN connector is responsible for establishing both wide area network connectivity (WIM)
+and intranet SDN connectivity.
+
+It receives information from ports to be connected .
+"""
+
+import logging
+from http import HTTPStatus
+
+
+class SdnConnectorError(Exception):
+    """Base Exception for all connector related errors
+    provide the parameter 'http_code' (int) with the error code:
+        Bad_Request = 400
+        Unauthorized = 401  (e.g. credentials are not valid)
+        Not_Found = 404    (e.g. try to edit or delete a non existing connectivity service)
+        Forbidden = 403
+        Method_Not_Allowed = 405
+        Not_Acceptable = 406
+        Request_Timeout = 408  (e.g timeout reaching server, or cannot reach the server)
+        Conflict = 409
+        Service_Unavailable = 503
+        Internal_Server_Error = 500
+    """
+
+    def __init__(self, message, http_code=HTTPStatus.INTERNAL_SERVER_ERROR.value):
+        Exception.__init__(self, message)
+        self.http_code = http_code
+
+
+class SdnConnectorBase(object):
+    """Abstract base class for all the SDN connectors
+
+    Arguments:
+        wim (dict): WIM record, as stored in the database
+        wim_account (dict): WIM account record, as stored in the database
+        config
+    The arguments of the constructor are converted to object attributes.
+    An extra property, ``service_endpoint_mapping`` is created from ``config``.
+    """
+
+    def __init__(self, wim, wim_account, config=None, logger=None):
+        """
+        :param wim: (dict). Contains among others 'wim_url'
+        :param wim_account: (dict). Contains among others 'uuid' (internal id), 'name',
+            'sdn' (True if is intended for SDN-assist or False if intended for WIM), 'user', 'password'.
+        :param config: (dict or None): Particular information of plugin. These keys if present have a common meaning:
+            'mapping_not_needed': (bool) False by default or if missing, indicates that mapping is not needed.
+            'service_endpoint_mapping': (list) provides the internal endpoint mapping. The meaning is:
+                KEY                     meaning for WIM             meaning for SDN assist
+                --------                --------                    --------
+                device_id               pop_switch_dpid             compute_id
+                device_interface_id     pop_switch_port             compute_pci_address
+                service_endpoint_id     wan_service_endpoint_id     SDN_service_endpoint_id
+                service_mapping_info    wan_service_mapping_info    SDN_service_mapping_info
+                    contains extra information if needed. Text in Yaml format
+                switch_dpid             wan_switch_dpid             SDN_switch_dpid
+                switch_port             wan_switch_port             SDN_switch_port
+                datacenter_id           vim_account                 vim_account
+            id: (internal, do not use)
+            wim_id: (internal, do not use)
+        :param logger (logging.Logger): optional logger object. If none is passed 'openmano.sdn.sdnconn' is used.
+        """
+        self.logger = logger or logging.getLogger("ro.sdn")
+        self.wim = wim
+        self.wim_account = wim_account
+        self.config = config or {}
+        self.service_endpoint_mapping = self.config.get("service_endpoint_mapping", [])
+
+    def check_credentials(self):
+        """Check if the connector itself can access the SDN/WIM with the provided url (wim.wim_url),
+            user (wim_account.user), and password (wim_account.password)
+
+        Raises:
+            SdnConnectorError: Issues regarding authorization, access to
+                external URLs, etc are detected.
+        """
+        raise NotImplementedError
+
+    def get_connectivity_service_status(self, service_uuid, conn_info=None):
+        """Monitor the status of the connectivity service established
+
+        Arguments:
+            service_uuid (str): UUID of the connectivity service
+            conn_info (dict or None): Information returned by the connector
+                during the service creation/edition and subsequently stored in
+                the database.
+
+        Returns:
+            dict: JSON/YAML-serializable dict that contains a mandatory key
+                ``sdn_status`` associated with one of the following values::
+
+                    {'sdn_status': 'ACTIVE'}
+                        # The service is up and running.
+
+                    {'sdn_status': 'INACTIVE'}
+                        # The service was created, but the connector
+                        # cannot determine yet if connectivity exists
+                        # (ideally, the caller needs to wait and check again).
+
+                    {'sdn_status': 'DOWN'}
+                        # Connection was previously established,
+                        # but an error/failure was detected.
+
+                    {'sdn_status': 'ERROR'}
+                        # An error occurred when trying to create the service/
+                        # establish the connectivity.
+
+                    {'sdn_status': 'BUILD'}
+                        # Still trying to create the service, the caller
+                        # needs to wait and check again.
+
+                Additionally ``error_msg``(**str**) and ``sdn_info``(**dict**)
+                keys can be used to provide additional status explanation or
+                new information available for the connectivity service.
+        """
+        raise NotImplementedError
+
+    def create_connectivity_service(self, service_type, connection_points, **kwargs):
+        """
+        Establish SDN/WAN connectivity between the endpoints
+        :param service_type: (str): ``ELINE`` (L2), ``ELAN`` (L2), ``ETREE`` (L2), ``L3``.
+        :param connection_points:  (list): each point corresponds to
+            an entry point to be connected. For WIM: from the DC to the transport network.
+            For SDN: Compute/PCI to the transport network. One
+            connection point serves to identify the specific access and
+            some other service parameters, such as encapsulation type.
+            Each item of the list is a dict with:
+                "service_endpoint_id": (str)(uuid)  Same meaning that for 'service_endpoint_mapping' (see __init__)
+                    In case the config attribute mapping_not_needed is True, this value is not relevant. In this case
+                    it will contain the string "device_id:device_interface_id"
+                "service_endpoint_encapsulation_type": None, "dot1q", ...
+                "service_endpoint_encapsulation_info": (dict) with:
+                    "vlan": ..., (int, present if encapsulation is dot1q)
+                    "vni": ... (int, present if encapsulation is vxlan),
+                    "peers": [(ipv4_1), (ipv4_2)] (present if encapsulation is vxlan)
+                    "mac": ...
+                    "device_id": ..., same meaning that for 'service_endpoint_mapping' (see __init__)
+                    "device_interface_id": same meaning that for 'service_endpoint_mapping' (see __init__)
+                    "switch_dpid": ..., present if mapping has been found for this device_id,device_interface_id
+                    "swith_port": ... present if mapping has been found for this device_id,device_interface_id
+                    "service_mapping_info": present if mapping has been found for this device_id,device_interface_id
+        :param kwargs: For future versions:
+            bandwidth (int): value in kilobytes
+            latency (int): value in milliseconds
+            Other QoS might be passed as keyword arguments.
+        :return: tuple: ``(service_id, conn_info)`` containing:
+            - *service_uuid* (str): UUID of the established connectivity service
+            - *conn_info* (dict or None): Information to be stored at the database (or ``None``).
+                This information will be provided to the :meth:`~.edit_connectivity_service` and :obj:`~.delete`.
+                **MUST** be JSON/YAML-serializable (plain data structures).
+        :raises: SdnConnectorException: In case of error. Nothing should be created in this case.
+            Provide the parameter http_code
+        """
+        raise NotImplementedError
+
+    def delete_connectivity_service(self, service_uuid, conn_info=None):
+        """
+        Disconnect multi-site endpoints previously connected
+
+        :param service_uuid: The one returned by create_connectivity_service
+        :param conn_info: The one returned by last call to 'create_connectivity_service' or 'edit_connectivity_service'
+            if they do not return None
+        :return: None
+        :raises: SdnConnectorException: In case of error. The parameter http_code must be filled
+        """
+        raise NotImplementedError
+
+    def edit_connectivity_service(
+        self, service_uuid, conn_info=None, connection_points=None, **kwargs
+    ):
+        """Change an existing connectivity service.
+
+        This method's arguments and return value follow the same convention as
+        :meth:`~.create_connectivity_service`.
+
+        :param service_uuid: UUID of the connectivity service.
+        :param conn_info: (dict or None): Information previously returned by last call to create_connectivity_service
+            or edit_connectivity_service
+        :param connection_points: (list): If provided, the old list of connection points will be replaced.
+        :param kwargs: Same meaning that create_connectivity_service
+        :return: dict or None: Information to be updated and stored at the database.
+                When ``None`` is returned, no information should be changed.
+                When an empty dict is returned, the database record will be deleted.
+                **MUST** be JSON/YAML-serializable (plain data structures).
+        Raises:
+            SdnConnectorException: In case of error.
+        """
+
+    def clear_all_connectivity_services(self):
+        """Delete all WAN Links in a WIM.
+
+        This method is intended for debugging only, and should delete all the
+        connections controlled by the WIM/SDN, not only the  connections that
+        a specific RO is aware of.
+
+        Raises:
+            SdnConnectorException: In case of error.
+        """
+        raise NotImplementedError
+
+    def get_all_active_connectivity_services(self):
+        """Provide information about all active connections provisioned by a
+        WIM.
+
+        Raises:
+            SdnConnectorException: In case of error.
+        """
+        raise NotImplementedError
diff --git a/src/compute/tests/test_unitary.py b/src/compute/tests/test_unitary.py
index 689e6038251d0a371833b34cd9e5158af2fcc0e6..001999f1b9607f03cd393f5582cc08a504c0e9d2 100644
--- a/src/compute/tests/test_unitary.py
+++ b/src/compute/tests/test_unitary.py
@@ -1,48 +1,54 @@
-import logging, os, pytest, requests, time
-from google.protobuf.json_format import MessageToDict
-from common.type_checkers.Assertions import validate_service_id
-from compute.client.ComputeClient import ComputeClient
-from compute.proto.context_pb2 import Service
-from compute.service.ComputeService import ComputeService
-from compute.Config import (
-    GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD, RESTAPI_SERVICE_PORT, RESTAPI_BASE_URL)
-from compute.service.rest_server.Server import Server
-from compute.service.rest_server.resources.Compute import Compute
-from service.service.ServiceService import ServiceService
-from service.Config import (
-    GRPC_SERVICE_PORT as SERVICE_GRPC_SERVICE_PORT, GRPC_MAX_WORKERS as SERVICE_GRPC_MAX_WORKERS,
-    GRPC_GRACE_PERIOD as SERVICE_GRPC_GRACE_PERIOD)
-
-compute_grpc_port = 10000 + GRPC_SERVICE_PORT # avoid privileged ports
-compute_restapi_port = 10000 + RESTAPI_SERVICE_PORT # avoid privileged ports
-service_grpc_port = 10000 + SERVICE_GRPC_SERVICE_PORT # avoid privileged ports
-
-os.environ['SERVICESERVICE_SERVICE_HOST'] = '127.0.0.1'
-os.environ['SERVICESERVICE_SERVICE_PORT_GRPC'] = str(service_grpc_port)
+import logging, os, pytest, time
+from compute.Config import RESTAPI_SERVICE_PORT, RESTAPI_BASE_URL
+from compute.service.rest_server.RestServer import RestServer
+from context.proto.context_pb2_grpc import add_ContextServiceServicer_to_server
+from service.proto.service_pb2_grpc import add_ServiceServiceServicer_to_server
+from .mock_osm.MockOSM import MockOSM
+from .MockService import MockService
+from .MockServicerImpl_Context import MockServicerImpl_Context
+from .MockServicerImpl_Service import MockServicerImpl_Service
 
 LOGGER = logging.getLogger(__name__)
 LOGGER.setLevel(logging.DEBUG)
 
-@pytest.fixture(scope='session')
-def service_service():
-    _service = ServiceService(
-        port=service_grpc_port, max_workers=SERVICE_GRPC_MAX_WORKERS, grace_period=SERVICE_GRPC_GRACE_PERIOD)
-    _service.start()
-    yield _service
-    _service.stop()
+LOCALHOST = '127.0.0.1'
+MOCKSERVER_GRPC_PORT = 10000
+COMPUTE_RESTAPI_PORT = 10000 + RESTAPI_SERVICE_PORT # avoid privileged ports
+
+class MockService_ContextService(MockService):
+    # Mock Server implementing Context and Service to simplify unitary tests of Compute
+
+    def __init__(self, cls_name='MockService_Service'):
+        super().__init__(LOCALHOST, MOCKSERVER_GRPC_PORT, cls_name=cls_name)
+
+    # pylint: disable=attribute-defined-outside-init
+    def install_servicers(self):
+        self.context_servicer = MockServicerImpl_Context()
+        add_ContextServiceServicer_to_server(self.context_servicer, self.server)
+        self.service_servicer = MockServicerImpl_Service()
+        add_ServiceServiceServicer_to_server(self.service_servicer, self.server)
+
+os.environ['CONTEXTSERVICE_SERVICE_HOST'] = LOCALHOST
+os.environ['CONTEXTSERVICE_SERVICE_PORT_GRPC'] = str(MOCKSERVER_GRPC_PORT)
+os.environ['SERVICESERVICE_SERVICE_HOST'] = LOCALHOST
+os.environ['SERVICESERVICE_SERVICE_PORT_GRPC'] = str(MOCKSERVER_GRPC_PORT)
+
+# NBI Plugin IETF L2VPN requires environment variables CONTEXTSERVICE_SERVICE_HOST, CONTEXTSERVICE_SERVICE_PORT_GRPC,
+# SERVICESERVICE_SERVICE_HOST, and SERVICESERVICE_SERVICE_PORT_GRPC to work properly.
+# pylint: disable=wrong-import-position,ungrouped-imports
+from compute.service.rest_server.nbi_plugins.ietf_l2vpn import register_ietf_l2vpn
 
 @pytest.fixture(scope='session')
-def compute_service(service_service : ServiceService):
-    _service = ComputeService(port=compute_grpc_port, max_workers=GRPC_MAX_WORKERS, grace_period=GRPC_GRACE_PERIOD)
+def mockservice():
+    _service = MockService_ContextService()
     _service.start()
     yield _service
     _service.stop()
 
 @pytest.fixture(scope='session')
-def compute_service_rest():
-    _rest_server = Server(port=compute_restapi_port, base_url=RESTAPI_BASE_URL)
-    _rest_server.add_resource(
-        Compute, '/restconf/config/compute', endpoint='api.compute')
+def compute_service_rest(mockservice):  # pylint: disable=redefined-outer-name
+    _rest_server = RestServer(port=COMPUTE_RESTAPI_PORT, base_url=RESTAPI_BASE_URL)
+    register_ietf_l2vpn(_rest_server)
     _rest_server.start()
     time.sleep(1) # bring time for the server to start
     yield _rest_server
@@ -50,27 +56,18 @@ def compute_service_rest():
     _rest_server.join()
 
 @pytest.fixture(scope='session')
-def compute_client(compute_service):
-    _client = ComputeClient(address='127.0.0.1', port=compute_grpc_port)
-    yield _client
-    _client.close()
+def osm_wim(compute_service_rest): # pylint: disable=redefined-outer-name
+    wim_url = 'http://{:s}:{:d}'.format(LOCALHOST, COMPUTE_RESTAPI_PORT)
+    return MockOSM(wim_url)
+
+def test_compute_create_connectivity_service_rest_api(osm_wim : MockOSM): # pylint: disable=redefined-outer-name
+    osm_wim.create_connectivity_service()
+
+def test_compute_get_connectivity_service_status_rest_api(osm_wim : MockOSM): # pylint: disable=redefined-outer-name
+    osm_wim.get_connectivity_service_status()
 
-def test_dummy_create_connectivity_service(compute_client : ComputeClient):
-    # dummy test: should fail with assertion error
-    with pytest.raises(AssertionError):
-        validate_service_id(MessageToDict(
-            compute_client.CreateConnectivityService(Service()),
-            including_default_value_fields=True, preserving_proto_field_name=True,
-            use_integers_for_enums=False))
+def test_compute_edit_connectivity_service_rest_api(osm_wim : MockOSM): # pylint: disable=redefined-outer-name
+    osm_wim.edit_connectivity_service()
 
-def test_dummy_create_connectivity_service_rest_api(compute_service_rest : Server):
-    # should work
-    request_url = 'http://127.0.0.1:{:s}{:s}/restconf/config/compute'
-    request_url = request_url.format(str(compute_restapi_port), RESTAPI_BASE_URL)
-    reply = requests.post(request_url, json={
-        # here add context of POST request body as JSON
-    })
-    json_reply = reply.json()
-    LOGGER.info('json_reply = {:s}'.format(str(json_reply)))
-    assert 'succeeded' in json_reply
-    assert json_reply['succeeded']
+def test_compute_delete_connectivity_service_rest_api(osm_wim : MockOSM): # pylint: disable=redefined-outer-name
+    osm_wim.delete_connectivity_service()
diff --git a/src/context/.gitlab-ci.yml b/src/context/.gitlab-ci.yml
index 8803cc9c2802ec5991f8ce330c2d86a9e5c763eb..a77be1af77766c9dbe6501b28db9a2c206e2864b 100644
--- a/src/context/.gitlab-ci.yml
+++ b/src/context/.gitlab-ci.yml
@@ -1,8 +1,7 @@
-# Build, tag, and push the Docker images to the GitLab Docker registry
+# Build, tag and push the Docker image to the GitLab registry
 build context:
   variables:
     IMAGE_NAME: 'context' # name of the microservice
-    IMAGE_NAME_TEST: 'context-test' # name of the microservice
     IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: build
   before_script:
@@ -11,52 +10,83 @@ build context:
     - docker build -t "$IMAGE_NAME:$IMAGE_TAG" -f ./src/$IMAGE_NAME/Dockerfile ./src/
     - docker tag "$IMAGE_NAME:$IMAGE_TAG" "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
     - docker push "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
+  after_script:
+    - docker images --filter="dangling=true" --quiet | xargs -r docker rmi
   rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' 
     - changes:
-      - src/$IMAGE_NAME/**
+      - src/$IMAGE_NAME/**/*.{py,in,yml}
+      - src/$IMAGE_NAME/Dockerfile
+      - src/$IMAGE_NAME/tests/*.py
+      - manifests/${IMAGE_NAME}service.yaml
       - .gitlab-ci.yml
 
-# Pull, execute, and run unitary tests for the Docker image from the GitLab registry
-unit_test context:
+# Apply unit test to the component
+unit test context:
   variables:
     IMAGE_NAME: 'context' # name of the microservice
-    IMAGE_NAME_TEST: 'context-test' # name of the microservice
     IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: unit_test
   needs:
     - build context
   before_script:
     - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
-    - if docker network list | grep teraflowbridge; then echo "teraflowbridge is already created"; else docker network create -d bridge teraflowbridge; fi  
+    - if docker network list | grep teraflowbridge; then echo "teraflowbridge is already created"; else docker network create -d bridge teraflowbridge; fi
+    - if docker container ls | grep redis; then docker rm -f redis; else echo "redis image is not in the system"; fi
+    - if docker container ls | grep $IMAGE_NAME; then docker rm -f $IMAGE_NAME; else echo "$IMAGE_NAME image is not in the system"; fi
   script:
     - docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
     - docker pull "redis:6.2"
-    - docker run -d --name $IMAGE_NAME-redis --network=teraflowbridge redis:6.2
-    - docker run -d --name $IMAGE_NAME --network=teraflowbridge --env "LOG_LEVEL=INFO" --env "DB_BACKEND=redis" --env "REDIS_SERVICE_HOST=$IMAGE_NAME-redis" --env "REDIS_SERVICE_PORT=6379" --env "REDIS_DATABASE_ID=0" "$IMAGE_NAME:$IMAGE_TAG"
-    - docker ps -a
-    - sleep 5
+    - docker run --name redis -d --network=teraflowbridge redis:6.2
+    - docker run --name $IMAGE_NAME -d -p 1010:1010 --env "DB_BACKEND=redis" --env "REDIS_SERVICE_HOST=redis" --env "REDIS_SERVICE_PORT=6379" --env "REDIS_DATABASE_ID=0" -v "$PWD/src/$IMAGE_NAME/tests:/opt/results" --network=teraflowbridge $CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG
+    - sleep 10
     - docker ps -a
     - docker logs $IMAGE_NAME
-    - docker exec -i $IMAGE_NAME bash -c "pytest --log-level=DEBUG --verbose $IMAGE_NAME/tests/test_unitary.py"
+    - docker exec -i $IMAGE_NAME bash -c "coverage run -m pytest --log-level=INFO --verbose $IMAGE_NAME/tests/test_unitary.py --junitxml=/opt/results/${IMAGE_NAME}_report.xml; coverage xml -o /opt/results/${IMAGE_NAME}_coverage.xml; coverage report --include='${IMAGE_NAME}/*' --show-missing"
+  coverage: '/TOTAL\s+\d+\s+\d+\s+(\d+%)/'
   after_script:
-    - docker stop $IMAGE_NAME $IMAGE_NAME-redis
-    - docker rm $IMAGE_NAME $IMAGE_NAME-redis
+    - docker rm -f $IMAGE_NAME
+    - docker rm -f  redis
+    - docker network rm teraflowbridge
   rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' 
     - changes:
-      - src/$IMAGE_NAME/**
+      - src/$IMAGE_NAME/**/*.{py,in,yml}
+      - src/$IMAGE_NAME/Dockerfile
+      - src/$IMAGE_NAME/tests/*.py
+      - src/$IMAGE_NAME/tests/Dockerfile
+      - manifests/${IMAGE_NAME}service.yaml
       - .gitlab-ci.yml
+  artifacts:
+      when: always
+      reports:
+        junit: src/$IMAGE_NAME/tests/${IMAGE_NAME}_report.xml
+        cobertura: src/$IMAGE_NAME/tests/${IMAGE_NAME}_coverage.xml
 
 # Deployment of the service in Kubernetes Cluster
 deploy context:
+  variables:
+    IMAGE_NAME: 'context' # name of the microservice
+    IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: deploy
   needs:
-    - build context
-    - unit_test context
-    - dependencies all
-    - integ_test execute
+    - unit test context
+    # - integ_test execute
   script:
+    - 'sed -i "s/$IMAGE_NAME:.*/$IMAGE_NAME:$IMAGE_TAG/" manifests/${IMAGE_NAME}service.yaml'
     - kubectl version
     - kubectl get all
-    - kubectl apply -f "manifests/contextservice.yaml"
-    - kubectl delete pods --selector app=contextservice
+    - kubectl apply -f "manifests/${IMAGE_NAME}service.yaml"
     - kubectl get all
+  # environment:
+  #   name: test
+  #   url: https://example.com
+  #   kubernetes:
+  #     namespace: test
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+      when: manual    
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
+      when: manual
diff --git a/src/context/client/ContextClient.py b/src/context/client/ContextClient.py
index c3e3c76fc43fbdc9a215703fdcdc55cb488d0171..84e3031c2e9f26ab64ebd7a92a02c84f2d7cb9f6 100644
--- a/src/context/client/ContextClient.py
+++ b/src/context/client/ContextClient.py
@@ -1,13 +1,11 @@
 from typing import Iterator
 import grpc, logging
 from common.tools.client.RetryDecorator import retry, delay_exponential
-from context.proto.context_pb2 import \
-    Context,  ContextEvent,  ContextId,  ContextIdList,  ContextList,  \
-    Device,   DeviceEvent,   DeviceId,   DeviceIdList,   DeviceList,   \
-    Empty,                                                             \
-    Link,     LinkEvent,     LinkId,     LinkIdList,     LinkList,     \
-    Service,  ServiceEvent,  ServiceId,  ServiceIdList,  ServiceList,  \
-    Topology, TopologyEvent, TopologyId, TopologyIdList, TopologyList
+from context.proto.context_pb2 import (
+    Connection, ConnectionEvent, ConnectionId, ConnectionIdList, ConnectionList, Context, ContextEvent, ContextId,
+    ContextIdList, ContextList, Device, DeviceEvent, DeviceId, DeviceIdList, DeviceList, Empty, Link, LinkEvent,
+    LinkId, LinkIdList, LinkList, Service, ServiceEvent, ServiceId, ServiceIdList, ServiceList, Topology,
+    TopologyEvent, TopologyId, TopologyIdList, TopologyList)
 from context.proto.context_pb2_grpc import ContextServiceStub
 
 LOGGER = logging.getLogger(__name__)
@@ -241,3 +239,45 @@ class ContextClient:
         response = self.stub.GetServiceEvents(request)
         LOGGER.debug('GetServiceEvents result: {:s}'.format(str(response)))
         return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def ListConnectionIds(self, request: ServiceId) -> ConnectionIdList:
+        LOGGER.debug('ListConnectionIds request: {:s}'.format(str(request)))
+        response = self.stub.ListConnectionIds(request)
+        LOGGER.debug('ListConnectionIds result: {:s}'.format(str(response)))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def ListConnections(self, request: ServiceId) -> ConnectionList:
+        LOGGER.debug('ListConnections request: {:s}'.format(str(request)))
+        response = self.stub.ListConnections(request)
+        LOGGER.debug('ListConnections result: {:s}'.format(str(response)))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def GetConnection(self, request: ConnectionId) -> Connection:
+        LOGGER.debug('GetConnection request: {:s}'.format(str(request)))
+        response = self.stub.GetConnection(request)
+        LOGGER.debug('GetConnection result: {:s}'.format(str(response)))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def SetConnection(self, request: Connection) -> ConnectionId:
+        LOGGER.debug('SetConnection request: {:s}'.format(str(request)))
+        response = self.stub.SetConnection(request)
+        LOGGER.debug('SetConnection result: {:s}'.format(str(response)))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def RemoveConnection(self, request: ConnectionId) -> Empty:
+        LOGGER.debug('RemoveConnection request: {:s}'.format(str(request)))
+        response = self.stub.RemoveConnection(request)
+        LOGGER.debug('RemoveConnection result: {:s}'.format(str(response)))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def GetConnectionEvents(self, request: Empty) -> Iterator[ConnectionEvent]:
+        LOGGER.debug('GetConnectionEvents request: {:s}'.format(str(request)))
+        response = self.stub.GetConnectionEvents(request)
+        LOGGER.debug('GetConnectionEvents result: {:s}'.format(str(response)))
+        return response
diff --git a/src/context/proto/context_pb2.py b/src/context/proto/context_pb2.py
index 8b4848bc33bfb0eba76590c8a3a627b2db84ca9f..68602b16f264ceac9acc3ef6669b09d5984e72c2 100644
--- a/src/context/proto/context_pb2.py
+++ b/src/context/proto/context_pb2.py
@@ -12,6 +12,7 @@ from google.protobuf import symbol_database as _symbol_database
 _sym_db = _symbol_database.Default()
 
 
+from . import kpi_sample_types_pb2 as kpi__sample__types__pb2
 
 
 DESCRIPTOR = _descriptor.FileDescriptor(
@@ -20,8 +21,9 @@ DESCRIPTOR = _descriptor.FileDescriptor(
   syntax='proto3',
   serialized_options=None,
   create_key=_descriptor._internal_create_key,
-  serialized_pb=b'\n\rcontext.proto\x12\x07\x63ontext\"\x07\n\x05\x45mpty\"\x14\n\x04Uuid\x12\x0c\n\x04uuid\x18\x01 \x01(\t\"F\n\x05\x45vent\x12\x11\n\ttimestamp\x18\x01 \x01(\x01\x12*\n\nevent_type\x18\x02 \x01(\x0e\x32\x16.context.EventTypeEnum\"0\n\tContextId\x12#\n\x0c\x63ontext_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\xb6\x01\n\x07\x43ontext\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12)\n\x0ctopology_ids\x18\x02 \x03(\x0b\x32\x13.context.TopologyId\x12\'\n\x0bservice_ids\x18\x03 \x03(\x0b\x32\x12.context.ServiceId\x12/\n\ncontroller\x18\x04 \x01(\x0b\x32\x1b.context.TeraFlowController\"8\n\rContextIdList\x12\'\n\x0b\x63ontext_ids\x18\x01 \x03(\x0b\x32\x12.context.ContextId\"1\n\x0b\x43ontextList\x12\"\n\x08\x63ontexts\x18\x01 \x03(\x0b\x32\x10.context.Context\"U\n\x0c\x43ontextEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12&\n\ncontext_id\x18\x02 \x01(\x0b\x32\x12.context.ContextId\"Z\n\nTopologyId\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12$\n\rtopology_uuid\x18\x02 \x01(\x0b\x32\r.context.Uuid\"~\n\x08Topology\x12(\n\x0btopology_id\x18\x01 \x01(\x0b\x32\x13.context.TopologyId\x12%\n\ndevice_ids\x18\x02 \x03(\x0b\x32\x11.context.DeviceId\x12!\n\x08link_ids\x18\x03 \x03(\x0b\x32\x0f.context.LinkId\";\n\x0eTopologyIdList\x12)\n\x0ctopology_ids\x18\x01 \x03(\x0b\x32\x13.context.TopologyId\"5\n\x0cTopologyList\x12%\n\ntopologies\x18\x01 \x03(\x0b\x32\x11.context.Topology\"X\n\rTopologyEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12(\n\x0btopology_id\x18\x02 \x01(\x0b\x32\x13.context.TopologyId\".\n\x08\x44\x65viceId\x12\"\n\x0b\x64\x65vice_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\x9a\x02\n\x06\x44\x65vice\x12$\n\tdevice_id\x18\x01 \x01(\x0b\x32\x11.context.DeviceId\x12\x13\n\x0b\x64\x65vice_type\x18\x02 \x01(\t\x12,\n\rdevice_config\x18\x03 \x01(\x0b\x32\x15.context.DeviceConfig\x12G\n\x19\x64\x65vice_operational_status\x18\x04 \x01(\x0e\x32$.context.DeviceOperationalStatusEnum\x12\x31\n\x0e\x64\x65vice_drivers\x18\x05 \x03(\x0e\x32\x19.context.DeviceDriverEnum\x12+\n\x10\x64\x65vice_endpoints\x18\x06 \x03(\x0b\x32\x11.context.EndPoint\"9\n\x0c\x44\x65viceConfig\x12)\n\x0c\x63onfig_rules\x18\x01 \x03(\x0b\x32\x13.context.ConfigRule\"5\n\x0c\x44\x65viceIdList\x12%\n\ndevice_ids\x18\x01 \x03(\x0b\x32\x11.context.DeviceId\".\n\nDeviceList\x12 \n\x07\x64\x65vices\x18\x01 \x03(\x0b\x32\x0f.context.Device\"R\n\x0b\x44\x65viceEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12$\n\tdevice_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\"*\n\x06LinkId\x12 \n\tlink_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"X\n\x04Link\x12 \n\x07link_id\x18\x01 \x01(\x0b\x32\x0f.context.LinkId\x12.\n\x11link_endpoint_ids\x18\x02 \x03(\x0b\x32\x13.context.EndPointId\"/\n\nLinkIdList\x12!\n\x08link_ids\x18\x01 \x03(\x0b\x32\x0f.context.LinkId\"(\n\x08LinkList\x12\x1c\n\x05links\x18\x01 \x03(\x0b\x32\r.context.Link\"L\n\tLinkEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12 \n\x07link_id\x18\x02 \x01(\x0b\x32\x0f.context.LinkId\"X\n\tServiceId\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12#\n\x0cservice_uuid\x18\x02 \x01(\x0b\x32\r.context.Uuid\"\xa6\x02\n\x07Service\x12&\n\nservice_id\x18\x01 \x01(\x0b\x32\x12.context.ServiceId\x12.\n\x0cservice_type\x18\x02 \x01(\x0e\x32\x18.context.ServiceTypeEnum\x12\x31\n\x14service_endpoint_ids\x18\x03 \x03(\x0b\x32\x13.context.EndPointId\x12\x30\n\x13service_constraints\x18\x04 \x03(\x0b\x32\x13.context.Constraint\x12.\n\x0eservice_status\x18\x05 \x01(\x0b\x32\x16.context.ServiceStatus\x12.\n\x0eservice_config\x18\x06 \x01(\x0b\x32\x16.context.ServiceConfig\"C\n\rServiceStatus\x12\x32\n\x0eservice_status\x18\x01 \x01(\x0e\x32\x1a.context.ServiceStatusEnum\":\n\rServiceConfig\x12)\n\x0c\x63onfig_rules\x18\x01 \x03(\x0b\x32\x13.context.ConfigRule\"8\n\rServiceIdList\x12\'\n\x0bservice_ids\x18\x01 \x03(\x0b\x32\x12.context.ServiceId\"1\n\x0bServiceList\x12\"\n\x08services\x18\x01 \x03(\x0b\x32\x10.context.Service\"U\n\x0cServiceEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12&\n\nservice_id\x18\x02 \x01(\x0b\x32\x12.context.ServiceId\"\x82\x01\n\nEndPointId\x12(\n\x0btopology_id\x18\x01 \x01(\x0b\x32\x13.context.TopologyId\x12$\n\tdevice_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\x12$\n\rendpoint_uuid\x18\x03 \x01(\x0b\x32\r.context.Uuid\"K\n\x08\x45ndPoint\x12(\n\x0b\x65ndpoint_id\x18\x01 \x01(\x0b\x32\x13.context.EndPointId\x12\x15\n\rendpoint_type\x18\x02 \x01(\t\"e\n\nConfigRule\x12)\n\x06\x61\x63tion\x18\x01 \x01(\x0e\x32\x19.context.ConfigActionEnum\x12\x14\n\x0cresource_key\x18\x02 \x01(\t\x12\x16\n\x0eresource_value\x18\x03 \x01(\t\"?\n\nConstraint\x12\x17\n\x0f\x63onstraint_type\x18\x01 \x01(\t\x12\x18\n\x10\x63onstraint_value\x18\x02 \x01(\t\"6\n\x0c\x43onnectionId\x12&\n\x0f\x63onnection_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\x8d\x01\n\nConnection\x12,\n\rconnection_id\x18\x01 \x01(\x0b\x32\x15.context.ConnectionId\x12.\n\x12related_service_id\x18\x02 \x01(\x0b\x32\x12.context.ServiceId\x12!\n\x04path\x18\x03 \x03(\x0b\x32\x13.context.EndPointId\"A\n\x10\x43onnectionIdList\x12-\n\x0e\x63onnection_ids\x18\x01 \x03(\x0b\x32\x15.context.ConnectionId\":\n\x0e\x43onnectionList\x12(\n\x0b\x63onnections\x18\x01 \x03(\x0b\x32\x13.context.Connection\"^\n\x12TeraFlowController\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x12\n\nip_address\x18\x02 \x01(\t\x12\x0c\n\x04port\x18\x03 \x01(\r\"U\n\x14\x41uthenticationResult\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x15\n\rauthenticated\x18\x02 \x01(\x08*j\n\rEventTypeEnum\x12\x17\n\x13\x45VENTTYPE_UNDEFINED\x10\x00\x12\x14\n\x10\x45VENTTYPE_CREATE\x10\x01\x12\x14\n\x10\x45VENTTYPE_UPDATE\x10\x02\x12\x14\n\x10\x45VENTTYPE_REMOVE\x10\x03*\xc5\x01\n\x10\x44\x65viceDriverEnum\x12\x1a\n\x16\x44\x45VICEDRIVER_UNDEFINED\x10\x00\x12\x1b\n\x17\x44\x45VICEDRIVER_OPENCONFIG\x10\x01\x12\x1e\n\x1a\x44\x45VICEDRIVER_TRANSPORT_API\x10\x02\x12\x13\n\x0f\x44\x45VICEDRIVER_P4\x10\x03\x12&\n\"DEVICEDRIVER_IETF_NETWORK_TOPOLOGY\x10\x04\x12\x1b\n\x17\x44\x45VICEDRIVER_ONF_TR_352\x10\x05*\x8f\x01\n\x1b\x44\x65viceOperationalStatusEnum\x12%\n!DEVICEOPERATIONALSTATUS_UNDEFINED\x10\x00\x12$\n DEVICEOPERATIONALSTATUS_DISABLED\x10\x01\x12#\n\x1f\x44\x45VICEOPERATIONALSTATUS_ENABLED\x10\x02*\x81\x01\n\x0fServiceTypeEnum\x12\x17\n\x13SERVICETYPE_UNKNOWN\x10\x00\x12\x14\n\x10SERVICETYPE_L3NM\x10\x01\x12\x14\n\x10SERVICETYPE_L2NM\x10\x02\x12)\n%SERVICETYPE_TAPI_CONNECTIVITY_SERVICE\x10\x03*\x88\x01\n\x11ServiceStatusEnum\x12\x1b\n\x17SERVICESTATUS_UNDEFINED\x10\x00\x12\x19\n\x15SERVICESTATUS_PLANNED\x10\x01\x12\x18\n\x14SERVICESTATUS_ACTIVE\x10\x02\x12!\n\x1dSERVICESTATUS_PENDING_REMOVAL\x10\x03*]\n\x10\x43onfigActionEnum\x12\x1a\n\x16\x43ONFIGACTION_UNDEFINED\x10\x00\x12\x14\n\x10\x43ONFIGACTION_SET\x10\x01\x12\x17\n\x13\x43ONFIGACTION_DELETE\x10\x02\x32\xa5\r\n\x0e\x43ontextService\x12:\n\x0eListContextIds\x12\x0e.context.Empty\x1a\x16.context.ContextIdList\"\x00\x12\x36\n\x0cListContexts\x12\x0e.context.Empty\x1a\x14.context.ContextList\"\x00\x12\x34\n\nGetContext\x12\x12.context.ContextId\x1a\x10.context.Context\"\x00\x12\x34\n\nSetContext\x12\x10.context.Context\x1a\x12.context.ContextId\"\x00\x12\x35\n\rRemoveContext\x12\x12.context.ContextId\x1a\x0e.context.Empty\"\x00\x12=\n\x10GetContextEvents\x12\x0e.context.Empty\x1a\x15.context.ContextEvent\"\x00\x30\x01\x12@\n\x0fListTopologyIds\x12\x12.context.ContextId\x1a\x17.context.TopologyIdList\"\x00\x12=\n\x0eListTopologies\x12\x12.context.ContextId\x1a\x15.context.TopologyList\"\x00\x12\x37\n\x0bGetTopology\x12\x13.context.TopologyId\x1a\x11.context.Topology\"\x00\x12\x37\n\x0bSetTopology\x12\x11.context.Topology\x1a\x13.context.TopologyId\"\x00\x12\x37\n\x0eRemoveTopology\x12\x13.context.TopologyId\x1a\x0e.context.Empty\"\x00\x12?\n\x11GetTopologyEvents\x12\x0e.context.Empty\x1a\x16.context.TopologyEvent\"\x00\x30\x01\x12\x38\n\rListDeviceIds\x12\x0e.context.Empty\x1a\x15.context.DeviceIdList\"\x00\x12\x34\n\x0bListDevices\x12\x0e.context.Empty\x1a\x13.context.DeviceList\"\x00\x12\x31\n\tGetDevice\x12\x11.context.DeviceId\x1a\x0f.context.Device\"\x00\x12\x31\n\tSetDevice\x12\x0f.context.Device\x1a\x11.context.DeviceId\"\x00\x12\x33\n\x0cRemoveDevice\x12\x11.context.DeviceId\x1a\x0e.context.Empty\"\x00\x12;\n\x0fGetDeviceEvents\x12\x0e.context.Empty\x1a\x14.context.DeviceEvent\"\x00\x30\x01\x12\x34\n\x0bListLinkIds\x12\x0e.context.Empty\x1a\x13.context.LinkIdList\"\x00\x12\x30\n\tListLinks\x12\x0e.context.Empty\x1a\x11.context.LinkList\"\x00\x12+\n\x07GetLink\x12\x0f.context.LinkId\x1a\r.context.Link\"\x00\x12+\n\x07SetLink\x12\r.context.Link\x1a\x0f.context.LinkId\"\x00\x12/\n\nRemoveLink\x12\x0f.context.LinkId\x1a\x0e.context.Empty\"\x00\x12\x37\n\rGetLinkEvents\x12\x0e.context.Empty\x1a\x12.context.LinkEvent\"\x00\x30\x01\x12>\n\x0eListServiceIds\x12\x12.context.ContextId\x1a\x16.context.ServiceIdList\"\x00\x12:\n\x0cListServices\x12\x12.context.ContextId\x1a\x14.context.ServiceList\"\x00\x12\x34\n\nGetService\x12\x12.context.ServiceId\x1a\x10.context.Service\"\x00\x12\x34\n\nSetService\x12\x10.context.Service\x1a\x12.context.ServiceId\"\x00\x12\x35\n\rRemoveService\x12\x12.context.ServiceId\x1a\x0e.context.Empty\"\x00\x12=\n\x10GetServiceEvents\x12\x0e.context.Empty\x1a\x15.context.ServiceEvent\"\x00\x30\x01\x62\x06proto3'
-)
+  serialized_pb=b'\n\rcontext.proto\x12\x07\x63ontext\x1a\x16kpi_sample_types.proto\"\x07\n\x05\x45mpty\"\x14\n\x04Uuid\x12\x0c\n\x04uuid\x18\x01 \x01(\t\"F\n\x05\x45vent\x12\x11\n\ttimestamp\x18\x01 \x01(\x01\x12*\n\nevent_type\x18\x02 \x01(\x0e\x32\x16.context.EventTypeEnum\"0\n\tContextId\x12#\n\x0c\x63ontext_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\xb6\x01\n\x07\x43ontext\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12)\n\x0ctopology_ids\x18\x02 \x03(\x0b\x32\x13.context.TopologyId\x12\'\n\x0bservice_ids\x18\x03 \x03(\x0b\x32\x12.context.ServiceId\x12/\n\ncontroller\x18\x04 \x01(\x0b\x32\x1b.context.TeraFlowController\"8\n\rContextIdList\x12\'\n\x0b\x63ontext_ids\x18\x01 \x03(\x0b\x32\x12.context.ContextId\"1\n\x0b\x43ontextList\x12\"\n\x08\x63ontexts\x18\x01 \x03(\x0b\x32\x10.context.Context\"U\n\x0c\x43ontextEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12&\n\ncontext_id\x18\x02 \x01(\x0b\x32\x12.context.ContextId\"Z\n\nTopologyId\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12$\n\rtopology_uuid\x18\x02 \x01(\x0b\x32\r.context.Uuid\"~\n\x08Topology\x12(\n\x0btopology_id\x18\x01 \x01(\x0b\x32\x13.context.TopologyId\x12%\n\ndevice_ids\x18\x02 \x03(\x0b\x32\x11.context.DeviceId\x12!\n\x08link_ids\x18\x03 \x03(\x0b\x32\x0f.context.LinkId\";\n\x0eTopologyIdList\x12)\n\x0ctopology_ids\x18\x01 \x03(\x0b\x32\x13.context.TopologyId\"5\n\x0cTopologyList\x12%\n\ntopologies\x18\x01 \x03(\x0b\x32\x11.context.Topology\"X\n\rTopologyEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12(\n\x0btopology_id\x18\x02 \x01(\x0b\x32\x13.context.TopologyId\".\n\x08\x44\x65viceId\x12\"\n\x0b\x64\x65vice_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\x9a\x02\n\x06\x44\x65vice\x12$\n\tdevice_id\x18\x01 \x01(\x0b\x32\x11.context.DeviceId\x12\x13\n\x0b\x64\x65vice_type\x18\x02 \x01(\t\x12,\n\rdevice_config\x18\x03 \x01(\x0b\x32\x15.context.DeviceConfig\x12G\n\x19\x64\x65vice_operational_status\x18\x04 \x01(\x0e\x32$.context.DeviceOperationalStatusEnum\x12\x31\n\x0e\x64\x65vice_drivers\x18\x05 \x03(\x0e\x32\x19.context.DeviceDriverEnum\x12+\n\x10\x64\x65vice_endpoints\x18\x06 \x03(\x0b\x32\x11.context.EndPoint\"9\n\x0c\x44\x65viceConfig\x12)\n\x0c\x63onfig_rules\x18\x01 \x03(\x0b\x32\x13.context.ConfigRule\"5\n\x0c\x44\x65viceIdList\x12%\n\ndevice_ids\x18\x01 \x03(\x0b\x32\x11.context.DeviceId\".\n\nDeviceList\x12 \n\x07\x64\x65vices\x18\x01 \x03(\x0b\x32\x0f.context.Device\"R\n\x0b\x44\x65viceEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12$\n\tdevice_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\"*\n\x06LinkId\x12 \n\tlink_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"X\n\x04Link\x12 \n\x07link_id\x18\x01 \x01(\x0b\x32\x0f.context.LinkId\x12.\n\x11link_endpoint_ids\x18\x02 \x03(\x0b\x32\x13.context.EndPointId\"/\n\nLinkIdList\x12!\n\x08link_ids\x18\x01 \x03(\x0b\x32\x0f.context.LinkId\"(\n\x08LinkList\x12\x1c\n\x05links\x18\x01 \x03(\x0b\x32\r.context.Link\"L\n\tLinkEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12 \n\x07link_id\x18\x02 \x01(\x0b\x32\x0f.context.LinkId\"X\n\tServiceId\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12#\n\x0cservice_uuid\x18\x02 \x01(\x0b\x32\r.context.Uuid\"\xa6\x02\n\x07Service\x12&\n\nservice_id\x18\x01 \x01(\x0b\x32\x12.context.ServiceId\x12.\n\x0cservice_type\x18\x02 \x01(\x0e\x32\x18.context.ServiceTypeEnum\x12\x31\n\x14service_endpoint_ids\x18\x03 \x03(\x0b\x32\x13.context.EndPointId\x12\x30\n\x13service_constraints\x18\x04 \x03(\x0b\x32\x13.context.Constraint\x12.\n\x0eservice_status\x18\x05 \x01(\x0b\x32\x16.context.ServiceStatus\x12.\n\x0eservice_config\x18\x06 \x01(\x0b\x32\x16.context.ServiceConfig\"C\n\rServiceStatus\x12\x32\n\x0eservice_status\x18\x01 \x01(\x0e\x32\x1a.context.ServiceStatusEnum\":\n\rServiceConfig\x12)\n\x0c\x63onfig_rules\x18\x01 \x03(\x0b\x32\x13.context.ConfigRule\"8\n\rServiceIdList\x12\'\n\x0bservice_ids\x18\x01 \x03(\x0b\x32\x12.context.ServiceId\"1\n\x0bServiceList\x12\"\n\x08services\x18\x01 \x03(\x0b\x32\x10.context.Service\"U\n\x0cServiceEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12&\n\nservice_id\x18\x02 \x01(\x0b\x32\x12.context.ServiceId\"6\n\x0c\x43onnectionId\x12&\n\x0f\x63onnection_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\xc4\x01\n\nConnection\x12,\n\rconnection_id\x18\x01 \x01(\x0b\x32\x15.context.ConnectionId\x12&\n\nservice_id\x18\x02 \x01(\x0b\x32\x12.context.ServiceId\x12\x33\n\x16path_hops_endpoint_ids\x18\x03 \x03(\x0b\x32\x13.context.EndPointId\x12+\n\x0fsub_service_ids\x18\x04 \x03(\x0b\x32\x12.context.ServiceId\"A\n\x10\x43onnectionIdList\x12-\n\x0e\x63onnection_ids\x18\x01 \x03(\x0b\x32\x15.context.ConnectionId\":\n\x0e\x43onnectionList\x12(\n\x0b\x63onnections\x18\x01 \x03(\x0b\x32\x13.context.Connection\"^\n\x0f\x43onnectionEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12,\n\rconnection_id\x18\x02 \x01(\x0b\x32\x15.context.ConnectionId\"\x82\x01\n\nEndPointId\x12(\n\x0btopology_id\x18\x01 \x01(\x0b\x32\x13.context.TopologyId\x12$\n\tdevice_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\x12$\n\rendpoint_uuid\x18\x03 \x01(\x0b\x32\r.context.Uuid\"\x86\x01\n\x08\x45ndPoint\x12(\n\x0b\x65ndpoint_id\x18\x01 \x01(\x0b\x32\x13.context.EndPointId\x12\x15\n\rendpoint_type\x18\x02 \x01(\t\x12\x39\n\x10kpi_sample_types\x18\x03 \x03(\x0e\x32\x1f.kpi_sample_types.KpiSampleType\"e\n\nConfigRule\x12)\n\x06\x61\x63tion\x18\x01 \x01(\x0e\x32\x19.context.ConfigActionEnum\x12\x14\n\x0cresource_key\x18\x02 \x01(\t\x12\x16\n\x0eresource_value\x18\x03 \x01(\t\"?\n\nConstraint\x12\x17\n\x0f\x63onstraint_type\x18\x01 \x01(\t\x12\x18\n\x10\x63onstraint_value\x18\x02 \x01(\t\"^\n\x12TeraFlowController\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x12\n\nip_address\x18\x02 \x01(\t\x12\x0c\n\x04port\x18\x03 \x01(\r\"U\n\x14\x41uthenticationResult\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x15\n\rauthenticated\x18\x02 \x01(\x08*j\n\rEventTypeEnum\x12\x17\n\x13\x45VENTTYPE_UNDEFINED\x10\x00\x12\x14\n\x10\x45VENTTYPE_CREATE\x10\x01\x12\x14\n\x10\x45VENTTYPE_UPDATE\x10\x02\x12\x14\n\x10\x45VENTTYPE_REMOVE\x10\x03*\xc5\x01\n\x10\x44\x65viceDriverEnum\x12\x1a\n\x16\x44\x45VICEDRIVER_UNDEFINED\x10\x00\x12\x1b\n\x17\x44\x45VICEDRIVER_OPENCONFIG\x10\x01\x12\x1e\n\x1a\x44\x45VICEDRIVER_TRANSPORT_API\x10\x02\x12\x13\n\x0f\x44\x45VICEDRIVER_P4\x10\x03\x12&\n\"DEVICEDRIVER_IETF_NETWORK_TOPOLOGY\x10\x04\x12\x1b\n\x17\x44\x45VICEDRIVER_ONF_TR_352\x10\x05*\x8f\x01\n\x1b\x44\x65viceOperationalStatusEnum\x12%\n!DEVICEOPERATIONALSTATUS_UNDEFINED\x10\x00\x12$\n DEVICEOPERATIONALSTATUS_DISABLED\x10\x01\x12#\n\x1f\x44\x45VICEOPERATIONALSTATUS_ENABLED\x10\x02*\x81\x01\n\x0fServiceTypeEnum\x12\x17\n\x13SERVICETYPE_UNKNOWN\x10\x00\x12\x14\n\x10SERVICETYPE_L3NM\x10\x01\x12\x14\n\x10SERVICETYPE_L2NM\x10\x02\x12)\n%SERVICETYPE_TAPI_CONNECTIVITY_SERVICE\x10\x03*\x88\x01\n\x11ServiceStatusEnum\x12\x1b\n\x17SERVICESTATUS_UNDEFINED\x10\x00\x12\x19\n\x15SERVICESTATUS_PLANNED\x10\x01\x12\x18\n\x14SERVICESTATUS_ACTIVE\x10\x02\x12!\n\x1dSERVICESTATUS_PENDING_REMOVAL\x10\x03*]\n\x10\x43onfigActionEnum\x12\x1a\n\x16\x43ONFIGACTION_UNDEFINED\x10\x00\x12\x14\n\x10\x43ONFIGACTION_SET\x10\x01\x12\x17\n\x13\x43ONFIGACTION_DELETE\x10\x02\x32\xad\x10\n\x0e\x43ontextService\x12:\n\x0eListContextIds\x12\x0e.context.Empty\x1a\x16.context.ContextIdList\"\x00\x12\x36\n\x0cListContexts\x12\x0e.context.Empty\x1a\x14.context.ContextList\"\x00\x12\x34\n\nGetContext\x12\x12.context.ContextId\x1a\x10.context.Context\"\x00\x12\x34\n\nSetContext\x12\x10.context.Context\x1a\x12.context.ContextId\"\x00\x12\x35\n\rRemoveContext\x12\x12.context.ContextId\x1a\x0e.context.Empty\"\x00\x12=\n\x10GetContextEvents\x12\x0e.context.Empty\x1a\x15.context.ContextEvent\"\x00\x30\x01\x12@\n\x0fListTopologyIds\x12\x12.context.ContextId\x1a\x17.context.TopologyIdList\"\x00\x12=\n\x0eListTopologies\x12\x12.context.ContextId\x1a\x15.context.TopologyList\"\x00\x12\x37\n\x0bGetTopology\x12\x13.context.TopologyId\x1a\x11.context.Topology\"\x00\x12\x37\n\x0bSetTopology\x12\x11.context.Topology\x1a\x13.context.TopologyId\"\x00\x12\x37\n\x0eRemoveTopology\x12\x13.context.TopologyId\x1a\x0e.context.Empty\"\x00\x12?\n\x11GetTopologyEvents\x12\x0e.context.Empty\x1a\x16.context.TopologyEvent\"\x00\x30\x01\x12\x38\n\rListDeviceIds\x12\x0e.context.Empty\x1a\x15.context.DeviceIdList\"\x00\x12\x34\n\x0bListDevices\x12\x0e.context.Empty\x1a\x13.context.DeviceList\"\x00\x12\x31\n\tGetDevice\x12\x11.context.DeviceId\x1a\x0f.context.Device\"\x00\x12\x31\n\tSetDevice\x12\x0f.context.Device\x1a\x11.context.DeviceId\"\x00\x12\x33\n\x0cRemoveDevice\x12\x11.context.DeviceId\x1a\x0e.context.Empty\"\x00\x12;\n\x0fGetDeviceEvents\x12\x0e.context.Empty\x1a\x14.context.DeviceEvent\"\x00\x30\x01\x12\x34\n\x0bListLinkIds\x12\x0e.context.Empty\x1a\x13.context.LinkIdList\"\x00\x12\x30\n\tListLinks\x12\x0e.context.Empty\x1a\x11.context.LinkList\"\x00\x12+\n\x07GetLink\x12\x0f.context.LinkId\x1a\r.context.Link\"\x00\x12+\n\x07SetLink\x12\r.context.Link\x1a\x0f.context.LinkId\"\x00\x12/\n\nRemoveLink\x12\x0f.context.LinkId\x1a\x0e.context.Empty\"\x00\x12\x37\n\rGetLinkEvents\x12\x0e.context.Empty\x1a\x12.context.LinkEvent\"\x00\x30\x01\x12>\n\x0eListServiceIds\x12\x12.context.ContextId\x1a\x16.context.ServiceIdList\"\x00\x12:\n\x0cListServices\x12\x12.context.ContextId\x1a\x14.context.ServiceList\"\x00\x12\x34\n\nGetService\x12\x12.context.ServiceId\x1a\x10.context.Service\"\x00\x12\x34\n\nSetService\x12\x10.context.Service\x1a\x12.context.ServiceId\"\x00\x12\x35\n\rRemoveService\x12\x12.context.ServiceId\x1a\x0e.context.Empty\"\x00\x12=\n\x10GetServiceEvents\x12\x0e.context.Empty\x1a\x15.context.ServiceEvent\"\x00\x30\x01\x12\x44\n\x11ListConnectionIds\x12\x12.context.ServiceId\x1a\x19.context.ConnectionIdList\"\x00\x12@\n\x0fListConnections\x12\x12.context.ServiceId\x1a\x17.context.ConnectionList\"\x00\x12=\n\rGetConnection\x12\x15.context.ConnectionId\x1a\x13.context.Connection\"\x00\x12=\n\rSetConnection\x12\x13.context.Connection\x1a\x15.context.ConnectionId\"\x00\x12;\n\x10RemoveConnection\x12\x15.context.ConnectionId\x1a\x0e.context.Empty\"\x00\x12\x43\n\x13GetConnectionEvents\x12\x0e.context.Empty\x1a\x18.context.ConnectionEvent\"\x00\x30\x01\x62\x06proto3'
+  ,
+  dependencies=[kpi__sample__types__pb2.DESCRIPTOR,])
 
 _EVENTTYPEENUM = _descriptor.EnumDescriptor(
   name='EventTypeEnum',
@@ -53,8 +55,8 @@ _EVENTTYPEENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=3468,
-  serialized_end=3574,
+  serialized_start=3703,
+  serialized_end=3809,
 )
 _sym_db.RegisterEnumDescriptor(_EVENTTYPEENUM)
 
@@ -99,8 +101,8 @@ _DEVICEDRIVERENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=3577,
-  serialized_end=3774,
+  serialized_start=3812,
+  serialized_end=4009,
 )
 _sym_db.RegisterEnumDescriptor(_DEVICEDRIVERENUM)
 
@@ -130,8 +132,8 @@ _DEVICEOPERATIONALSTATUSENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=3777,
-  serialized_end=3920,
+  serialized_start=4012,
+  serialized_end=4155,
 )
 _sym_db.RegisterEnumDescriptor(_DEVICEOPERATIONALSTATUSENUM)
 
@@ -166,8 +168,8 @@ _SERVICETYPEENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=3923,
-  serialized_end=4052,
+  serialized_start=4158,
+  serialized_end=4287,
 )
 _sym_db.RegisterEnumDescriptor(_SERVICETYPEENUM)
 
@@ -202,8 +204,8 @@ _SERVICESTATUSENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=4055,
-  serialized_end=4191,
+  serialized_start=4290,
+  serialized_end=4426,
 )
 _sym_db.RegisterEnumDescriptor(_SERVICESTATUSENUM)
 
@@ -233,8 +235,8 @@ _CONFIGACTIONENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=4193,
-  serialized_end=4286,
+  serialized_start=4428,
+  serialized_end=4521,
 )
 _sym_db.RegisterEnumDescriptor(_CONFIGACTIONENUM)
 
@@ -286,8 +288,8 @@ _EMPTY = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=26,
-  serialized_end=33,
+  serialized_start=50,
+  serialized_end=57,
 )
 
 
@@ -318,8 +320,8 @@ _UUID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=35,
-  serialized_end=55,
+  serialized_start=59,
+  serialized_end=79,
 )
 
 
@@ -357,8 +359,8 @@ _EVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=57,
-  serialized_end=127,
+  serialized_start=81,
+  serialized_end=151,
 )
 
 
@@ -389,8 +391,8 @@ _CONTEXTID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=129,
-  serialized_end=177,
+  serialized_start=153,
+  serialized_end=201,
 )
 
 
@@ -442,8 +444,8 @@ _CONTEXT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=180,
-  serialized_end=362,
+  serialized_start=204,
+  serialized_end=386,
 )
 
 
@@ -474,8 +476,8 @@ _CONTEXTIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=364,
-  serialized_end=420,
+  serialized_start=388,
+  serialized_end=444,
 )
 
 
@@ -506,8 +508,8 @@ _CONTEXTLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=422,
-  serialized_end=471,
+  serialized_start=446,
+  serialized_end=495,
 )
 
 
@@ -545,8 +547,8 @@ _CONTEXTEVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=473,
-  serialized_end=558,
+  serialized_start=497,
+  serialized_end=582,
 )
 
 
@@ -584,8 +586,8 @@ _TOPOLOGYID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=560,
-  serialized_end=650,
+  serialized_start=584,
+  serialized_end=674,
 )
 
 
@@ -630,8 +632,8 @@ _TOPOLOGY = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=652,
-  serialized_end=778,
+  serialized_start=676,
+  serialized_end=802,
 )
 
 
@@ -662,8 +664,8 @@ _TOPOLOGYIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=780,
-  serialized_end=839,
+  serialized_start=804,
+  serialized_end=863,
 )
 
 
@@ -694,8 +696,8 @@ _TOPOLOGYLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=841,
-  serialized_end=894,
+  serialized_start=865,
+  serialized_end=918,
 )
 
 
@@ -733,8 +735,8 @@ _TOPOLOGYEVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=896,
-  serialized_end=984,
+  serialized_start=920,
+  serialized_end=1008,
 )
 
 
@@ -765,8 +767,8 @@ _DEVICEID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=986,
-  serialized_end=1032,
+  serialized_start=1010,
+  serialized_end=1056,
 )
 
 
@@ -832,8 +834,8 @@ _DEVICE = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1035,
-  serialized_end=1317,
+  serialized_start=1059,
+  serialized_end=1341,
 )
 
 
@@ -864,8 +866,8 @@ _DEVICECONFIG = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1319,
-  serialized_end=1376,
+  serialized_start=1343,
+  serialized_end=1400,
 )
 
 
@@ -896,8 +898,8 @@ _DEVICEIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1378,
-  serialized_end=1431,
+  serialized_start=1402,
+  serialized_end=1455,
 )
 
 
@@ -928,8 +930,8 @@ _DEVICELIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1433,
-  serialized_end=1479,
+  serialized_start=1457,
+  serialized_end=1503,
 )
 
 
@@ -967,8 +969,8 @@ _DEVICEEVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1481,
-  serialized_end=1563,
+  serialized_start=1505,
+  serialized_end=1587,
 )
 
 
@@ -999,8 +1001,8 @@ _LINKID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1565,
-  serialized_end=1607,
+  serialized_start=1589,
+  serialized_end=1631,
 )
 
 
@@ -1038,8 +1040,8 @@ _LINK = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1609,
-  serialized_end=1697,
+  serialized_start=1633,
+  serialized_end=1721,
 )
 
 
@@ -1070,8 +1072,8 @@ _LINKIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1699,
-  serialized_end=1746,
+  serialized_start=1723,
+  serialized_end=1770,
 )
 
 
@@ -1102,8 +1104,8 @@ _LINKLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1748,
-  serialized_end=1788,
+  serialized_start=1772,
+  serialized_end=1812,
 )
 
 
@@ -1141,8 +1143,8 @@ _LINKEVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1790,
-  serialized_end=1866,
+  serialized_start=1814,
+  serialized_end=1890,
 )
 
 
@@ -1180,8 +1182,8 @@ _SERVICEID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1868,
-  serialized_end=1956,
+  serialized_start=1892,
+  serialized_end=1980,
 )
 
 
@@ -1247,8 +1249,8 @@ _SERVICE = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1959,
-  serialized_end=2253,
+  serialized_start=1983,
+  serialized_end=2277,
 )
 
 
@@ -1279,8 +1281,8 @@ _SERVICESTATUS = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2255,
-  serialized_end=2322,
+  serialized_start=2279,
+  serialized_end=2346,
 )
 
 
@@ -1311,8 +1313,8 @@ _SERVICECONFIG = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2324,
-  serialized_end=2382,
+  serialized_start=2348,
+  serialized_end=2406,
 )
 
 
@@ -1343,8 +1345,8 @@ _SERVICEIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2384,
-  serialized_end=2440,
+  serialized_start=2408,
+  serialized_end=2464,
 )
 
 
@@ -1375,8 +1377,8 @@ _SERVICELIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2442,
-  serialized_end=2491,
+  serialized_start=2466,
+  serialized_end=2515,
 )
 
 
@@ -1414,40 +1416,26 @@ _SERVICEEVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2493,
-  serialized_end=2578,
+  serialized_start=2517,
+  serialized_end=2602,
 )
 
 
-_ENDPOINTID = _descriptor.Descriptor(
-  name='EndPointId',
-  full_name='context.EndPointId',
+_CONNECTIONID = _descriptor.Descriptor(
+  name='ConnectionId',
+  full_name='context.ConnectionId',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='topology_id', full_name='context.EndPointId.topology_id', index=0,
+      name='connection_uuid', full_name='context.ConnectionId.connection_uuid', index=0,
       number=1, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='device_id', full_name='context.EndPointId.device_id', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='endpoint_uuid', full_name='context.EndPointId.endpoint_uuid', index=2,
-      number=3, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
   ],
   extensions=[
   ],
@@ -1460,30 +1448,44 @@ _ENDPOINTID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2581,
-  serialized_end=2711,
+  serialized_start=2604,
+  serialized_end=2658,
 )
 
 
-_ENDPOINT = _descriptor.Descriptor(
-  name='EndPoint',
-  full_name='context.EndPoint',
+_CONNECTION = _descriptor.Descriptor(
+  name='Connection',
+  full_name='context.Connection',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='endpoint_id', full_name='context.EndPoint.endpoint_id', index=0,
+      name='connection_id', full_name='context.Connection.connection_id', index=0,
       number=1, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='endpoint_type', full_name='context.EndPoint.endpoint_type', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='service_id', full_name='context.Connection.service_id', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='path_hops_endpoint_ids', full_name='context.Connection.path_hops_endpoint_ids', index=2,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='sub_service_ids', full_name='context.Connection.sub_service_ids', index=3,
+      number=4, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -1499,37 +1501,55 @@ _ENDPOINT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2713,
-  serialized_end=2788,
+  serialized_start=2661,
+  serialized_end=2857,
 )
 
 
-_CONFIGRULE = _descriptor.Descriptor(
-  name='ConfigRule',
-  full_name='context.ConfigRule',
+_CONNECTIONIDLIST = _descriptor.Descriptor(
+  name='ConnectionIdList',
+  full_name='context.ConnectionIdList',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='action', full_name='context.ConfigRule.action', index=0,
-      number=1, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='resource_key', full_name='context.ConfigRule.resource_key', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='connection_ids', full_name='context.ConnectionIdList.connection_ids', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2859,
+  serialized_end=2924,
+)
+
+
+_CONNECTIONLIST = _descriptor.Descriptor(
+  name='ConnectionList',
+  full_name='context.ConnectionList',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
     _descriptor.FieldDescriptor(
-      name='resource_value', full_name='context.ConfigRule.resource_value', index=2,
-      number=3, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='connections', full_name='context.ConnectionList.connections', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -1545,30 +1565,30 @@ _CONFIGRULE = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2790,
-  serialized_end=2891,
+  serialized_start=2926,
+  serialized_end=2984,
 )
 
 
-_CONSTRAINT = _descriptor.Descriptor(
-  name='Constraint',
-  full_name='context.Constraint',
+_CONNECTIONEVENT = _descriptor.Descriptor(
+  name='ConnectionEvent',
+  full_name='context.ConnectionEvent',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='constraint_type', full_name='context.Constraint.constraint_type', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='event', full_name='context.ConnectionEvent.event', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='constraint_value', full_name='context.Constraint.constraint_value', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='connection_id', full_name='context.ConnectionEvent.connection_id', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -1584,26 +1604,40 @@ _CONSTRAINT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2893,
-  serialized_end=2956,
+  serialized_start=2986,
+  serialized_end=3080,
 )
 
 
-_CONNECTIONID = _descriptor.Descriptor(
-  name='ConnectionId',
-  full_name='context.ConnectionId',
+_ENDPOINTID = _descriptor.Descriptor(
+  name='EndPointId',
+  full_name='context.EndPointId',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='connection_uuid', full_name='context.ConnectionId.connection_uuid', index=0,
+      name='topology_id', full_name='context.EndPointId.topology_id', index=0,
       number=1, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='device_id', full_name='context.EndPointId.device_id', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='endpoint_uuid', full_name='context.EndPointId.endpoint_uuid', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
   ],
   extensions=[
   ],
@@ -1616,36 +1650,36 @@ _CONNECTIONID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2958,
-  serialized_end=3012,
+  serialized_start=3083,
+  serialized_end=3213,
 )
 
 
-_CONNECTION = _descriptor.Descriptor(
-  name='Connection',
-  full_name='context.Connection',
+_ENDPOINT = _descriptor.Descriptor(
+  name='EndPoint',
+  full_name='context.EndPoint',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='connection_id', full_name='context.Connection.connection_id', index=0,
+      name='endpoint_id', full_name='context.EndPoint.endpoint_id', index=0,
       number=1, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='related_service_id', full_name='context.Connection.related_service_id', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
+      name='endpoint_type', full_name='context.EndPoint.endpoint_type', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='path', full_name='context.Connection.path', index=2,
-      number=3, type=11, cpp_type=10, label=3,
+      name='kpi_sample_types', full_name='context.EndPoint.kpi_sample_types', index=2,
+      number=3, type=14, cpp_type=8, label=3,
       has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
@@ -1662,23 +1696,37 @@ _CONNECTION = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=3015,
-  serialized_end=3156,
+  serialized_start=3216,
+  serialized_end=3350,
 )
 
 
-_CONNECTIONIDLIST = _descriptor.Descriptor(
-  name='ConnectionIdList',
-  full_name='context.ConnectionIdList',
+_CONFIGRULE = _descriptor.Descriptor(
+  name='ConfigRule',
+  full_name='context.ConfigRule',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='connection_ids', full_name='context.ConnectionIdList.connection_ids', index=0,
-      number=1, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
+      name='action', full_name='context.ConfigRule.action', index=0,
+      number=1, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='resource_key', full_name='context.ConfigRule.resource_key', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='resource_value', full_name='context.ConfigRule.resource_value', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -1694,23 +1742,30 @@ _CONNECTIONIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=3158,
-  serialized_end=3223,
+  serialized_start=3352,
+  serialized_end=3453,
 )
 
 
-_CONNECTIONLIST = _descriptor.Descriptor(
-  name='ConnectionList',
-  full_name='context.ConnectionList',
+_CONSTRAINT = _descriptor.Descriptor(
+  name='Constraint',
+  full_name='context.Constraint',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='connections', full_name='context.ConnectionList.connections', index=0,
-      number=1, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
+      name='constraint_type', full_name='context.Constraint.constraint_type', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='constraint_value', full_name='context.Constraint.constraint_value', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -1726,8 +1781,8 @@ _CONNECTIONLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=3225,
-  serialized_end=3283,
+  serialized_start=3455,
+  serialized_end=3518,
 )
 
 
@@ -1772,8 +1827,8 @@ _TERAFLOWCONTROLLER = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=3285,
-  serialized_end=3379,
+  serialized_start=3520,
+  serialized_end=3614,
 )
 
 
@@ -1811,8 +1866,8 @@ _AUTHENTICATIONRESULT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=3381,
-  serialized_end=3466,
+  serialized_start=3616,
+  serialized_end=3701,
 )
 
 _EVENT.fields_by_name['event_type'].enum_type = _EVENTTYPEENUM
@@ -1866,17 +1921,21 @@ _SERVICEIDLIST.fields_by_name['service_ids'].message_type = _SERVICEID
 _SERVICELIST.fields_by_name['services'].message_type = _SERVICE
 _SERVICEEVENT.fields_by_name['event'].message_type = _EVENT
 _SERVICEEVENT.fields_by_name['service_id'].message_type = _SERVICEID
+_CONNECTIONID.fields_by_name['connection_uuid'].message_type = _UUID
+_CONNECTION.fields_by_name['connection_id'].message_type = _CONNECTIONID
+_CONNECTION.fields_by_name['service_id'].message_type = _SERVICEID
+_CONNECTION.fields_by_name['path_hops_endpoint_ids'].message_type = _ENDPOINTID
+_CONNECTION.fields_by_name['sub_service_ids'].message_type = _SERVICEID
+_CONNECTIONIDLIST.fields_by_name['connection_ids'].message_type = _CONNECTIONID
+_CONNECTIONLIST.fields_by_name['connections'].message_type = _CONNECTION
+_CONNECTIONEVENT.fields_by_name['event'].message_type = _EVENT
+_CONNECTIONEVENT.fields_by_name['connection_id'].message_type = _CONNECTIONID
 _ENDPOINTID.fields_by_name['topology_id'].message_type = _TOPOLOGYID
 _ENDPOINTID.fields_by_name['device_id'].message_type = _DEVICEID
 _ENDPOINTID.fields_by_name['endpoint_uuid'].message_type = _UUID
 _ENDPOINT.fields_by_name['endpoint_id'].message_type = _ENDPOINTID
+_ENDPOINT.fields_by_name['kpi_sample_types'].enum_type = kpi__sample__types__pb2._KPISAMPLETYPE
 _CONFIGRULE.fields_by_name['action'].enum_type = _CONFIGACTIONENUM
-_CONNECTIONID.fields_by_name['connection_uuid'].message_type = _UUID
-_CONNECTION.fields_by_name['connection_id'].message_type = _CONNECTIONID
-_CONNECTION.fields_by_name['related_service_id'].message_type = _SERVICEID
-_CONNECTION.fields_by_name['path'].message_type = _ENDPOINTID
-_CONNECTIONIDLIST.fields_by_name['connection_ids'].message_type = _CONNECTIONID
-_CONNECTIONLIST.fields_by_name['connections'].message_type = _CONNECTION
 _TERAFLOWCONTROLLER.fields_by_name['context_id'].message_type = _CONTEXTID
 _AUTHENTICATIONRESULT.fields_by_name['context_id'].message_type = _CONTEXTID
 DESCRIPTOR.message_types_by_name['Empty'] = _EMPTY
@@ -1910,14 +1969,15 @@ DESCRIPTOR.message_types_by_name['ServiceConfig'] = _SERVICECONFIG
 DESCRIPTOR.message_types_by_name['ServiceIdList'] = _SERVICEIDLIST
 DESCRIPTOR.message_types_by_name['ServiceList'] = _SERVICELIST
 DESCRIPTOR.message_types_by_name['ServiceEvent'] = _SERVICEEVENT
-DESCRIPTOR.message_types_by_name['EndPointId'] = _ENDPOINTID
-DESCRIPTOR.message_types_by_name['EndPoint'] = _ENDPOINT
-DESCRIPTOR.message_types_by_name['ConfigRule'] = _CONFIGRULE
-DESCRIPTOR.message_types_by_name['Constraint'] = _CONSTRAINT
 DESCRIPTOR.message_types_by_name['ConnectionId'] = _CONNECTIONID
 DESCRIPTOR.message_types_by_name['Connection'] = _CONNECTION
 DESCRIPTOR.message_types_by_name['ConnectionIdList'] = _CONNECTIONIDLIST
 DESCRIPTOR.message_types_by_name['ConnectionList'] = _CONNECTIONLIST
+DESCRIPTOR.message_types_by_name['ConnectionEvent'] = _CONNECTIONEVENT
+DESCRIPTOR.message_types_by_name['EndPointId'] = _ENDPOINTID
+DESCRIPTOR.message_types_by_name['EndPoint'] = _ENDPOINT
+DESCRIPTOR.message_types_by_name['ConfigRule'] = _CONFIGRULE
+DESCRIPTOR.message_types_by_name['Constraint'] = _CONSTRAINT
 DESCRIPTOR.message_types_by_name['TeraFlowController'] = _TERAFLOWCONTROLLER
 DESCRIPTOR.message_types_by_name['AuthenticationResult'] = _AUTHENTICATIONRESULT
 DESCRIPTOR.enum_types_by_name['EventTypeEnum'] = _EVENTTYPEENUM
@@ -2145,34 +2205,6 @@ ServiceEvent = _reflection.GeneratedProtocolMessageType('ServiceEvent', (_messag
   })
 _sym_db.RegisterMessage(ServiceEvent)
 
-EndPointId = _reflection.GeneratedProtocolMessageType('EndPointId', (_message.Message,), {
-  'DESCRIPTOR' : _ENDPOINTID,
-  '__module__' : 'context_pb2'
-  # @@protoc_insertion_point(class_scope:context.EndPointId)
-  })
-_sym_db.RegisterMessage(EndPointId)
-
-EndPoint = _reflection.GeneratedProtocolMessageType('EndPoint', (_message.Message,), {
-  'DESCRIPTOR' : _ENDPOINT,
-  '__module__' : 'context_pb2'
-  # @@protoc_insertion_point(class_scope:context.EndPoint)
-  })
-_sym_db.RegisterMessage(EndPoint)
-
-ConfigRule = _reflection.GeneratedProtocolMessageType('ConfigRule', (_message.Message,), {
-  'DESCRIPTOR' : _CONFIGRULE,
-  '__module__' : 'context_pb2'
-  # @@protoc_insertion_point(class_scope:context.ConfigRule)
-  })
-_sym_db.RegisterMessage(ConfigRule)
-
-Constraint = _reflection.GeneratedProtocolMessageType('Constraint', (_message.Message,), {
-  'DESCRIPTOR' : _CONSTRAINT,
-  '__module__' : 'context_pb2'
-  # @@protoc_insertion_point(class_scope:context.Constraint)
-  })
-_sym_db.RegisterMessage(Constraint)
-
 ConnectionId = _reflection.GeneratedProtocolMessageType('ConnectionId', (_message.Message,), {
   'DESCRIPTOR' : _CONNECTIONID,
   '__module__' : 'context_pb2'
@@ -2201,6 +2233,41 @@ ConnectionList = _reflection.GeneratedProtocolMessageType('ConnectionList', (_me
   })
 _sym_db.RegisterMessage(ConnectionList)
 
+ConnectionEvent = _reflection.GeneratedProtocolMessageType('ConnectionEvent', (_message.Message,), {
+  'DESCRIPTOR' : _CONNECTIONEVENT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ConnectionEvent)
+  })
+_sym_db.RegisterMessage(ConnectionEvent)
+
+EndPointId = _reflection.GeneratedProtocolMessageType('EndPointId', (_message.Message,), {
+  'DESCRIPTOR' : _ENDPOINTID,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.EndPointId)
+  })
+_sym_db.RegisterMessage(EndPointId)
+
+EndPoint = _reflection.GeneratedProtocolMessageType('EndPoint', (_message.Message,), {
+  'DESCRIPTOR' : _ENDPOINT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.EndPoint)
+  })
+_sym_db.RegisterMessage(EndPoint)
+
+ConfigRule = _reflection.GeneratedProtocolMessageType('ConfigRule', (_message.Message,), {
+  'DESCRIPTOR' : _CONFIGRULE,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ConfigRule)
+  })
+_sym_db.RegisterMessage(ConfigRule)
+
+Constraint = _reflection.GeneratedProtocolMessageType('Constraint', (_message.Message,), {
+  'DESCRIPTOR' : _CONSTRAINT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Constraint)
+  })
+_sym_db.RegisterMessage(Constraint)
+
 TeraFlowController = _reflection.GeneratedProtocolMessageType('TeraFlowController', (_message.Message,), {
   'DESCRIPTOR' : _TERAFLOWCONTROLLER,
   '__module__' : 'context_pb2'
@@ -2224,8 +2291,8 @@ _CONTEXTSERVICE = _descriptor.ServiceDescriptor(
   index=0,
   serialized_options=None,
   create_key=_descriptor._internal_create_key,
-  serialized_start=4289,
-  serialized_end=5990,
+  serialized_start=4524,
+  serialized_end=6617,
   methods=[
   _descriptor.MethodDescriptor(
     name='ListContextIds',
@@ -2527,6 +2594,66 @@ _CONTEXTSERVICE = _descriptor.ServiceDescriptor(
     serialized_options=None,
     create_key=_descriptor._internal_create_key,
   ),
+  _descriptor.MethodDescriptor(
+    name='ListConnectionIds',
+    full_name='context.ContextService.ListConnectionIds',
+    index=30,
+    containing_service=None,
+    input_type=_SERVICEID,
+    output_type=_CONNECTIONIDLIST,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='ListConnections',
+    full_name='context.ContextService.ListConnections',
+    index=31,
+    containing_service=None,
+    input_type=_SERVICEID,
+    output_type=_CONNECTIONLIST,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetConnection',
+    full_name='context.ContextService.GetConnection',
+    index=32,
+    containing_service=None,
+    input_type=_CONNECTIONID,
+    output_type=_CONNECTION,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='SetConnection',
+    full_name='context.ContextService.SetConnection',
+    index=33,
+    containing_service=None,
+    input_type=_CONNECTION,
+    output_type=_CONNECTIONID,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='RemoveConnection',
+    full_name='context.ContextService.RemoveConnection',
+    index=34,
+    containing_service=None,
+    input_type=_CONNECTIONID,
+    output_type=_EMPTY,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetConnectionEvents',
+    full_name='context.ContextService.GetConnectionEvents',
+    index=35,
+    containing_service=None,
+    input_type=_EMPTY,
+    output_type=_CONNECTIONEVENT,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
 ])
 _sym_db.RegisterServiceDescriptor(_CONTEXTSERVICE)
 
diff --git a/src/context/proto/context_pb2_grpc.py b/src/context/proto/context_pb2_grpc.py
index c344c8b539324140fddc411324959368da3c1b7b..14397184e3a46f0f8be7fd9bf3fb7210520afe74 100644
--- a/src/context/proto/context_pb2_grpc.py
+++ b/src/context/proto/context_pb2_grpc.py
@@ -6,9 +6,7 @@ from . import context_pb2 as context__pb2
 
 
 class ContextServiceStub(object):
-    """import "kpi_sample_types.proto";
-
-    """
+    """Missing associated documentation comment in .proto file."""
 
     def __init__(self, channel):
         """Constructor.
@@ -166,12 +164,40 @@ class ContextServiceStub(object):
                 request_serializer=context__pb2.Empty.SerializeToString,
                 response_deserializer=context__pb2.ServiceEvent.FromString,
                 )
+        self.ListConnectionIds = channel.unary_unary(
+                '/context.ContextService/ListConnectionIds',
+                request_serializer=context__pb2.ServiceId.SerializeToString,
+                response_deserializer=context__pb2.ConnectionIdList.FromString,
+                )
+        self.ListConnections = channel.unary_unary(
+                '/context.ContextService/ListConnections',
+                request_serializer=context__pb2.ServiceId.SerializeToString,
+                response_deserializer=context__pb2.ConnectionList.FromString,
+                )
+        self.GetConnection = channel.unary_unary(
+                '/context.ContextService/GetConnection',
+                request_serializer=context__pb2.ConnectionId.SerializeToString,
+                response_deserializer=context__pb2.Connection.FromString,
+                )
+        self.SetConnection = channel.unary_unary(
+                '/context.ContextService/SetConnection',
+                request_serializer=context__pb2.Connection.SerializeToString,
+                response_deserializer=context__pb2.ConnectionId.FromString,
+                )
+        self.RemoveConnection = channel.unary_unary(
+                '/context.ContextService/RemoveConnection',
+                request_serializer=context__pb2.ConnectionId.SerializeToString,
+                response_deserializer=context__pb2.Empty.FromString,
+                )
+        self.GetConnectionEvents = channel.unary_stream(
+                '/context.ContextService/GetConnectionEvents',
+                request_serializer=context__pb2.Empty.SerializeToString,
+                response_deserializer=context__pb2.ConnectionEvent.FromString,
+                )
 
 
 class ContextServiceServicer(object):
-    """import "kpi_sample_types.proto";
-
-    """
+    """Missing associated documentation comment in .proto file."""
 
     def ListContextIds(self, request, context):
         """Missing associated documentation comment in .proto file."""
@@ -353,6 +379,42 @@ class ContextServiceServicer(object):
         context.set_details('Method not implemented!')
         raise NotImplementedError('Method not implemented!')
 
+    def ListConnectionIds(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def ListConnections(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def GetConnection(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def SetConnection(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def RemoveConnection(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def GetConnectionEvents(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
 
 def add_ContextServiceServicer_to_server(servicer, server):
     rpc_method_handlers = {
@@ -506,6 +568,36 @@ def add_ContextServiceServicer_to_server(servicer, server):
                     request_deserializer=context__pb2.Empty.FromString,
                     response_serializer=context__pb2.ServiceEvent.SerializeToString,
             ),
+            'ListConnectionIds': grpc.unary_unary_rpc_method_handler(
+                    servicer.ListConnectionIds,
+                    request_deserializer=context__pb2.ServiceId.FromString,
+                    response_serializer=context__pb2.ConnectionIdList.SerializeToString,
+            ),
+            'ListConnections': grpc.unary_unary_rpc_method_handler(
+                    servicer.ListConnections,
+                    request_deserializer=context__pb2.ServiceId.FromString,
+                    response_serializer=context__pb2.ConnectionList.SerializeToString,
+            ),
+            'GetConnection': grpc.unary_unary_rpc_method_handler(
+                    servicer.GetConnection,
+                    request_deserializer=context__pb2.ConnectionId.FromString,
+                    response_serializer=context__pb2.Connection.SerializeToString,
+            ),
+            'SetConnection': grpc.unary_unary_rpc_method_handler(
+                    servicer.SetConnection,
+                    request_deserializer=context__pb2.Connection.FromString,
+                    response_serializer=context__pb2.ConnectionId.SerializeToString,
+            ),
+            'RemoveConnection': grpc.unary_unary_rpc_method_handler(
+                    servicer.RemoveConnection,
+                    request_deserializer=context__pb2.ConnectionId.FromString,
+                    response_serializer=context__pb2.Empty.SerializeToString,
+            ),
+            'GetConnectionEvents': grpc.unary_stream_rpc_method_handler(
+                    servicer.GetConnectionEvents,
+                    request_deserializer=context__pb2.Empty.FromString,
+                    response_serializer=context__pb2.ConnectionEvent.SerializeToString,
+            ),
     }
     generic_handler = grpc.method_handlers_generic_handler(
             'context.ContextService', rpc_method_handlers)
@@ -514,9 +606,7 @@ def add_ContextServiceServicer_to_server(servicer, server):
 
  # This class is part of an EXPERIMENTAL API.
 class ContextService(object):
-    """import "kpi_sample_types.proto";
-
-    """
+    """Missing associated documentation comment in .proto file."""
 
     @staticmethod
     def ListContextIds(request,
@@ -1027,3 +1117,105 @@ class ContextService(object):
             context__pb2.ServiceEvent.FromString,
             options, channel_credentials,
             insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def ListConnectionIds(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/context.ContextService/ListConnectionIds',
+            context__pb2.ServiceId.SerializeToString,
+            context__pb2.ConnectionIdList.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def ListConnections(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/context.ContextService/ListConnections',
+            context__pb2.ServiceId.SerializeToString,
+            context__pb2.ConnectionList.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def GetConnection(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/context.ContextService/GetConnection',
+            context__pb2.ConnectionId.SerializeToString,
+            context__pb2.Connection.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def SetConnection(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/context.ContextService/SetConnection',
+            context__pb2.Connection.SerializeToString,
+            context__pb2.ConnectionId.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def RemoveConnection(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/context.ContextService/RemoveConnection',
+            context__pb2.ConnectionId.SerializeToString,
+            context__pb2.Empty.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def GetConnectionEvents(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_stream(request, target, '/context.ContextService/GetConnectionEvents',
+            context__pb2.Empty.SerializeToString,
+            context__pb2.ConnectionEvent.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
diff --git a/src/context/proto/kpi_sample_types_pb2.py b/src/context/proto/kpi_sample_types_pb2.py
index ad22554ec352d0aeae644fdce00c0f28996ed73b..ea7fd2f82757d4c3db02d7e2c7817e2787b0b490 100644
--- a/src/context/proto/kpi_sample_types_pb2.py
+++ b/src/context/proto/kpi_sample_types_pb2.py
@@ -2,6 +2,7 @@
 # Generated by the protocol buffer compiler.  DO NOT EDIT!
 # source: kpi_sample_types.proto
 """Generated protocol buffer code."""
+from google.protobuf.internal import enum_type_wrapper
 from google.protobuf import descriptor as _descriptor
 from google.protobuf import message as _message
 from google.protobuf import reflection as _reflection
@@ -15,15 +16,62 @@ _sym_db = _symbol_database.Default()
 
 DESCRIPTOR = _descriptor.FileDescriptor(
   name='kpi_sample_types.proto',
-  package='',
+  package='kpi_sample_types',
   syntax='proto3',
   serialized_options=None,
   create_key=_descriptor._internal_create_key,
-  serialized_pb=b'\n\x16kpi_sample_types.protob\x06proto3'
+  serialized_pb=b'\n\x16kpi_sample_types.proto\x12\x10kpi_sample_types*\xbe\x01\n\rKpiSampleType\x12\x19\n\x15KPISAMPLETYPE_UNKNOWN\x10\x00\x12%\n!KPISAMPLETYPE_PACKETS_TRANSMITTED\x10\x65\x12\"\n\x1eKPISAMPLETYPE_PACKETS_RECEIVED\x10\x66\x12$\n\x1fKPISAMPLETYPE_BYTES_TRANSMITTED\x10\xc9\x01\x12!\n\x1cKPISAMPLETYPE_BYTES_RECEIVED\x10\xca\x01\x62\x06proto3'
 )
 
+_KPISAMPLETYPE = _descriptor.EnumDescriptor(
+  name='KpiSampleType',
+  full_name='kpi_sample_types.KpiSampleType',
+  filename=None,
+  file=DESCRIPTOR,
+  create_key=_descriptor._internal_create_key,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='KPISAMPLETYPE_UNKNOWN', index=0, number=0,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='KPISAMPLETYPE_PACKETS_TRANSMITTED', index=1, number=101,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='KPISAMPLETYPE_PACKETS_RECEIVED', index=2, number=102,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='KPISAMPLETYPE_BYTES_TRANSMITTED', index=3, number=201,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='KPISAMPLETYPE_BYTES_RECEIVED', index=4, number=202,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=45,
+  serialized_end=235,
+)
+_sym_db.RegisterEnumDescriptor(_KPISAMPLETYPE)
+
+KpiSampleType = enum_type_wrapper.EnumTypeWrapper(_KPISAMPLETYPE)
+KPISAMPLETYPE_UNKNOWN = 0
+KPISAMPLETYPE_PACKETS_TRANSMITTED = 101
+KPISAMPLETYPE_PACKETS_RECEIVED = 102
+KPISAMPLETYPE_BYTES_TRANSMITTED = 201
+KPISAMPLETYPE_BYTES_RECEIVED = 202
 
 
+DESCRIPTOR.enum_types_by_name['KpiSampleType'] = _KPISAMPLETYPE
 _sym_db.RegisterFileDescriptor(DESCRIPTOR)
 
 
diff --git a/src/context/requirements.in b/src/context/requirements.in
index e80c645fc94eb240a3290e762db22557e94cdae0..86097934b48d8465bc9092881f9320ec9ebe54f4 100644
--- a/src/context/requirements.in
+++ b/src/context/requirements.in
@@ -6,3 +6,4 @@ pytest
 pytest-benchmark
 redis
 requests
+coverage
diff --git a/src/context/service/Populate.py b/src/context/service/Populate.py
index eea593d90a608b57db0bd1fa801eef11abee5b03..91460b7ce52ae84e76d2781672f636dc86187f99 100644
--- a/src/context/service/Populate.py
+++ b/src/context/service/Populate.py
@@ -1,9 +1,8 @@
 import copy
 from context.client.ContextClient import ContextClient
-from context.proto.context_pb2 import Context, Device, Link, Service, Topology
+from context.proto.context_pb2 import Connection, Context, Device, Link, Service, Topology
 from context.tests.example_objects import (
-    CONTEXT, TOPOLOGY,
-    DEVICE1, DEVICE1_ID, DEVICE2, DEVICE2_ID, DEVICE3, DEVICE3_ID,
+    CONNECTION_DEV1_DEV3, CONTEXT, TOPOLOGY, DEVICE1, DEVICE1_ID, DEVICE2, DEVICE2_ID, DEVICE3, DEVICE3_ID,
     LINK_DEV1_DEV2, LINK_DEV1_DEV2_ID, LINK_DEV1_DEV3, LINK_DEV1_DEV3_ID, LINK_DEV2_DEV3, LINK_DEV2_DEV3_ID,
     SERVICE_DEV1_DEV2, SERVICE_DEV1_DEV3, SERVICE_DEV2_DEV3)
 
@@ -30,5 +29,7 @@ def populate(address, port):
     client.SetTopology(Topology(**TOPOLOGY_WITH_DEVICES_AND_LINKS))
 
     client.SetService(Service(**SERVICE_DEV1_DEV2))
-    client.SetService(Service(**SERVICE_DEV1_DEV3))
     client.SetService(Service(**SERVICE_DEV2_DEV3))
+
+    client.SetService(Service(**SERVICE_DEV1_DEV3))
+    client.SetConnection(Connection(**CONNECTION_DEV1_DEV3))
diff --git a/src/context/service/database/ConnectionModel.py b/src/context/service/database/ConnectionModel.py
new file mode 100644
index 0000000000000000000000000000000000000000..61033841c02bd5ad4614602605db9fd144564cc8
--- /dev/null
+++ b/src/context/service/database/ConnectionModel.py
@@ -0,0 +1,122 @@
+import logging, operator
+from typing import Dict, List, Optional, Set, Tuple, Union
+from common.orm.Database import Database
+from common.orm.backend.Tools import key_to_str
+from common.orm.fields.ForeignKeyField import ForeignKeyField
+from common.orm.fields.IntegerField import IntegerField
+from common.orm.fields.PrimaryKeyField import PrimaryKeyField
+from common.orm.fields.StringField import StringField
+from common.orm.model.Model import Model
+from common.orm.HighLevel import get_object, get_or_create_object, get_related_objects, update_or_create_object
+from context.proto.context_pb2 import EndPointId
+from .EndPointModel import EndPointModel
+from .ServiceModel import ServiceModel
+from .Tools import remove_dict_key
+
+LOGGER = logging.getLogger(__name__)
+
+class PathModel(Model): # pylint: disable=abstract-method
+    pk = PrimaryKeyField()
+
+    def dump(self) -> List[Dict]:
+        db_path_hop_pks = self.references(PathHopModel)
+        path_hops = [PathHopModel(self.database, pk).dump(include_position=True) for pk,_ in db_path_hop_pks]
+        path_hops = sorted(path_hops, key=operator.itemgetter('position'))
+        return [remove_dict_key(path_hop, 'position') for path_hop in path_hops]
+
+class PathHopModel(Model): # pylint: disable=abstract-method
+    pk = PrimaryKeyField()
+    path_fk = ForeignKeyField(PathModel)
+    position = IntegerField(min_value=0, required=True)
+    endpoint_fk = ForeignKeyField(EndPointModel)
+
+    def dump(self, include_position=True) -> Dict: # pylint: disable=arguments-differ
+        db_endpoint : EndPointModel = EndPointModel(self.database, self.endpoint_fk)
+        result = db_endpoint.dump_id()
+        if include_position: result['position'] = self.position
+        return result
+
+class ConnectionModel(Model):
+    pk = PrimaryKeyField()
+    connection_uuid = StringField(required=True, allow_empty=False)
+    service_fk = ForeignKeyField(ServiceModel, required=False)
+    path_fk = ForeignKeyField(PathModel, required=True)
+
+    def dump_id(self) -> Dict:
+        return {
+            'connection_uuid': {'uuid': self.connection_uuid},
+        }
+
+    def dump_path_hops_endpoint_ids(self) -> List[Dict]:
+        return PathModel(self.database, self.path_fk).dump()
+
+    def dump_sub_service_ids(self) -> List[Dict]:
+        from .RelationModels import ConnectionSubServiceModel # pylint: disable=import-outside-toplevel
+        db_sub_services = get_related_objects(self, ConnectionSubServiceModel, 'sub_service_fk')
+        return [db_sub_service.dump_id() for db_sub_service in sorted(db_sub_services, key=operator.attrgetter('pk'))]
+
+    def dump(self, include_path=True, include_sub_service_ids=True) -> Dict: # pylint: disable=arguments-differ
+        result = {'connection_id': self.dump_id()}
+        if self.service_fk is not None:
+            result['service_id'] = ServiceModel(self.database, self.service_fk).dump_id()
+        if include_path: result['path_hops_endpoint_ids'] = self.dump_path_hops_endpoint_ids()
+        if include_sub_service_ids: result['sub_service_ids'] = self.dump_sub_service_ids()
+        return result
+
+def set_path_hop(
+        database : Database, db_path : PathModel, position : int, db_endpoint : EndPointModel
+    ) -> Tuple[PathHopModel, bool]:
+
+    str_path_hop_key = key_to_str([db_path.pk, db_endpoint.pk], separator=':')
+    result : Tuple[PathHopModel, bool] = update_or_create_object(database, PathHopModel, str_path_hop_key, {
+        'path_fk': db_path, 'position': position, 'endpoint_fk': db_endpoint})
+    db_path_hop, updated = result
+    return db_path_hop, updated
+
+def delete_path_hop(
+        database : Database, db_path : PathModel, db_path_hop_pk : str
+    ) -> None:
+
+    db_path_hop : Optional[PathHopModel] = get_object(database, PathHopModel, db_path_hop_pk, raise_if_not_found=False)
+    if db_path_hop is None: return
+    db_path_hop.delete()
+
+def delete_all_path_hops(
+        database : Database, db_path : PathHopModel
+    ) -> None:
+
+    db_path_hop_pks = db_path.references(PathHopModel)
+    for pk,_ in db_path_hop_pks: PathHopModel(database, pk).delete()
+
+def set_path(
+        database : Database, connection_uuid : str, raw_endpoint_ids : List[EndPointId], path_name : str = ''
+    ) -> List[Union[PathModel, PathHopModel]]:
+
+    str_path_key = connection_uuid if len(path_name) == 0 else key_to_str([connection_uuid, path_name], separator=':')
+    result : Tuple[PathModel, bool] = get_or_create_object(database, PathModel, str_path_key)
+    db_path, created = result
+
+    db_path_hop_pks : Set[str] = set(map(operator.itemgetter(0), db_path.references(PathHopModel)))
+    db_objects : List[Tuple[Union[PathModel, PathHopModel], bool]] = [db_path]
+
+    for position,endpoint_id in enumerate(raw_endpoint_ids):
+        endpoint_uuid                  = endpoint_id.endpoint_uuid.uuid
+        endpoint_device_uuid           = endpoint_id.device_id.device_uuid.uuid
+        endpoint_topology_uuid         = endpoint_id.topology_id.topology_uuid.uuid
+        endpoint_topology_context_uuid = endpoint_id.topology_id.context_id.context_uuid.uuid
+
+        str_endpoint_key = key_to_str([endpoint_device_uuid, endpoint_uuid])
+        if len(endpoint_topology_context_uuid) > 0 and len(endpoint_topology_uuid) > 0:
+            str_topology_key = key_to_str([endpoint_topology_context_uuid, endpoint_topology_uuid])
+            str_endpoint_key = key_to_str([str_endpoint_key, str_topology_key], separator=':')
+
+        db_endpoint : EndPointModel = get_object(database, EndPointModel, str_endpoint_key)
+
+        result : Tuple[PathHopModel, bool] = set_path_hop(database, db_path, position, db_endpoint)
+        db_path_hop, updated = result
+        db_objects.append(db_path_hop)
+        db_path_hop_pks.discard(db_path_hop.instance_key)
+
+    for db_path_hop_pk in db_path_hop_pks: delete_path_hop(database, db_path, db_path_hop_pk)
+
+    return db_objects
diff --git a/src/context/service/database/EndPointModel.py b/src/context/service/database/EndPointModel.py
index 38b87d6f37c4e99dd3790f4d8802acd03873f77d..f1239ac124df42a74575e15d247c98b6dd2a88bb 100644
--- a/src/context/service/database/EndPointModel.py
+++ b/src/context/service/database/EndPointModel.py
@@ -1,10 +1,14 @@
 import logging
-from typing import Dict
+from typing import Dict, List
+from common.orm.Database import Database
+from common.orm.backend.Tools import key_to_str
+from common.orm.fields.EnumeratedField import EnumeratedField
 from common.orm.fields.ForeignKeyField import ForeignKeyField
 from common.orm.fields.PrimaryKeyField import PrimaryKeyField
 from common.orm.fields.StringField import StringField
 from common.orm.model.Model import Model
 from .DeviceModel import DeviceModel
+from .KpiSampleType import ORM_KpiSampleTypeEnum, grpc_to_enum__kpi_sample_type
 from .TopologyModel import TopologyModel
 
 LOGGER = logging.getLogger(__name__)
@@ -26,8 +30,34 @@ class EndPointModel(Model):
             result['topology_id'] = TopologyModel(self.database, self.topology_fk).dump_id()
         return result
 
-    def dump(self) -> Dict:
-        return {
+    def dump_kpi_sample_types(self) -> List[int]:
+        db_kpi_sample_type_pks = self.references(KpiSampleTypeModel)
+        return [KpiSampleTypeModel(self.database, pk).dump() for pk,_ in db_kpi_sample_type_pks]
+
+    def dump(   # pylint: disable=arguments-differ
+            self, include_kpi_sample_types=True
+        ) -> Dict:
+        result = {
             'endpoint_id': self.dump_id(),
             'endpoint_type': self.endpoint_type,
         }
+        if include_kpi_sample_types: result['kpi_sample_types'] = self.dump_kpi_sample_types()
+        return result
+
+class KpiSampleTypeModel(Model): # pylint: disable=abstract-method
+    pk = PrimaryKeyField()
+    endpoint_fk = ForeignKeyField(EndPointModel)
+    kpi_sample_type = EnumeratedField(ORM_KpiSampleTypeEnum, required=True)
+
+    def dump(self) -> Dict:
+        return self.kpi_sample_type.value
+
+def set_kpi_sample_types(database : Database, db_endpoint : EndPointModel, grpc_endpoint_kpi_sample_types):
+    db_endpoint_pk = db_endpoint.pk
+    for kpi_sample_type in grpc_endpoint_kpi_sample_types:
+        orm_kpi_sample_type = grpc_to_enum__kpi_sample_type(kpi_sample_type)
+        str_endpoint_kpi_sample_type_key = key_to_str([db_endpoint_pk, orm_kpi_sample_type.name])
+        db_endpoint_kpi_sample_type = KpiSampleTypeModel(database, str_endpoint_kpi_sample_type_key)
+        db_endpoint_kpi_sample_type.endpoint_fk = db_endpoint
+        db_endpoint_kpi_sample_type.kpi_sample_type = orm_kpi_sample_type
+        db_endpoint_kpi_sample_type.save()
diff --git a/src/context/service/database/KpiSampleType.py b/src/context/service/database/KpiSampleType.py
new file mode 100644
index 0000000000000000000000000000000000000000..50cbcd8a6641f1a598b2153cac840d6259462f96
--- /dev/null
+++ b/src/context/service/database/KpiSampleType.py
@@ -0,0 +1,14 @@
+import functools
+from enum import Enum
+from context.proto.kpi_sample_types_pb2 import KpiSampleType
+from .Tools import grpc_to_enum
+
+class ORM_KpiSampleTypeEnum(Enum):
+    UNKNOWN             = KpiSampleType.KPISAMPLETYPE_UNKNOWN
+    PACKETS_TRANSMITTED = KpiSampleType.KPISAMPLETYPE_PACKETS_TRANSMITTED
+    PACKETS_RECEIVED    = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED
+    BYTES_TRANSMITTED   = KpiSampleType.KPISAMPLETYPE_BYTES_TRANSMITTED
+    BYTES_RECEIVED      = KpiSampleType.KPISAMPLETYPE_BYTES_RECEIVED
+
+grpc_to_enum__kpi_sample_type = functools.partial(
+    grpc_to_enum, KpiSampleType, ORM_KpiSampleTypeEnum)
diff --git a/src/context/service/database/RelationModels.py b/src/context/service/database/RelationModels.py
index 4531e0594f3f213f4a00b1fe70dfb2d8dc0a0f5e..8da0862ee744c4015a27a3eb3bdafad2a8e79a26 100644
--- a/src/context/service/database/RelationModels.py
+++ b/src/context/service/database/RelationModels.py
@@ -2,6 +2,7 @@ import logging
 from common.orm.fields.ForeignKeyField import ForeignKeyField
 from common.orm.fields.PrimaryKeyField import PrimaryKeyField
 from common.orm.model.Model import Model
+from .ConnectionModel import ConnectionModel
 from .DeviceModel import DeviceModel
 from .EndPointModel import EndPointModel
 from .LinkModel import LinkModel
@@ -10,6 +11,11 @@ from .TopologyModel import TopologyModel
 
 LOGGER = logging.getLogger(__name__)
 
+class ConnectionSubServiceModel(Model): # pylint: disable=abstract-method
+    pk = PrimaryKeyField()
+    connection_fk = ForeignKeyField(ConnectionModel)
+    sub_service_fk = ForeignKeyField(ServiceModel)
+
 class LinkEndPointModel(Model): # pylint: disable=abstract-method
     pk = PrimaryKeyField()
     link_fk = ForeignKeyField(LinkModel)
diff --git a/src/context/service/grpc_server/Constants.py b/src/context/service/grpc_server/Constants.py
index 80ff198de86644e53d4d8cc6a693efae237f450f..aff7711e221bdfae70e720d0040f8e5bedcbeec7 100644
--- a/src/context/service/grpc_server/Constants.py
+++ b/src/context/service/grpc_server/Constants.py
@@ -1,9 +1,10 @@
-TOPIC_CONTEXT  = 'context'
-TOPIC_TOPOLOGY = 'topology'
-TOPIC_DEVICE   = 'device'
-TOPIC_LINK     = 'link'
-TOPIC_SERVICE  = 'service'
+TOPIC_CONTEXT    = 'context'
+TOPIC_TOPOLOGY   = 'topology'
+TOPIC_DEVICE     = 'device'
+TOPIC_LINK       = 'link'
+TOPIC_SERVICE    = 'service'
+TOPIC_CONNECTION = 'connection'
 
-TOPICS = {TOPIC_CONTEXT, TOPIC_TOPOLOGY, TOPIC_DEVICE, TOPIC_LINK, TOPIC_SERVICE}
+TOPICS = {TOPIC_CONTEXT, TOPIC_TOPOLOGY, TOPIC_DEVICE, TOPIC_LINK, TOPIC_SERVICE, TOPIC_CONNECTION}
 
 CONSUME_TIMEOUT = 0.5 # seconds
diff --git a/src/context/service/grpc_server/ContextServiceServicerImpl.py b/src/context/service/grpc_server/ContextServiceServicerImpl.py
index e76c399cd4e17578a01ac7bf88cb0fc3f7017b8e..73c61f355e01eaa9a2e3abd1dd9a2d1b779b22da 100644
--- a/src/context/service/grpc_server/ContextServiceServicerImpl.py
+++ b/src/context/service/grpc_server/ContextServiceServicerImpl.py
@@ -8,35 +8,38 @@ from common.orm.backend.Tools import key_to_str
 from common.rpc_method_wrapper.Decorator import create_metrics, safe_and_metered_rpc_method
 from common.rpc_method_wrapper.ServiceExceptions import InvalidArgumentException
 from context.proto.context_pb2 import (
-    Context, ContextEvent, ContextId, ContextIdList, ContextList, Device, DeviceEvent, DeviceId, DeviceIdList,
-    DeviceList, Empty, EventTypeEnum, Link, LinkEvent, LinkId, LinkIdList, LinkList, Service, ServiceEvent, ServiceId,
-    ServiceIdList, ServiceList, Topology, TopologyEvent, TopologyId, TopologyIdList, TopologyList)
+    Connection, ConnectionEvent, ConnectionId, ConnectionIdList, ConnectionList, Context, ContextEvent, ContextId,
+    ContextIdList, ContextList, Device, DeviceEvent, DeviceId, DeviceIdList, DeviceList, Empty, EventTypeEnum, Link,
+    LinkEvent, LinkId, LinkIdList, LinkList, Service, ServiceEvent, ServiceId, ServiceIdList, ServiceList, Topology,
+    TopologyEvent, TopologyId, TopologyIdList, TopologyList)
 from context.proto.context_pb2_grpc import ContextServiceServicer
 from context.service.database.ConfigModel import ConfigModel, ConfigRuleModel, grpc_config_rules_to_raw, update_config
+from context.service.database.ConnectionModel import ConnectionModel, PathHopModel, PathModel, set_path
 from context.service.database.ConstraintModel import ConstraintModel, ConstraintsModel, set_constraints
 from context.service.database.ContextModel import ContextModel
 from context.service.database.DeviceModel import (
     DeviceModel, DriverModel, grpc_to_enum__device_operational_status, set_drivers)
-from context.service.database.EndPointModel import EndPointModel
+from context.service.database.EndPointModel import EndPointModel, KpiSampleTypeModel, set_kpi_sample_types
 from context.service.database.Events import notify_event
 from context.service.database.LinkModel import LinkModel
 from context.service.database.RelationModels import (
-    LinkEndPointModel, ServiceEndPointModel, TopologyDeviceModel, TopologyLinkModel)
+    ConnectionSubServiceModel, LinkEndPointModel, ServiceEndPointModel, TopologyDeviceModel, TopologyLinkModel)
 from context.service.database.ServiceModel import (
     ServiceModel, grpc_to_enum__service_status, grpc_to_enum__service_type)
 from context.service.database.TopologyModel import TopologyModel
 from context.service.grpc_server.Constants import (
-    CONSUME_TIMEOUT, TOPIC_CONTEXT, TOPIC_DEVICE, TOPIC_LINK, TOPIC_SERVICE, TOPIC_TOPOLOGY)
+    CONSUME_TIMEOUT, TOPIC_CONNECTION, TOPIC_CONTEXT, TOPIC_DEVICE, TOPIC_LINK, TOPIC_SERVICE, TOPIC_TOPOLOGY)
 
 LOGGER = logging.getLogger(__name__)
 
 SERVICE_NAME = 'Context'
 METHOD_NAMES = [
-    'ListContextIds',  'ListContexts',   'GetContext',  'SetContext',  'RemoveContext',  'GetContextEvents',
-    'ListTopologyIds', 'ListTopologies', 'GetTopology', 'SetTopology', 'RemoveTopology', 'GetTopologyEvents',
-    'ListDeviceIds',   'ListDevices',    'GetDevice',   'SetDevice',   'RemoveDevice',   'GetDeviceEvents',
-    'ListLinkIds',     'ListLinks',      'GetLink',     'SetLink',     'RemoveLink',     'GetLinkEvents',
-    'ListServiceIds',  'ListServices',   'GetService',  'SetService',  'RemoveService',  'GetServiceEvents',
+    'ListConnectionIds', 'ListConnections', 'GetConnection', 'SetConnection', 'RemoveConnection', 'GetConnectionEvents',
+    'ListContextIds',    'ListContexts',    'GetContext',    'SetContext',    'RemoveContext',    'GetContextEvents',
+    'ListTopologyIds',   'ListTopologies',  'GetTopology',   'SetTopology',   'RemoveTopology',   'GetTopologyEvents',
+    'ListDeviceIds',     'ListDevices',     'GetDevice',     'SetDevice',     'RemoveDevice',     'GetDeviceEvents',
+    'ListLinkIds',       'ListLinks',       'GetLink',       'SetLink',       'RemoveLink',       'GetLinkEvents',
+    'ListServiceIds',    'ListServices',    'GetService',    'SetService',    'RemoveService',    'GetServiceEvents',
 ]
 METRICS = create_metrics(SERVICE_NAME, METHOD_NAMES)
 
@@ -279,7 +282,9 @@ class ContextServiceServicerImpl(ContextServiceServicer):
 
             result : Tuple[EndPointModel, bool] = update_or_create_object(
                 self.database, EndPointModel, str_endpoint_key, endpoint_attributes)
-            #db_endpoint, updated = result
+            db_endpoint, endpoint_updated = result
+
+            set_kpi_sample_types(self.database, db_endpoint, endpoint.kpi_sample_types)
 
         event_type = EventTypeEnum.EVENTTYPE_UPDATE if updated else EventTypeEnum.EVENTTYPE_CREATE
         dict_device_id = db_device.dump_id()
@@ -296,7 +301,10 @@ class ContextServiceServicerImpl(ContextServiceServicer):
         dict_device_id = db_device.dump_id()
 
         for db_endpoint_pk,_ in db_device.references(EndPointModel):
-            EndPointModel(self.database, db_endpoint_pk).delete()
+            db_endpoint = EndPointModel(self.database, db_endpoint_pk)
+            for db_kpi_sample_type_pk,_ in db_endpoint.references(KpiSampleTypeModel):
+                KpiSampleTypeModel(self.database, db_kpi_sample_type_pk).delete()
+            db_endpoint.delete()
 
         for db_topology_device_pk,_ in db_device.references(TopologyDeviceModel):
             TopologyDeviceModel(self.database, db_topology_device_pk).delete()
@@ -524,3 +532,96 @@ class ContextServiceServicerImpl(ContextServiceServicer):
     def GetServiceEvents(self, request: Empty, context : grpc.ServicerContext) -> Iterator[ServiceEvent]:
         for message in self.messagebroker.consume({TOPIC_SERVICE}, consume_timeout=CONSUME_TIMEOUT):
             yield ServiceEvent(**json.loads(message.content))
+
+
+    # ----- Connection -------------------------------------------------------------------------------------------------
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def ListConnectionIds(self, request: ServiceId, context : grpc.ServicerContext) -> ConnectionIdList:
+        str_key = key_to_str([request.context_id.context_uuid.uuid, request.service_uuid.uuid])
+        db_service : ServiceModel = get_object(self.database, ServiceModel, str_key)
+        db_connections : Set[ConnectionModel] = get_related_objects(db_service, ConnectionModel)
+        db_connections = sorted(db_connections, key=operator.attrgetter('pk'))
+        return ConnectionIdList(connection_ids=[db_connection.dump_id() for db_connection in db_connections])
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def ListConnections(self, request: ContextId, context : grpc.ServicerContext) -> ServiceList:
+        str_key = key_to_str([request.context_id.context_uuid.uuid, request.service_uuid.uuid])
+        db_service : ServiceModel = get_object(self.database, ServiceModel, str_key)
+        db_connections : Set[ConnectionModel] = get_related_objects(db_service, ConnectionModel)
+        db_connections = sorted(db_connections, key=operator.attrgetter('pk'))
+        return ConnectionList(connections=[db_connection.dump() for db_connection in db_connections])
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def GetConnection(self, request: ConnectionId, context : grpc.ServicerContext) -> Connection:
+        db_connection : ConnectionModel = get_object(self.database, ConnectionModel, request.connection_uuid.uuid)
+        return Connection(**db_connection.dump(include_path=True, include_sub_service_ids=True))
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def SetConnection(self, request: Connection, context : grpc.ServicerContext) -> ConnectionId:
+        connection_uuid = request.connection_id.connection_uuid.uuid
+
+        connection_attributes = {'connection_uuid': connection_uuid}
+
+        service_context_uuid = request.service_id.context_id.context_uuid.uuid
+        service_uuid = request.service_id.service_uuid.uuid
+        if len(service_context_uuid) > 0 and len(service_uuid) > 0:
+            str_service_key = key_to_str([service_context_uuid, service_uuid])
+            db_service : ServiceModel = get_object(self.database, ServiceModel, str_service_key)
+            connection_attributes['service_fk'] = db_service
+
+        path_hops_result = set_path(self.database, connection_uuid, request.path_hops_endpoint_ids, path_name = '')
+        db_path = path_hops_result[0]
+        connection_attributes['path_fk'] = db_path
+
+        result : Tuple[ConnectionModel, bool] = update_or_create_object(
+            self.database, ConnectionModel, connection_uuid, connection_attributes)
+        db_connection, updated = result
+
+        for sub_service_id in request.sub_service_ids:
+            sub_service_uuid         = sub_service_id.service_uuid.uuid
+            sub_service_context_uuid = sub_service_id.context_id.context_uuid.uuid
+            str_sub_service_key = key_to_str([sub_service_context_uuid, sub_service_uuid])
+            db_service : ServiceModel = get_object(self.database, ServiceModel, str_sub_service_key)
+
+            str_connection_sub_service_key = key_to_str([connection_uuid, str_sub_service_key], separator='--')
+            result : Tuple[ConnectionSubServiceModel, bool] = get_or_create_object(
+                self.database, ConnectionSubServiceModel, str_connection_sub_service_key, {
+                    'connection_fk': db_connection, 'sub_service_fk': db_service})
+            #db_connection_sub_service, connection_sub_service_created = result
+
+        event_type = EventTypeEnum.EVENTTYPE_UPDATE if updated else EventTypeEnum.EVENTTYPE_CREATE
+        dict_connection_id = db_connection.dump_id()
+        notify_event(self.messagebroker, TOPIC_CONNECTION, event_type, {'connection_id': dict_connection_id})
+        return ConnectionId(**dict_connection_id)
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def RemoveConnection(self, request: ConnectionId, context : grpc.ServicerContext) -> Empty:
+        db_connection = ConnectionModel(self.database, request.connection_uuid.uuid, auto_load=False)
+        found = db_connection.load()
+        if not found: return Empty()
+
+        dict_connection_id = db_connection.dump_id()
+
+        db_path = PathModel(self.database, db_connection.path_fk)
+        for db_path_hop_pk,_ in db_path.references(PathHopModel):
+            PathHopModel(self.database, db_path_hop_pk).delete()
+
+        # Do not remove sub-services automatically. They are supported by real services, so Service component should
+        # deal with the correct removal workflow to deconfigure the devices.
+        for db_connection_sub_service_pk,_ in db_connection.references(ConnectionSubServiceModel):
+            db_connection_sub_service : ConnectionSubServiceModel = get_object(
+                self.database, ConnectionSubServiceModel, db_connection_sub_service_pk)
+            db_connection_sub_service.delete()
+
+        db_connection.delete()
+        db_path.delete()
+
+        event_type = EventTypeEnum.EVENTTYPE_REMOVE
+        notify_event(self.messagebroker, TOPIC_CONNECTION, event_type, {'connection_id': dict_connection_id})
+        return Empty()
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def GetConnectionEvents(self, request: Empty, context : grpc.ServicerContext) -> Iterator[ConnectionEvent]:
+        for message in self.messagebroker.consume({TOPIC_CONNECTION}, consume_timeout=CONSUME_TIMEOUT):
+            yield ConnectionEvent(**json.loads(message.content))
diff --git a/src/context/service/rest_server/Resources.py b/src/context/service/rest_server/Resources.py
index 54a21ed3984ced399d82f72b7ca7f77e447f0459..9abf60d4855f2b5f996b85378c358d1e8460729c 100644
--- a/src/context/service/rest_server/Resources.py
+++ b/src/context/service/rest_server/Resources.py
@@ -1,26 +1,23 @@
 from flask.json import jsonify
 from flask_restful import Resource
 from google.protobuf.json_format import MessageToDict
-from common.message_broker.Factory import LOGGER
 from common.orm.Database import Database
-from context.proto.context_pb2 import ContextId, DeviceId, Empty, LinkId, ServiceId, TopologyId
+from context.proto.context_pb2 import ConnectionId, ContextId, DeviceId, Empty, LinkId, ServiceId, TopologyId
 from context.service.grpc_server.ContextServiceServicerImpl import ContextServiceServicerImpl
 
-def grpc_context_id(context_uuid):
-    return ContextId(**{
-        'context_uuid': {'uuid': context_uuid}
-    })
+def format_grpc_to_json(grpc_reply):
+    return jsonify(MessageToDict(
+        grpc_reply, including_default_value_fields=True, preserving_proto_field_name=True,
+        use_integers_for_enums=False))
 
-def grpc_topology_id(context_uuid, topology_uuid):
-    return TopologyId(**{
-        'context_id': {'context_uuid': {'uuid': context_uuid}},
-        'topology_uuid': {'uuid': topology_uuid}
+def grpc_connection_id(connection_uuid):
+    return ConnectionId(**{
+        'connection_uuid': {'uuid': connection_uuid}
     })
 
-def grpc_service_id(context_uuid, service_uuid):
-    return ServiceId(**{
-        'context_id': {'context_uuid': {'uuid': context_uuid}},
-        'service_uuid': {'uuid': service_uuid}
+def grpc_context_id(context_uuid):
+    return ContextId(**{
+        'context_uuid': {'uuid': context_uuid}
     })
 
 def grpc_device_id(device_uuid):
@@ -33,10 +30,17 @@ def grpc_link_id(link_uuid):
         'link_uuid': {'uuid': link_uuid}
     })
 
-def format_grpc_to_json(grpc_reply):
-    return jsonify(MessageToDict(
-        grpc_reply, including_default_value_fields=True, preserving_proto_field_name=True,
-        use_integers_for_enums=False))
+def grpc_service_id(context_uuid, service_uuid):
+    return ServiceId(**{
+        'context_id': {'context_uuid': {'uuid': context_uuid}},
+        'service_uuid': {'uuid': service_uuid}
+    })
+
+def grpc_topology_id(context_uuid, topology_uuid):
+    return TopologyId(**{
+        'context_id': {'context_uuid': {'uuid': context_uuid}},
+        'topology_uuid': {'uuid': topology_uuid}
+    })
 
 class _Resource(Resource):
     def __init__(self, database : Database) -> None:
@@ -104,27 +108,44 @@ class Link(_Resource):
     def get(self, link_uuid : str):
         return format_grpc_to_json(self.servicer.GetLink(grpc_link_id(link_uuid), None))
 
+class ConnectionIds(_Resource):
+    def get(self, context_uuid : str, service_uuid : str):
+        return format_grpc_to_json(self.servicer.ListConnectionIds(grpc_service_id(context_uuid, service_uuid), None))
+
+class Connections(_Resource):
+    def get(self, context_uuid : str, service_uuid : str):
+        return format_grpc_to_json(self.servicer.ListConnections(grpc_service_id(context_uuid, service_uuid), None))
+
+class Connection(_Resource):
+    def get(self, connection_uuid : str):
+        return format_grpc_to_json(self.servicer.GetConnection(grpc_connection_id(connection_uuid), None))
+
+
 # Use 'path' type in Service and Sink because service_uuid and link_uuid might contain char '/' and Flask is unable to
 # recognize them in 'string' type.
 RESOURCES = [
     # (endpoint_name, resource_class, resource_url)
-    ('api.context_ids',  ContextIds,  '/context_ids'),
-    ('api.contexts',     Contexts,    '/contexts'),
-    ('api.context',      Context,     '/context/<string:context_uuid>'),
+    ('api.context_ids',    ContextIds,    '/context_ids'),
+    ('api.contexts',       Contexts,      '/contexts'),
+    ('api.context',        Context,       '/context/<string:context_uuid>'),
+
+    ('api.topology_ids',   TopologyIds,   '/context/<string:context_uuid>/topology_ids'),
+    ('api.topologies',     Topologies,    '/context/<string:context_uuid>/topologies'),
+    ('api.topology',       Topology,      '/context/<string:context_uuid>/topology/<string:topology_uuid>'),
 
-    ('api.topology_ids', TopologyIds, '/context/<string:context_uuid>/topology_ids'),
-    ('api.topologies',   Topologies,  '/context/<string:context_uuid>/topologies'),
-    ('api.topology',     Topology,    '/context/<string:context_uuid>/topology/<string:topology_uuid>'),
+    ('api.service_ids',    ServiceIds,    '/context/<string:context_uuid>/service_ids'),
+    ('api.services',       Services,      '/context/<string:context_uuid>/services'),
+    ('api.service',        Service,       '/context/<string:context_uuid>/service/<path:service_uuid>'),
 
-    ('api.service_ids',  ServiceIds,  '/context/<string:context_uuid>/service_ids'),
-    ('api.services',     Services,    '/context/<string:context_uuid>/services'),
-    ('api.service',      Service,     '/context/<string:context_uuid>/service/<path:service_uuid>'),
+    ('api.device_ids',     DeviceIds,     '/device_ids'),
+    ('api.devices',        Devices,       '/devices'),
+    ('api.device',         Device,        '/device/<string:device_uuid>'),
 
-    ('api.device_ids',   DeviceIds,   '/device_ids'),
-    ('api.devices',      Devices,     '/devices'),
-    ('api.device',       Device,      '/device/<string:device_uuid>'),
+    ('api.link_ids',       LinkIds,       '/link_ids'),
+    ('api.links',          Links,         '/links'),
+    ('api.link',           Link,          '/link/<path:link_uuid>'),
 
-    ('api.link_ids',     LinkIds,     '/link_ids'),
-    ('api.links',        Links,       '/links'),
-    ('api.link',         Link,        '/link/<path:link_uuid>'),
+    ('api.connection_ids', ConnectionIds, '/context/<string:context_uuid>/service/<path:service_uuid>/connection_ids'),
+    ('api.connections',    Connections,   '/context/<string:context_uuid>/service/<path:service_uuid>/connections'),
+    ('api.connection',     Connection,    '/connection/<path:connection_uuid>'),
 ]
diff --git a/src/context/service/rest_server/Server.py b/src/context/service/rest_server/Server.py
index 3095d77c48e756dd0c5d655b06a2b0625bcc89d5..5ff40017abdafe44a2fb6569e661457021bef9e5 100644
--- a/src/context/service/rest_server/Server.py
+++ b/src/context/service/rest_server/Server.py
@@ -28,8 +28,8 @@ class Server(threading.Thread):
         self.ctx = self.app.app_context()
         self.ctx.push()
 
-        endpoint = 'http://{}:{}{}'.format(self.host, self.port, self.base_url)
-        LOGGER.info('Listening on {}...'.format(endpoint))
+        endpoint = 'http://{:s}:{:s}{:s}'.format(str(self.host), str(self.port), str(self.base_url))
+        LOGGER.info('Listening on {:s}...'.format(str(endpoint)))
         self.srv.serve_forever()
 
     def shutdown(self):
diff --git a/src/context/tests/Tools.py b/src/context/tests/Tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..b4d1d8953126fb74b8ab17a71fbba3b19acba733
--- /dev/null
+++ b/src/context/tests/Tools.py
@@ -0,0 +1,27 @@
+import json
+from copy import deepcopy
+from typing import Any, Dict, Union
+from context.proto.context_pb2 import ConfigActionEnum
+
+def config_rule(action : ConfigActionEnum, resource_key : str, resource_value : Union[str, Dict[str, Any]]):
+    if not isinstance(resource_value, str): resource_value = json.dumps(resource_value, sort_keys=True)
+    return {'action': action, 'resource_key': resource_key, 'resource_value': resource_value}
+
+def config_rule_set(resource_key : str, resource_value : Union[str, Dict[str, Any]]):
+    return config_rule(ConfigActionEnum.CONFIGACTION_SET, resource_key, resource_value)
+
+def config_rule_delete(resource_key : str, resource_value : Union[str, Dict[str, Any]]):
+    return config_rule(ConfigActionEnum.CONFIGACTION_DELETE, resource_key, resource_value)
+
+def endpoint_id(device_id, endpoint_uuid, topology_id=None):
+    result = {'device_id': deepcopy(device_id), 'endpoint_uuid': {'uuid': endpoint_uuid}}
+    if topology_id is not None: result['topology_id'] = deepcopy(topology_id)
+    return result
+
+def endpoint(device_id, endpoint_uuid, endpoint_type, topology_id=None, kpi_sample_types=[]):
+    result = {
+        'endpoint_id': endpoint_id(device_id, endpoint_uuid, topology_id=topology_id),
+        'endpoint_type': endpoint_type,
+    }
+    if len(kpi_sample_types) > 0: result['kpi_sample_types'] = deepcopy(kpi_sample_types)
+    return result
diff --git a/src/context/tests/example_objects.py b/src/context/tests/example_objects.py
index 81339c04e1fe77667bd41179f3fa0813c5fc69df..ef2117bc60dff52e90bca5fd1191064086bdcc08 100644
--- a/src/context/tests/example_objects.py
+++ b/src/context/tests/example_objects.py
@@ -1,21 +1,12 @@
 from copy import deepcopy
 from common.Constants import DEFAULT_CONTEXT_UUID, DEFAULT_TOPOLOGY_UUID
 from context.proto.context_pb2 import (
-    ConfigActionEnum, DeviceDriverEnum, DeviceOperationalStatusEnum, ServiceStatusEnum, ServiceTypeEnum)
+    DeviceDriverEnum, DeviceOperationalStatusEnum, ServiceStatusEnum, ServiceTypeEnum)
+from context.proto.kpi_sample_types_pb2 import KpiSampleType
+from .Tools import config_rule_set, endpoint, endpoint_id
 
 # Some example objects to be used by the tests
 
-# Helper methods
-def config_rule(action, resource_key, resource_value):
-    return {'action': action, 'resource_key': resource_key, 'resource_value': resource_value}
-
-def endpoint_id(topology_id, device_id, endpoint_uuid):
-    return {'topology_id': deepcopy(topology_id), 'device_id': deepcopy(device_id),
-            'endpoint_uuid': {'uuid': endpoint_uuid}}
-
-def endpoint(topology_id, device_id, endpoint_uuid, endpoint_type):
-    return {'endpoint_id': endpoint_id(topology_id, device_id, endpoint_uuid), 'endpoint_type': endpoint_type}
-
 ## use "deepcopy" to prevent propagating forced changes during tests
 CONTEXT_ID = {'context_uuid': {'uuid': DEFAULT_CONTEXT_UUID}}
 CONTEXT = {
@@ -34,22 +25,36 @@ TOPOLOGY = {
     'link_ids': [],
 }
 
+PACKET_PORT_SAMPLE_TYPES = [
+    KpiSampleType.KPISAMPLETYPE_PACKETS_TRANSMITTED,
+    KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED,
+    KpiSampleType.KPISAMPLETYPE_BYTES_TRANSMITTED,
+    KpiSampleType.KPISAMPLETYPE_BYTES_RECEIVED,
+]
+
+def _endpoint_id(device_id, endpoint_uuid):
+    return endpoint_id(device_id, endpoint_uuid, topology_id=TOPOLOGY_ID)
+
+def _endpoint(device_id, endpoint_uuid, endpoint_type):
+    return endpoint(
+        device_id, endpoint_uuid, endpoint_type, topology_id=TOPOLOGY_ID, kpi_sample_types=PACKET_PORT_SAMPLE_TYPES)
+
 DEVICE1_UUID = 'DEV1'
 DEVICE1_ID = {'device_uuid': {'uuid': DEVICE1_UUID}}
 DEVICE1 = {
     'device_id': deepcopy(DEVICE1_ID),
     'device_type': 'packet-router',
     'device_config': {'config_rules': [
-        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'dev/rsrc1/value', 'value1'),
-        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'dev/rsrc2/value', 'value2'),
-        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'dev/rsrc3/value', 'value3'),
+        config_rule_set('dev/rsrc1/value', 'value1'),
+        config_rule_set('dev/rsrc2/value', 'value2'),
+        config_rule_set('dev/rsrc3/value', 'value3'),
     ]},
     'device_operational_status': DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_ENABLED,
     'device_drivers': [DeviceDriverEnum.DEVICEDRIVER_OPENCONFIG, DeviceDriverEnum.DEVICEDRIVER_P4],
     'device_endpoints': [
-        endpoint(TOPOLOGY_ID, DEVICE1_ID, 'EP2', 'port-packet-100G'),
-        endpoint(TOPOLOGY_ID, DEVICE1_ID, 'EP3', 'port-packet-100G'),
-        endpoint(TOPOLOGY_ID, DEVICE1_ID, 'EP100', 'port-packet-10G'),
+        _endpoint(DEVICE1_ID, 'EP2',   'port-packet-100G'),
+        _endpoint(DEVICE1_ID, 'EP3',   'port-packet-100G'),
+        _endpoint(DEVICE1_ID, 'EP100', 'port-packet-10G' ),
     ],
 }
 
@@ -59,16 +64,16 @@ DEVICE2 = {
     'device_id': deepcopy(DEVICE2_ID),
     'device_type': 'packet-router',
     'device_config': {'config_rules': [
-        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'dev/rsrc1/value', 'value4'),
-        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'dev/rsrc2/value', 'value5'),
-        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'dev/rsrc3/value', 'value6'),
+        config_rule_set('dev/rsrc1/value', 'value4'),
+        config_rule_set('dev/rsrc2/value', 'value5'),
+        config_rule_set('dev/rsrc3/value', 'value6'),
     ]},
     'device_operational_status': DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_ENABLED,
     'device_drivers': [DeviceDriverEnum.DEVICEDRIVER_OPENCONFIG, DeviceDriverEnum.DEVICEDRIVER_P4],
     'device_endpoints': [
-        endpoint(TOPOLOGY_ID, DEVICE2_ID, 'EP1', 'port-packet-100G'),
-        endpoint(TOPOLOGY_ID, DEVICE2_ID, 'EP3', 'port-packet-100G'),
-        endpoint(TOPOLOGY_ID, DEVICE2_ID, 'EP100', 'port-packet-10G'),
+        _endpoint(DEVICE2_ID, 'EP1',   'port-packet-100G'),
+        _endpoint(DEVICE2_ID, 'EP3',   'port-packet-100G'),
+        _endpoint(DEVICE2_ID, 'EP100', 'port-packet-10G' ),
     ],
 }
 
@@ -78,16 +83,16 @@ DEVICE3 = {
     'device_id': deepcopy(DEVICE3_ID),
     'device_type': 'packet-router',
     'device_config': {'config_rules': [
-        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'dev/rsrc1/value', 'value4'),
-        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'dev/rsrc2/value', 'value5'),
-        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'dev/rsrc3/value', 'value6'),
+        config_rule_set('dev/rsrc1/value', 'value4'),
+        config_rule_set('dev/rsrc2/value', 'value5'),
+        config_rule_set('dev/rsrc3/value', 'value6'),
     ]},
     'device_operational_status': DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_ENABLED,
     'device_drivers': [DeviceDriverEnum.DEVICEDRIVER_OPENCONFIG, DeviceDriverEnum.DEVICEDRIVER_P4],
     'device_endpoints': [
-        endpoint(TOPOLOGY_ID, DEVICE3_ID, 'EP1', 'port-packet-100G'),
-        endpoint(TOPOLOGY_ID, DEVICE3_ID, 'EP2', 'port-packet-100G'),
-        endpoint(TOPOLOGY_ID, DEVICE3_ID, 'EP100', 'port-packet-10G'),
+        _endpoint(DEVICE3_ID, 'EP1',   'port-packet-100G'),
+        _endpoint(DEVICE3_ID, 'EP2',   'port-packet-100G'),
+        _endpoint(DEVICE3_ID, 'EP100', 'port-packet-10G' ),
     ],
 }
 
@@ -96,8 +101,8 @@ LINK_DEV1_DEV2_ID = {'link_uuid': {'uuid': LINK_DEV1_DEV2_UUID}}
 LINK_DEV1_DEV2 = {
     'link_id': deepcopy(LINK_DEV1_DEV2_ID),
     'link_endpoint_ids' : [
-        endpoint_id(TOPOLOGY_ID, DEVICE1_ID, 'EP2'),
-        endpoint_id(TOPOLOGY_ID, DEVICE2_ID, 'EP1'),
+        _endpoint_id(DEVICE1_ID, 'EP2'),
+        _endpoint_id(DEVICE2_ID, 'EP1'),
     ]
 }
 
@@ -106,8 +111,8 @@ LINK_DEV2_DEV3_ID = {'link_uuid': {'uuid': LINK_DEV2_DEV3_UUID}}
 LINK_DEV2_DEV3 = {
     'link_id': deepcopy(LINK_DEV2_DEV3_ID),
     'link_endpoint_ids' : [
-        endpoint_id(TOPOLOGY_ID, DEVICE2_ID, 'EP3'),
-        endpoint_id(TOPOLOGY_ID, DEVICE3_ID, 'EP2'),
+        _endpoint_id(DEVICE2_ID, 'EP3'),
+        _endpoint_id(DEVICE3_ID, 'EP2'),
     ]
 }
 
@@ -116,8 +121,8 @@ LINK_DEV1_DEV3_ID = {'link_uuid': {'uuid': LINK_DEV1_DEV3_UUID}}
 LINK_DEV1_DEV3 = {
     'link_id': deepcopy(LINK_DEV1_DEV3_ID),
     'link_endpoint_ids' : [
-        endpoint_id(TOPOLOGY_ID, DEVICE1_ID, 'EP3'),
-        endpoint_id(TOPOLOGY_ID, DEVICE3_ID, 'EP1'),
+        _endpoint_id(DEVICE1_ID, 'EP3'),
+        _endpoint_id(DEVICE3_ID, 'EP1'),
     ]
 }
 
@@ -130,8 +135,8 @@ SERVICE_DEV1_DEV2 = {
     'service_id': deepcopy(SERVICE_DEV1_DEV2_ID),
     'service_type': ServiceTypeEnum.SERVICETYPE_L3NM,
     'service_endpoint_ids' : [
-        endpoint_id(TOPOLOGY_ID, DEVICE1_ID, 'EP100'),
-        endpoint_id(TOPOLOGY_ID, DEVICE2_ID, 'EP100'),
+        _endpoint_id(DEVICE1_ID, 'EP100'),
+        _endpoint_id(DEVICE2_ID, 'EP100'),
     ],
     'service_constraints': [
         {'constraint_type': 'latency_ms', 'constraint_value': '15.2'},
@@ -139,9 +144,9 @@ SERVICE_DEV1_DEV2 = {
     ],
     'service_status': {'service_status': ServiceStatusEnum.SERVICESTATUS_ACTIVE},
     'service_config': {'config_rules': [
-        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'svc/rsrc1/value', 'value7'),
-        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'svc/rsrc2/value', 'value8'),
-        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'svc/rsrc3/value', 'value9'),
+        config_rule_set('svc/rsrc1/value', 'value7'),
+        config_rule_set('svc/rsrc2/value', 'value8'),
+        config_rule_set('svc/rsrc3/value', 'value9'),
     ]},
 }
 
@@ -154,8 +159,8 @@ SERVICE_DEV1_DEV3 = {
     'service_id': deepcopy(SERVICE_DEV1_DEV3_ID),
     'service_type': ServiceTypeEnum.SERVICETYPE_L3NM,
     'service_endpoint_ids' : [
-        endpoint_id(TOPOLOGY_ID, DEVICE1_ID, 'EP100'),
-        endpoint_id(TOPOLOGY_ID, DEVICE3_ID, 'EP100'),
+        _endpoint_id(DEVICE1_ID, 'EP100'),
+        _endpoint_id(DEVICE3_ID, 'EP100'),
     ],
     'service_constraints': [
         {'constraint_type': 'latency_ms', 'constraint_value': '5.8'},
@@ -163,9 +168,9 @@ SERVICE_DEV1_DEV3 = {
     ],
     'service_status': {'service_status': ServiceStatusEnum.SERVICESTATUS_ACTIVE},
     'service_config': {'config_rules': [
-        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'svc/rsrc1/value', 'value7'),
-        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'svc/rsrc2/value', 'value8'),
-        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'svc/rsrc3/value', 'value9'),
+        config_rule_set('svc/rsrc1/value', 'value7'),
+        config_rule_set('svc/rsrc2/value', 'value8'),
+        config_rule_set('svc/rsrc3/value', 'value9'),
     ]},
 }
 
@@ -178,8 +183,8 @@ SERVICE_DEV2_DEV3 = {
     'service_id': deepcopy(SERVICE_DEV2_DEV3_ID),
     'service_type': ServiceTypeEnum.SERVICETYPE_L3NM,
     'service_endpoint_ids' : [
-        endpoint_id(TOPOLOGY_ID, DEVICE2_ID, 'EP100'),
-        endpoint_id(TOPOLOGY_ID, DEVICE3_ID, 'EP100'),
+        _endpoint_id(DEVICE2_ID, 'EP100'),
+        _endpoint_id(DEVICE3_ID, 'EP100'),
     ],
     'service_constraints': [
         {'constraint_type': 'latency_ms', 'constraint_value': '23.1'},
@@ -187,8 +192,29 @@ SERVICE_DEV2_DEV3 = {
     ],
     'service_status': {'service_status': ServiceStatusEnum.SERVICESTATUS_ACTIVE},
     'service_config': {'config_rules': [
-        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'svc/rsrc1/value', 'value7'),
-        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'svc/rsrc2/value', 'value8'),
-        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'svc/rsrc3/value', 'value9'),
+        config_rule_set('svc/rsrc1/value', 'value7'),
+        config_rule_set('svc/rsrc2/value', 'value8'),
+        config_rule_set('svc/rsrc3/value', 'value9'),
     ]},
 }
+
+CONNECTION_DEV1_DEV3_UUID = 'CON:DEV1/EP100-DEV3/EP100'
+CONNECTION_DEV1_DEV3_ID = {
+    'connection_uuid': {'uuid': CONNECTION_DEV1_DEV3_UUID},
+}
+CONNECTION_DEV1_DEV3 = {
+    'connection_id': deepcopy(CONNECTION_DEV1_DEV3_ID),
+    'service_id': deepcopy(SERVICE_DEV1_DEV3_ID),
+    'path_hops_endpoint_ids' : [
+        _endpoint_id(DEVICE1_ID, 'EP100'),
+        _endpoint_id(DEVICE1_ID, 'EP2'),
+        _endpoint_id(DEVICE2_ID, 'EP1'),
+        _endpoint_id(DEVICE2_ID, 'EP3'),
+        _endpoint_id(DEVICE3_ID, 'EP2'),
+        _endpoint_id(DEVICE3_ID, 'EP100'),
+    ],
+    'sub_service_ids': [
+        deepcopy(SERVICE_DEV1_DEV2_ID),
+        deepcopy(SERVICE_DEV2_DEV3_ID),
+    ],
+}
diff --git a/src/context/tests/test_unitary.py b/src/context/tests/test_unitary.py
index 02343f458b06808e33955e43a93bf1a7f9308b34..7d3cd9b965fbc0e4df0acc2ec092a1448095d392 100644
--- a/src/context/tests/test_unitary.py
+++ b/src/context/tests/test_unitary.py
@@ -1,5 +1,6 @@
+# pylint: disable=too-many-lines
 import copy, grpc, logging, os, pytest, requests, threading, time, urllib
-from queue import Queue
+from queue import Queue, Empty
 from typing import Tuple
 from common.Constants import DEFAULT_CONTEXT_UUID, DEFAULT_TOPOLOGY_UUID
 from common.orm.Database import Database
@@ -7,16 +8,17 @@ from common.orm.Factory import get_database_backend, BackendEnum as DatabaseBack
 from common.message_broker.Factory import get_messagebroker_backend, BackendEnum as MessageBrokerBackendEnum
 from common.message_broker.MessageBroker import MessageBroker
 from common.type_checkers.Assertions import (
-    validate_context, validate_context_ids, validate_contexts, validate_device, validate_device_ids, validate_devices,
-    validate_link, validate_link_ids, validate_links, validate_service, validate_service_ids, validate_services,
-    validate_topologies, validate_topology, validate_topology_ids)
+    validate_connection, validate_connection_ids, validate_connections, validate_context, validate_context_ids,
+    validate_contexts, validate_device, validate_device_ids, validate_devices, validate_link, validate_link_ids,
+    validate_links, validate_service, validate_service_ids, validate_services, validate_topologies, validate_topology,
+    validate_topology_ids)
 from context.Config import (
     GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD, RESTAPI_SERVICE_PORT, RESTAPI_BASE_URL)
 from context.client.ContextClient import ContextClient
 from context.proto.context_pb2 import (
-    Context, ContextEvent, ContextId, Device, DeviceEvent, DeviceId, DeviceOperationalStatusEnum, Empty,
-    EventTypeEnum, Link, LinkEvent, LinkId, Service, ServiceEvent, ServiceId, ServiceStatusEnum, ServiceTypeEnum,
-    Topology, TopologyEvent, TopologyId)
+    Connection, ConnectionEvent, ConnectionId, Context, ContextEvent, ContextId, Device, DeviceEvent, DeviceId,
+    DeviceOperationalStatusEnum, Empty, EventTypeEnum, Link, LinkEvent, LinkId, Service, ServiceEvent, ServiceId,
+    ServiceStatusEnum, ServiceTypeEnum, Topology, TopologyEvent, TopologyId)
 from context.service.database.Tools import (
     FASTHASHER_DATA_ACCEPTED_FORMAT, FASTHASHER_ITEM_ACCEPTED_FORMAT, fast_hasher)
 from context.service.grpc_server.ContextService import ContextService
@@ -24,9 +26,11 @@ from context.service.Populate import populate
 from context.service.rest_server.Server import Server as RestServer
 from context.service.rest_server.Resources import RESOURCES
 from .example_objects import (
-    CONTEXT, CONTEXT_ID, DEVICE1, DEVICE1_ID, DEVICE1_UUID, DEVICE2, DEVICE2_ID, DEVICE2_UUID, LINK_DEV1_DEV2,
-    LINK_DEV1_DEV2_ID, LINK_DEV1_DEV2_UUID, SERVICE_DEV1_DEV2, SERVICE_DEV1_DEV2_ID, SERVICE_DEV1_DEV2_UUID, TOPOLOGY,
-    TOPOLOGY_ID)
+    CONNECTION_DEV1_DEV3, CONNECTION_DEV1_DEV3_ID, CONNECTION_DEV1_DEV3_UUID, CONTEXT, CONTEXT_ID, DEVICE1, DEVICE1_ID,
+    DEVICE1_UUID, DEVICE2, DEVICE2_ID, DEVICE2_UUID, DEVICE3, DEVICE3_ID, DEVICE3_UUID, LINK_DEV1_DEV2,
+    LINK_DEV1_DEV2_ID, LINK_DEV1_DEV2_UUID, SERVICE_DEV1_DEV2, SERVICE_DEV1_DEV2_ID, SERVICE_DEV1_DEV2_UUID,
+    SERVICE_DEV1_DEV3, SERVICE_DEV1_DEV3_ID, SERVICE_DEV1_DEV3_UUID, SERVICE_DEV2_DEV3, SERVICE_DEV2_DEV3_ID,
+    SERVICE_DEV2_DEV3_UUID, TOPOLOGY, TOPOLOGY_ID)
 
 LOGGER = logging.getLogger(__name__)
 LOGGER.setLevel(logging.DEBUG)
@@ -98,17 +102,19 @@ class EventsCollector:
     def __init__(self, context_client_grpc : ContextClient) -> None: # pylint: disable=redefined-outer-name
         self._events_queue = Queue()
 
-        self._context_stream  = context_client_grpc.GetContextEvents(Empty())
-        self._topology_stream = context_client_grpc.GetTopologyEvents(Empty())
-        self._device_stream   = context_client_grpc.GetDeviceEvents(Empty())
-        self._link_stream     = context_client_grpc.GetLinkEvents(Empty())
-        self._service_stream  = context_client_grpc.GetServiceEvents(Empty())
+        self._context_stream    = context_client_grpc.GetContextEvents(Empty())
+        self._topology_stream   = context_client_grpc.GetTopologyEvents(Empty())
+        self._device_stream     = context_client_grpc.GetDeviceEvents(Empty())
+        self._link_stream       = context_client_grpc.GetLinkEvents(Empty())
+        self._service_stream    = context_client_grpc.GetServiceEvents(Empty())
+        self._connection_stream = context_client_grpc.GetConnectionEvents(Empty())
 
-        self._context_thread  = threading.Thread(target=self._collect, args=(self._context_stream ,), daemon=False)
-        self._topology_thread = threading.Thread(target=self._collect, args=(self._topology_stream,), daemon=False)
-        self._device_thread   = threading.Thread(target=self._collect, args=(self._device_stream  ,), daemon=False)
-        self._link_thread     = threading.Thread(target=self._collect, args=(self._link_stream    ,), daemon=False)
-        self._service_thread  = threading.Thread(target=self._collect, args=(self._service_stream ,), daemon=False)
+        self._context_thread    = threading.Thread(target=self._collect, args=(self._context_stream   ,), daemon=False)
+        self._topology_thread   = threading.Thread(target=self._collect, args=(self._topology_stream  ,), daemon=False)
+        self._device_thread     = threading.Thread(target=self._collect, args=(self._device_stream    ,), daemon=False)
+        self._link_thread       = threading.Thread(target=self._collect, args=(self._link_stream      ,), daemon=False)
+        self._service_thread    = threading.Thread(target=self._collect, args=(self._service_stream   ,), daemon=False)
+        self._connection_thread = threading.Thread(target=self._collect, args=(self._connection_stream,), daemon=False)
 
     def _collect(self, events_stream) -> None:
         try:
@@ -124,28 +130,47 @@ class EventsCollector:
         self._device_thread.start()
         self._link_thread.start()
         self._service_thread.start()
+        self._connection_thread.start()
 
     def get_event(self, block : bool = True, timeout : float = 0.1):
         return self._events_queue.get(block=block, timeout=timeout)
 
+    def get_events(self, block : bool = True, timeout : float = 0.1, count : int = None):
+        events = []
+        if count is None:
+            while True:
+                try:
+                    events.append(self.get_event(block=block, timeout=timeout))
+                except Empty: # pylint: disable=catching-non-exception
+                    break
+        else:
+            for _ in range(count):
+                try:
+                    events.append(self.get_event(block=block, timeout=timeout))
+                except Empty: # pylint: disable=catching-non-exception
+                    pass
+        return sorted(events, key=lambda e: e.event.timestamp)
+
     def stop(self):
         self._context_stream.cancel()
         self._topology_stream.cancel()
         self._device_stream.cancel()
         self._link_stream.cancel()
         self._service_stream.cancel()
+        self._connection_stream.cancel()
 
         self._context_thread.join()
         self._topology_thread.join()
         self._device_thread.join()
         self._link_thread.join()
         self._service_thread.join()
+        self._connection_thread.join()
 
 
 # ----- Test gRPC methods ----------------------------------------------------------------------------------------------
 
 def test_grpc_context(
-    context_client_grpc : ContextClient,                     # pylint: disable=redefined-outer-name
+    context_client_grpc : ContextClient,                # pylint: disable=redefined-outer-name
     context_db_mb : Tuple[Database, MessageBroker]):    # pylint: disable=redefined-outer-name
     context_database = context_db_mb[0]
 
@@ -266,7 +291,7 @@ def test_grpc_context(
 
 
 def test_grpc_topology(
-    context_client_grpc : ContextClient,                     # pylint: disable=redefined-outer-name
+    context_client_grpc : ContextClient,                # pylint: disable=redefined-outer-name
     context_db_mb : Tuple[Database, MessageBroker]):    # pylint: disable=redefined-outer-name
     context_database = context_db_mb[0]
 
@@ -318,16 +343,16 @@ def test_grpc_topology(
     assert response.context_uuid.uuid == DEFAULT_CONTEXT_UUID
 
     # ----- Check create event -----------------------------------------------------------------------------------------
-    event = events_collector.get_event(block=True)
-    assert isinstance(event, TopologyEvent)
-    assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE
-    assert event.topology_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
-    assert event.topology_id.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+    events = events_collector.get_events(block=True, count=2)
 
-    event = events_collector.get_event(block=True)
-    assert isinstance(event, ContextEvent)
-    assert event.event.event_type == EventTypeEnum.EVENTTYPE_UPDATE
-    assert event.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert isinstance(events[0], TopologyEvent)
+    assert events[0].event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert events[0].topology_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert events[0].topology_id.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+
+    assert isinstance(events[1], ContextEvent)
+    assert events[1].event.event_type == EventTypeEnum.EVENTTYPE_UPDATE
+    assert events[1].context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
 
     # ----- Update the object ------------------------------------------------------------------------------------------
     response = context_client_grpc.SetTopology(Topology(**TOPOLOGY))
@@ -374,16 +399,16 @@ def test_grpc_topology(
     context_client_grpc.RemoveContext(ContextId(**CONTEXT_ID))
 
     # ----- Check remove event -----------------------------------------------------------------------------------------
-    event = events_collector.get_event(block=True)
-    assert isinstance(event, TopologyEvent)
-    assert event.event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
-    assert event.topology_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
-    assert event.topology_id.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+    events = events_collector.get_events(block=True, count=2)
 
-    event = events_collector.get_event(block=True)
-    assert isinstance(event, ContextEvent)
-    assert event.event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
-    assert event.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert isinstance(events[0], TopologyEvent)
+    assert events[0].event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert events[0].topology_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert events[0].topology_id.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+
+    assert isinstance(events[1], ContextEvent)
+    assert events[1].event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert events[1].context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
 
     # ----- Stop the EventsCollector -----------------------------------------------------------------------------------
     events_collector.stop()
@@ -398,7 +423,7 @@ def test_grpc_topology(
 
 
 def test_grpc_device(
-    context_client_grpc : ContextClient,                     # pylint: disable=redefined-outer-name
+    context_client_grpc : ContextClient,                # pylint: disable=redefined-outer-name
     context_db_mb : Tuple[Database, MessageBroker]):    # pylint: disable=redefined-outer-name
     context_database = context_db_mb[0]
 
@@ -417,16 +442,16 @@ def test_grpc_device(
     assert response.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
     assert response.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
 
-    event = events_collector.get_event(block=True)
-    assert isinstance(event, ContextEvent)
-    assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE
-    assert event.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    events = events_collector.get_events(block=True, count=2)
 
-    event = events_collector.get_event(block=True)
-    assert isinstance(event, TopologyEvent)
-    assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE
-    assert event.topology_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
-    assert event.topology_id.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+    assert isinstance(events[0], ContextEvent)
+    assert events[0].event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert events[0].context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+
+    assert isinstance(events[1], TopologyEvent)
+    assert events[1].event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert events[1].topology_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert events[1].topology_id.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
 
     # ----- Get when the object does not exist -------------------------------------------------------------------------
     with pytest.raises(grpc.RpcError) as e:
@@ -484,7 +509,7 @@ def test_grpc_device(
     for db_entry in db_entries:
         LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
     LOGGER.info('-----------------------------------------------------------')
-    assert len(db_entries) == 25
+    assert len(db_entries) == 41
 
     # ----- Get when the object exists ---------------------------------------------------------------------------------
     response = context_client_grpc.GetDevice(DeviceId(**DEVICE1_ID))
@@ -537,7 +562,7 @@ def test_grpc_device(
     for db_entry in db_entries:
         LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
     LOGGER.info('-----------------------------------------------------------')
-    assert len(db_entries) == 25
+    assert len(db_entries) == 41
 
     # ----- Remove the object ------------------------------------------------------------------------------------------
     context_client_grpc.RemoveDevice(DeviceId(**DEVICE1_ID))
@@ -545,21 +570,20 @@ def test_grpc_device(
     context_client_grpc.RemoveContext(ContextId(**CONTEXT_ID))
 
     # ----- Check remove event -----------------------------------------------------------------------------------------
-    event = events_collector.get_event(block=True)
-    assert isinstance(event, DeviceEvent)
-    assert event.event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
-    assert event.device_id.device_uuid.uuid == DEVICE1_UUID
+    events = events_collector.get_events(block=True, count=3)
 
-    event = events_collector.get_event(block=True)
-    assert isinstance(event, TopologyEvent)
-    assert event.event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
-    assert event.topology_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
-    assert event.topology_id.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+    assert isinstance(events[0], DeviceEvent)
+    assert events[0].event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert events[0].device_id.device_uuid.uuid == DEVICE1_UUID
 
-    event = events_collector.get_event(block=True)
-    assert isinstance(event, ContextEvent)
-    assert event.event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
-    assert event.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert isinstance(events[1], TopologyEvent)
+    assert events[1].event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert events[1].topology_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert events[1].topology_id.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+
+    assert isinstance(events[2], ContextEvent)
+    assert events[2].event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert events[2].context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
 
     # ----- Stop the EventsCollector -----------------------------------------------------------------------------------
     events_collector.stop()
@@ -574,7 +598,7 @@ def test_grpc_device(
 
 
 def test_grpc_link(
-    context_client_grpc : ContextClient,                     # pylint: disable=redefined-outer-name
+    context_client_grpc : ContextClient,                # pylint: disable=redefined-outer-name
     context_db_mb : Tuple[Database, MessageBroker]):    # pylint: disable=redefined-outer-name
     context_database = context_db_mb[0]
 
@@ -599,26 +623,24 @@ def test_grpc_link(
     response = context_client_grpc.SetDevice(Device(**DEVICE2))
     assert response.device_uuid.uuid == DEVICE2_UUID
 
-    event = events_collector.get_event(block=True)
-    assert isinstance(event, ContextEvent)
-    assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE
-    assert event.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    events = events_collector.get_events(block=True, count=4)
 
-    event = events_collector.get_event(block=True)
-    assert isinstance(event, TopologyEvent)
-    assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE
-    assert event.topology_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
-    assert event.topology_id.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+    assert isinstance(events[0], ContextEvent)
+    assert events[0].event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert events[0].context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
 
-    event = events_collector.get_event(block=True)
-    assert isinstance(event, DeviceEvent)
-    assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE
-    assert event.device_id.device_uuid.uuid == DEVICE1_UUID
+    assert isinstance(events[1], TopologyEvent)
+    assert events[1].event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert events[1].topology_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert events[1].topology_id.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
 
-    event = events_collector.get_event(block=True)
-    assert isinstance(event, DeviceEvent)
-    assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE
-    assert event.device_id.device_uuid.uuid == DEVICE2_UUID
+    assert isinstance(events[2], DeviceEvent)
+    assert events[2].event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert events[2].device_id.device_uuid.uuid == DEVICE1_UUID
+
+    assert isinstance(events[3], DeviceEvent)
+    assert events[3].event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert events[3].device_id.device_uuid.uuid == DEVICE2_UUID
 
     # ----- Get when the object does not exist -------------------------------------------------------------------------
     with pytest.raises(grpc.RpcError) as e:
@@ -639,7 +661,7 @@ def test_grpc_link(
     for db_entry in db_entries:
         LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
     LOGGER.info('-----------------------------------------------------------')
-    assert len(db_entries) == 38
+    assert len(db_entries) == 69
 
     # ----- Create the object ------------------------------------------------------------------------------------------
     response = context_client_grpc.SetLink(Link(**LINK_DEV1_DEV2))
@@ -667,7 +689,7 @@ def test_grpc_link(
     for db_entry in db_entries:
         LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
     LOGGER.info('-----------------------------------------------------------')
-    assert len(db_entries) == 48
+    assert len(db_entries) == 77
 
     # ----- Get when the object exists ---------------------------------------------------------------------------------
     response = context_client_grpc.GetLink(LinkId(**LINK_DEV1_DEV2_ID))
@@ -713,7 +735,7 @@ def test_grpc_link(
     for db_entry in db_entries:
         LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
     LOGGER.info('-----------------------------------------------------------')
-    assert len(db_entries) == 48
+    assert len(db_entries) == 77
 
     # ----- Remove the object ------------------------------------------------------------------------------------------
     context_client_grpc.RemoveLink(LinkId(**LINK_DEV1_DEV2_ID))
@@ -723,31 +745,28 @@ def test_grpc_link(
     context_client_grpc.RemoveContext(ContextId(**CONTEXT_ID))
 
     # ----- Check remove event -----------------------------------------------------------------------------------------
-    event = events_collector.get_event(block=True)
-    assert isinstance(event, LinkEvent)
-    assert event.event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
-    assert event.link_id.link_uuid.uuid == LINK_DEV1_DEV2_UUID
+    events = events_collector.get_events(block=True, count=5)
 
-    event = events_collector.get_event(block=True)
-    assert isinstance(event, DeviceEvent)
-    assert event.event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
-    assert event.device_id.device_uuid.uuid == DEVICE1_UUID
+    assert isinstance(events[0], LinkEvent)
+    assert events[0].event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert events[0].link_id.link_uuid.uuid == LINK_DEV1_DEV2_UUID
 
-    event = events_collector.get_event(block=True)
-    assert isinstance(event, DeviceEvent)
-    assert event.event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
-    assert event.device_id.device_uuid.uuid == DEVICE2_UUID
+    assert isinstance(events[1], DeviceEvent)
+    assert events[1].event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert events[1].device_id.device_uuid.uuid == DEVICE1_UUID
 
-    event = events_collector.get_event(block=True)
-    assert isinstance(event, TopologyEvent)
-    assert event.event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
-    assert event.topology_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
-    assert event.topology_id.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+    assert isinstance(events[2], DeviceEvent)
+    assert events[2].event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert events[2].device_id.device_uuid.uuid == DEVICE2_UUID
 
-    event = events_collector.get_event(block=True)
-    assert isinstance(event, ContextEvent)
-    assert event.event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
-    assert event.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert isinstance(events[3], TopologyEvent)
+    assert events[3].event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert events[3].topology_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert events[3].topology_id.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+
+    assert isinstance(events[4], ContextEvent)
+    assert events[4].event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert events[4].context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
 
     # ----- Stop the EventsCollector -----------------------------------------------------------------------------------
     events_collector.stop()
@@ -762,7 +781,7 @@ def test_grpc_link(
 
 
 def test_grpc_service(
-    context_client_grpc : ContextClient,                     # pylint: disable=redefined-outer-name
+    context_client_grpc : ContextClient,                # pylint: disable=redefined-outer-name
     context_db_mb : Tuple[Database, MessageBroker]):    # pylint: disable=redefined-outer-name
     context_database = context_db_mb[0]
 
@@ -787,26 +806,24 @@ def test_grpc_service(
     response = context_client_grpc.SetDevice(Device(**DEVICE2))
     assert response.device_uuid.uuid == DEVICE2_UUID
 
-    event = events_collector.get_event(block=True)
-    assert isinstance(event, ContextEvent)
-    assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE
-    assert event.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    events = events_collector.get_events(block=True, count=4)
 
-    event = events_collector.get_event(block=True)
-    assert isinstance(event, TopologyEvent)
-    assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE
-    assert event.topology_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
-    assert event.topology_id.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+    assert isinstance(events[0], ContextEvent)
+    assert events[0].event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert events[0].context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
 
-    event = events_collector.get_event(block=True)
-    assert isinstance(event, DeviceEvent)
-    assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE
-    assert event.device_id.device_uuid.uuid == DEVICE1_UUID
+    assert isinstance(events[1], TopologyEvent)
+    assert events[1].event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert events[1].topology_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert events[1].topology_id.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
 
-    event = events_collector.get_event(block=True)
-    assert isinstance(event, DeviceEvent)
-    assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE
-    assert event.device_id.device_uuid.uuid == DEVICE2_UUID
+    assert isinstance(events[2], DeviceEvent)
+    assert events[2].event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert events[2].device_id.device_uuid.uuid == DEVICE1_UUID
+
+    assert isinstance(events[3], DeviceEvent)
+    assert events[3].event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert events[3].device_id.device_uuid.uuid == DEVICE2_UUID
 
     # ----- Get when the object does not exist -------------------------------------------------------------------------
     with pytest.raises(grpc.RpcError) as e:
@@ -827,7 +844,7 @@ def test_grpc_service(
     for db_entry in db_entries:
         LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
     LOGGER.info('-----------------------------------------------------------')
-    assert len(db_entries) == 38
+    assert len(db_entries) == 69
 
     # ----- Create the object ------------------------------------------------------------------------------------------
     with pytest.raises(grpc.RpcError) as e:
@@ -850,16 +867,16 @@ def test_grpc_service(
     assert response.context_uuid.uuid == DEFAULT_CONTEXT_UUID
 
     # ----- Check create event -----------------------------------------------------------------------------------------
-    event = events_collector.get_event(block=True)
-    assert isinstance(event, ServiceEvent)
-    assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE
-    assert event.service_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
-    assert event.service_id.service_uuid.uuid == SERVICE_DEV1_DEV2_UUID
+    events = events_collector.get_events(block=True, count=2)
 
-    event = events_collector.get_event(block=True)
-    assert isinstance(event, ContextEvent)
-    assert event.event.event_type == EventTypeEnum.EVENTTYPE_UPDATE
-    assert event.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert isinstance(events[0], ServiceEvent)
+    assert events[0].event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert events[0].service_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert events[0].service_id.service_uuid.uuid == SERVICE_DEV1_DEV2_UUID
+
+    assert isinstance(events[1], ContextEvent)
+    assert events[1].event.event_type == EventTypeEnum.EVENTTYPE_UPDATE
+    assert events[1].context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
 
     # ----- Update the object ------------------------------------------------------------------------------------------
     response = context_client_grpc.SetService(Service(**SERVICE_DEV1_DEV2))
@@ -879,7 +896,7 @@ def test_grpc_service(
     for db_entry in db_entries:
         LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
     LOGGER.info('-----------------------------------------------------------')
-    assert len(db_entries) == 57
+    assert len(db_entries) == 86
 
     # ----- Get when the object exists ---------------------------------------------------------------------------------
     response = context_client_grpc.GetService(ServiceId(**SERVICE_DEV1_DEV2_ID))
@@ -915,31 +932,279 @@ def test_grpc_service(
     context_client_grpc.RemoveContext(ContextId(**CONTEXT_ID))
 
     # ----- Check remove event -----------------------------------------------------------------------------------------
-    event = events_collector.get_event(block=True)
-    assert isinstance(event, ServiceEvent)
-    assert event.service_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
-    assert event.service_id.service_uuid.uuid == SERVICE_DEV1_DEV2_UUID
+    events = events_collector.get_events(block=True, count=5)
 
-    event = events_collector.get_event(block=True)
-    assert isinstance(event, DeviceEvent)
-    assert event.event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
-    assert event.device_id.device_uuid.uuid == DEVICE1_UUID
+    assert isinstance(events[0], ServiceEvent)
+    assert events[0].service_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert events[0].service_id.service_uuid.uuid == SERVICE_DEV1_DEV2_UUID
 
-    event = events_collector.get_event(block=True)
-    assert isinstance(event, DeviceEvent)
-    assert event.event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
-    assert event.device_id.device_uuid.uuid == DEVICE2_UUID
+    assert isinstance(events[1], DeviceEvent)
+    assert events[1].event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert events[1].device_id.device_uuid.uuid == DEVICE1_UUID
+
+    assert isinstance(events[2], DeviceEvent)
+    assert events[2].event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert events[2].device_id.device_uuid.uuid == DEVICE2_UUID
+
+    assert isinstance(events[3], TopologyEvent)
+    assert events[3].event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert events[3].topology_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert events[3].topology_id.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
 
+    assert isinstance(events[4], ContextEvent)
+    assert events[4].event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert events[4].context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+
+    # ----- Stop the EventsCollector -----------------------------------------------------------------------------------
+    events_collector.stop()
+
+    # ----- Dump state of database after remove the object -------------------------------------------------------------
+    db_entries = context_database.dump()
+    LOGGER.info('----- Database Dump [{:3d} entries] -------------------------'.format(len(db_entries)))
+    for db_entry in db_entries:
+        LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
+    LOGGER.info('-----------------------------------------------------------')
+    assert len(db_entries) == 0
+
+
+def test_grpc_connection(
+    context_client_grpc : ContextClient,                # pylint: disable=redefined-outer-name
+    context_db_mb : Tuple[Database, MessageBroker]):    # pylint: disable=redefined-outer-name
+    context_database = context_db_mb[0]
+
+    # ----- Clean the database -----------------------------------------------------------------------------------------
+    context_database.clear_all()
+
+    # ----- Initialize the EventsCollector -----------------------------------------------------------------------------
+    events_collector = EventsCollector(context_client_grpc)
+    events_collector.start()
+
+    # ----- Prepare dependencies for the test and capture related events -----------------------------------------------
+    response = context_client_grpc.SetContext(Context(**CONTEXT))
+    assert response.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+
+    response = context_client_grpc.SetTopology(Topology(**TOPOLOGY))
+    assert response.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert response.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+
+    response = context_client_grpc.SetDevice(Device(**DEVICE1))
+    assert response.device_uuid.uuid == DEVICE1_UUID
+
+    response = context_client_grpc.SetDevice(Device(**DEVICE2))
+    assert response.device_uuid.uuid == DEVICE2_UUID
+
+    response = context_client_grpc.SetDevice(Device(**DEVICE3))
+    assert response.device_uuid.uuid == DEVICE3_UUID
+
+    response = context_client_grpc.SetService(Service(**SERVICE_DEV1_DEV2))
+    assert response.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert response.service_uuid.uuid == SERVICE_DEV1_DEV2_UUID
+
+    CONTEXT_WITH_SERVICE = copy.deepcopy(CONTEXT)
+    CONTEXT_WITH_SERVICE['service_ids'].append(SERVICE_DEV1_DEV2_ID)
+    response = context_client_grpc.SetContext(Context(**CONTEXT_WITH_SERVICE))
+    assert response.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+
+    response = context_client_grpc.SetService(Service(**SERVICE_DEV2_DEV3))
+    assert response.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert response.service_uuid.uuid == SERVICE_DEV2_DEV3_UUID
+
+    CONTEXT_WITH_SERVICE = copy.deepcopy(CONTEXT)
+    CONTEXT_WITH_SERVICE['service_ids'].append(SERVICE_DEV2_DEV3_ID)
+    response = context_client_grpc.SetContext(Context(**CONTEXT_WITH_SERVICE))
+    assert response.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+
+    response = context_client_grpc.SetService(Service(**SERVICE_DEV1_DEV3))
+    assert response.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert response.service_uuid.uuid == SERVICE_DEV1_DEV3_UUID
+
+    CONTEXT_WITH_SERVICE = copy.deepcopy(CONTEXT)
+    CONTEXT_WITH_SERVICE['service_ids'].append(SERVICE_DEV1_DEV3_ID)
+    response = context_client_grpc.SetContext(Context(**CONTEXT_WITH_SERVICE))
+    assert response.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+
+    events = events_collector.get_events(block=True, count=11)
+
+    assert isinstance(events[0], ContextEvent)
+    assert events[0].event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert events[0].context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+
+    assert isinstance(events[1], TopologyEvent)
+    assert events[1].event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert events[1].topology_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert events[1].topology_id.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+
+    assert isinstance(events[2], DeviceEvent)
+    assert events[2].event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert events[2].device_id.device_uuid.uuid == DEVICE1_UUID
+
+    assert isinstance(events[3], DeviceEvent)
+    assert events[3].event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert events[3].device_id.device_uuid.uuid == DEVICE2_UUID
+
+    assert isinstance(events[4], DeviceEvent)
+    assert events[4].event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert events[4].device_id.device_uuid.uuid == DEVICE3_UUID
+
+    assert isinstance(events[5], ServiceEvent)
+    assert events[5].event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert events[5].service_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert events[5].service_id.service_uuid.uuid == SERVICE_DEV1_DEV2_UUID
+
+    assert isinstance(events[6], ContextEvent)
+    assert events[6].event.event_type == EventTypeEnum.EVENTTYPE_UPDATE
+    assert events[6].context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+
+    assert isinstance(events[7], ServiceEvent)
+    assert events[7].event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert events[7].service_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert events[7].service_id.service_uuid.uuid == SERVICE_DEV2_DEV3_UUID
+
+    assert isinstance(events[8], ContextEvent)
+    assert events[8].event.event_type == EventTypeEnum.EVENTTYPE_UPDATE
+    assert events[8].context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+
+    assert isinstance(events[9], ServiceEvent)
+    assert events[9].event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert events[9].service_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert events[9].service_id.service_uuid.uuid == SERVICE_DEV1_DEV3_UUID
+
+    assert isinstance(events[10], ContextEvent)
+    assert events[10].event.event_type == EventTypeEnum.EVENTTYPE_UPDATE
+    assert events[10].context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+
+    # ----- Get when the object does not exist -------------------------------------------------------------------------
+    with pytest.raises(grpc.RpcError) as e:
+        context_client_grpc.GetConnection(ConnectionId(**CONNECTION_DEV1_DEV3_ID))
+    assert e.value.code() == grpc.StatusCode.NOT_FOUND
+    assert e.value.details() == 'Connection({:s}) not found'.format(CONNECTION_DEV1_DEV3_UUID)
+
+    # ----- List when the object does not exist ------------------------------------------------------------------------
+    response = context_client_grpc.ListConnectionIds(ServiceId(**SERVICE_DEV1_DEV3_ID))
+    assert len(response.connection_ids) == 0
+
+    response = context_client_grpc.ListConnections(ServiceId(**SERVICE_DEV1_DEV3_ID))
+    assert len(response.connections) == 0
+
+    # ----- Dump state of database before create the object ------------------------------------------------------------
+    db_entries = context_database.dump()
+    LOGGER.info('----- Database Dump [{:3d} entries] -------------------------'.format(len(db_entries)))
+    for db_entry in db_entries:
+        LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
+    LOGGER.info('-----------------------------------------------------------')
+    assert len(db_entries) == 140
+
+    # ----- Create the object ------------------------------------------------------------------------------------------
+    with pytest.raises(grpc.RpcError) as e:
+        WRONG_CONNECTION = copy.deepcopy(CONNECTION_DEV1_DEV3)
+        WRONG_CONNECTION['path_hops_endpoint_ids'][0]\
+            ['topology_id']['context_id']['context_uuid']['uuid'] = 'wrong-context-uuid'
+        context_client_grpc.SetConnection(Connection(**WRONG_CONNECTION))
+    assert e.value.code() == grpc.StatusCode.NOT_FOUND
+    # TODO: should we check that all endpoints belong to same topology?
+    # TODO: should we check that endpoints form links over the topology?
+    msg = 'EndPoint(DEV1/EP100:wrong-context-uuid/admin) not found'
+    assert e.value.details() == msg
+
+    response = context_client_grpc.SetConnection(Connection(**CONNECTION_DEV1_DEV3))
+    assert response.connection_uuid.uuid == CONNECTION_DEV1_DEV3_UUID
+
+    # ----- Check create event -----------------------------------------------------------------------------------------
     event = events_collector.get_event(block=True)
-    assert isinstance(event, TopologyEvent)
-    assert event.event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
-    assert event.topology_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
-    assert event.topology_id.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+    assert isinstance(event, ConnectionEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert event.connection_id.connection_uuid.uuid == CONNECTION_DEV1_DEV3_UUID
 
+    # ----- Update the object ------------------------------------------------------------------------------------------
+    response = context_client_grpc.SetConnection(Connection(**CONNECTION_DEV1_DEV3))
+    assert response.connection_uuid.uuid == CONNECTION_DEV1_DEV3_UUID
+
+    # ----- Check update event -----------------------------------------------------------------------------------------
     event = events_collector.get_event(block=True)
-    assert isinstance(event, ContextEvent)
-    assert event.event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
-    assert event.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert isinstance(event, ConnectionEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_UPDATE
+    assert event.connection_id.connection_uuid.uuid == CONNECTION_DEV1_DEV3_UUID
+
+    # ----- Dump state of database after create/update the object ------------------------------------------------------
+    db_entries = context_database.dump()
+    LOGGER.info('----- Database Dump [{:3d} entries] -------------------------'.format(len(db_entries)))
+    for db_entry in db_entries:
+        LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
+    LOGGER.info('-----------------------------------------------------------')
+    assert len(db_entries) == 156
+
+    # ----- Get when the object exists ---------------------------------------------------------------------------------
+    response = context_client_grpc.GetConnection(ConnectionId(**CONNECTION_DEV1_DEV3_ID))
+    assert response.connection_id.connection_uuid.uuid == CONNECTION_DEV1_DEV3_UUID
+    assert response.service_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert response.service_id.service_uuid.uuid == SERVICE_DEV1_DEV3_UUID
+    assert len(response.path_hops_endpoint_ids) == 6
+    assert len(response.sub_service_ids) == 2
+
+    # ----- List when the object exists --------------------------------------------------------------------------------
+    response = context_client_grpc.ListConnectionIds(ServiceId(**SERVICE_DEV1_DEV3_ID))
+    assert len(response.connection_ids) == 1
+    assert response.connection_ids[0].connection_uuid.uuid == CONNECTION_DEV1_DEV3_UUID
+
+    response = context_client_grpc.ListConnections(ServiceId(**SERVICE_DEV1_DEV3_ID))
+    assert len(response.connections) == 1
+    assert response.connections[0].connection_id.connection_uuid.uuid == CONNECTION_DEV1_DEV3_UUID
+    assert len(response.connections[0].path_hops_endpoint_ids) == 6
+    assert len(response.connections[0].sub_service_ids) == 2
+
+    # ----- Remove the object ------------------------------------------------------------------------------------------
+    context_client_grpc.RemoveConnection(ConnectionId(**CONNECTION_DEV1_DEV3_ID))
+    context_client_grpc.RemoveService(ServiceId(**SERVICE_DEV1_DEV3_ID))
+    context_client_grpc.RemoveService(ServiceId(**SERVICE_DEV2_DEV3_ID))
+    context_client_grpc.RemoveService(ServiceId(**SERVICE_DEV1_DEV2_ID))
+    context_client_grpc.RemoveDevice(DeviceId(**DEVICE1_ID))
+    context_client_grpc.RemoveDevice(DeviceId(**DEVICE2_ID))
+    context_client_grpc.RemoveDevice(DeviceId(**DEVICE3_ID))
+    context_client_grpc.RemoveTopology(TopologyId(**TOPOLOGY_ID))
+    context_client_grpc.RemoveContext(ContextId(**CONTEXT_ID))
+
+    # ----- Check remove event -----------------------------------------------------------------------------------------
+    events = events_collector.get_events(block=True, count=9)
+
+    assert isinstance(events[0], ConnectionEvent)
+    assert events[0].event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert events[0].connection_id.connection_uuid.uuid == CONNECTION_DEV1_DEV3_UUID
+
+    assert isinstance(events[1], ServiceEvent)
+    assert events[1].event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert events[1].service_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert events[1].service_id.service_uuid.uuid == SERVICE_DEV1_DEV3_UUID
+
+    assert isinstance(events[2], ServiceEvent)
+    assert events[2].event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert events[2].service_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert events[2].service_id.service_uuid.uuid == SERVICE_DEV2_DEV3_UUID
+
+    assert isinstance(events[3], ServiceEvent)
+    assert events[3].event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert events[3].service_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert events[3].service_id.service_uuid.uuid == SERVICE_DEV1_DEV2_UUID
+
+    assert isinstance(events[4], DeviceEvent)
+    assert events[4].event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert events[4].device_id.device_uuid.uuid == DEVICE1_UUID
+
+    assert isinstance(events[5], DeviceEvent)
+    assert events[5].event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert events[5].device_id.device_uuid.uuid == DEVICE2_UUID
+
+    assert isinstance(events[6], DeviceEvent)
+    assert events[6].event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert events[6].device_id.device_uuid.uuid == DEVICE3_UUID
+
+    assert isinstance(events[7], TopologyEvent)
+    assert events[7].event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert events[7].topology_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert events[7].topology_id.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+
+    assert isinstance(events[8], ContextEvent)
+    assert events[8].event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert events[8].context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
 
     # ----- Stop the EventsCollector -----------------------------------------------------------------------------------
     events_collector.stop()
@@ -963,7 +1228,6 @@ def test_rest_populate_database(
     database.clear_all()
     populate('127.0.0.1', GRPC_PORT)
 
-
 def test_rest_get_context_ids(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
     reply = do_rest_request('/context_ids')
     validate_context_ids(reply)
@@ -973,39 +1237,39 @@ def test_rest_get_contexts(context_service_rest : RestServer): # pylint: disable
     validate_contexts(reply)
 
 def test_rest_get_context(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
-    context_uuid = urllib.parse.quote('admin')
+    context_uuid = urllib.parse.quote(DEFAULT_CONTEXT_UUID)
     reply = do_rest_request('/context/{:s}'.format(context_uuid))
     validate_context(reply)
 
 def test_rest_get_topology_ids(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
-    context_uuid = urllib.parse.quote('admin')
+    context_uuid = urllib.parse.quote(DEFAULT_CONTEXT_UUID)
     reply = do_rest_request('/context/{:s}/topology_ids'.format(context_uuid))
     validate_topology_ids(reply)
 
 def test_rest_get_topologies(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
-    context_uuid = urllib.parse.quote('admin')
+    context_uuid = urllib.parse.quote(DEFAULT_CONTEXT_UUID)
     reply = do_rest_request('/context/{:s}/topologies'.format(context_uuid))
     validate_topologies(reply)
 
 def test_rest_get_topology(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
-    context_uuid = urllib.parse.quote('admin')
-    topology_uuid = urllib.parse.quote('admin')
+    context_uuid = urllib.parse.quote(DEFAULT_CONTEXT_UUID)
+    topology_uuid = urllib.parse.quote(DEFAULT_TOPOLOGY_UUID)
     reply = do_rest_request('/context/{:s}/topology/{:s}'.format(context_uuid, topology_uuid))
     validate_topology(reply, num_devices=3, num_links=3)
 
 def test_rest_get_service_ids(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
-    context_uuid = urllib.parse.quote('admin')
+    context_uuid = urllib.parse.quote(DEFAULT_CONTEXT_UUID)
     reply = do_rest_request('/context/{:s}/service_ids'.format(context_uuid))
     validate_service_ids(reply)
 
 def test_rest_get_services(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
-    context_uuid = urllib.parse.quote('admin')
+    context_uuid = urllib.parse.quote(DEFAULT_CONTEXT_UUID)
     reply = do_rest_request('/context/{:s}/services'.format(context_uuid))
     validate_services(reply)
 
 def test_rest_get_service(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
-    context_uuid = urllib.parse.quote('admin')
-    service_uuid = urllib.parse.quote('SVC:DEV1/EP100-DEV2/EP100', safe='')
+    context_uuid = urllib.parse.quote(DEFAULT_CONTEXT_UUID)
+    service_uuid = urllib.parse.quote(SERVICE_DEV1_DEV2_UUID, safe='')
     reply = do_rest_request('/context/{:s}/service/{:s}'.format(context_uuid, service_uuid))
     validate_service(reply)
 
@@ -1018,7 +1282,7 @@ def test_rest_get_devices(context_service_rest : RestServer): # pylint: disable=
     validate_devices(reply)
 
 def test_rest_get_device(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
-    device_uuid = urllib.parse.quote('DEV1', safe='')
+    device_uuid = urllib.parse.quote(DEVICE1_UUID, safe='')
     reply = do_rest_request('/device/{:s}'.format(device_uuid))
     validate_device(reply)
 
@@ -1031,10 +1295,27 @@ def test_rest_get_links(context_service_rest : RestServer): # pylint: disable=re
     validate_links(reply)
 
 def test_rest_get_link(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
-    link_uuid = urllib.parse.quote('DEV1/EP2 ==> DEV2/EP1', safe='')
+    link_uuid = urllib.parse.quote(LINK_DEV1_DEV2_UUID, safe='')
     reply = do_rest_request('/link/{:s}'.format(link_uuid))
     validate_link(reply)
 
+def test_rest_get_connection_ids(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+    context_uuid = urllib.parse.quote(DEFAULT_CONTEXT_UUID)
+    service_uuid = urllib.parse.quote(SERVICE_DEV1_DEV3_UUID, safe='')
+    reply = do_rest_request('/context/{:s}/service/{:s}/connection_ids'.format(context_uuid, service_uuid))
+    validate_connection_ids(reply)
+
+def test_rest_get_connections(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+    context_uuid = urllib.parse.quote(DEFAULT_CONTEXT_UUID)
+    service_uuid = urllib.parse.quote(SERVICE_DEV1_DEV3_UUID, safe='')
+    reply = do_rest_request('/context/{:s}/service/{:s}/connections'.format(context_uuid, service_uuid))
+    validate_connections(reply)
+
+def test_rest_get_connection(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+    connection_uuid = urllib.parse.quote(CONNECTION_DEV1_DEV3_UUID, safe='')
+    reply = do_rest_request('/connection/{:s}'.format(connection_uuid))
+    validate_connection(reply)
+
 
 # ----- Test misc. Context internal tools ------------------------------------------------------------------------------
 
diff --git a/src/dbscanserving/.gitlab-ci.yml b/src/dbscanserving/.gitlab-ci.yml
new file mode 100644
index 0000000000000000000000000000000000000000..de704cf861f82e8e3a976461db6658d7ad2e888a
--- /dev/null
+++ b/src/dbscanserving/.gitlab-ci.yml
@@ -0,0 +1,83 @@
+# build, tag and push the Docker image to the gitlab registry
+build dbscanserving:
+  variables:
+    IMAGE_NAME: 'dbscanserving' # name of the microservice
+    IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
+  stage: build
+  before_script:
+    - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
+  script:
+    - docker build -t "$IMAGE_NAME:$IMAGE_TAG" -f ./src/$IMAGE_NAME/Dockerfile ./src/
+    - docker tag "$IMAGE_NAME:$IMAGE_TAG" "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
+    - docker push "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
+  # after_script:
+  #   - docker rmi $(docker images --quiet --filter=dangling=true)
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' 
+    - changes:
+      - src/$IMAGE_NAME/**/*.{py,in,yml}
+      - src/$IMAGE_NAME/Dockerfile
+      - src/$IMAGE_NAME/tests/*.py
+      - src/$IMAGE_NAME/tests/Dockerfile
+      - manifests/$IMAGE_NAME.yaml
+      - .gitlab-ci.yml
+
+# apply unit test to the dbscanserving component
+unit test dbscanserving:
+  variables:
+    IMAGE_NAME: 'dbscanserving' # name of the microservice
+    IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
+  stage: unit_test
+  needs:
+    - build dbscanserving
+  before_script:
+    - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
+    - if docker network list | grep teraflowbridge; then echo "teraflowbridge is already created"; else docker network create -d bridge teraflowbridge; fi
+    - if docker container ls | grep $IMAGE_NAME; then docker rm -f $IMAGE_NAME; else echo "$IMAGE_NAME image is not in the system"; fi
+  script:
+    - docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
+    - docker run --name $IMAGE_NAME -d -p 10006:10006 --network=teraflowbridge --rm $CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG
+    - sleep 5
+    - docker ps -a
+    - docker exec -i $IMAGE_NAME bash -c "pytest --log-level=DEBUG --verbose $IMAGE_NAME/tests/test_unitary.py"
+  after_script:
+    #- docker rm -f $IMAGE_NAME
+    - docker network rm teraflowbridge
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' 
+    - changes:
+      - src/$IMAGE_NAME/**/*.{py,in,yml}
+      - src/$IMAGE_NAME/Dockerfile
+      - src/$IMAGE_NAME/tests/*.py
+      - src/$IMAGE_NAME/tests/Dockerfile
+      - manifests/$IMAGE_NAMEservice.yaml
+      - .gitlab-ci.yml
+
+
+# Deployment of the dbscanserving service in Kubernetes Cluster
+deploy dbscanserving:
+  variables:
+    IMAGE_NAME: 'dbscanserving' # name of the microservice
+    IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
+  stage: deploy
+  needs:
+    - unit test dbscanserving
+    # - integ_test execute
+  script:
+    - 'sed -i "s/$IMAGE_NAME:.*/$IMAGE_NAME:$IMAGE_TAG/" manifests/${IMAGE_NAME}service.yaml'
+    - kubectl version
+    - kubectl get all
+    - kubectl apply -f "manifests/$IMAGE_NAME.yaml"
+    - kubectl get all
+  # environment:
+  #   name: test
+  #   url: https://example.com
+  #   kubernetes:
+  #     namespace: test
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+      when: manual    
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
+      when: manual
\ No newline at end of file
diff --git a/src/centralizedattackdetector/Config.py b/src/dbscanserving/Config.py
similarity index 72%
rename from src/centralizedattackdetector/Config.py
rename to src/dbscanserving/Config.py
index 3a122c7590bdda97f7be1cc4ec331cb86b5f8dd4..ec89bbb0f8b37539e06f9cc101c0f461a3bc2a82 100644
--- a/src/centralizedattackdetector/Config.py
+++ b/src/dbscanserving/Config.py
@@ -1,10 +1,10 @@
 import logging
 
 # General settings
-LOG_LEVEL = logging.WARNING
+LOG_LEVEL = logging.DEBUG
 
 # gRPC settings
-GRPC_SERVICE_PORT = 10000
+GRPC_SERVICE_PORT = 10006
 GRPC_MAX_WORKERS  = 10
 GRPC_GRACE_PERIOD = 60
 
diff --git a/src/dbscanserving/Dockerfile b/src/dbscanserving/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..aa62cbe93cb008085762abbe0ddaae5a31f0fb73
--- /dev/null
+++ b/src/dbscanserving/Dockerfile
@@ -0,0 +1,34 @@
+FROM python:3-slim
+
+# Install dependencies
+RUN apt-get --yes --quiet --quiet update && \
+    apt-get --yes --quiet --quiet install wget g++ && \
+    rm -rf /var/lib/apt/lists/*
+
+# Set Python to show logs as they occur
+ENV PYTHONUNBUFFERED=0
+
+# Download the gRPC health probe
+RUN GRPC_HEALTH_PROBE_VERSION=v0.2.0 && \
+    wget -qO/bin/grpc_health_probe https://github.com/grpc-ecosystem/grpc-health-probe/releases/download/${GRPC_HEALTH_PROBE_VERSION}/grpc_health_probe-linux-amd64 && \
+    chmod +x /bin/grpc_health_probe
+
+# Get generic Python packages
+RUN python3 -m pip install --upgrade pip setuptools wheel pip-tools
+
+# Set working directory
+WORKDIR /var/teraflow
+
+# Create module sub-folders
+RUN mkdir -p /var/teraflow/dbscanserving
+
+# Get Python packages per module
+COPY dbscanserving/requirements.in dbscanserving/requirements.in
+RUN pip-compile --output-file=dbscanserving/requirements.txt dbscanserving/requirements.in
+RUN python3 -m pip install -r dbscanserving/requirements.txt
+
+COPY common/. common
+COPY dbscanserving/. dbscanserving
+
+# Start dbscanserving service
+ENTRYPOINT ["python", "-m", "dbscanserving.service"]
diff --git a/src/compute/service/rest_server/resources/__init__.py b/src/dbscanserving/__init__.py
similarity index 100%
rename from src/compute/service/rest_server/resources/__init__.py
rename to src/dbscanserving/__init__.py
diff --git a/src/dbscanserving/client/DbscanServingClient.py b/src/dbscanserving/client/DbscanServingClient.py
new file mode 100644
index 0000000000000000000000000000000000000000..4d6614b5dc44c2b916fb298217fef4a8f1b8e72c
--- /dev/null
+++ b/src/dbscanserving/client/DbscanServingClient.py
@@ -0,0 +1,33 @@
+import grpc, logging
+from common.tools.client.RetryDecorator import retry, delay_exponential
+from dbscanserving.proto.dbscanserving_pb2 import DetectionRequest, DetectionResponse
+from dbscanserving.proto.dbscanserving_pb2_grpc import DetectorStub
+
+LOGGER = logging.getLogger(__name__)
+MAX_RETRIES = 15
+DELAY_FUNCTION = delay_exponential(initial=0.01, increment=2.0, maximum=5.0)
+
+class DbscanServingClient:
+    def __init__(self, address, port):
+        self.endpoint = '{:s}:{:s}'.format(str(address), str(port))
+        LOGGER.debug('Creating channel to {:s}...'.format(str(self.endpoint)))
+        self.channel = None
+        self.stub = None
+        self.connect()
+        LOGGER.debug('Channel created')
+
+    def connect(self):
+        self.channel = grpc.insecure_channel(self.endpoint)
+        self.stub = DetectorStub(self.channel)
+
+    def close(self):
+        if(self.channel is not None): self.channel.close()
+        self.channel = None
+        self.stub = None
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def Detect(self, request : DetectionRequest) -> DetectionResponse:
+        LOGGER.debug('Detect request: {:s}'.format(str(request)))
+        response = self.stub.Detect(request)
+        LOGGER.debug('Detect result: {:s}'.format(str(response)))
+        return response
diff --git a/src/dbscanserving/client/__init__.py b/src/dbscanserving/client/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/dbscanserving/genproto.sh b/src/dbscanserving/genproto.sh
new file mode 100755
index 0000000000000000000000000000000000000000..d44156c2f327fe2344cb17d59fa51b3ead9bc43b
--- /dev/null
+++ b/src/dbscanserving/genproto.sh
@@ -0,0 +1,30 @@
+#!/bin/bash -eu
+#
+# Copyright 2018 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+#!/bin/bash -e
+
+# Make folder containing the script the root folder for its execution
+cd $(dirname $0)
+
+rm -rf proto/*.py
+rm -rf proto/__pycache__
+touch proto/__init__.py
+
+# building current service protos
+python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto dbscanserving.proto
+
+sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/dbscanserving_pb2.py
+sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/dbscanserving_pb2_grpc.py
diff --git a/src/dbscanserving/proto/__init__.py b/src/dbscanserving/proto/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/dbscanserving/proto/dbscanserving_pb2.py b/src/dbscanserving/proto/dbscanserving_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..b5e464db4db39c2d3f6a5530c5d702e4bf25b8a0
--- /dev/null
+++ b/src/dbscanserving/proto/dbscanserving_pb2.py
@@ -0,0 +1,252 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: dbscanserving.proto
+"""Generated protocol buffer code."""
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='dbscanserving.proto',
+  package='dbscanserving',
+  syntax='proto3',
+  serialized_options=None,
+  create_key=_descriptor._internal_create_key,
+  serialized_pb=b'\n\x13\x64\x62scanserving.proto\x12\rdbscanserving\"\x1a\n\x06Sample\x12\x10\n\x08\x66\x65\x61tures\x18\x01 \x03(\x02\"\xd6\x01\n\x10\x44\x65tectionRequest\x12\x0b\n\x03\x65ps\x18\x01 \x01(\x02\x12\x13\n\x0bmin_samples\x18\x02 \x01(\x05\x12%\n\x06metric\x18\x03 \x01(\x0e\x32\x15.dbscanserving.Metric\x12\x13\n\x0bnum_samples\x18\x04 \x01(\x05\x12\x14\n\x0cnum_features\x18\x05 \x01(\x05\x12&\n\x07samples\x18\x06 \x03(\x0b\x32\x15.dbscanserving.Sample\x12\x17\n\nidentifier\x18\x07 \x01(\x05H\x00\x88\x01\x01\x42\r\n\x0b_identifier\",\n\x11\x44\x65tectionResponse\x12\x17\n\x0f\x63luster_indices\x18\x01 \x03(\x05*\x17\n\x06Metric\x12\r\n\tEUCLIDEAN\x10\x00\x32W\n\x08\x44\x65tector\x12K\n\x06\x44\x65tect\x12\x1f.dbscanserving.DetectionRequest\x1a .dbscanserving.DetectionResponseb\x06proto3'
+)
+
+_METRIC = _descriptor.EnumDescriptor(
+  name='Metric',
+  full_name='dbscanserving.Metric',
+  filename=None,
+  file=DESCRIPTOR,
+  create_key=_descriptor._internal_create_key,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='EUCLIDEAN', index=0, number=0,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=329,
+  serialized_end=352,
+)
+_sym_db.RegisterEnumDescriptor(_METRIC)
+
+Metric = enum_type_wrapper.EnumTypeWrapper(_METRIC)
+EUCLIDEAN = 0
+
+
+
+_SAMPLE = _descriptor.Descriptor(
+  name='Sample',
+  full_name='dbscanserving.Sample',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='features', full_name='dbscanserving.Sample.features', index=0,
+      number=1, type=2, cpp_type=6, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=38,
+  serialized_end=64,
+)
+
+
+_DETECTIONREQUEST = _descriptor.Descriptor(
+  name='DetectionRequest',
+  full_name='dbscanserving.DetectionRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='eps', full_name='dbscanserving.DetectionRequest.eps', index=0,
+      number=1, type=2, cpp_type=6, label=1,
+      has_default_value=False, default_value=float(0),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='min_samples', full_name='dbscanserving.DetectionRequest.min_samples', index=1,
+      number=2, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='metric', full_name='dbscanserving.DetectionRequest.metric', index=2,
+      number=3, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='num_samples', full_name='dbscanserving.DetectionRequest.num_samples', index=3,
+      number=4, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='num_features', full_name='dbscanserving.DetectionRequest.num_features', index=4,
+      number=5, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='samples', full_name='dbscanserving.DetectionRequest.samples', index=5,
+      number=6, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='identifier', full_name='dbscanserving.DetectionRequest.identifier', index=6,
+      number=7, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+    _descriptor.OneofDescriptor(
+      name='_identifier', full_name='dbscanserving.DetectionRequest._identifier',
+      index=0, containing_type=None,
+      create_key=_descriptor._internal_create_key,
+    fields=[]),
+  ],
+  serialized_start=67,
+  serialized_end=281,
+)
+
+
+_DETECTIONRESPONSE = _descriptor.Descriptor(
+  name='DetectionResponse',
+  full_name='dbscanserving.DetectionResponse',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='cluster_indices', full_name='dbscanserving.DetectionResponse.cluster_indices', index=0,
+      number=1, type=5, cpp_type=1, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=283,
+  serialized_end=327,
+)
+
+_DETECTIONREQUEST.fields_by_name['metric'].enum_type = _METRIC
+_DETECTIONREQUEST.fields_by_name['samples'].message_type = _SAMPLE
+_DETECTIONREQUEST.oneofs_by_name['_identifier'].fields.append(
+  _DETECTIONREQUEST.fields_by_name['identifier'])
+_DETECTIONREQUEST.fields_by_name['identifier'].containing_oneof = _DETECTIONREQUEST.oneofs_by_name['_identifier']
+DESCRIPTOR.message_types_by_name['Sample'] = _SAMPLE
+DESCRIPTOR.message_types_by_name['DetectionRequest'] = _DETECTIONREQUEST
+DESCRIPTOR.message_types_by_name['DetectionResponse'] = _DETECTIONRESPONSE
+DESCRIPTOR.enum_types_by_name['Metric'] = _METRIC
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+Sample = _reflection.GeneratedProtocolMessageType('Sample', (_message.Message,), {
+  'DESCRIPTOR' : _SAMPLE,
+  '__module__' : 'dbscanserving_pb2'
+  # @@protoc_insertion_point(class_scope:dbscanserving.Sample)
+  })
+_sym_db.RegisterMessage(Sample)
+
+DetectionRequest = _reflection.GeneratedProtocolMessageType('DetectionRequest', (_message.Message,), {
+  'DESCRIPTOR' : _DETECTIONREQUEST,
+  '__module__' : 'dbscanserving_pb2'
+  # @@protoc_insertion_point(class_scope:dbscanserving.DetectionRequest)
+  })
+_sym_db.RegisterMessage(DetectionRequest)
+
+DetectionResponse = _reflection.GeneratedProtocolMessageType('DetectionResponse', (_message.Message,), {
+  'DESCRIPTOR' : _DETECTIONRESPONSE,
+  '__module__' : 'dbscanserving_pb2'
+  # @@protoc_insertion_point(class_scope:dbscanserving.DetectionResponse)
+  })
+_sym_db.RegisterMessage(DetectionResponse)
+
+
+
+_DETECTOR = _descriptor.ServiceDescriptor(
+  name='Detector',
+  full_name='dbscanserving.Detector',
+  file=DESCRIPTOR,
+  index=0,
+  serialized_options=None,
+  create_key=_descriptor._internal_create_key,
+  serialized_start=354,
+  serialized_end=441,
+  methods=[
+  _descriptor.MethodDescriptor(
+    name='Detect',
+    full_name='dbscanserving.Detector.Detect',
+    index=0,
+    containing_service=None,
+    input_type=_DETECTIONREQUEST,
+    output_type=_DETECTIONRESPONSE,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+])
+_sym_db.RegisterServiceDescriptor(_DETECTOR)
+
+DESCRIPTOR.services_by_name['Detector'] = _DETECTOR
+
+# @@protoc_insertion_point(module_scope)
diff --git a/src/dbscanserving/proto/dbscanserving_pb2_grpc.py b/src/dbscanserving/proto/dbscanserving_pb2_grpc.py
new file mode 100644
index 0000000000000000000000000000000000000000..895ced1484df2101bb055f28b6a6d3631e7e68da
--- /dev/null
+++ b/src/dbscanserving/proto/dbscanserving_pb2_grpc.py
@@ -0,0 +1,66 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
+
+from . import dbscanserving_pb2 as dbscanserving__pb2
+
+
+class DetectorStub(object):
+    """Missing associated documentation comment in .proto file."""
+
+    def __init__(self, channel):
+        """Constructor.
+
+        Args:
+            channel: A grpc.Channel.
+        """
+        self.Detect = channel.unary_unary(
+                '/dbscanserving.Detector/Detect',
+                request_serializer=dbscanserving__pb2.DetectionRequest.SerializeToString,
+                response_deserializer=dbscanserving__pb2.DetectionResponse.FromString,
+                )
+
+
+class DetectorServicer(object):
+    """Missing associated documentation comment in .proto file."""
+
+    def Detect(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+
+def add_DetectorServicer_to_server(servicer, server):
+    rpc_method_handlers = {
+            'Detect': grpc.unary_unary_rpc_method_handler(
+                    servicer.Detect,
+                    request_deserializer=dbscanserving__pb2.DetectionRequest.FromString,
+                    response_serializer=dbscanserving__pb2.DetectionResponse.SerializeToString,
+            ),
+    }
+    generic_handler = grpc.method_handlers_generic_handler(
+            'dbscanserving.Detector', rpc_method_handlers)
+    server.add_generic_rpc_handlers((generic_handler,))
+
+
+ # This class is part of an EXPERIMENTAL API.
+class Detector(object):
+    """Missing associated documentation comment in .proto file."""
+
+    @staticmethod
+    def Detect(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/dbscanserving.Detector/Detect',
+            dbscanserving__pb2.DetectionRequest.SerializeToString,
+            dbscanserving__pb2.DetectionResponse.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
diff --git a/src/centralizedattackdetector/requirements.in b/src/dbscanserving/requirements.in
similarity index 85%
rename from src/centralizedattackdetector/requirements.in
rename to src/dbscanserving/requirements.in
index 25abdad1b5767117956a88b816399635348884c7..b4272e5c4e709f5893c0e750d263fc24b7be4f7d 100644
--- a/src/centralizedattackdetector/requirements.in
+++ b/src/dbscanserving/requirements.in
@@ -1,6 +1,6 @@
 grpcio-health-checking
-grpcio
 prometheus-client
 pytest
 pytest-benchmark
-redis
+grpcio
+scikit-learn
\ No newline at end of file
diff --git a/src/centralizedattackdetector/requirements.txt b/src/dbscanserving/requirements.txt
similarity index 62%
rename from src/centralizedattackdetector/requirements.txt
rename to src/dbscanserving/requirements.txt
index f15fc23535b22452219448d5a4f3525996b58a75..4f9bee9891129ad09e4281180e1aee2858df0c29 100644
--- a/src/centralizedattackdetector/requirements.txt
+++ b/src/dbscanserving/requirements.txt
@@ -2,31 +2,37 @@
 # This file is autogenerated by pip-compile with python 3.9
 # To update, run:
 #
-#    pip-compile
+#    pip-compile --output-file=requirements.txt requirements.in
 #
 attrs==21.2.0
     # via pytest
-grpcio==1.41.0
+grpcio==1.42.0
     # via
     #   -r requirements.in
     #   grpcio-health-checking
-grpcio-health-checking==1.41.0
+grpcio-health-checking==1.42.0
     # via -r requirements.in
 iniconfig==1.1.1
     # via pytest
-packaging==21.0
+joblib==1.1.0
+    # via scikit-learn
+numpy==1.21.4
+    # via
+    #   scikit-learn
+    #   scipy
+packaging==21.3
     # via pytest
 pluggy==1.0.0
     # via pytest
-prometheus-client==0.11.0
+prometheus-client==0.12.0
     # via -r requirements.in
-protobuf==3.18.0
+protobuf==3.19.1
     # via grpcio-health-checking
-py==1.10.0
+py==1.11.0
     # via pytest
 py-cpuinfo==8.0.0
     # via pytest-benchmark
-pyparsing==2.4.7
+pyparsing==3.0.6
     # via packaging
 pytest==6.2.5
     # via
@@ -34,9 +40,13 @@ pytest==6.2.5
     #   pytest-benchmark
 pytest-benchmark==3.4.1
     # via -r requirements.in
-redis==3.5.3
+scikit-learn==1.0.1
     # via -r requirements.in
+scipy==1.7.3
+    # via scikit-learn
 six==1.16.0
     # via grpcio
+threadpoolctl==3.0.0
+    # via scikit-learn
 toml==0.10.2
     # via pytest
diff --git a/src/dbscanserving/service/DbscanService.py b/src/dbscanserving/service/DbscanService.py
new file mode 100644
index 0000000000000000000000000000000000000000..43336ab089b6848971cda4e79e4da7aa26c1ca65
--- /dev/null
+++ b/src/dbscanserving/service/DbscanService.py
@@ -0,0 +1,56 @@
+import grpc
+import logging
+from concurrent import futures
+from grpc_health.v1.health import HealthServicer, OVERALL_HEALTH
+from grpc_health.v1.health_pb2 import HealthCheckResponse
+from grpc_health.v1.health_pb2_grpc import add_HealthServicer_to_server
+from dbscanserving.proto.dbscanserving_pb2_grpc import add_DetectorServicer_to_server
+from dbscanserving.service.DbscanServiceServicerImpl import DbscanServiceServicerImpl
+from dbscanserving.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD
+
+BIND_ADDRESS = '0.0.0.0'
+LOGGER = logging.getLogger(__name__)
+
+class DbscanService:
+    def __init__(
+        self, address=BIND_ADDRESS, port=GRPC_SERVICE_PORT, max_workers=GRPC_MAX_WORKERS,
+        grace_period=GRPC_GRACE_PERIOD):
+
+        self.address = address
+        self.port = port
+        self.endpoint = None
+        self.max_workers = max_workers
+        self.grace_period = grace_period
+        self.dbscan_servicer = None
+        self.health_servicer = None
+        self.pool = None
+        self.server = None
+
+    def start(self):
+        self.endpoint = '{:s}:{:s}'.format(str(self.address), str(self.port))
+        LOGGER.debug('Starting Service (tentative endpoint: {:s}, max_workers: {:s})...'.format(
+            str(self.endpoint), str(self.max_workers)))
+
+        self.pool = futures.ThreadPoolExecutor(max_workers=self.max_workers)
+        self.server = grpc.server(self.pool) # , interceptors=(tracer_interceptor,))
+
+        self.dbscan_servicer = DbscanServiceServicerImpl()
+        add_DetectorServicer_to_server(self.dbscan_servicer, self.server)
+
+        self.health_servicer = HealthServicer(
+            experimental_non_blocking=True, experimental_thread_pool=futures.ThreadPoolExecutor(max_workers=1))
+        add_HealthServicer_to_server(self.health_servicer, self.server)
+
+        port = self.server.add_insecure_port(self.endpoint)
+        self.endpoint = '{:s}:{:s}'.format(str(self.address), str(port))
+        LOGGER.info('Listening on {:s}...'.format(self.endpoint))
+        self.server.start()
+        self.health_servicer.set(OVERALL_HEALTH, HealthCheckResponse.SERVING) # pylint: disable=maybe-no-member
+
+        LOGGER.debug('Service started')
+
+    def stop(self):
+        LOGGER.debug('Stopping service (grace period {:s} seconds)...'.format(str(self.grace_period)))
+        self.health_servicer.enter_graceful_shutdown()
+        self.server.stop(self.grace_period)
+        LOGGER.debug('Service stopped')
diff --git a/src/dbscanserving/service/DbscanServiceServicerImpl.py b/src/dbscanserving/service/DbscanServiceServicerImpl.py
new file mode 100644
index 0000000000000000000000000000000000000000..251b3eb1ad4c5e51eb80207ce5036351ea65572f
--- /dev/null
+++ b/src/dbscanserving/service/DbscanServiceServicerImpl.py
@@ -0,0 +1,32 @@
+import os, grpc, logging
+from sklearn.cluster import DBSCAN
+from common.rpc_method_wrapper.Decorator import create_metrics, safe_and_metered_rpc_method
+from dbscanserving.proto.dbscanserving_pb2 import DetectionRequest, DetectionResponse
+from dbscanserving.proto.dbscanserving_pb2_grpc import DetectorServicer
+
+LOGGER = logging.getLogger(__name__)
+
+SERVICE_NAME = 'DbscanServing'
+METHOD_NAMES = ['Detect']
+METRICS = create_metrics(SERVICE_NAME, METHOD_NAMES)
+
+
+class DbscanServiceServicerImpl(DetectorServicer):
+
+    def __init__(self):
+        LOGGER.debug('Creating Servicer...')
+        LOGGER.debug('Servicer Created')
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def Detect(self, request : DetectionRequest, context : grpc.ServicerContext) -> DetectionResponse:
+        if request.num_samples != len(request.samples):
+            context.set_details("The sample dimension declared does not match with the number of samples received.")
+            LOGGER.debug(f"The sample dimension declared does not match with the number of samples received. Declared: {request.num_samples} - Received: {len(request.samples)}")
+            context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
+            return DetectionResponse()
+        # TODO: implement the validation of the features dimension
+        clusters = DBSCAN(eps=request.eps, min_samples=request.min_samples).fit_predict([[x for x in sample.features] for sample in request.samples])
+        response = DetectionResponse()
+        for cluster in clusters:
+            response.cluster_indices.append(cluster)
+        return response
diff --git a/src/dbscanserving/service/__init__.py b/src/dbscanserving/service/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/centralizedattackdetector/service/__main__.py b/src/dbscanserving/service/__main__.py
similarity index 70%
rename from src/centralizedattackdetector/service/__main__.py
rename to src/dbscanserving/service/__main__.py
index 5737402c0318fa58123c5fdf00296c237f09aeb8..bc3f1c681fe88d723d36a28bff64c0e6b04fd089 100644
--- a/src/centralizedattackdetector/service/__main__.py
+++ b/src/dbscanserving/service/__main__.py
@@ -1,9 +1,9 @@
-import logging, signal, sys, threading
+import os, logging, signal, sys, time, threading, multiprocessing
 from prometheus_client import start_http_server
 from common.Settings import get_setting
-from centralizedattackdetector.Config import (
+from dbscanserving.Config import (
     GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD, LOG_LEVEL, METRICS_PORT)
-from centralizedattackdetector.service.CentralizedAttackDetectorService import CentralizedAttackDetectorService
+from dbscanserving.service.DbscanService import DbscanService
 
 terminate = threading.Event()
 LOGGER = None
@@ -15,11 +15,11 @@ def signal_handler(signal, frame): # pylint: disable=redefined-outer-name
 def main():
     global LOGGER # pylint: disable=global-statement
 
-    service_port = get_setting('CENTRALIZEDATTACKDETECTORSERVICE_SERVICE_PORT_GRPC', default=GRPC_SERVICE_PORT)
-    max_workers  = get_setting('MAX_WORKERS',                                        default=GRPC_MAX_WORKERS )
-    grace_period = get_setting('GRACE_PERIOD',                                       default=GRPC_GRACE_PERIOD)
-    log_level    = get_setting('LOG_LEVEL',                                          default=LOG_LEVEL        )
-    metrics_port = get_setting('METRICS_PORT',                                       default=METRICS_PORT     )
+    service_port = get_setting('DBSCANSERVICE_SERVICE_PORT_GRPC', default=GRPC_SERVICE_PORT)
+    max_workers  = get_setting('MAX_WORKERS',                     default=GRPC_MAX_WORKERS )
+    grace_period = get_setting('GRACE_PERIOD',                    default=GRPC_GRACE_PERIOD)
+    log_level    = get_setting('LOG_LEVEL',                       default=LOG_LEVEL        )
+    metrics_port = get_setting('METRICS_PORT',                    default=METRICS_PORT     )
 
     logging.basicConfig(level=log_level)
     LOGGER = logging.getLogger(__name__)
@@ -33,7 +33,7 @@ def main():
     start_http_server(metrics_port)
 
     # Starting CentralizedCybersecurity service
-    grpc_service = CentralizedAttackDetectorService(
+    grpc_service = DbscanService(
         port=service_port, max_workers=max_workers, grace_period=grace_period)
     grpc_service.start()
 
diff --git a/src/dbscanserving/tests/__init__.py b/src/dbscanserving/tests/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/dbscanserving/tests/test_unitary.py b/src/dbscanserving/tests/test_unitary.py
new file mode 100644
index 0000000000000000000000000000000000000000..061d2f07d2e1a6876318214a909a26140c98197d
--- /dev/null
+++ b/src/dbscanserving/tests/test_unitary.py
@@ -0,0 +1,116 @@
+import random, logging, pytest, numpy
+from dbscanserving.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD
+from dbscanserving.client.DbscanServingClient import DbscanServingClient
+from dbscanserving.service.DbscanService import DbscanService
+from dbscanserving.proto.dbscanserving_pb2 import DetectionRequest, DetectionResponse, Sample
+
+port = 10000 + GRPC_SERVICE_PORT # avoid privileged ports
+
+LOGGER = logging.getLogger(__name__)
+LOGGER.setLevel(logging.DEBUG)
+
+@pytest.fixture(scope='session')
+def dbscanserving_service():
+    _service = DbscanService(
+        port=port, max_workers=GRPC_MAX_WORKERS, grace_period=GRPC_GRACE_PERIOD)
+    _service.start()
+    yield _service
+    _service.stop()
+
+@pytest.fixture(scope='session')
+def dbscanserving_client():
+    _client = DbscanServingClient(address='127.0.0.1', port=port)
+    yield _client
+    _client.close()
+
+def test_detection_correct(dbscanserving_service, dbscanserving_client: DbscanServingClient):
+    request: DetectionRequest = DetectionRequest()
+
+    request.num_samples = 310
+    request.num_features = 100
+    request.eps = 100.5
+    request.min_samples = 50
+
+    for _ in range(200):
+        grpc_sample = Sample()
+        for __ in range(100):
+            grpc_sample.features.append(random.uniform(0., 10.))
+        request.samples.append(grpc_sample)
+        
+    for _ in range(100):
+        grpc_sample = Sample()
+        for __ in range(100):
+            grpc_sample.features.append(random.uniform(50., 60.))
+        request.samples.append(grpc_sample)
+        
+    for _ in range(10):
+        grpc_sample = Sample()
+        for __ in range(100):
+            grpc_sample.features.append(random.uniform(5000., 6000.))
+        request.samples.append(grpc_sample)
+
+    response: DetectionResponse = dbscanserving_client.Detect(request)
+    assert len(response.cluster_indices) == 310
+
+def test_detection_incorrect(dbscanserving_service, dbscanserving_client: DbscanServingClient):
+    request: DetectionRequest = DetectionRequest()
+
+    request.num_samples = 210
+    request.num_features = 100
+    request.eps = 100.5
+    request.min_samples = 50
+
+    for _ in range(200):
+        grpc_sample = Sample()
+        for __ in range(100):
+            grpc_sample.features.append(random.uniform(0., 10.))
+        request.samples.append(grpc_sample)
+        
+    for _ in range(100):
+        grpc_sample = Sample()
+        for __ in range(100):
+            grpc_sample.features.append(random.uniform(50., 60.))
+        request.samples.append(grpc_sample)
+        
+    for _ in range(10):
+        grpc_sample = Sample()
+        for __ in range(100):
+            grpc_sample.features.append(random.uniform(5000., 6000.))
+        request.samples.append(grpc_sample)
+
+    with pytest.raises(Exception):
+        response: DetectionResponse = dbscanserving_client.Detect(request)
+
+def test_detection_clusters(dbscanserving_service, dbscanserving_client: DbscanServingClient):
+    request: DetectionRequest = DetectionRequest()
+
+    request.num_samples = 310
+    request.num_features = 100
+    request.eps = 100.5
+    request.min_samples = 50
+
+    for _ in range(200):
+        grpc_sample = Sample()
+        for __ in range(100):
+            grpc_sample.features.append(random.uniform(0., 10.))
+        request.samples.append(grpc_sample)
+        
+    for _ in range(100):
+        grpc_sample = Sample()
+        for __ in range(100):
+            grpc_sample.features.append(random.uniform(50., 60.))
+        request.samples.append(grpc_sample)
+        
+    for _ in range(10):
+        grpc_sample = Sample()
+        for __ in range(100):
+            grpc_sample.features.append(random.uniform(5000., 6000.))
+        request.samples.append(grpc_sample)
+
+    response: DetectionResponse = dbscanserving_client.Detect(request)
+    for v in response.cluster_indices[:200]:
+        assert v == 0
+    for v in response.cluster_indices[200:300]:
+        assert v == 1
+    for v in response.cluster_indices[300:]:
+        assert v == -1
diff --git a/src/device/.gitlab-ci.yml b/src/device/.gitlab-ci.yml
index e6ee81e379f8eeeca6c715bff5dfc6f2cf1c3d23..2cd6cd848e548c32dbd01901b7fc13da3fe071f8 100644
--- a/src/device/.gitlab-ci.yml
+++ b/src/device/.gitlab-ci.yml
@@ -1,8 +1,7 @@
-# Build, tag, and push the Docker images to the GitLab Docker registry
+# Build, tag and push the Docker image to the GitLab registry
 build device:
   variables:
     IMAGE_NAME: 'device' # name of the microservice
-    IMAGE_NAME_TEST: 'device-test' # name of the microservice
     IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: build
   before_script:
@@ -11,50 +10,79 @@ build device:
     - docker build -t "$IMAGE_NAME:$IMAGE_TAG" -f ./src/$IMAGE_NAME/Dockerfile ./src/
     - docker tag "$IMAGE_NAME:$IMAGE_TAG" "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
     - docker push "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
+  after_script:
+    - docker images --filter="dangling=true" --quiet | xargs -r docker rmi
   rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' 
     - changes:
-      - src/$IMAGE_NAME/**
+      - src/$IMAGE_NAME/**/*.{py,in,yml}
+      - src/$IMAGE_NAME/Dockerfile
+      - src/$IMAGE_NAME/tests/*.py
+      - manifests/${IMAGE_NAME}service.yaml
       - .gitlab-ci.yml
 
-# Pull, execute, and run unitary tests for the Docker image from the GitLab registry
-unit_test device:
+# Apply unit test to the component
+unit test device:
   variables:
     IMAGE_NAME: 'device' # name of the microservice
-    IMAGE_NAME_TEST: 'device-test' # name of the microservice
     IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: unit_test
   needs:
     - build device
   before_script:
     - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
-    - if docker network list | grep teraflowbridge; then echo "teraflowbridge is already created"; else docker network create -d bridge teraflowbridge; fi  
+    - if docker network list | grep teraflowbridge; then echo "teraflowbridge is already created"; else docker network create -d bridge teraflowbridge; fi
+    - if docker container ls | grep $IMAGE_NAME; then docker rm -f $IMAGE_NAME; else echo "$IMAGE_NAME image is not in the system"; fi
   script:
     - docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
-    - docker run -d -p 2020:2020 --name $IMAGE_NAME --network=teraflowbridge "$IMAGE_NAME:$IMAGE_TAG"
-    - docker ps -a
+    - docker run --name $IMAGE_NAME -d -p 2020:2020 -v "$PWD/src/$IMAGE_NAME/tests:/opt/results" --network=teraflowbridge $CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG
     - sleep 5
     - docker ps -a
     - docker logs $IMAGE_NAME
-    - docker exec -i $IMAGE_NAME bash -c "pytest --log-level=DEBUG --verbose $IMAGE_NAME/tests/test_unitary_driverapi.py $IMAGE_NAME/tests/test_unitary.py"
+    - docker exec -i $IMAGE_NAME bash -c "coverage run -m pytest --log-level=INFO --verbose $IMAGE_NAME/tests/test_unitary.py --junitxml=/opt/results/${IMAGE_NAME}_report.xml; coverage xml -o /opt/results/${IMAGE_NAME}_coverage.xml; coverage report --include='${IMAGE_NAME}/*' --show-missing"
+  coverage: '/TOTAL\s+\d+\s+\d+\s+(\d+%)/'
   after_script:
-    - docker stop $IMAGE_NAME
-    - docker rm $IMAGE_NAME
+    - docker rm -f $IMAGE_NAME
+    - docker network rm teraflowbridge
   rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' 
     - changes:
-      - src/$IMAGE_NAME/**
+      - src/$IMAGE_NAME/**/*.{py,in,yml}
+      - src/$IMAGE_NAME/Dockerfile
+      - src/$IMAGE_NAME/tests/*.py
+      - src/$IMAGE_NAME/tests/Dockerfile
+      - manifests/${IMAGE_NAME}service.yaml
       - .gitlab-ci.yml
+  artifacts:
+      when: always
+      reports:
+        junit: src/$IMAGE_NAME/tests/${IMAGE_NAME}_report.xml
+        cobertura: src/$IMAGE_NAME/tests/${IMAGE_NAME}_coverage.xml
 
 # Deployment of the service in Kubernetes Cluster
 deploy device:
+  variables:
+    IMAGE_NAME: 'device' # name of the microservice
+    IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: deploy
   needs:
-    - build device
-    - unit_test device
-    - dependencies all
-    - integ_test execute
+    - unit test device
+    # - integ_test execute
   script:
+    - 'sed -i "s/$IMAGE_NAME:.*/$IMAGE_NAME:$IMAGE_TAG/" manifests/${IMAGE_NAME}service.yaml'
     - kubectl version
     - kubectl get all
-    - kubectl apply -f "manifests/deviceservice.yaml"
-    - kubectl delete pods --selector app=deviceservice
+    - kubectl apply -f "manifests/${IMAGE_NAME}service.yaml"
     - kubectl get all
+  # environment:
+  #   name: test
+  #   url: https://example.com
+  #   kubernetes:
+  #     namespace: test
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+      when: manual    
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
+      when: manual
diff --git a/src/device/client/DeviceClient.py b/src/device/client/DeviceClient.py
index 31f35ccfd7d8c5fab4a3ff7b630ae358fbde4c65..17f0ac715165841dea3c80a3aae881e9cd1a62ec 100644
--- a/src/device/client/DeviceClient.py
+++ b/src/device/client/DeviceClient.py
@@ -1,6 +1,7 @@
 import grpc, logging
 from common.tools.client.RetryDecorator import retry, delay_exponential
-from device.proto.context_pb2 import Device, DeviceId, Empty
+from device.proto.context_pb2 import Device, DeviceConfig, DeviceId, Empty
+from device.proto.device_pb2 import MonitoringSettings
 from device.proto.device_pb2_grpc import DeviceServiceStub
 
 LOGGER = logging.getLogger(__name__)
@@ -45,3 +46,17 @@ class DeviceClient:
         response = self.stub.DeleteDevice(request)
         LOGGER.debug('DeleteDevice result: {:s}'.format(str(response)))
         return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def GetInitialConfig(self, request : DeviceId) -> DeviceConfig:
+        LOGGER.debug('GetInitialConfig request: {:s}'.format(str(request)))
+        response = self.stub.GetInitialConfig(request)
+        LOGGER.debug('GetInitialConfig result: {:s}'.format(str(response)))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def MonitorDeviceKpi(self, request : MonitoringSettings) -> Empty:
+        LOGGER.debug('MonitorDeviceKpi request: {:s}'.format(str(request)))
+        response = self.stub.MonitorDeviceKpi(request)
+        LOGGER.debug('MonitorDeviceKpi result: {:s}'.format(str(response)))
+        return response
diff --git a/src/device/proto/context_pb2.py b/src/device/proto/context_pb2.py
index 8b4848bc33bfb0eba76590c8a3a627b2db84ca9f..68602b16f264ceac9acc3ef6669b09d5984e72c2 100644
--- a/src/device/proto/context_pb2.py
+++ b/src/device/proto/context_pb2.py
@@ -12,6 +12,7 @@ from google.protobuf import symbol_database as _symbol_database
 _sym_db = _symbol_database.Default()
 
 
+from . import kpi_sample_types_pb2 as kpi__sample__types__pb2
 
 
 DESCRIPTOR = _descriptor.FileDescriptor(
@@ -20,8 +21,9 @@ DESCRIPTOR = _descriptor.FileDescriptor(
   syntax='proto3',
   serialized_options=None,
   create_key=_descriptor._internal_create_key,
-  serialized_pb=b'\n\rcontext.proto\x12\x07\x63ontext\"\x07\n\x05\x45mpty\"\x14\n\x04Uuid\x12\x0c\n\x04uuid\x18\x01 \x01(\t\"F\n\x05\x45vent\x12\x11\n\ttimestamp\x18\x01 \x01(\x01\x12*\n\nevent_type\x18\x02 \x01(\x0e\x32\x16.context.EventTypeEnum\"0\n\tContextId\x12#\n\x0c\x63ontext_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\xb6\x01\n\x07\x43ontext\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12)\n\x0ctopology_ids\x18\x02 \x03(\x0b\x32\x13.context.TopologyId\x12\'\n\x0bservice_ids\x18\x03 \x03(\x0b\x32\x12.context.ServiceId\x12/\n\ncontroller\x18\x04 \x01(\x0b\x32\x1b.context.TeraFlowController\"8\n\rContextIdList\x12\'\n\x0b\x63ontext_ids\x18\x01 \x03(\x0b\x32\x12.context.ContextId\"1\n\x0b\x43ontextList\x12\"\n\x08\x63ontexts\x18\x01 \x03(\x0b\x32\x10.context.Context\"U\n\x0c\x43ontextEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12&\n\ncontext_id\x18\x02 \x01(\x0b\x32\x12.context.ContextId\"Z\n\nTopologyId\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12$\n\rtopology_uuid\x18\x02 \x01(\x0b\x32\r.context.Uuid\"~\n\x08Topology\x12(\n\x0btopology_id\x18\x01 \x01(\x0b\x32\x13.context.TopologyId\x12%\n\ndevice_ids\x18\x02 \x03(\x0b\x32\x11.context.DeviceId\x12!\n\x08link_ids\x18\x03 \x03(\x0b\x32\x0f.context.LinkId\";\n\x0eTopologyIdList\x12)\n\x0ctopology_ids\x18\x01 \x03(\x0b\x32\x13.context.TopologyId\"5\n\x0cTopologyList\x12%\n\ntopologies\x18\x01 \x03(\x0b\x32\x11.context.Topology\"X\n\rTopologyEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12(\n\x0btopology_id\x18\x02 \x01(\x0b\x32\x13.context.TopologyId\".\n\x08\x44\x65viceId\x12\"\n\x0b\x64\x65vice_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\x9a\x02\n\x06\x44\x65vice\x12$\n\tdevice_id\x18\x01 \x01(\x0b\x32\x11.context.DeviceId\x12\x13\n\x0b\x64\x65vice_type\x18\x02 \x01(\t\x12,\n\rdevice_config\x18\x03 \x01(\x0b\x32\x15.context.DeviceConfig\x12G\n\x19\x64\x65vice_operational_status\x18\x04 \x01(\x0e\x32$.context.DeviceOperationalStatusEnum\x12\x31\n\x0e\x64\x65vice_drivers\x18\x05 \x03(\x0e\x32\x19.context.DeviceDriverEnum\x12+\n\x10\x64\x65vice_endpoints\x18\x06 \x03(\x0b\x32\x11.context.EndPoint\"9\n\x0c\x44\x65viceConfig\x12)\n\x0c\x63onfig_rules\x18\x01 \x03(\x0b\x32\x13.context.ConfigRule\"5\n\x0c\x44\x65viceIdList\x12%\n\ndevice_ids\x18\x01 \x03(\x0b\x32\x11.context.DeviceId\".\n\nDeviceList\x12 \n\x07\x64\x65vices\x18\x01 \x03(\x0b\x32\x0f.context.Device\"R\n\x0b\x44\x65viceEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12$\n\tdevice_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\"*\n\x06LinkId\x12 \n\tlink_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"X\n\x04Link\x12 \n\x07link_id\x18\x01 \x01(\x0b\x32\x0f.context.LinkId\x12.\n\x11link_endpoint_ids\x18\x02 \x03(\x0b\x32\x13.context.EndPointId\"/\n\nLinkIdList\x12!\n\x08link_ids\x18\x01 \x03(\x0b\x32\x0f.context.LinkId\"(\n\x08LinkList\x12\x1c\n\x05links\x18\x01 \x03(\x0b\x32\r.context.Link\"L\n\tLinkEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12 \n\x07link_id\x18\x02 \x01(\x0b\x32\x0f.context.LinkId\"X\n\tServiceId\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12#\n\x0cservice_uuid\x18\x02 \x01(\x0b\x32\r.context.Uuid\"\xa6\x02\n\x07Service\x12&\n\nservice_id\x18\x01 \x01(\x0b\x32\x12.context.ServiceId\x12.\n\x0cservice_type\x18\x02 \x01(\x0e\x32\x18.context.ServiceTypeEnum\x12\x31\n\x14service_endpoint_ids\x18\x03 \x03(\x0b\x32\x13.context.EndPointId\x12\x30\n\x13service_constraints\x18\x04 \x03(\x0b\x32\x13.context.Constraint\x12.\n\x0eservice_status\x18\x05 \x01(\x0b\x32\x16.context.ServiceStatus\x12.\n\x0eservice_config\x18\x06 \x01(\x0b\x32\x16.context.ServiceConfig\"C\n\rServiceStatus\x12\x32\n\x0eservice_status\x18\x01 \x01(\x0e\x32\x1a.context.ServiceStatusEnum\":\n\rServiceConfig\x12)\n\x0c\x63onfig_rules\x18\x01 \x03(\x0b\x32\x13.context.ConfigRule\"8\n\rServiceIdList\x12\'\n\x0bservice_ids\x18\x01 \x03(\x0b\x32\x12.context.ServiceId\"1\n\x0bServiceList\x12\"\n\x08services\x18\x01 \x03(\x0b\x32\x10.context.Service\"U\n\x0cServiceEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12&\n\nservice_id\x18\x02 \x01(\x0b\x32\x12.context.ServiceId\"\x82\x01\n\nEndPointId\x12(\n\x0btopology_id\x18\x01 \x01(\x0b\x32\x13.context.TopologyId\x12$\n\tdevice_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\x12$\n\rendpoint_uuid\x18\x03 \x01(\x0b\x32\r.context.Uuid\"K\n\x08\x45ndPoint\x12(\n\x0b\x65ndpoint_id\x18\x01 \x01(\x0b\x32\x13.context.EndPointId\x12\x15\n\rendpoint_type\x18\x02 \x01(\t\"e\n\nConfigRule\x12)\n\x06\x61\x63tion\x18\x01 \x01(\x0e\x32\x19.context.ConfigActionEnum\x12\x14\n\x0cresource_key\x18\x02 \x01(\t\x12\x16\n\x0eresource_value\x18\x03 \x01(\t\"?\n\nConstraint\x12\x17\n\x0f\x63onstraint_type\x18\x01 \x01(\t\x12\x18\n\x10\x63onstraint_value\x18\x02 \x01(\t\"6\n\x0c\x43onnectionId\x12&\n\x0f\x63onnection_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\x8d\x01\n\nConnection\x12,\n\rconnection_id\x18\x01 \x01(\x0b\x32\x15.context.ConnectionId\x12.\n\x12related_service_id\x18\x02 \x01(\x0b\x32\x12.context.ServiceId\x12!\n\x04path\x18\x03 \x03(\x0b\x32\x13.context.EndPointId\"A\n\x10\x43onnectionIdList\x12-\n\x0e\x63onnection_ids\x18\x01 \x03(\x0b\x32\x15.context.ConnectionId\":\n\x0e\x43onnectionList\x12(\n\x0b\x63onnections\x18\x01 \x03(\x0b\x32\x13.context.Connection\"^\n\x12TeraFlowController\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x12\n\nip_address\x18\x02 \x01(\t\x12\x0c\n\x04port\x18\x03 \x01(\r\"U\n\x14\x41uthenticationResult\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x15\n\rauthenticated\x18\x02 \x01(\x08*j\n\rEventTypeEnum\x12\x17\n\x13\x45VENTTYPE_UNDEFINED\x10\x00\x12\x14\n\x10\x45VENTTYPE_CREATE\x10\x01\x12\x14\n\x10\x45VENTTYPE_UPDATE\x10\x02\x12\x14\n\x10\x45VENTTYPE_REMOVE\x10\x03*\xc5\x01\n\x10\x44\x65viceDriverEnum\x12\x1a\n\x16\x44\x45VICEDRIVER_UNDEFINED\x10\x00\x12\x1b\n\x17\x44\x45VICEDRIVER_OPENCONFIG\x10\x01\x12\x1e\n\x1a\x44\x45VICEDRIVER_TRANSPORT_API\x10\x02\x12\x13\n\x0f\x44\x45VICEDRIVER_P4\x10\x03\x12&\n\"DEVICEDRIVER_IETF_NETWORK_TOPOLOGY\x10\x04\x12\x1b\n\x17\x44\x45VICEDRIVER_ONF_TR_352\x10\x05*\x8f\x01\n\x1b\x44\x65viceOperationalStatusEnum\x12%\n!DEVICEOPERATIONALSTATUS_UNDEFINED\x10\x00\x12$\n DEVICEOPERATIONALSTATUS_DISABLED\x10\x01\x12#\n\x1f\x44\x45VICEOPERATIONALSTATUS_ENABLED\x10\x02*\x81\x01\n\x0fServiceTypeEnum\x12\x17\n\x13SERVICETYPE_UNKNOWN\x10\x00\x12\x14\n\x10SERVICETYPE_L3NM\x10\x01\x12\x14\n\x10SERVICETYPE_L2NM\x10\x02\x12)\n%SERVICETYPE_TAPI_CONNECTIVITY_SERVICE\x10\x03*\x88\x01\n\x11ServiceStatusEnum\x12\x1b\n\x17SERVICESTATUS_UNDEFINED\x10\x00\x12\x19\n\x15SERVICESTATUS_PLANNED\x10\x01\x12\x18\n\x14SERVICESTATUS_ACTIVE\x10\x02\x12!\n\x1dSERVICESTATUS_PENDING_REMOVAL\x10\x03*]\n\x10\x43onfigActionEnum\x12\x1a\n\x16\x43ONFIGACTION_UNDEFINED\x10\x00\x12\x14\n\x10\x43ONFIGACTION_SET\x10\x01\x12\x17\n\x13\x43ONFIGACTION_DELETE\x10\x02\x32\xa5\r\n\x0e\x43ontextService\x12:\n\x0eListContextIds\x12\x0e.context.Empty\x1a\x16.context.ContextIdList\"\x00\x12\x36\n\x0cListContexts\x12\x0e.context.Empty\x1a\x14.context.ContextList\"\x00\x12\x34\n\nGetContext\x12\x12.context.ContextId\x1a\x10.context.Context\"\x00\x12\x34\n\nSetContext\x12\x10.context.Context\x1a\x12.context.ContextId\"\x00\x12\x35\n\rRemoveContext\x12\x12.context.ContextId\x1a\x0e.context.Empty\"\x00\x12=\n\x10GetContextEvents\x12\x0e.context.Empty\x1a\x15.context.ContextEvent\"\x00\x30\x01\x12@\n\x0fListTopologyIds\x12\x12.context.ContextId\x1a\x17.context.TopologyIdList\"\x00\x12=\n\x0eListTopologies\x12\x12.context.ContextId\x1a\x15.context.TopologyList\"\x00\x12\x37\n\x0bGetTopology\x12\x13.context.TopologyId\x1a\x11.context.Topology\"\x00\x12\x37\n\x0bSetTopology\x12\x11.context.Topology\x1a\x13.context.TopologyId\"\x00\x12\x37\n\x0eRemoveTopology\x12\x13.context.TopologyId\x1a\x0e.context.Empty\"\x00\x12?\n\x11GetTopologyEvents\x12\x0e.context.Empty\x1a\x16.context.TopologyEvent\"\x00\x30\x01\x12\x38\n\rListDeviceIds\x12\x0e.context.Empty\x1a\x15.context.DeviceIdList\"\x00\x12\x34\n\x0bListDevices\x12\x0e.context.Empty\x1a\x13.context.DeviceList\"\x00\x12\x31\n\tGetDevice\x12\x11.context.DeviceId\x1a\x0f.context.Device\"\x00\x12\x31\n\tSetDevice\x12\x0f.context.Device\x1a\x11.context.DeviceId\"\x00\x12\x33\n\x0cRemoveDevice\x12\x11.context.DeviceId\x1a\x0e.context.Empty\"\x00\x12;\n\x0fGetDeviceEvents\x12\x0e.context.Empty\x1a\x14.context.DeviceEvent\"\x00\x30\x01\x12\x34\n\x0bListLinkIds\x12\x0e.context.Empty\x1a\x13.context.LinkIdList\"\x00\x12\x30\n\tListLinks\x12\x0e.context.Empty\x1a\x11.context.LinkList\"\x00\x12+\n\x07GetLink\x12\x0f.context.LinkId\x1a\r.context.Link\"\x00\x12+\n\x07SetLink\x12\r.context.Link\x1a\x0f.context.LinkId\"\x00\x12/\n\nRemoveLink\x12\x0f.context.LinkId\x1a\x0e.context.Empty\"\x00\x12\x37\n\rGetLinkEvents\x12\x0e.context.Empty\x1a\x12.context.LinkEvent\"\x00\x30\x01\x12>\n\x0eListServiceIds\x12\x12.context.ContextId\x1a\x16.context.ServiceIdList\"\x00\x12:\n\x0cListServices\x12\x12.context.ContextId\x1a\x14.context.ServiceList\"\x00\x12\x34\n\nGetService\x12\x12.context.ServiceId\x1a\x10.context.Service\"\x00\x12\x34\n\nSetService\x12\x10.context.Service\x1a\x12.context.ServiceId\"\x00\x12\x35\n\rRemoveService\x12\x12.context.ServiceId\x1a\x0e.context.Empty\"\x00\x12=\n\x10GetServiceEvents\x12\x0e.context.Empty\x1a\x15.context.ServiceEvent\"\x00\x30\x01\x62\x06proto3'
-)
+  serialized_pb=b'\n\rcontext.proto\x12\x07\x63ontext\x1a\x16kpi_sample_types.proto\"\x07\n\x05\x45mpty\"\x14\n\x04Uuid\x12\x0c\n\x04uuid\x18\x01 \x01(\t\"F\n\x05\x45vent\x12\x11\n\ttimestamp\x18\x01 \x01(\x01\x12*\n\nevent_type\x18\x02 \x01(\x0e\x32\x16.context.EventTypeEnum\"0\n\tContextId\x12#\n\x0c\x63ontext_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\xb6\x01\n\x07\x43ontext\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12)\n\x0ctopology_ids\x18\x02 \x03(\x0b\x32\x13.context.TopologyId\x12\'\n\x0bservice_ids\x18\x03 \x03(\x0b\x32\x12.context.ServiceId\x12/\n\ncontroller\x18\x04 \x01(\x0b\x32\x1b.context.TeraFlowController\"8\n\rContextIdList\x12\'\n\x0b\x63ontext_ids\x18\x01 \x03(\x0b\x32\x12.context.ContextId\"1\n\x0b\x43ontextList\x12\"\n\x08\x63ontexts\x18\x01 \x03(\x0b\x32\x10.context.Context\"U\n\x0c\x43ontextEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12&\n\ncontext_id\x18\x02 \x01(\x0b\x32\x12.context.ContextId\"Z\n\nTopologyId\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12$\n\rtopology_uuid\x18\x02 \x01(\x0b\x32\r.context.Uuid\"~\n\x08Topology\x12(\n\x0btopology_id\x18\x01 \x01(\x0b\x32\x13.context.TopologyId\x12%\n\ndevice_ids\x18\x02 \x03(\x0b\x32\x11.context.DeviceId\x12!\n\x08link_ids\x18\x03 \x03(\x0b\x32\x0f.context.LinkId\";\n\x0eTopologyIdList\x12)\n\x0ctopology_ids\x18\x01 \x03(\x0b\x32\x13.context.TopologyId\"5\n\x0cTopologyList\x12%\n\ntopologies\x18\x01 \x03(\x0b\x32\x11.context.Topology\"X\n\rTopologyEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12(\n\x0btopology_id\x18\x02 \x01(\x0b\x32\x13.context.TopologyId\".\n\x08\x44\x65viceId\x12\"\n\x0b\x64\x65vice_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\x9a\x02\n\x06\x44\x65vice\x12$\n\tdevice_id\x18\x01 \x01(\x0b\x32\x11.context.DeviceId\x12\x13\n\x0b\x64\x65vice_type\x18\x02 \x01(\t\x12,\n\rdevice_config\x18\x03 \x01(\x0b\x32\x15.context.DeviceConfig\x12G\n\x19\x64\x65vice_operational_status\x18\x04 \x01(\x0e\x32$.context.DeviceOperationalStatusEnum\x12\x31\n\x0e\x64\x65vice_drivers\x18\x05 \x03(\x0e\x32\x19.context.DeviceDriverEnum\x12+\n\x10\x64\x65vice_endpoints\x18\x06 \x03(\x0b\x32\x11.context.EndPoint\"9\n\x0c\x44\x65viceConfig\x12)\n\x0c\x63onfig_rules\x18\x01 \x03(\x0b\x32\x13.context.ConfigRule\"5\n\x0c\x44\x65viceIdList\x12%\n\ndevice_ids\x18\x01 \x03(\x0b\x32\x11.context.DeviceId\".\n\nDeviceList\x12 \n\x07\x64\x65vices\x18\x01 \x03(\x0b\x32\x0f.context.Device\"R\n\x0b\x44\x65viceEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12$\n\tdevice_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\"*\n\x06LinkId\x12 \n\tlink_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"X\n\x04Link\x12 \n\x07link_id\x18\x01 \x01(\x0b\x32\x0f.context.LinkId\x12.\n\x11link_endpoint_ids\x18\x02 \x03(\x0b\x32\x13.context.EndPointId\"/\n\nLinkIdList\x12!\n\x08link_ids\x18\x01 \x03(\x0b\x32\x0f.context.LinkId\"(\n\x08LinkList\x12\x1c\n\x05links\x18\x01 \x03(\x0b\x32\r.context.Link\"L\n\tLinkEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12 \n\x07link_id\x18\x02 \x01(\x0b\x32\x0f.context.LinkId\"X\n\tServiceId\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12#\n\x0cservice_uuid\x18\x02 \x01(\x0b\x32\r.context.Uuid\"\xa6\x02\n\x07Service\x12&\n\nservice_id\x18\x01 \x01(\x0b\x32\x12.context.ServiceId\x12.\n\x0cservice_type\x18\x02 \x01(\x0e\x32\x18.context.ServiceTypeEnum\x12\x31\n\x14service_endpoint_ids\x18\x03 \x03(\x0b\x32\x13.context.EndPointId\x12\x30\n\x13service_constraints\x18\x04 \x03(\x0b\x32\x13.context.Constraint\x12.\n\x0eservice_status\x18\x05 \x01(\x0b\x32\x16.context.ServiceStatus\x12.\n\x0eservice_config\x18\x06 \x01(\x0b\x32\x16.context.ServiceConfig\"C\n\rServiceStatus\x12\x32\n\x0eservice_status\x18\x01 \x01(\x0e\x32\x1a.context.ServiceStatusEnum\":\n\rServiceConfig\x12)\n\x0c\x63onfig_rules\x18\x01 \x03(\x0b\x32\x13.context.ConfigRule\"8\n\rServiceIdList\x12\'\n\x0bservice_ids\x18\x01 \x03(\x0b\x32\x12.context.ServiceId\"1\n\x0bServiceList\x12\"\n\x08services\x18\x01 \x03(\x0b\x32\x10.context.Service\"U\n\x0cServiceEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12&\n\nservice_id\x18\x02 \x01(\x0b\x32\x12.context.ServiceId\"6\n\x0c\x43onnectionId\x12&\n\x0f\x63onnection_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\xc4\x01\n\nConnection\x12,\n\rconnection_id\x18\x01 \x01(\x0b\x32\x15.context.ConnectionId\x12&\n\nservice_id\x18\x02 \x01(\x0b\x32\x12.context.ServiceId\x12\x33\n\x16path_hops_endpoint_ids\x18\x03 \x03(\x0b\x32\x13.context.EndPointId\x12+\n\x0fsub_service_ids\x18\x04 \x03(\x0b\x32\x12.context.ServiceId\"A\n\x10\x43onnectionIdList\x12-\n\x0e\x63onnection_ids\x18\x01 \x03(\x0b\x32\x15.context.ConnectionId\":\n\x0e\x43onnectionList\x12(\n\x0b\x63onnections\x18\x01 \x03(\x0b\x32\x13.context.Connection\"^\n\x0f\x43onnectionEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12,\n\rconnection_id\x18\x02 \x01(\x0b\x32\x15.context.ConnectionId\"\x82\x01\n\nEndPointId\x12(\n\x0btopology_id\x18\x01 \x01(\x0b\x32\x13.context.TopologyId\x12$\n\tdevice_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\x12$\n\rendpoint_uuid\x18\x03 \x01(\x0b\x32\r.context.Uuid\"\x86\x01\n\x08\x45ndPoint\x12(\n\x0b\x65ndpoint_id\x18\x01 \x01(\x0b\x32\x13.context.EndPointId\x12\x15\n\rendpoint_type\x18\x02 \x01(\t\x12\x39\n\x10kpi_sample_types\x18\x03 \x03(\x0e\x32\x1f.kpi_sample_types.KpiSampleType\"e\n\nConfigRule\x12)\n\x06\x61\x63tion\x18\x01 \x01(\x0e\x32\x19.context.ConfigActionEnum\x12\x14\n\x0cresource_key\x18\x02 \x01(\t\x12\x16\n\x0eresource_value\x18\x03 \x01(\t\"?\n\nConstraint\x12\x17\n\x0f\x63onstraint_type\x18\x01 \x01(\t\x12\x18\n\x10\x63onstraint_value\x18\x02 \x01(\t\"^\n\x12TeraFlowController\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x12\n\nip_address\x18\x02 \x01(\t\x12\x0c\n\x04port\x18\x03 \x01(\r\"U\n\x14\x41uthenticationResult\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x15\n\rauthenticated\x18\x02 \x01(\x08*j\n\rEventTypeEnum\x12\x17\n\x13\x45VENTTYPE_UNDEFINED\x10\x00\x12\x14\n\x10\x45VENTTYPE_CREATE\x10\x01\x12\x14\n\x10\x45VENTTYPE_UPDATE\x10\x02\x12\x14\n\x10\x45VENTTYPE_REMOVE\x10\x03*\xc5\x01\n\x10\x44\x65viceDriverEnum\x12\x1a\n\x16\x44\x45VICEDRIVER_UNDEFINED\x10\x00\x12\x1b\n\x17\x44\x45VICEDRIVER_OPENCONFIG\x10\x01\x12\x1e\n\x1a\x44\x45VICEDRIVER_TRANSPORT_API\x10\x02\x12\x13\n\x0f\x44\x45VICEDRIVER_P4\x10\x03\x12&\n\"DEVICEDRIVER_IETF_NETWORK_TOPOLOGY\x10\x04\x12\x1b\n\x17\x44\x45VICEDRIVER_ONF_TR_352\x10\x05*\x8f\x01\n\x1b\x44\x65viceOperationalStatusEnum\x12%\n!DEVICEOPERATIONALSTATUS_UNDEFINED\x10\x00\x12$\n DEVICEOPERATIONALSTATUS_DISABLED\x10\x01\x12#\n\x1f\x44\x45VICEOPERATIONALSTATUS_ENABLED\x10\x02*\x81\x01\n\x0fServiceTypeEnum\x12\x17\n\x13SERVICETYPE_UNKNOWN\x10\x00\x12\x14\n\x10SERVICETYPE_L3NM\x10\x01\x12\x14\n\x10SERVICETYPE_L2NM\x10\x02\x12)\n%SERVICETYPE_TAPI_CONNECTIVITY_SERVICE\x10\x03*\x88\x01\n\x11ServiceStatusEnum\x12\x1b\n\x17SERVICESTATUS_UNDEFINED\x10\x00\x12\x19\n\x15SERVICESTATUS_PLANNED\x10\x01\x12\x18\n\x14SERVICESTATUS_ACTIVE\x10\x02\x12!\n\x1dSERVICESTATUS_PENDING_REMOVAL\x10\x03*]\n\x10\x43onfigActionEnum\x12\x1a\n\x16\x43ONFIGACTION_UNDEFINED\x10\x00\x12\x14\n\x10\x43ONFIGACTION_SET\x10\x01\x12\x17\n\x13\x43ONFIGACTION_DELETE\x10\x02\x32\xad\x10\n\x0e\x43ontextService\x12:\n\x0eListContextIds\x12\x0e.context.Empty\x1a\x16.context.ContextIdList\"\x00\x12\x36\n\x0cListContexts\x12\x0e.context.Empty\x1a\x14.context.ContextList\"\x00\x12\x34\n\nGetContext\x12\x12.context.ContextId\x1a\x10.context.Context\"\x00\x12\x34\n\nSetContext\x12\x10.context.Context\x1a\x12.context.ContextId\"\x00\x12\x35\n\rRemoveContext\x12\x12.context.ContextId\x1a\x0e.context.Empty\"\x00\x12=\n\x10GetContextEvents\x12\x0e.context.Empty\x1a\x15.context.ContextEvent\"\x00\x30\x01\x12@\n\x0fListTopologyIds\x12\x12.context.ContextId\x1a\x17.context.TopologyIdList\"\x00\x12=\n\x0eListTopologies\x12\x12.context.ContextId\x1a\x15.context.TopologyList\"\x00\x12\x37\n\x0bGetTopology\x12\x13.context.TopologyId\x1a\x11.context.Topology\"\x00\x12\x37\n\x0bSetTopology\x12\x11.context.Topology\x1a\x13.context.TopologyId\"\x00\x12\x37\n\x0eRemoveTopology\x12\x13.context.TopologyId\x1a\x0e.context.Empty\"\x00\x12?\n\x11GetTopologyEvents\x12\x0e.context.Empty\x1a\x16.context.TopologyEvent\"\x00\x30\x01\x12\x38\n\rListDeviceIds\x12\x0e.context.Empty\x1a\x15.context.DeviceIdList\"\x00\x12\x34\n\x0bListDevices\x12\x0e.context.Empty\x1a\x13.context.DeviceList\"\x00\x12\x31\n\tGetDevice\x12\x11.context.DeviceId\x1a\x0f.context.Device\"\x00\x12\x31\n\tSetDevice\x12\x0f.context.Device\x1a\x11.context.DeviceId\"\x00\x12\x33\n\x0cRemoveDevice\x12\x11.context.DeviceId\x1a\x0e.context.Empty\"\x00\x12;\n\x0fGetDeviceEvents\x12\x0e.context.Empty\x1a\x14.context.DeviceEvent\"\x00\x30\x01\x12\x34\n\x0bListLinkIds\x12\x0e.context.Empty\x1a\x13.context.LinkIdList\"\x00\x12\x30\n\tListLinks\x12\x0e.context.Empty\x1a\x11.context.LinkList\"\x00\x12+\n\x07GetLink\x12\x0f.context.LinkId\x1a\r.context.Link\"\x00\x12+\n\x07SetLink\x12\r.context.Link\x1a\x0f.context.LinkId\"\x00\x12/\n\nRemoveLink\x12\x0f.context.LinkId\x1a\x0e.context.Empty\"\x00\x12\x37\n\rGetLinkEvents\x12\x0e.context.Empty\x1a\x12.context.LinkEvent\"\x00\x30\x01\x12>\n\x0eListServiceIds\x12\x12.context.ContextId\x1a\x16.context.ServiceIdList\"\x00\x12:\n\x0cListServices\x12\x12.context.ContextId\x1a\x14.context.ServiceList\"\x00\x12\x34\n\nGetService\x12\x12.context.ServiceId\x1a\x10.context.Service\"\x00\x12\x34\n\nSetService\x12\x10.context.Service\x1a\x12.context.ServiceId\"\x00\x12\x35\n\rRemoveService\x12\x12.context.ServiceId\x1a\x0e.context.Empty\"\x00\x12=\n\x10GetServiceEvents\x12\x0e.context.Empty\x1a\x15.context.ServiceEvent\"\x00\x30\x01\x12\x44\n\x11ListConnectionIds\x12\x12.context.ServiceId\x1a\x19.context.ConnectionIdList\"\x00\x12@\n\x0fListConnections\x12\x12.context.ServiceId\x1a\x17.context.ConnectionList\"\x00\x12=\n\rGetConnection\x12\x15.context.ConnectionId\x1a\x13.context.Connection\"\x00\x12=\n\rSetConnection\x12\x13.context.Connection\x1a\x15.context.ConnectionId\"\x00\x12;\n\x10RemoveConnection\x12\x15.context.ConnectionId\x1a\x0e.context.Empty\"\x00\x12\x43\n\x13GetConnectionEvents\x12\x0e.context.Empty\x1a\x18.context.ConnectionEvent\"\x00\x30\x01\x62\x06proto3'
+  ,
+  dependencies=[kpi__sample__types__pb2.DESCRIPTOR,])
 
 _EVENTTYPEENUM = _descriptor.EnumDescriptor(
   name='EventTypeEnum',
@@ -53,8 +55,8 @@ _EVENTTYPEENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=3468,
-  serialized_end=3574,
+  serialized_start=3703,
+  serialized_end=3809,
 )
 _sym_db.RegisterEnumDescriptor(_EVENTTYPEENUM)
 
@@ -99,8 +101,8 @@ _DEVICEDRIVERENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=3577,
-  serialized_end=3774,
+  serialized_start=3812,
+  serialized_end=4009,
 )
 _sym_db.RegisterEnumDescriptor(_DEVICEDRIVERENUM)
 
@@ -130,8 +132,8 @@ _DEVICEOPERATIONALSTATUSENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=3777,
-  serialized_end=3920,
+  serialized_start=4012,
+  serialized_end=4155,
 )
 _sym_db.RegisterEnumDescriptor(_DEVICEOPERATIONALSTATUSENUM)
 
@@ -166,8 +168,8 @@ _SERVICETYPEENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=3923,
-  serialized_end=4052,
+  serialized_start=4158,
+  serialized_end=4287,
 )
 _sym_db.RegisterEnumDescriptor(_SERVICETYPEENUM)
 
@@ -202,8 +204,8 @@ _SERVICESTATUSENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=4055,
-  serialized_end=4191,
+  serialized_start=4290,
+  serialized_end=4426,
 )
 _sym_db.RegisterEnumDescriptor(_SERVICESTATUSENUM)
 
@@ -233,8 +235,8 @@ _CONFIGACTIONENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=4193,
-  serialized_end=4286,
+  serialized_start=4428,
+  serialized_end=4521,
 )
 _sym_db.RegisterEnumDescriptor(_CONFIGACTIONENUM)
 
@@ -286,8 +288,8 @@ _EMPTY = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=26,
-  serialized_end=33,
+  serialized_start=50,
+  serialized_end=57,
 )
 
 
@@ -318,8 +320,8 @@ _UUID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=35,
-  serialized_end=55,
+  serialized_start=59,
+  serialized_end=79,
 )
 
 
@@ -357,8 +359,8 @@ _EVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=57,
-  serialized_end=127,
+  serialized_start=81,
+  serialized_end=151,
 )
 
 
@@ -389,8 +391,8 @@ _CONTEXTID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=129,
-  serialized_end=177,
+  serialized_start=153,
+  serialized_end=201,
 )
 
 
@@ -442,8 +444,8 @@ _CONTEXT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=180,
-  serialized_end=362,
+  serialized_start=204,
+  serialized_end=386,
 )
 
 
@@ -474,8 +476,8 @@ _CONTEXTIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=364,
-  serialized_end=420,
+  serialized_start=388,
+  serialized_end=444,
 )
 
 
@@ -506,8 +508,8 @@ _CONTEXTLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=422,
-  serialized_end=471,
+  serialized_start=446,
+  serialized_end=495,
 )
 
 
@@ -545,8 +547,8 @@ _CONTEXTEVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=473,
-  serialized_end=558,
+  serialized_start=497,
+  serialized_end=582,
 )
 
 
@@ -584,8 +586,8 @@ _TOPOLOGYID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=560,
-  serialized_end=650,
+  serialized_start=584,
+  serialized_end=674,
 )
 
 
@@ -630,8 +632,8 @@ _TOPOLOGY = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=652,
-  serialized_end=778,
+  serialized_start=676,
+  serialized_end=802,
 )
 
 
@@ -662,8 +664,8 @@ _TOPOLOGYIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=780,
-  serialized_end=839,
+  serialized_start=804,
+  serialized_end=863,
 )
 
 
@@ -694,8 +696,8 @@ _TOPOLOGYLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=841,
-  serialized_end=894,
+  serialized_start=865,
+  serialized_end=918,
 )
 
 
@@ -733,8 +735,8 @@ _TOPOLOGYEVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=896,
-  serialized_end=984,
+  serialized_start=920,
+  serialized_end=1008,
 )
 
 
@@ -765,8 +767,8 @@ _DEVICEID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=986,
-  serialized_end=1032,
+  serialized_start=1010,
+  serialized_end=1056,
 )
 
 
@@ -832,8 +834,8 @@ _DEVICE = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1035,
-  serialized_end=1317,
+  serialized_start=1059,
+  serialized_end=1341,
 )
 
 
@@ -864,8 +866,8 @@ _DEVICECONFIG = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1319,
-  serialized_end=1376,
+  serialized_start=1343,
+  serialized_end=1400,
 )
 
 
@@ -896,8 +898,8 @@ _DEVICEIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1378,
-  serialized_end=1431,
+  serialized_start=1402,
+  serialized_end=1455,
 )
 
 
@@ -928,8 +930,8 @@ _DEVICELIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1433,
-  serialized_end=1479,
+  serialized_start=1457,
+  serialized_end=1503,
 )
 
 
@@ -967,8 +969,8 @@ _DEVICEEVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1481,
-  serialized_end=1563,
+  serialized_start=1505,
+  serialized_end=1587,
 )
 
 
@@ -999,8 +1001,8 @@ _LINKID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1565,
-  serialized_end=1607,
+  serialized_start=1589,
+  serialized_end=1631,
 )
 
 
@@ -1038,8 +1040,8 @@ _LINK = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1609,
-  serialized_end=1697,
+  serialized_start=1633,
+  serialized_end=1721,
 )
 
 
@@ -1070,8 +1072,8 @@ _LINKIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1699,
-  serialized_end=1746,
+  serialized_start=1723,
+  serialized_end=1770,
 )
 
 
@@ -1102,8 +1104,8 @@ _LINKLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1748,
-  serialized_end=1788,
+  serialized_start=1772,
+  serialized_end=1812,
 )
 
 
@@ -1141,8 +1143,8 @@ _LINKEVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1790,
-  serialized_end=1866,
+  serialized_start=1814,
+  serialized_end=1890,
 )
 
 
@@ -1180,8 +1182,8 @@ _SERVICEID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1868,
-  serialized_end=1956,
+  serialized_start=1892,
+  serialized_end=1980,
 )
 
 
@@ -1247,8 +1249,8 @@ _SERVICE = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1959,
-  serialized_end=2253,
+  serialized_start=1983,
+  serialized_end=2277,
 )
 
 
@@ -1279,8 +1281,8 @@ _SERVICESTATUS = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2255,
-  serialized_end=2322,
+  serialized_start=2279,
+  serialized_end=2346,
 )
 
 
@@ -1311,8 +1313,8 @@ _SERVICECONFIG = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2324,
-  serialized_end=2382,
+  serialized_start=2348,
+  serialized_end=2406,
 )
 
 
@@ -1343,8 +1345,8 @@ _SERVICEIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2384,
-  serialized_end=2440,
+  serialized_start=2408,
+  serialized_end=2464,
 )
 
 
@@ -1375,8 +1377,8 @@ _SERVICELIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2442,
-  serialized_end=2491,
+  serialized_start=2466,
+  serialized_end=2515,
 )
 
 
@@ -1414,40 +1416,26 @@ _SERVICEEVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2493,
-  serialized_end=2578,
+  serialized_start=2517,
+  serialized_end=2602,
 )
 
 
-_ENDPOINTID = _descriptor.Descriptor(
-  name='EndPointId',
-  full_name='context.EndPointId',
+_CONNECTIONID = _descriptor.Descriptor(
+  name='ConnectionId',
+  full_name='context.ConnectionId',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='topology_id', full_name='context.EndPointId.topology_id', index=0,
+      name='connection_uuid', full_name='context.ConnectionId.connection_uuid', index=0,
       number=1, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='device_id', full_name='context.EndPointId.device_id', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='endpoint_uuid', full_name='context.EndPointId.endpoint_uuid', index=2,
-      number=3, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
   ],
   extensions=[
   ],
@@ -1460,30 +1448,44 @@ _ENDPOINTID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2581,
-  serialized_end=2711,
+  serialized_start=2604,
+  serialized_end=2658,
 )
 
 
-_ENDPOINT = _descriptor.Descriptor(
-  name='EndPoint',
-  full_name='context.EndPoint',
+_CONNECTION = _descriptor.Descriptor(
+  name='Connection',
+  full_name='context.Connection',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='endpoint_id', full_name='context.EndPoint.endpoint_id', index=0,
+      name='connection_id', full_name='context.Connection.connection_id', index=0,
       number=1, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='endpoint_type', full_name='context.EndPoint.endpoint_type', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='service_id', full_name='context.Connection.service_id', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='path_hops_endpoint_ids', full_name='context.Connection.path_hops_endpoint_ids', index=2,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='sub_service_ids', full_name='context.Connection.sub_service_ids', index=3,
+      number=4, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -1499,37 +1501,55 @@ _ENDPOINT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2713,
-  serialized_end=2788,
+  serialized_start=2661,
+  serialized_end=2857,
 )
 
 
-_CONFIGRULE = _descriptor.Descriptor(
-  name='ConfigRule',
-  full_name='context.ConfigRule',
+_CONNECTIONIDLIST = _descriptor.Descriptor(
+  name='ConnectionIdList',
+  full_name='context.ConnectionIdList',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='action', full_name='context.ConfigRule.action', index=0,
-      number=1, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='resource_key', full_name='context.ConfigRule.resource_key', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='connection_ids', full_name='context.ConnectionIdList.connection_ids', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2859,
+  serialized_end=2924,
+)
+
+
+_CONNECTIONLIST = _descriptor.Descriptor(
+  name='ConnectionList',
+  full_name='context.ConnectionList',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
     _descriptor.FieldDescriptor(
-      name='resource_value', full_name='context.ConfigRule.resource_value', index=2,
-      number=3, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='connections', full_name='context.ConnectionList.connections', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -1545,30 +1565,30 @@ _CONFIGRULE = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2790,
-  serialized_end=2891,
+  serialized_start=2926,
+  serialized_end=2984,
 )
 
 
-_CONSTRAINT = _descriptor.Descriptor(
-  name='Constraint',
-  full_name='context.Constraint',
+_CONNECTIONEVENT = _descriptor.Descriptor(
+  name='ConnectionEvent',
+  full_name='context.ConnectionEvent',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='constraint_type', full_name='context.Constraint.constraint_type', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='event', full_name='context.ConnectionEvent.event', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='constraint_value', full_name='context.Constraint.constraint_value', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='connection_id', full_name='context.ConnectionEvent.connection_id', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -1584,26 +1604,40 @@ _CONSTRAINT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2893,
-  serialized_end=2956,
+  serialized_start=2986,
+  serialized_end=3080,
 )
 
 
-_CONNECTIONID = _descriptor.Descriptor(
-  name='ConnectionId',
-  full_name='context.ConnectionId',
+_ENDPOINTID = _descriptor.Descriptor(
+  name='EndPointId',
+  full_name='context.EndPointId',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='connection_uuid', full_name='context.ConnectionId.connection_uuid', index=0,
+      name='topology_id', full_name='context.EndPointId.topology_id', index=0,
       number=1, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='device_id', full_name='context.EndPointId.device_id', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='endpoint_uuid', full_name='context.EndPointId.endpoint_uuid', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
   ],
   extensions=[
   ],
@@ -1616,36 +1650,36 @@ _CONNECTIONID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2958,
-  serialized_end=3012,
+  serialized_start=3083,
+  serialized_end=3213,
 )
 
 
-_CONNECTION = _descriptor.Descriptor(
-  name='Connection',
-  full_name='context.Connection',
+_ENDPOINT = _descriptor.Descriptor(
+  name='EndPoint',
+  full_name='context.EndPoint',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='connection_id', full_name='context.Connection.connection_id', index=0,
+      name='endpoint_id', full_name='context.EndPoint.endpoint_id', index=0,
       number=1, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='related_service_id', full_name='context.Connection.related_service_id', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
+      name='endpoint_type', full_name='context.EndPoint.endpoint_type', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='path', full_name='context.Connection.path', index=2,
-      number=3, type=11, cpp_type=10, label=3,
+      name='kpi_sample_types', full_name='context.EndPoint.kpi_sample_types', index=2,
+      number=3, type=14, cpp_type=8, label=3,
       has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
@@ -1662,23 +1696,37 @@ _CONNECTION = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=3015,
-  serialized_end=3156,
+  serialized_start=3216,
+  serialized_end=3350,
 )
 
 
-_CONNECTIONIDLIST = _descriptor.Descriptor(
-  name='ConnectionIdList',
-  full_name='context.ConnectionIdList',
+_CONFIGRULE = _descriptor.Descriptor(
+  name='ConfigRule',
+  full_name='context.ConfigRule',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='connection_ids', full_name='context.ConnectionIdList.connection_ids', index=0,
-      number=1, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
+      name='action', full_name='context.ConfigRule.action', index=0,
+      number=1, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='resource_key', full_name='context.ConfigRule.resource_key', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='resource_value', full_name='context.ConfigRule.resource_value', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -1694,23 +1742,30 @@ _CONNECTIONIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=3158,
-  serialized_end=3223,
+  serialized_start=3352,
+  serialized_end=3453,
 )
 
 
-_CONNECTIONLIST = _descriptor.Descriptor(
-  name='ConnectionList',
-  full_name='context.ConnectionList',
+_CONSTRAINT = _descriptor.Descriptor(
+  name='Constraint',
+  full_name='context.Constraint',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='connections', full_name='context.ConnectionList.connections', index=0,
-      number=1, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
+      name='constraint_type', full_name='context.Constraint.constraint_type', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='constraint_value', full_name='context.Constraint.constraint_value', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -1726,8 +1781,8 @@ _CONNECTIONLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=3225,
-  serialized_end=3283,
+  serialized_start=3455,
+  serialized_end=3518,
 )
 
 
@@ -1772,8 +1827,8 @@ _TERAFLOWCONTROLLER = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=3285,
-  serialized_end=3379,
+  serialized_start=3520,
+  serialized_end=3614,
 )
 
 
@@ -1811,8 +1866,8 @@ _AUTHENTICATIONRESULT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=3381,
-  serialized_end=3466,
+  serialized_start=3616,
+  serialized_end=3701,
 )
 
 _EVENT.fields_by_name['event_type'].enum_type = _EVENTTYPEENUM
@@ -1866,17 +1921,21 @@ _SERVICEIDLIST.fields_by_name['service_ids'].message_type = _SERVICEID
 _SERVICELIST.fields_by_name['services'].message_type = _SERVICE
 _SERVICEEVENT.fields_by_name['event'].message_type = _EVENT
 _SERVICEEVENT.fields_by_name['service_id'].message_type = _SERVICEID
+_CONNECTIONID.fields_by_name['connection_uuid'].message_type = _UUID
+_CONNECTION.fields_by_name['connection_id'].message_type = _CONNECTIONID
+_CONNECTION.fields_by_name['service_id'].message_type = _SERVICEID
+_CONNECTION.fields_by_name['path_hops_endpoint_ids'].message_type = _ENDPOINTID
+_CONNECTION.fields_by_name['sub_service_ids'].message_type = _SERVICEID
+_CONNECTIONIDLIST.fields_by_name['connection_ids'].message_type = _CONNECTIONID
+_CONNECTIONLIST.fields_by_name['connections'].message_type = _CONNECTION
+_CONNECTIONEVENT.fields_by_name['event'].message_type = _EVENT
+_CONNECTIONEVENT.fields_by_name['connection_id'].message_type = _CONNECTIONID
 _ENDPOINTID.fields_by_name['topology_id'].message_type = _TOPOLOGYID
 _ENDPOINTID.fields_by_name['device_id'].message_type = _DEVICEID
 _ENDPOINTID.fields_by_name['endpoint_uuid'].message_type = _UUID
 _ENDPOINT.fields_by_name['endpoint_id'].message_type = _ENDPOINTID
+_ENDPOINT.fields_by_name['kpi_sample_types'].enum_type = kpi__sample__types__pb2._KPISAMPLETYPE
 _CONFIGRULE.fields_by_name['action'].enum_type = _CONFIGACTIONENUM
-_CONNECTIONID.fields_by_name['connection_uuid'].message_type = _UUID
-_CONNECTION.fields_by_name['connection_id'].message_type = _CONNECTIONID
-_CONNECTION.fields_by_name['related_service_id'].message_type = _SERVICEID
-_CONNECTION.fields_by_name['path'].message_type = _ENDPOINTID
-_CONNECTIONIDLIST.fields_by_name['connection_ids'].message_type = _CONNECTIONID
-_CONNECTIONLIST.fields_by_name['connections'].message_type = _CONNECTION
 _TERAFLOWCONTROLLER.fields_by_name['context_id'].message_type = _CONTEXTID
 _AUTHENTICATIONRESULT.fields_by_name['context_id'].message_type = _CONTEXTID
 DESCRIPTOR.message_types_by_name['Empty'] = _EMPTY
@@ -1910,14 +1969,15 @@ DESCRIPTOR.message_types_by_name['ServiceConfig'] = _SERVICECONFIG
 DESCRIPTOR.message_types_by_name['ServiceIdList'] = _SERVICEIDLIST
 DESCRIPTOR.message_types_by_name['ServiceList'] = _SERVICELIST
 DESCRIPTOR.message_types_by_name['ServiceEvent'] = _SERVICEEVENT
-DESCRIPTOR.message_types_by_name['EndPointId'] = _ENDPOINTID
-DESCRIPTOR.message_types_by_name['EndPoint'] = _ENDPOINT
-DESCRIPTOR.message_types_by_name['ConfigRule'] = _CONFIGRULE
-DESCRIPTOR.message_types_by_name['Constraint'] = _CONSTRAINT
 DESCRIPTOR.message_types_by_name['ConnectionId'] = _CONNECTIONID
 DESCRIPTOR.message_types_by_name['Connection'] = _CONNECTION
 DESCRIPTOR.message_types_by_name['ConnectionIdList'] = _CONNECTIONIDLIST
 DESCRIPTOR.message_types_by_name['ConnectionList'] = _CONNECTIONLIST
+DESCRIPTOR.message_types_by_name['ConnectionEvent'] = _CONNECTIONEVENT
+DESCRIPTOR.message_types_by_name['EndPointId'] = _ENDPOINTID
+DESCRIPTOR.message_types_by_name['EndPoint'] = _ENDPOINT
+DESCRIPTOR.message_types_by_name['ConfigRule'] = _CONFIGRULE
+DESCRIPTOR.message_types_by_name['Constraint'] = _CONSTRAINT
 DESCRIPTOR.message_types_by_name['TeraFlowController'] = _TERAFLOWCONTROLLER
 DESCRIPTOR.message_types_by_name['AuthenticationResult'] = _AUTHENTICATIONRESULT
 DESCRIPTOR.enum_types_by_name['EventTypeEnum'] = _EVENTTYPEENUM
@@ -2145,34 +2205,6 @@ ServiceEvent = _reflection.GeneratedProtocolMessageType('ServiceEvent', (_messag
   })
 _sym_db.RegisterMessage(ServiceEvent)
 
-EndPointId = _reflection.GeneratedProtocolMessageType('EndPointId', (_message.Message,), {
-  'DESCRIPTOR' : _ENDPOINTID,
-  '__module__' : 'context_pb2'
-  # @@protoc_insertion_point(class_scope:context.EndPointId)
-  })
-_sym_db.RegisterMessage(EndPointId)
-
-EndPoint = _reflection.GeneratedProtocolMessageType('EndPoint', (_message.Message,), {
-  'DESCRIPTOR' : _ENDPOINT,
-  '__module__' : 'context_pb2'
-  # @@protoc_insertion_point(class_scope:context.EndPoint)
-  })
-_sym_db.RegisterMessage(EndPoint)
-
-ConfigRule = _reflection.GeneratedProtocolMessageType('ConfigRule', (_message.Message,), {
-  'DESCRIPTOR' : _CONFIGRULE,
-  '__module__' : 'context_pb2'
-  # @@protoc_insertion_point(class_scope:context.ConfigRule)
-  })
-_sym_db.RegisterMessage(ConfigRule)
-
-Constraint = _reflection.GeneratedProtocolMessageType('Constraint', (_message.Message,), {
-  'DESCRIPTOR' : _CONSTRAINT,
-  '__module__' : 'context_pb2'
-  # @@protoc_insertion_point(class_scope:context.Constraint)
-  })
-_sym_db.RegisterMessage(Constraint)
-
 ConnectionId = _reflection.GeneratedProtocolMessageType('ConnectionId', (_message.Message,), {
   'DESCRIPTOR' : _CONNECTIONID,
   '__module__' : 'context_pb2'
@@ -2201,6 +2233,41 @@ ConnectionList = _reflection.GeneratedProtocolMessageType('ConnectionList', (_me
   })
 _sym_db.RegisterMessage(ConnectionList)
 
+ConnectionEvent = _reflection.GeneratedProtocolMessageType('ConnectionEvent', (_message.Message,), {
+  'DESCRIPTOR' : _CONNECTIONEVENT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ConnectionEvent)
+  })
+_sym_db.RegisterMessage(ConnectionEvent)
+
+EndPointId = _reflection.GeneratedProtocolMessageType('EndPointId', (_message.Message,), {
+  'DESCRIPTOR' : _ENDPOINTID,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.EndPointId)
+  })
+_sym_db.RegisterMessage(EndPointId)
+
+EndPoint = _reflection.GeneratedProtocolMessageType('EndPoint', (_message.Message,), {
+  'DESCRIPTOR' : _ENDPOINT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.EndPoint)
+  })
+_sym_db.RegisterMessage(EndPoint)
+
+ConfigRule = _reflection.GeneratedProtocolMessageType('ConfigRule', (_message.Message,), {
+  'DESCRIPTOR' : _CONFIGRULE,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ConfigRule)
+  })
+_sym_db.RegisterMessage(ConfigRule)
+
+Constraint = _reflection.GeneratedProtocolMessageType('Constraint', (_message.Message,), {
+  'DESCRIPTOR' : _CONSTRAINT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Constraint)
+  })
+_sym_db.RegisterMessage(Constraint)
+
 TeraFlowController = _reflection.GeneratedProtocolMessageType('TeraFlowController', (_message.Message,), {
   'DESCRIPTOR' : _TERAFLOWCONTROLLER,
   '__module__' : 'context_pb2'
@@ -2224,8 +2291,8 @@ _CONTEXTSERVICE = _descriptor.ServiceDescriptor(
   index=0,
   serialized_options=None,
   create_key=_descriptor._internal_create_key,
-  serialized_start=4289,
-  serialized_end=5990,
+  serialized_start=4524,
+  serialized_end=6617,
   methods=[
   _descriptor.MethodDescriptor(
     name='ListContextIds',
@@ -2527,6 +2594,66 @@ _CONTEXTSERVICE = _descriptor.ServiceDescriptor(
     serialized_options=None,
     create_key=_descriptor._internal_create_key,
   ),
+  _descriptor.MethodDescriptor(
+    name='ListConnectionIds',
+    full_name='context.ContextService.ListConnectionIds',
+    index=30,
+    containing_service=None,
+    input_type=_SERVICEID,
+    output_type=_CONNECTIONIDLIST,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='ListConnections',
+    full_name='context.ContextService.ListConnections',
+    index=31,
+    containing_service=None,
+    input_type=_SERVICEID,
+    output_type=_CONNECTIONLIST,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetConnection',
+    full_name='context.ContextService.GetConnection',
+    index=32,
+    containing_service=None,
+    input_type=_CONNECTIONID,
+    output_type=_CONNECTION,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='SetConnection',
+    full_name='context.ContextService.SetConnection',
+    index=33,
+    containing_service=None,
+    input_type=_CONNECTION,
+    output_type=_CONNECTIONID,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='RemoveConnection',
+    full_name='context.ContextService.RemoveConnection',
+    index=34,
+    containing_service=None,
+    input_type=_CONNECTIONID,
+    output_type=_EMPTY,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetConnectionEvents',
+    full_name='context.ContextService.GetConnectionEvents',
+    index=35,
+    containing_service=None,
+    input_type=_EMPTY,
+    output_type=_CONNECTIONEVENT,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
 ])
 _sym_db.RegisterServiceDescriptor(_CONTEXTSERVICE)
 
diff --git a/src/device/proto/device_pb2.py b/src/device/proto/device_pb2.py
index e351738e6ac1ea9dadf4310897a979ab38db669b..4d4dbb82567256dd79595884f0ed9c2f13498d31 100644
--- a/src/device/proto/device_pb2.py
+++ b/src/device/proto/device_pb2.py
@@ -12,6 +12,7 @@ _sym_db = _symbol_database.Default()
 
 
 from . import context_pb2 as context__pb2
+from . import monitoring_pb2 as monitoring__pb2
 
 
 DESCRIPTOR = _descriptor.FileDescriptor(
@@ -20,14 +21,77 @@ DESCRIPTOR = _descriptor.FileDescriptor(
   syntax='proto3',
   serialized_options=None,
   create_key=_descriptor._internal_create_key,
-  serialized_pb=b'\n\x0c\x64\x65vice.proto\x12\x06\x64\x65vice\x1a\rcontext.proto2\xf0\x01\n\rDeviceService\x12\x31\n\tAddDevice\x12\x0f.context.Device\x1a\x11.context.DeviceId\"\x00\x12\x37\n\x0f\x43onfigureDevice\x12\x0f.context.Device\x1a\x11.context.DeviceId\"\x00\x12\x33\n\x0c\x44\x65leteDevice\x12\x11.context.DeviceId\x1a\x0e.context.Empty\"\x00\x12>\n\x10GetInitialConfig\x12\x11.context.DeviceId\x1a\x15.context.DeviceConfig\"\x00\x62\x06proto3'
+  serialized_pb=b'\n\x0c\x64\x65vice.proto\x12\x06\x64\x65vice\x1a\rcontext.proto\x1a\x10monitoring.proto\"\xa4\x01\n\x12MonitoringSettings\x12!\n\x06kpi_id\x18\x01 \x01(\x0b\x32\x11.monitoring.KpiId\x12\x31\n\x0ekpi_descriptor\x18\x02 \x01(\x0b\x32\x19.monitoring.KpiDescriptor\x12\x1b\n\x13sampling_duration_s\x18\x03 \x01(\x02\x12\x1b\n\x13sampling_interval_s\x18\x04 \x01(\x02\x32\xb2\x02\n\rDeviceService\x12\x31\n\tAddDevice\x12\x0f.context.Device\x1a\x11.context.DeviceId\"\x00\x12\x37\n\x0f\x43onfigureDevice\x12\x0f.context.Device\x1a\x11.context.DeviceId\"\x00\x12\x33\n\x0c\x44\x65leteDevice\x12\x11.context.DeviceId\x1a\x0e.context.Empty\"\x00\x12>\n\x10GetInitialConfig\x12\x11.context.DeviceId\x1a\x15.context.DeviceConfig\"\x00\x12@\n\x10MonitorDeviceKpi\x12\x1a.device.MonitoringSettings\x1a\x0e.context.Empty\"\x00\x62\x06proto3'
   ,
-  dependencies=[context__pb2.DESCRIPTOR,])
+  dependencies=[context__pb2.DESCRIPTOR,monitoring__pb2.DESCRIPTOR,])
 
 
 
+
+_MONITORINGSETTINGS = _descriptor.Descriptor(
+  name='MonitoringSettings',
+  full_name='device.MonitoringSettings',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='kpi_id', full_name='device.MonitoringSettings.kpi_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='kpi_descriptor', full_name='device.MonitoringSettings.kpi_descriptor', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='sampling_duration_s', full_name='device.MonitoringSettings.sampling_duration_s', index=2,
+      number=3, type=2, cpp_type=6, label=1,
+      has_default_value=False, default_value=float(0),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='sampling_interval_s', full_name='device.MonitoringSettings.sampling_interval_s', index=3,
+      number=4, type=2, cpp_type=6, label=1,
+      has_default_value=False, default_value=float(0),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=58,
+  serialized_end=222,
+)
+
+_MONITORINGSETTINGS.fields_by_name['kpi_id'].message_type = monitoring__pb2._KPIID
+_MONITORINGSETTINGS.fields_by_name['kpi_descriptor'].message_type = monitoring__pb2._KPIDESCRIPTOR
+DESCRIPTOR.message_types_by_name['MonitoringSettings'] = _MONITORINGSETTINGS
 _sym_db.RegisterFileDescriptor(DESCRIPTOR)
 
+MonitoringSettings = _reflection.GeneratedProtocolMessageType('MonitoringSettings', (_message.Message,), {
+  'DESCRIPTOR' : _MONITORINGSETTINGS,
+  '__module__' : 'device_pb2'
+  # @@protoc_insertion_point(class_scope:device.MonitoringSettings)
+  })
+_sym_db.RegisterMessage(MonitoringSettings)
+
 
 
 _DEVICESERVICE = _descriptor.ServiceDescriptor(
@@ -37,8 +101,8 @@ _DEVICESERVICE = _descriptor.ServiceDescriptor(
   index=0,
   serialized_options=None,
   create_key=_descriptor._internal_create_key,
-  serialized_start=40,
-  serialized_end=280,
+  serialized_start=225,
+  serialized_end=531,
   methods=[
   _descriptor.MethodDescriptor(
     name='AddDevice',
@@ -80,6 +144,16 @@ _DEVICESERVICE = _descriptor.ServiceDescriptor(
     serialized_options=None,
     create_key=_descriptor._internal_create_key,
   ),
+  _descriptor.MethodDescriptor(
+    name='MonitorDeviceKpi',
+    full_name='device.DeviceService.MonitorDeviceKpi',
+    index=4,
+    containing_service=None,
+    input_type=_MONITORINGSETTINGS,
+    output_type=context__pb2._EMPTY,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
 ])
 _sym_db.RegisterServiceDescriptor(_DEVICESERVICE)
 
diff --git a/src/device/proto/device_pb2_grpc.py b/src/device/proto/device_pb2_grpc.py
index 453aa2fcbc9296cf25298c2041433dfbb06b8e28..2b9bfc47da3b33b632ff46a8454496a499305a6c 100644
--- a/src/device/proto/device_pb2_grpc.py
+++ b/src/device/proto/device_pb2_grpc.py
@@ -3,6 +3,7 @@
 import grpc
 
 from . import context_pb2 as context__pb2
+from . import device_pb2 as device__pb2
 
 
 class DeviceServiceStub(object):
@@ -34,6 +35,11 @@ class DeviceServiceStub(object):
                 request_serializer=context__pb2.DeviceId.SerializeToString,
                 response_deserializer=context__pb2.DeviceConfig.FromString,
                 )
+        self.MonitorDeviceKpi = channel.unary_unary(
+                '/device.DeviceService/MonitorDeviceKpi',
+                request_serializer=device__pb2.MonitoringSettings.SerializeToString,
+                response_deserializer=context__pb2.Empty.FromString,
+                )
 
 
 class DeviceServiceServicer(object):
@@ -63,6 +69,12 @@ class DeviceServiceServicer(object):
         context.set_details('Method not implemented!')
         raise NotImplementedError('Method not implemented!')
 
+    def MonitorDeviceKpi(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
 
 def add_DeviceServiceServicer_to_server(servicer, server):
     rpc_method_handlers = {
@@ -86,6 +98,11 @@ def add_DeviceServiceServicer_to_server(servicer, server):
                     request_deserializer=context__pb2.DeviceId.FromString,
                     response_serializer=context__pb2.DeviceConfig.SerializeToString,
             ),
+            'MonitorDeviceKpi': grpc.unary_unary_rpc_method_handler(
+                    servicer.MonitorDeviceKpi,
+                    request_deserializer=device__pb2.MonitoringSettings.FromString,
+                    response_serializer=context__pb2.Empty.SerializeToString,
+            ),
     }
     generic_handler = grpc.method_handlers_generic_handler(
             'device.DeviceService', rpc_method_handlers)
@@ -163,3 +180,20 @@ class DeviceService(object):
             context__pb2.DeviceConfig.FromString,
             options, channel_credentials,
             insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def MonitorDeviceKpi(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/device.DeviceService/MonitorDeviceKpi',
+            device__pb2.MonitoringSettings.SerializeToString,
+            context__pb2.Empty.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
diff --git a/src/device/proto/kpi_sample_types_pb2.py b/src/device/proto/kpi_sample_types_pb2.py
index ad22554ec352d0aeae644fdce00c0f28996ed73b..ea7fd2f82757d4c3db02d7e2c7817e2787b0b490 100644
--- a/src/device/proto/kpi_sample_types_pb2.py
+++ b/src/device/proto/kpi_sample_types_pb2.py
@@ -2,6 +2,7 @@
 # Generated by the protocol buffer compiler.  DO NOT EDIT!
 # source: kpi_sample_types.proto
 """Generated protocol buffer code."""
+from google.protobuf.internal import enum_type_wrapper
 from google.protobuf import descriptor as _descriptor
 from google.protobuf import message as _message
 from google.protobuf import reflection as _reflection
@@ -15,15 +16,62 @@ _sym_db = _symbol_database.Default()
 
 DESCRIPTOR = _descriptor.FileDescriptor(
   name='kpi_sample_types.proto',
-  package='',
+  package='kpi_sample_types',
   syntax='proto3',
   serialized_options=None,
   create_key=_descriptor._internal_create_key,
-  serialized_pb=b'\n\x16kpi_sample_types.protob\x06proto3'
+  serialized_pb=b'\n\x16kpi_sample_types.proto\x12\x10kpi_sample_types*\xbe\x01\n\rKpiSampleType\x12\x19\n\x15KPISAMPLETYPE_UNKNOWN\x10\x00\x12%\n!KPISAMPLETYPE_PACKETS_TRANSMITTED\x10\x65\x12\"\n\x1eKPISAMPLETYPE_PACKETS_RECEIVED\x10\x66\x12$\n\x1fKPISAMPLETYPE_BYTES_TRANSMITTED\x10\xc9\x01\x12!\n\x1cKPISAMPLETYPE_BYTES_RECEIVED\x10\xca\x01\x62\x06proto3'
 )
 
+_KPISAMPLETYPE = _descriptor.EnumDescriptor(
+  name='KpiSampleType',
+  full_name='kpi_sample_types.KpiSampleType',
+  filename=None,
+  file=DESCRIPTOR,
+  create_key=_descriptor._internal_create_key,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='KPISAMPLETYPE_UNKNOWN', index=0, number=0,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='KPISAMPLETYPE_PACKETS_TRANSMITTED', index=1, number=101,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='KPISAMPLETYPE_PACKETS_RECEIVED', index=2, number=102,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='KPISAMPLETYPE_BYTES_TRANSMITTED', index=3, number=201,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='KPISAMPLETYPE_BYTES_RECEIVED', index=4, number=202,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=45,
+  serialized_end=235,
+)
+_sym_db.RegisterEnumDescriptor(_KPISAMPLETYPE)
+
+KpiSampleType = enum_type_wrapper.EnumTypeWrapper(_KPISAMPLETYPE)
+KPISAMPLETYPE_UNKNOWN = 0
+KPISAMPLETYPE_PACKETS_TRANSMITTED = 101
+KPISAMPLETYPE_PACKETS_RECEIVED = 102
+KPISAMPLETYPE_BYTES_TRANSMITTED = 201
+KPISAMPLETYPE_BYTES_RECEIVED = 202
 
 
+DESCRIPTOR.enum_types_by_name['KpiSampleType'] = _KPISAMPLETYPE
 _sym_db.RegisterFileDescriptor(DESCRIPTOR)
 
 
diff --git a/src/device/proto/monitoring_pb2.py b/src/device/proto/monitoring_pb2.py
index 7368609d2145f94cc3b746836a5297333151c738..b313ebb68f0da37a540898e8c362fd204a799076 100644
--- a/src/device/proto/monitoring_pb2.py
+++ b/src/device/proto/monitoring_pb2.py
@@ -2,7 +2,6 @@
 # Generated by the protocol buffer compiler.  DO NOT EDIT!
 # source: monitoring.proto
 """Generated protocol buffer code."""
-from google.protobuf.internal import enum_type_wrapper
 from google.protobuf import descriptor as _descriptor
 from google.protobuf import message as _message
 from google.protobuf import reflection as _reflection
@@ -13,6 +12,7 @@ _sym_db = _symbol_database.Default()
 
 
 from . import context_pb2 as context__pb2
+from . import kpi_sample_types_pb2 as kpi__sample__types__pb2
 
 
 DESCRIPTOR = _descriptor.FileDescriptor(
@@ -21,177 +21,53 @@ DESCRIPTOR = _descriptor.FileDescriptor(
   syntax='proto3',
   serialized_options=None,
   create_key=_descriptor._internal_create_key,
-  serialized_pb=b'\n\x10monitoring.proto\x12\nmonitoring\x1a\rcontext.proto\"\x84\x01\n\x10\x43reateKpiRequest\x12\x16\n\x0ekpiDescription\x18\x01 \x01(\t\x12$\n\tdevice_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\x12\x32\n\x0fkpi_sample_type\x18\x03 \x01(\x0e\x32\x19.monitoring.KpiSampleType\"h\n\x11MonitorKpiRequest\x12!\n\x06kpi_id\x18\x01 \x01(\x0b\x32\x11.monitoring.KpiId\x12\x18\n\x10\x63onnexion_time_s\x18\x02 \x01(\r\x12\x16\n\x0esample_rate_ms\x18\x03 \x01(\r\"i\n\x17MonitorDeviceKpiRequest\x12\x1c\n\x03kpi\x18\x01 \x01(\x0b\x32\x0f.monitoring.Kpi\x12\x18\n\x10\x63onnexion_time_s\x18\x02 \x01(\r\x12\x16\n\x0esample_rate_ms\x18\x03 \x01(\r\"s\n\x11IncludeKpiRequest\x12!\n\x06kpi_id\x18\x01 \x01(\x0b\x32\x11.monitoring.KpiId\x12\x12\n\ntime_stamp\x18\x02 \x01(\t\x12\'\n\tkpi_value\x18\x03 \x01(\x0b\x32\x14.monitoring.KpiValue\"&\n\x05KpiId\x12\x1d\n\x06kpi_id\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\xd6\x01\n\x03Kpi\x12!\n\x06kpi_id\x18\x01 \x01(\x0b\x32\x11.monitoring.KpiId\x12\x11\n\ttimestamp\x18\x02 \x01(\t\x12\x16\n\x0ekpiDescription\x18\x03 \x01(\t\x12\'\n\tkpi_value\x18\x04 \x01(\x0b\x32\x14.monitoring.KpiValue\x12\x32\n\x0fkpi_sample_type\x18\x05 \x01(\x0e\x32\x19.monitoring.KpiSampleType\x12$\n\tdevice_id\x18\x06 \x01(\x0b\x32\x11.context.DeviceId\"a\n\x08KpiValue\x12\x10\n\x06intVal\x18\x01 \x01(\rH\x00\x12\x12\n\x08\x66loatVal\x18\x02 \x01(\x02H\x00\x12\x13\n\tstringVal\x18\x03 \x01(\tH\x00\x12\x11\n\x07\x62oolVal\x18\x04 \x01(\x08H\x00\x42\x07\n\x05value\"+\n\x07KpiList\x12 \n\x07kpiList\x18\x01 \x03(\x0b\x32\x0f.monitoring.Kpi*x\n\rKpiSampleType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x17\n\x13PACKETS_TRANSMITTED\x10\x65\x12\x14\n\x10PACKETS_RECEIVED\x10\x66\x12\x16\n\x11\x42YTES_TRANSMITTED\x10\xc9\x01\x12\x13\n\x0e\x42YTES_RECEIVED\x10\xca\x01\x32\x8b\x03\n\x11MonitoringService\x12>\n\tCreateKpi\x12\x1c.monitoring.CreateKpiRequest\x1a\x11.monitoring.KpiId\"\x00\x12=\n\nIncludeKpi\x12\x1d.monitoring.IncludeKpiRequest\x1a\x0e.context.Empty\"\x00\x12=\n\nMonitorKpi\x12\x1d.monitoring.MonitorKpiRequest\x1a\x0e.context.Empty\"\x00\x12I\n\x10MonitorDeviceKpi\x12#.monitoring.MonitorDeviceKpiRequest\x1a\x0e.context.Empty\"\x00\x12\x36\n\x0cGetStreamKpi\x12\x11.monitoring.KpiId\x1a\x0f.monitoring.Kpi\"\x00\x30\x01\x12\x35\n\rGetInstantKpi\x12\x11.monitoring.KpiId\x1a\x0f.monitoring.Kpi\"\x00\x62\x06proto3'
+  serialized_pb=b'\n\x10monitoring.proto\x12\nmonitoring\x1a\rcontext.proto\x1a\x16kpi_sample_types.proto\"\xda\x01\n\rKpiDescriptor\x12\x17\n\x0fkpi_description\x18\x01 \x01(\t\x12\x38\n\x0fkpi_sample_type\x18\x02 \x01(\x0e\x32\x1f.kpi_sample_types.KpiSampleType\x12$\n\tdevice_id\x18\x03 \x01(\x0b\x32\x11.context.DeviceId\x12(\n\x0b\x65ndpoint_id\x18\x04 \x01(\x0b\x32\x13.context.EndPointId\x12&\n\nservice_id\x18\x05 \x01(\x0b\x32\x12.context.ServiceId\"p\n\x11MonitorKpiRequest\x12!\n\x06kpi_id\x18\x01 \x01(\x0b\x32\x11.monitoring.KpiId\x12\x1b\n\x13sampling_duration_s\x18\x02 \x01(\x02\x12\x1b\n\x13sampling_interval_s\x18\x03 \x01(\x02\"&\n\x05KpiId\x12\x1d\n\x06kpi_id\x18\x01 \x01(\x0b\x32\r.context.Uuid\"d\n\x03Kpi\x12!\n\x06kpi_id\x18\x01 \x01(\x0b\x32\x11.monitoring.KpiId\x12\x11\n\ttimestamp\x18\x02 \x01(\t\x12\'\n\tkpi_value\x18\x04 \x01(\x0b\x32\x14.monitoring.KpiValue\"a\n\x08KpiValue\x12\x10\n\x06intVal\x18\x01 \x01(\rH\x00\x12\x12\n\x08\x66loatVal\x18\x02 \x01(\x02H\x00\x12\x13\n\tstringVal\x18\x03 \x01(\tH\x00\x12\x11\n\x07\x62oolVal\x18\x04 \x01(\x08H\x00\x42\x07\n\x05value\",\n\x07KpiList\x12!\n\x08kpi_list\x18\x01 \x03(\x0b\x32\x0f.monitoring.Kpi2\xf3\x02\n\x11MonitoringService\x12;\n\tCreateKpi\x12\x19.monitoring.KpiDescriptor\x1a\x11.monitoring.KpiId\"\x00\x12\x42\n\x10GetKpiDescriptor\x12\x11.monitoring.KpiId\x1a\x19.monitoring.KpiDescriptor\"\x00\x12/\n\nIncludeKpi\x12\x0f.monitoring.Kpi\x1a\x0e.context.Empty\"\x00\x12=\n\nMonitorKpi\x12\x1d.monitoring.MonitorKpiRequest\x1a\x0e.context.Empty\"\x00\x12\x36\n\x0cGetStreamKpi\x12\x11.monitoring.KpiId\x1a\x0f.monitoring.Kpi\"\x00\x30\x01\x12\x35\n\rGetInstantKpi\x12\x11.monitoring.KpiId\x1a\x0f.monitoring.Kpi\"\x00\x62\x06proto3'
   ,
-  dependencies=[context__pb2.DESCRIPTOR,])
+  dependencies=[context__pb2.DESCRIPTOR,kpi__sample__types__pb2.DESCRIPTOR,])
 
-_KPISAMPLETYPE = _descriptor.EnumDescriptor(
-  name='KpiSampleType',
-  full_name='monitoring.KpiSampleType',
-  filename=None,
-  file=DESCRIPTOR,
-  create_key=_descriptor._internal_create_key,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='UNKNOWN', index=0, number=0,
-      serialized_options=None,
-      type=None,
-      create_key=_descriptor._internal_create_key),
-    _descriptor.EnumValueDescriptor(
-      name='PACKETS_TRANSMITTED', index=1, number=101,
-      serialized_options=None,
-      type=None,
-      create_key=_descriptor._internal_create_key),
-    _descriptor.EnumValueDescriptor(
-      name='PACKETS_RECEIVED', index=2, number=102,
-      serialized_options=None,
-      type=None,
-      create_key=_descriptor._internal_create_key),
-    _descriptor.EnumValueDescriptor(
-      name='BYTES_TRANSMITTED', index=3, number=201,
-      serialized_options=None,
-      type=None,
-      create_key=_descriptor._internal_create_key),
-    _descriptor.EnumValueDescriptor(
-      name='BYTES_RECEIVED', index=4, number=202,
-      serialized_options=None,
-      type=None,
-      create_key=_descriptor._internal_create_key),
-  ],
-  containing_type=None,
-  serialized_options=None,
-  serialized_start=913,
-  serialized_end=1033,
-)
-_sym_db.RegisterEnumDescriptor(_KPISAMPLETYPE)
-
-KpiSampleType = enum_type_wrapper.EnumTypeWrapper(_KPISAMPLETYPE)
-UNKNOWN = 0
-PACKETS_TRANSMITTED = 101
-PACKETS_RECEIVED = 102
-BYTES_TRANSMITTED = 201
-BYTES_RECEIVED = 202
 
 
 
-_CREATEKPIREQUEST = _descriptor.Descriptor(
-  name='CreateKpiRequest',
-  full_name='monitoring.CreateKpiRequest',
+_KPIDESCRIPTOR = _descriptor.Descriptor(
+  name='KpiDescriptor',
+  full_name='monitoring.KpiDescriptor',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='kpiDescription', full_name='monitoring.CreateKpiRequest.kpiDescription', index=0,
+      name='kpi_description', full_name='monitoring.KpiDescriptor.kpi_description', index=0,
       number=1, type=9, cpp_type=9, label=1,
       has_default_value=False, default_value=b"".decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='device_id', full_name='monitoring.CreateKpiRequest.device_id', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='kpi_sample_type', full_name='monitoring.CreateKpiRequest.kpi_sample_type', index=2,
-      number=3, type=14, cpp_type=8, label=1,
+      name='kpi_sample_type', full_name='monitoring.KpiDescriptor.kpi_sample_type', index=1,
+      number=2, type=14, cpp_type=8, label=1,
       has_default_value=False, default_value=0,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=48,
-  serialized_end=180,
-)
-
-
-_MONITORKPIREQUEST = _descriptor.Descriptor(
-  name='MonitorKpiRequest',
-  full_name='monitoring.MonitorKpiRequest',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  create_key=_descriptor._internal_create_key,
-  fields=[
     _descriptor.FieldDescriptor(
-      name='kpi_id', full_name='monitoring.MonitorKpiRequest.kpi_id', index=0,
-      number=1, type=11, cpp_type=10, label=1,
+      name='device_id', full_name='monitoring.KpiDescriptor.device_id', index=2,
+      number=3, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='connexion_time_s', full_name='monitoring.MonitorKpiRequest.connexion_time_s', index=1,
-      number=2, type=13, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='sample_rate_ms', full_name='monitoring.MonitorKpiRequest.sample_rate_ms', index=2,
-      number=3, type=13, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=182,
-  serialized_end=286,
-)
-
-
-_MONITORDEVICEKPIREQUEST = _descriptor.Descriptor(
-  name='MonitorDeviceKpiRequest',
-  full_name='monitoring.MonitorDeviceKpiRequest',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  create_key=_descriptor._internal_create_key,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='kpi', full_name='monitoring.MonitorDeviceKpiRequest.kpi', index=0,
-      number=1, type=11, cpp_type=10, label=1,
+      name='endpoint_id', full_name='monitoring.KpiDescriptor.endpoint_id', index=3,
+      number=4, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='connexion_time_s', full_name='monitoring.MonitorDeviceKpiRequest.connexion_time_s', index=1,
-      number=2, type=13, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='sample_rate_ms', full_name='monitoring.MonitorDeviceKpiRequest.sample_rate_ms', index=2,
-      number=3, type=13, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
+      name='service_id', full_name='monitoring.KpiDescriptor.service_id', index=4,
+      number=5, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -207,37 +83,37 @@ _MONITORDEVICEKPIREQUEST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=288,
-  serialized_end=393,
+  serialized_start=72,
+  serialized_end=290,
 )
 
 
-_INCLUDEKPIREQUEST = _descriptor.Descriptor(
-  name='IncludeKpiRequest',
-  full_name='monitoring.IncludeKpiRequest',
+_MONITORKPIREQUEST = _descriptor.Descriptor(
+  name='MonitorKpiRequest',
+  full_name='monitoring.MonitorKpiRequest',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='kpi_id', full_name='monitoring.IncludeKpiRequest.kpi_id', index=0,
+      name='kpi_id', full_name='monitoring.MonitorKpiRequest.kpi_id', index=0,
       number=1, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='time_stamp', full_name='monitoring.IncludeKpiRequest.time_stamp', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='sampling_duration_s', full_name='monitoring.MonitorKpiRequest.sampling_duration_s', index=1,
+      number=2, type=2, cpp_type=6, label=1,
+      has_default_value=False, default_value=float(0),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='kpi_value', full_name='monitoring.IncludeKpiRequest.kpi_value', index=2,
-      number=3, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
+      name='sampling_interval_s', full_name='monitoring.MonitorKpiRequest.sampling_interval_s', index=2,
+      number=3, type=2, cpp_type=6, label=1,
+      has_default_value=False, default_value=float(0),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -253,8 +129,8 @@ _INCLUDEKPIREQUEST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=395,
-  serialized_end=510,
+  serialized_start=292,
+  serialized_end=404,
 )
 
 
@@ -285,8 +161,8 @@ _KPIID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=512,
-  serialized_end=550,
+  serialized_start=406,
+  serialized_end=444,
 )
 
 
@@ -313,33 +189,12 @@ _KPI = _descriptor.Descriptor(
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='kpiDescription', full_name='monitoring.Kpi.kpiDescription', index=2,
-      number=3, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='kpi_value', full_name='monitoring.Kpi.kpi_value', index=3,
+      name='kpi_value', full_name='monitoring.Kpi.kpi_value', index=2,
       number=4, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='kpi_sample_type', full_name='monitoring.Kpi.kpi_sample_type', index=4,
-      number=5, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='device_id', full_name='monitoring.Kpi.device_id', index=5,
-      number=6, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
   ],
   extensions=[
   ],
@@ -352,8 +207,8 @@ _KPI = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=553,
-  serialized_end=767,
+  serialized_start=446,
+  serialized_end=546,
 )
 
 
@@ -410,8 +265,8 @@ _KPIVALUE = _descriptor.Descriptor(
       create_key=_descriptor._internal_create_key,
     fields=[]),
   ],
-  serialized_start=769,
-  serialized_end=866,
+  serialized_start=548,
+  serialized_end=645,
 )
 
 
@@ -424,7 +279,7 @@ _KPILIST = _descriptor.Descriptor(
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='kpiList', full_name='monitoring.KpiList.kpiList', index=0,
+      name='kpi_list', full_name='monitoring.KpiList.kpi_list', index=0,
       number=1, type=11, cpp_type=10, label=3,
       has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
@@ -442,21 +297,18 @@ _KPILIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=868,
-  serialized_end=911,
+  serialized_start=647,
+  serialized_end=691,
 )
 
-_CREATEKPIREQUEST.fields_by_name['device_id'].message_type = context__pb2._DEVICEID
-_CREATEKPIREQUEST.fields_by_name['kpi_sample_type'].enum_type = _KPISAMPLETYPE
+_KPIDESCRIPTOR.fields_by_name['kpi_sample_type'].enum_type = kpi__sample__types__pb2._KPISAMPLETYPE
+_KPIDESCRIPTOR.fields_by_name['device_id'].message_type = context__pb2._DEVICEID
+_KPIDESCRIPTOR.fields_by_name['endpoint_id'].message_type = context__pb2._ENDPOINTID
+_KPIDESCRIPTOR.fields_by_name['service_id'].message_type = context__pb2._SERVICEID
 _MONITORKPIREQUEST.fields_by_name['kpi_id'].message_type = _KPIID
-_MONITORDEVICEKPIREQUEST.fields_by_name['kpi'].message_type = _KPI
-_INCLUDEKPIREQUEST.fields_by_name['kpi_id'].message_type = _KPIID
-_INCLUDEKPIREQUEST.fields_by_name['kpi_value'].message_type = _KPIVALUE
 _KPIID.fields_by_name['kpi_id'].message_type = context__pb2._UUID
 _KPI.fields_by_name['kpi_id'].message_type = _KPIID
 _KPI.fields_by_name['kpi_value'].message_type = _KPIVALUE
-_KPI.fields_by_name['kpi_sample_type'].enum_type = _KPISAMPLETYPE
-_KPI.fields_by_name['device_id'].message_type = context__pb2._DEVICEID
 _KPIVALUE.oneofs_by_name['value'].fields.append(
   _KPIVALUE.fields_by_name['intVal'])
 _KPIVALUE.fields_by_name['intVal'].containing_oneof = _KPIVALUE.oneofs_by_name['value']
@@ -469,24 +321,21 @@ _KPIVALUE.fields_by_name['stringVal'].containing_oneof = _KPIVALUE.oneofs_by_nam
 _KPIVALUE.oneofs_by_name['value'].fields.append(
   _KPIVALUE.fields_by_name['boolVal'])
 _KPIVALUE.fields_by_name['boolVal'].containing_oneof = _KPIVALUE.oneofs_by_name['value']
-_KPILIST.fields_by_name['kpiList'].message_type = _KPI
-DESCRIPTOR.message_types_by_name['CreateKpiRequest'] = _CREATEKPIREQUEST
+_KPILIST.fields_by_name['kpi_list'].message_type = _KPI
+DESCRIPTOR.message_types_by_name['KpiDescriptor'] = _KPIDESCRIPTOR
 DESCRIPTOR.message_types_by_name['MonitorKpiRequest'] = _MONITORKPIREQUEST
-DESCRIPTOR.message_types_by_name['MonitorDeviceKpiRequest'] = _MONITORDEVICEKPIREQUEST
-DESCRIPTOR.message_types_by_name['IncludeKpiRequest'] = _INCLUDEKPIREQUEST
 DESCRIPTOR.message_types_by_name['KpiId'] = _KPIID
 DESCRIPTOR.message_types_by_name['Kpi'] = _KPI
 DESCRIPTOR.message_types_by_name['KpiValue'] = _KPIVALUE
 DESCRIPTOR.message_types_by_name['KpiList'] = _KPILIST
-DESCRIPTOR.enum_types_by_name['KpiSampleType'] = _KPISAMPLETYPE
 _sym_db.RegisterFileDescriptor(DESCRIPTOR)
 
-CreateKpiRequest = _reflection.GeneratedProtocolMessageType('CreateKpiRequest', (_message.Message,), {
-  'DESCRIPTOR' : _CREATEKPIREQUEST,
+KpiDescriptor = _reflection.GeneratedProtocolMessageType('KpiDescriptor', (_message.Message,), {
+  'DESCRIPTOR' : _KPIDESCRIPTOR,
   '__module__' : 'monitoring_pb2'
-  # @@protoc_insertion_point(class_scope:monitoring.CreateKpiRequest)
+  # @@protoc_insertion_point(class_scope:monitoring.KpiDescriptor)
   })
-_sym_db.RegisterMessage(CreateKpiRequest)
+_sym_db.RegisterMessage(KpiDescriptor)
 
 MonitorKpiRequest = _reflection.GeneratedProtocolMessageType('MonitorKpiRequest', (_message.Message,), {
   'DESCRIPTOR' : _MONITORKPIREQUEST,
@@ -495,20 +344,6 @@ MonitorKpiRequest = _reflection.GeneratedProtocolMessageType('MonitorKpiRequest'
   })
 _sym_db.RegisterMessage(MonitorKpiRequest)
 
-MonitorDeviceKpiRequest = _reflection.GeneratedProtocolMessageType('MonitorDeviceKpiRequest', (_message.Message,), {
-  'DESCRIPTOR' : _MONITORDEVICEKPIREQUEST,
-  '__module__' : 'monitoring_pb2'
-  # @@protoc_insertion_point(class_scope:monitoring.MonitorDeviceKpiRequest)
-  })
-_sym_db.RegisterMessage(MonitorDeviceKpiRequest)
-
-IncludeKpiRequest = _reflection.GeneratedProtocolMessageType('IncludeKpiRequest', (_message.Message,), {
-  'DESCRIPTOR' : _INCLUDEKPIREQUEST,
-  '__module__' : 'monitoring_pb2'
-  # @@protoc_insertion_point(class_scope:monitoring.IncludeKpiRequest)
-  })
-_sym_db.RegisterMessage(IncludeKpiRequest)
-
 KpiId = _reflection.GeneratedProtocolMessageType('KpiId', (_message.Message,), {
   'DESCRIPTOR' : _KPIID,
   '__module__' : 'monitoring_pb2'
@@ -546,45 +381,45 @@ _MONITORINGSERVICE = _descriptor.ServiceDescriptor(
   index=0,
   serialized_options=None,
   create_key=_descriptor._internal_create_key,
-  serialized_start=1036,
-  serialized_end=1431,
+  serialized_start=694,
+  serialized_end=1065,
   methods=[
   _descriptor.MethodDescriptor(
     name='CreateKpi',
     full_name='monitoring.MonitoringService.CreateKpi',
     index=0,
     containing_service=None,
-    input_type=_CREATEKPIREQUEST,
+    input_type=_KPIDESCRIPTOR,
     output_type=_KPIID,
     serialized_options=None,
     create_key=_descriptor._internal_create_key,
   ),
   _descriptor.MethodDescriptor(
-    name='IncludeKpi',
-    full_name='monitoring.MonitoringService.IncludeKpi',
+    name='GetKpiDescriptor',
+    full_name='monitoring.MonitoringService.GetKpiDescriptor',
     index=1,
     containing_service=None,
-    input_type=_INCLUDEKPIREQUEST,
-    output_type=context__pb2._EMPTY,
+    input_type=_KPIID,
+    output_type=_KPIDESCRIPTOR,
     serialized_options=None,
     create_key=_descriptor._internal_create_key,
   ),
   _descriptor.MethodDescriptor(
-    name='MonitorKpi',
-    full_name='monitoring.MonitoringService.MonitorKpi',
+    name='IncludeKpi',
+    full_name='monitoring.MonitoringService.IncludeKpi',
     index=2,
     containing_service=None,
-    input_type=_MONITORKPIREQUEST,
+    input_type=_KPI,
     output_type=context__pb2._EMPTY,
     serialized_options=None,
     create_key=_descriptor._internal_create_key,
   ),
   _descriptor.MethodDescriptor(
-    name='MonitorDeviceKpi',
-    full_name='monitoring.MonitoringService.MonitorDeviceKpi',
+    name='MonitorKpi',
+    full_name='monitoring.MonitoringService.MonitorKpi',
     index=3,
     containing_service=None,
-    input_type=_MONITORDEVICEKPIREQUEST,
+    input_type=_MONITORKPIREQUEST,
     output_type=context__pb2._EMPTY,
     serialized_options=None,
     create_key=_descriptor._internal_create_key,
diff --git a/src/device/requirements.in b/src/device/requirements.in
index 5c38e92914207bf101ebc00b2cef453a3a85f82a..6a86f5385a313ddc52ebf0d1372246e2ffd4838b 100644
--- a/src/device/requirements.in
+++ b/src/device/requirements.in
@@ -11,4 +11,7 @@ pytest-benchmark
 python-json-logger
 pytz
 redis
+requests
 xmltodict
+p4runtime==1.3.0
+coverage
diff --git a/src/device/service/DeviceService.py b/src/device/service/DeviceService.py
index ae0d5c8396157d5398751587f5e7d808b0d8f484..11cb8d3e39236002ee7ca6471c69112bbcfa059a 100644
--- a/src/device/service/DeviceService.py
+++ b/src/device/service/DeviceService.py
@@ -3,26 +3,29 @@ from concurrent import futures
 from grpc_health.v1.health import HealthServicer, OVERALL_HEALTH
 from grpc_health.v1.health_pb2 import HealthCheckResponse
 from grpc_health.v1.health_pb2_grpc import add_HealthServicer_to_server
+from common.orm.backend.BackendEnum import BackendEnum
+from common.orm.Database import Database
+from common.orm.Factory import get_database_backend
 from context.client.ContextClient import ContextClient
 from device.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD
 from device.proto.device_pb2_grpc import add_DeviceServiceServicer_to_server
-from .DeviceServiceServicerImpl import DeviceServiceServicerImpl
-#from .MonitoringLoops import MonitoringLoops
+from monitoring.client.monitoring_client import MonitoringClient
 from .driver_api.DriverInstanceCache import DriverInstanceCache
+from .DeviceServiceServicerImpl import DeviceServiceServicerImpl
+from .MonitoringLoops import MonitoringLoops
 
 BIND_ADDRESS = '0.0.0.0'
 LOGGER = logging.getLogger(__name__)
 
 class DeviceService:
     def __init__(
-        self, context_client : ContextClient, driver_instance_cache : DriverInstanceCache,
-        #monitoring_loops : MonitoringLoops,
-        address=BIND_ADDRESS, port=GRPC_SERVICE_PORT, max_workers=GRPC_MAX_WORKERS,
-        grace_period=GRPC_GRACE_PERIOD):
+        self, context_client : ContextClient, monitoring_client : MonitoringClient,
+        driver_instance_cache : DriverInstanceCache,
+        address=BIND_ADDRESS, port=GRPC_SERVICE_PORT, max_workers=GRPC_MAX_WORKERS, grace_period=GRPC_GRACE_PERIOD):
 
         self.context_client = context_client
+        self.monitoring_client = monitoring_client
         self.driver_instance_cache = driver_instance_cache
-        #self.monitoring_loops = monitoring_loops
         self.address = address
         self.port = port
         self.endpoint = None
@@ -33,18 +36,21 @@ class DeviceService:
         self.pool = None
         self.server = None
 
+        self.database = Database(get_database_backend(backend=BackendEnum.INMEMORY))
+        self.monitoring_loops = MonitoringLoops(monitoring_client, self.database)
+
     def start(self):
         self.endpoint = '{:s}:{:s}'.format(str(self.address), str(self.port))
         LOGGER.info('Starting Service (tentative endpoint: {:s}, max_workers: {:s})...'.format(
             str(self.endpoint), str(self.max_workers)))
 
+        self.monitoring_loops.start()
+
         self.pool = futures.ThreadPoolExecutor(max_workers=self.max_workers)
         self.server = grpc.server(self.pool) # , interceptors=(tracer_interceptor,))
 
         self.device_servicer = DeviceServiceServicerImpl(
-            self.context_client, self.driver_instance_cache,
-            #self.monitoring_loops
-        )
+            self.context_client, self.database, self.driver_instance_cache, self.monitoring_loops)
         add_DeviceServiceServicer_to_server(self.device_servicer, self.server)
 
         self.health_servicer = HealthServicer(
@@ -63,4 +69,5 @@ class DeviceService:
         LOGGER.debug('Stopping service (grace period {:s} seconds)...'.format(str(self.grace_period)))
         self.health_servicer.enter_graceful_shutdown()
         self.server.stop(self.grace_period)
+        self.monitoring_loops.stop()
         LOGGER.debug('Service stopped')
diff --git a/src/device/service/DeviceServiceServicerImpl.py b/src/device/service/DeviceServiceServicerImpl.py
index 7dbed3eb23a0479a8972a6a3547f027ff458c5b2..485cf0a27994b4943df6d8aadf3d54d3e63bfdcf 100644
--- a/src/device/service/DeviceServiceServicerImpl.py
+++ b/src/device/service/DeviceServiceServicerImpl.py
@@ -1,28 +1,31 @@
-import grpc, logging
-from prometheus_client import Counter, Histogram
-from common.database.api.context.Constants import DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID
-from common.database.api.Database import Database
-from common.database.api.context.topology.device.OperationalStatus import OperationalStatus
-from common.exceptions.ServiceException import ServiceException
-from device.proto.context_pb2 import DeviceId, Device, Empty
+import grpc, json, logging
+from typing import Any, List, Tuple
+from google.protobuf.json_format import MessageToDict
+from common.orm.Database import Database
+from common.orm.HighLevel import get_object, update_or_create_object
+from common.orm.backend.Tools import key_to_str
+from common.rpc_method_wrapper.Decorator import create_metrics, safe_and_metered_rpc_method
+from common.rpc_method_wrapper.ServiceExceptions import InvalidArgumentException, OperationFailedException
+from context.client.ContextClient import ContextClient
+from context.proto.kpi_sample_types_pb2 import KpiSampleType
+from device.proto.context_pb2 import ConfigActionEnum, Device, DeviceConfig, DeviceId, Empty
 from device.proto.device_pb2 import MonitoringSettings
 from device.proto.device_pb2_grpc import DeviceServiceServicer
-#from .MonitoringLoops import MonitoringLoops
+from device.service.database.RelationModels import EndPointMonitorKpiModel
+from .MonitoringLoops import MonitoringLoops
 from .database.ConfigModel import (
     ConfigModel, ConfigRuleModel, ORM_ConfigActionEnum, get_config_rules, grpc_config_rules_to_raw, update_config)
 from .database.DatabaseTools import (
     delete_device_from_context, get_device_driver_filter_fields, sync_device_from_context, sync_device_to_context,
     update_device_in_local_database)
 from .database.DeviceModel import DeviceModel, DriverModel
-from .database.EndPointModel import EndPointModel
-#from .database.KpiModel import KpiModel
-#from .database.KpiSampleType import grpc_to_enum__kpi_sample_type
-from .driver_api._Driver import _Driver, RESOURCE_ENDPOINTS, RESOURCE_INTERFACES, RESOURCE_NETWORK_INSTANCES
+from .database.EndPointModel import EndPointModel, EndPointMonitorModel
+from .database.KpiModel import KpiModel
+from .database.KpiSampleType import ORM_KpiSampleTypeEnum, grpc_to_enum__kpi_sample_type
+from .driver_api._Driver import _Driver, RESOURCE_ENDPOINTS #, RESOURCE_INTERFACES, RESOURCE_NETWORK_INSTANCES
 from .driver_api.DriverInstanceCache import DriverInstanceCache
 from .driver_api.Tools import (
-    check_delete_errors, check_set_errors,
-    #check_subscribe_errors, check_unsubscribe_errors
-)
+    check_delete_errors, check_set_errors, check_subscribe_errors, check_unsubscribe_errors)
 
 LOGGER = logging.getLogger(__name__)
 
@@ -30,176 +33,361 @@ SERVICE_NAME = 'Device'
 METHOD_NAMES = ['AddDevice', 'ConfigureDevice', 'DeleteDevice', 'GetInitialConfig', 'MonitorDeviceKpi']
 METRICS = create_metrics(SERVICE_NAME, METHOD_NAMES)
 
-MONITORDEVICEKPI_COUNTER_STARTED    = Counter  ('device_monitordevicekpi_counter_started',
-                                            'Device:MonitorDeviceKpi counter of requests started'  )
-MONITORDEVICEKPI_COUNTER_COMPLETED  = Counter  ('device_monitordevicekpi_counter_completed',
-                                            'Device:MonitorDeviceKpi counter of requests completed')
-MONITORDEVICEKPI_COUNTER_FAILED     = Counter  ('device_monitordevicekpi_counter_failed',
-                                            'Device:MonitorDeviceKpi counter of requests failed'   )
-MONITORDEVICEKPI_HISTOGRAM_DURATION = Histogram('device_monitordevicekpi_histogram_duration',
-                                            'Device:MonitorDeviceKpi histogram of request duration')
-
-
 class DeviceServiceServicerImpl(DeviceServiceServicer):
     def __init__(
-        self, context_client : ContextClient, driver_instance_cache : DriverInstanceCache,
-        #monitoring_loops : MonitoringLoops
-        ):
+        self, context_client : ContextClient, database : Database, driver_instance_cache : DriverInstanceCache,
+        monitoring_loops : MonitoringLoops):
 
         LOGGER.debug('Creating Servicer...')
         self.context_client = context_client
-        self.database = Database(get_database_backend(backend=BackendEnum.INMEMORY))
+        self.database = database
         self.driver_instance_cache = driver_instance_cache
-        #self.monitoring_loops = monitoring_loops
+        self.monitoring_loops = monitoring_loops
         LOGGER.debug('Servicer Created')
 
-    @ADDDEVICE_HISTOGRAM_DURATION.time()
-    def AddDevice(self, request : Device, grpc_context : grpc.ServicerContext) -> DeviceId:
-        ADDDEVICE_COUNTER_STARTED.inc()
-        try:
-            LOGGER.debug('AddDevice request: {}'.format(str(request)))
-
-            # ----- Validate request data and pre-conditions -----------------------------------------------------------
-            device_id, device_type, device_config, device_opstat, db_endpoints_ports = \
-                check_device_request('AddDevice', request, self.database, LOGGER)
-
-            # ----- Implement changes in the database ------------------------------------------------------------------
-            db_context = self.database.context(DEFAULT_CONTEXT_ID).create()
-            db_topology = db_context.topology(DEFAULT_TOPOLOGY_ID).create()
-            db_device = db_topology.device(device_id).create(device_type, device_config, device_opstat)
-            for db_endpoint,port_type in db_endpoints_ports:
-                db_endpoint.create(port_type)
-
-            # ----- Compose reply --------------------------------------------------------------------------------------
-            reply = DeviceId(**db_device.dump_id())
-            LOGGER.debug('AddDevice reply: {}'.format(str(reply)))
-            ADDDEVICE_COUNTER_COMPLETED.inc()
-            return reply
-        except ServiceException as e:
-            LOGGER.exception('AddDevice exception')
-            ADDDEVICE_COUNTER_FAILED.inc()
-            grpc_context.abort(e.code, e.details)
-        except Exception as e:                                      # pragma: no cover
-            LOGGER.exception('AddDevice exception')
-            ADDDEVICE_COUNTER_FAILED.inc()
-            grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))
-
-    @CONFIGUREDEVICE_HISTOGRAM_DURATION.time()
-    def ConfigureDevice(self, request : Device, grpc_context : grpc.ServicerContext) -> DeviceId:
-        CONFIGUREDEVICE_COUNTER_STARTED.inc()
-        try:
-            LOGGER.debug('ConfigureDevice request: {}'.format(str(request)))
-
-            # ----- Validate request data and pre-conditions -----------------------------------------------------------
-            device_id, device_type, device_config, device_opstat, db_endpoints_ports = \
-                check_device_request('UpdateDevice', request, self.database, LOGGER)
-
-            # ----- Implement changes in the database ------------------------------------------------------------------
-            db_context = self.database.context(DEFAULT_CONTEXT_ID).create()
-            db_topology = db_context.topology(DEFAULT_TOPOLOGY_ID).create()
-            db_device = db_topology.device(device_id)
-
-            db_device_attributes = db_device.attributes.get(attributes=['device_type'])
-            # should not happen, device creation through Database API ensures all fields are always present
-            if len(db_device_attributes) == 0:                                                  # pragma: no cover
-                msg = 'Attribute device_type for Device({}) does not exist in the database.'    # pragma: no cover
-                msg = msg.format(device_id)                                                     # pragma: no cover
-                raise ServiceException(grpc.StatusCode.FAILED_PRECONDITION, msg)                # pragma: no cover
-
-            db_device_type = db_device_attributes.get('device_type')
-            # should not happen, device creation through Database API ensures all fields are always present
-            if len(db_device_type) == 0:                                                # pragma: no cover
-                msg = 'Attribute device_type for Device({}) is empty in the database.'  # pragma: no cover
-                msg = msg.format(device_id)                                             # pragma: no cover
-                raise ServiceException(grpc.StatusCode.FAILED_PRECONDITION, msg)        # pragma: no cover
-
-            if db_device_type != device_type:
-                msg = 'Device({}) has Type({}) in the database. Cannot be changed to Type({}).'
-                msg = msg.format(device_id, db_device_type, device_type)
-                raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
-
-            if len(db_endpoints_ports) > 0:
-                msg = 'Endpoints belonging to Device({}) cannot be modified.'
-                msg = msg.format(device_id)
-                raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
-
-            update_attributes = {}
-
-            if len(device_config) > 0:
-                update_attributes['device_config'] = device_config
-            
-            if device_opstat != OperationalStatus.KEEP_STATE:
-                update_attributes['device_operational_status'] = device_opstat
-
-            if len(update_attributes) == 0:
-                msg = ' '.join([
-                    'Any change has been requested for Device({}).',
-                    'Either specify a new configuration or a new device operational status.',
-                ])
-                msg = msg.format(device_id)
-                raise ServiceException(grpc.StatusCode.ABORTED, msg)
-
-            db_device.update(update_attributes=update_attributes)
-
-            # ----- Compose reply --------------------------------------------------------------------------------------
-            reply = DeviceId(**db_device.dump_id())
-            LOGGER.debug('ConfigureDevice reply: {}'.format(str(reply)))
-            CONFIGUREDEVICE_COUNTER_COMPLETED.inc()
-            return reply
-        except ServiceException as e:
-            LOGGER.exception('ConfigureDevice exception')
-            CONFIGUREDEVICE_COUNTER_FAILED.inc()
-            grpc_context.abort(e.code, e.details)
-        except Exception as e:                                      # pragma: no cover
-            LOGGER.exception('ConfigureDevice exception')
-            CONFIGUREDEVICE_COUNTER_FAILED.inc()
-            grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))
-
-    @DELETEDEVICE_HISTOGRAM_DURATION.time()
-    def DeleteDevice(self, request : DeviceId, grpc_context : grpc.ServicerContext) -> Empty:
-        DELETEDEVICE_COUNTER_STARTED.inc()
-        try:
-            LOGGER.debug('DeleteDevice request: {}'.format(str(request)))
-
-            # ----- Validate request data and pre-conditions -----------------------------------------------------------
-            device_id = check_device_id_request('DeleteDevice', request, self.database, LOGGER)
-
-            # ----- Implement changes in the database ------------------------------------------------------------------
-            db_context = self.database.context(DEFAULT_CONTEXT_ID).create()
-            db_topology = db_context.topology(DEFAULT_TOPOLOGY_ID).create()
-            db_topology.device(device_id).delete()
-
-            # ----- Compose reply --------------------------------------------------------------------------------------
-            reply = Empty()
-            LOGGER.debug('DeleteDevice reply: {}'.format(str(reply)))
-            DELETEDEVICE_COUNTER_COMPLETED.inc()
-            return reply
-        except ServiceException as e:
-            LOGGER.exception('DeleteDevice exception')
-            DELETEDEVICE_COUNTER_FAILED.inc()
-            grpc_context.abort(e.code, e.details)
-        except Exception as e:                                      # pragma: no cover
-            LOGGER.exception('DeleteDevice exception')
-            DELETEDEVICE_COUNTER_FAILED.inc()
-            grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))
-
-    @MONITORDEVICEKPI_HISTOGRAM_DURATION.time()
-    def MonitorDeviceKpi(self, request : MonitoringSettings, grpc_context : grpc.ServicerContext) -> Empty:
-        MONITORDEVICEKPI_COUNTER_STARTED.inc()
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def AddDevice(self, request : Device, context : grpc.ServicerContext) -> DeviceId:
+        device_id = request.device_id
+        device_uuid = device_id.device_uuid.uuid
+
+        connection_config_rules = {}
+        unexpected_config_rules = []
+        for config_rule in request.device_config.config_rules:
+            if (config_rule.action == ConfigActionEnum.CONFIGACTION_SET) and \
+               (config_rule.resource_key.startswith('_connect/')):
+                connection_config_rules[config_rule.resource_key.replace('_connect/', '')] = config_rule.resource_value
+            else:
+                unexpected_config_rules.append(config_rule)
+        if len(unexpected_config_rules) > 0:
+            unexpected_config_rules = MessageToDict(
+                request.device_config, including_default_value_fields=True,
+                preserving_proto_field_name=True, use_integers_for_enums=True)
+            unexpected_config_rules = unexpected_config_rules['config_rules']
+            unexpected_config_rules = list(filter(
+                lambda cr: cr['resource_key'].replace('_connect/', '') not in connection_config_rules,
+                unexpected_config_rules))
+            str_unexpected_config_rules = json.dumps(unexpected_config_rules, sort_keys=True)
+            raise InvalidArgumentException(
+                'device.device_config.config_rules', str_unexpected_config_rules,
+                extra_details='RPC method AddDevice only accepts connection Config Rules that should start '\
+                              'with "_connect/" tag. Others should be configured after adding the device.')
+
+        if len(request.device_endpoints) > 0:
+            unexpected_endpoints = []
+            for device_endpoint in request.device_endpoints:
+                unexpected_endpoints.append(MessageToDict(
+                    device_endpoint, including_default_value_fields=True, preserving_proto_field_name=True,
+                    use_integers_for_enums=True))
+            str_unexpected_endpoints = json.dumps(unexpected_endpoints, sort_keys=True)
+            raise InvalidArgumentException(
+                'device.device_endpoints', str_unexpected_endpoints,
+                extra_details='RPC method AddDevice does not accept Endpoints. Endpoints are discovered through '\
+                              'interrogation of the physical device.')
+
+        # Remove device configuration
+        json_request = MessageToDict(
+            request, including_default_value_fields=True, preserving_proto_field_name=True,
+            use_integers_for_enums=True)
+        json_request['device_config'] = {}
+        request = Device(**json_request)
+
+        sync_device_from_context(device_uuid, self.context_client, self.database)
+        db_device,_ = update_device_in_local_database(self.database, request)
+
+        driver_filter_fields = get_device_driver_filter_fields(db_device)
+
+        #LOGGER.info('[AddDevice] connection_config_rules = {:s}'.format(str(connection_config_rules)))
+        address  = connection_config_rules.pop('address', None)
+        port     = connection_config_rules.pop('port', None)
+        settings = connection_config_rules.pop('settings', '{}')
         try:
-            LOGGER.debug('MonitorDeviceKpi request: {}'.format(str(request)))
-
-            # ---- Implement method ------------------------------------------------------------------------------------
-
-            reply = Empty()
-            LOGGER.debug('MonitorDeviceKpi reply: {}'.format(str(reply)))
-            MONITORDEVICEKPI_COUNTER_COMPLETED.inc()
-            return reply
-        except ServiceException as e:
-            LOGGER.exception('MonitorDeviceKpi exception')
-            MONITORDEVICEKPI_COUNTER_FAILED.inc()
-            grpc_context.abort(e.code, e.details)
-        except Exception as e:
-            LOGGER.exception('MonitorDeviceKpi exception')
-            MONITORDEVICEKPI_COUNTER_FAILED.inc()
-            grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))
+            settings = json.loads(settings)
+        except ValueError as e:
+            raise InvalidArgumentException(
+                'device.device_config.config_rules[settings]', settings,
+                extra_details='_connect/settings Config Rules provided cannot be decoded as JSON dictionary.') from e
+        driver : _Driver = self.driver_instance_cache.get(
+            device_uuid, filter_fields=driver_filter_fields, address=address, port=port, settings=settings)
+        driver.Connect()
+
+        endpoints = driver.GetConfig([RESOURCE_ENDPOINTS])
+        #LOGGER.info('[AddDevice] endpoints = {:s}'.format(str(endpoints)))
+        for resource_key, resource_value in endpoints:
+            endpoint_uuid = resource_value.get('uuid')
+            endpoint_type = resource_value.get('type')
+            str_endpoint_key = key_to_str([device_uuid, endpoint_uuid])
+            db_endpoint, _ = update_or_create_object(
+                self.database, EndPointModel, str_endpoint_key, {
+                'device_fk'    : db_device,
+                'endpoint_uuid': endpoint_uuid,
+                'endpoint_type': endpoint_type,
+                'resource_key' : resource_key,
+            })
+            sample_types = resource_value.get('sample_types', {})
+            for sample_type, monitor_resource_key in sample_types.items():
+                str_endpoint_monitor_key = key_to_str([str_endpoint_key, str(sample_type)])
+                update_or_create_object(self.database, EndPointMonitorModel, str_endpoint_monitor_key, {
+                    'endpoint_fk'    : db_endpoint,
+                    'resource_key'   : monitor_resource_key,
+                    'kpi_sample_type': grpc_to_enum__kpi_sample_type(sample_type),
+                })
+
+        running_config_rules = driver.GetConfig()
+        running_config_rules = [
+            (ORM_ConfigActionEnum.SET, config_rule[0], json.dumps(config_rule[1], sort_keys=True))
+            for config_rule in running_config_rules
+        ]
+        #for running_config_rule in running_config_rules:
+        #    LOGGER.info('[AddDevice] running_config_rule: {:s}'.format(str(running_config_rule)))
+        update_config(self.database, device_uuid, 'running', running_config_rules)
+
+        initial_config_rules = driver.GetInitialConfig()
+        update_config(self.database, device_uuid, 'initial', initial_config_rules)
+
+        #LOGGER.info('[AddDevice] db_device = {:s}'.format(str(db_device.dump(
+        #    include_config_rules=True, include_drivers=True, include_endpoints=True))))
+
+        sync_device_to_context(db_device, self.context_client)
+        return DeviceId(**db_device.dump_id())
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def ConfigureDevice(self, request : Device, context : grpc.ServicerContext) -> DeviceId:
+        device_id = request.device_id
+        device_uuid = device_id.device_uuid.uuid
+
+        sync_device_from_context(device_uuid, self.context_client, self.database)
+
+        context_config_rules = get_config_rules(self.database, device_uuid, 'running')
+        context_config_rules = {config_rule[1]: config_rule[2] for config_rule in context_config_rules}
+        #LOGGER.info('[ConfigureDevice] context_config_rules = {:s}'.format(str(context_config_rules)))
+
+        db_device,_ = update_device_in_local_database(self.database, request)
+
+        request_config_rules = grpc_config_rules_to_raw(request.device_config.config_rules)
+        #LOGGER.info('[ConfigureDevice] request_config_rules = {:s}'.format(str(request_config_rules)))
+
+        resources_to_set    : List[Tuple[str, Any]] = [] # key, value
+        resources_to_delete : List[Tuple[str, Any]] = [] # key, value
+
+        for config_rule in request_config_rules:
+            action, key, value = config_rule
+            if action == ORM_ConfigActionEnum.SET:
+                if (key not in context_config_rules) or (context_config_rules[key] != value):
+                    resources_to_set.append((key, value))
+            elif action == ORM_ConfigActionEnum.DELETE:
+                if key in context_config_rules:
+                    resources_to_delete.append((key, value))
+
+        #LOGGER.info('[ConfigureDevice] resources_to_set = {:s}'.format(str(resources_to_set)))
+        #LOGGER.info('[ConfigureDevice] resources_to_delete = {:s}'.format(str(resources_to_delete)))
+
+        # TODO: use of datastores (might be virtual ones) to enable rollbacks
+
+        errors = []
+
+        driver : _Driver = self.driver_instance_cache.get(device_uuid)
+        if driver is None:
+            errors.append('Device({:s}) has not been added to this Device instance'.format(str(device_uuid)))
+
+        if len(errors) == 0:
+            results_setconfig = driver.SetConfig(resources_to_set)
+            errors.extend(check_set_errors(resources_to_set, results_setconfig))
+
+        if len(errors) == 0:
+            results_deleteconfig = driver.DeleteConfig(resources_to_delete)
+            errors.extend(check_delete_errors(resources_to_delete, results_deleteconfig))
+
+        if len(errors) > 0:
+            raise OperationFailedException('ConfigureDevice', extra_details=errors)
+
+        running_config_rules = driver.GetConfig()
+        running_config_rules = [
+            (ORM_ConfigActionEnum.SET, config_rule[0], json.dumps(config_rule[1], sort_keys=True))
+            for config_rule in running_config_rules
+        ]
+        #for running_config_rule in running_config_rules:
+        #    LOGGER.info('[ConfigureDevice] running_config_rule: {:s}'.format(str(running_config_rule)))
+        update_config(self.database, device_uuid, 'running', running_config_rules)
+
+        sync_device_to_context(db_device, self.context_client)
+        return DeviceId(**db_device.dump_id())
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def DeleteDevice(self, request : DeviceId, context : grpc.ServicerContext) -> Empty:
+        device_uuid = request.device_uuid.uuid
+
+        sync_device_from_context(device_uuid, self.context_client, self.database)
+        db_device : DeviceModel = get_object(self.database, DeviceModel, device_uuid, raise_if_not_found=False)
+        if db_device is None: return Empty()
+
+        self.driver_instance_cache.delete(device_uuid)
+        delete_device_from_context(db_device, self.context_client)
+
+        for db_kpi_pk,_ in db_device.references(KpiModel):
+            KpiModel(self.database, db_kpi_pk).delete()
+
+        for db_endpoint_pk,_ in db_device.references(EndPointModel):
+            db_endpoint = EndPointModel(self.database, db_endpoint_pk)
+            for db_endpoint_monitor_pk,_ in db_endpoint.references(EndPointMonitorModel):
+                EndPointMonitorModel(self.database, db_endpoint_monitor_pk).delete()
+            db_endpoint.delete()
+
+        for db_driver_pk,_ in db_device.references(DriverModel):
+            DriverModel(self.database, db_driver_pk).delete()
+
+        db_initial_config = ConfigModel(self.database, db_device.device_initial_config_fk)
+        for db_config_rule_pk,_ in db_initial_config.references(ConfigRuleModel):
+            ConfigRuleModel(self.database, db_config_rule_pk).delete()
+
+        db_running_config = ConfigModel(self.database, db_device.device_running_config_fk)
+        for db_config_rule_pk,_ in db_running_config.references(ConfigRuleModel):
+            ConfigRuleModel(self.database, db_config_rule_pk).delete()
+
+        db_device.delete()
+        db_initial_config.delete()
+        db_running_config.delete()
+        return Empty()
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def GetInitialConfig(self, request : DeviceId, context : grpc.ServicerContext) -> DeviceConfig:
+        device_uuid = request.device_uuid.uuid
+
+        sync_device_from_context(device_uuid, self.context_client, self.database)
+        db_device : DeviceModel = get_object(self.database, DeviceModel, device_uuid, raise_if_not_found=False)
+
+        config_rules = {} if db_device is None else db_device.dump_initial_config()
+        return DeviceConfig(config_rules=config_rules)
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def MonitorDeviceKpi(self, request : MonitoringSettings, context : grpc.ServicerContext) -> Empty:
+        kpi_uuid = request.kpi_id.kpi_id.uuid
+
+        subscribe = (request.sampling_duration_s > 0.0) and (request.sampling_interval_s > 0.0)
+        if subscribe:
+            device_uuid = request.kpi_descriptor.device_id.device_uuid.uuid
+
+            db_device : DeviceModel = get_object(self.database, DeviceModel, device_uuid, raise_if_not_found=False)
+            if db_device is None:
+                msg = 'Device({:s}) has not been added to this Device instance.'.format(str(device_uuid))
+                raise OperationFailedException('MonitorDeviceKpi', extra_details=msg)
+
+            endpoint_id = request.kpi_descriptor.endpoint_id
+            endpoint_uuid = endpoint_id.endpoint_uuid.uuid
+            str_endpoint_key = key_to_str([device_uuid, endpoint_uuid])
+            endpoint_topology_context_uuid = endpoint_id.topology_id.context_id.context_uuid.uuid
+            endpoint_topology_uuid = endpoint_id.topology_id.topology_uuid.uuid
+            if len(endpoint_topology_context_uuid) > 0 and len(endpoint_topology_uuid) > 0:
+                str_topology_key = key_to_str([endpoint_topology_context_uuid, endpoint_topology_uuid])
+                str_endpoint_key = key_to_str([str_endpoint_key, str_topology_key], separator=':')
+            db_endpoint : EndPointModel = get_object(
+                self.database, EndPointModel, str_endpoint_key, raise_if_not_found=False)
+            if db_endpoint is None:
+                msg = 'Device({:s})/EndPoint({:s}) not found. EndPointKey({:s})'.format(
+                    str(device_uuid), str(endpoint_uuid), str(str_endpoint_key))
+                raise OperationFailedException('MonitorDeviceKpi', extra_details=msg)
+
+            driver : _Driver = self.driver_instance_cache.get(device_uuid)
+            if driver is None:
+                msg = 'Device({:s}) has not been added to this Device instance'.format(str(device_uuid))
+                raise OperationFailedException('MonitorDeviceKpi', extra_details=msg)
+
+            sample_type = request.kpi_descriptor.kpi_sample_type
+
+            attributes = {
+                'kpi_uuid'         : request.kpi_id.kpi_id.uuid,
+                'kpi_description'  : request.kpi_descriptor.kpi_description,
+                'kpi_sample_type'  : grpc_to_enum__kpi_sample_type(sample_type),
+                'device_fk'        : db_device,
+                'endpoint_fk'      : db_endpoint,
+                'sampling_duration': request.sampling_duration_s,
+                'sampling_interval': request.sampling_interval_s,
+            }
+            result : Tuple[KpiModel, bool] = update_or_create_object(self.database, KpiModel, kpi_uuid, attributes)
+            db_kpi, updated = result
+
+            str_endpoint_monitor_key = key_to_str([str_endpoint_key, str(sample_type)])
+            db_endpoint_monitor : EndPointMonitorModel = get_object(
+                self.database, EndPointMonitorModel, str_endpoint_monitor_key, raise_if_not_found=False)
+            if db_endpoint_monitor is None:
+                msg = 'SampleType({:s}/{:s}) not supported for EndPoint({:s}).'.format(
+                    str(sample_type), str(KpiSampleType.Name(sample_type).upper().replace('KPISAMPLETYPE_', '')),
+                    str(endpoint_uuid))
+                raise OperationFailedException('MonitorDeviceKpi', extra_details=msg)
+
+            str_endpoint_monitor_kpi_key = key_to_str([device_uuid, db_endpoint_monitor.resource_key], separator=':')
+            attributes = {
+                'endpoint_monitor_fk': db_endpoint_monitor,
+                'kpi_fk'             : db_kpi,
+            }
+            result : Tuple[EndPointMonitorKpiModel, bool] = update_or_create_object(
+                self.database, EndPointMonitorKpiModel, str_endpoint_monitor_kpi_key, attributes)
+            db_endpoint_monitor_kpi, updated = result
+
+            resources_to_subscribe : List[Tuple[str, float, float]] = [] # key, sampling_duration, sampling_interval
+            resources_to_subscribe.append(
+                (db_endpoint_monitor.resource_key, db_kpi.sampling_duration, db_kpi.sampling_interval))
+            results_subscribestate = driver.SubscribeState(resources_to_subscribe)
+            errors = check_subscribe_errors(resources_to_subscribe, results_subscribestate)
+            if len(errors) > 0: raise OperationFailedException('MonitorDeviceKpi', extra_details=errors)
+
+            self.monitoring_loops.add(device_uuid, driver)
+
+        else:
+            db_kpi : KpiModel = get_object(
+                self.database, KpiModel, kpi_uuid, raise_if_not_found=False)
+            if db_kpi is None:
+                msg = 'Kpi({:s}) not found'.format(str(kpi_uuid))
+                raise OperationFailedException('MonitorDeviceKpi', extra_details=msg)
+
+            db_device : DeviceModel = get_object(
+                self.database, DeviceModel, db_kpi.device_fk, raise_if_not_found=False)
+            if db_device is None:
+                msg = 'Device({:s}) not found'.format(str(db_kpi.device_fk))
+                raise OperationFailedException('MonitorDeviceKpi', extra_details=msg)
+            device_uuid = db_device.device_uuid
+
+            db_endpoint : EndPointModel = get_object(
+                self.database, EndPointModel, db_kpi.endpoint_fk, raise_if_not_found=False)
+            if db_endpoint is None:
+                msg = 'EndPoint({:s}) not found'.format(str(db_kpi.endpoint_fk))
+                raise OperationFailedException('MonitorDeviceKpi', extra_details=msg)
+            endpoint_uuid = db_endpoint.endpoint_uuid
+            str_endpoint_key = db_endpoint.pk
+
+            kpi_sample_type : ORM_KpiSampleTypeEnum = db_kpi.kpi_sample_type
+            sample_type = kpi_sample_type.value
+            str_endpoint_monitor_key = key_to_str([str_endpoint_key, str(sample_type)])
+            db_endpoint_monitor : EndPointMonitorModel = get_object(
+                self.database, EndPointMonitorModel, str_endpoint_monitor_key, raise_if_not_found=False)
+            if db_endpoint_monitor is None:
+                msg = 'EndPointMonitor({:s}) not found.'.format(str(str_endpoint_monitor_key))
+                raise OperationFailedException('MonitorDeviceKpi', extra_details=msg)
+
+            str_endpoint_monitor_kpi_key = key_to_str([device_uuid, db_endpoint_monitor.resource_key], separator=':')
+            db_endpoint_monitor_kpi : EndPointMonitorKpiModel = get_object(
+                self.database, EndPointMonitorKpiModel, str_endpoint_monitor_kpi_key, raise_if_not_found=False)
+            if db_endpoint_monitor_kpi is None:
+                msg = 'EndPointMonitorKpi({:s}) not found.'.format(str(str_endpoint_monitor_kpi_key))
+                raise OperationFailedException('MonitorDeviceKpi', extra_details=msg)
+
+            resources_to_unsubscribe : List[Tuple[str, float, float]] = [] # key, sampling_duration, sampling_interval
+            resources_to_unsubscribe.append(
+                (db_endpoint_monitor.resource_key, db_kpi.sampling_duration, db_kpi.sampling_interval))
+
+            driver : _Driver = self.driver_instance_cache.get(device_uuid)
+            if driver is None:
+                msg = 'Device({:s}) has not been added to this Device instance'.format(str(device_uuid))
+                raise OperationFailedException('MonitorDeviceKpi', extra_details=msg)
+
+            results_unsubscribestate = driver.UnsubscribeState(resources_to_unsubscribe)
+            errors = check_unsubscribe_errors(resources_to_unsubscribe, results_unsubscribestate)
+            if len(errors) > 0: raise OperationFailedException('MonitorDeviceKpi', extra_details=errors)
+
+            db_endpoint_monitor_kpi.delete()
+            db_kpi.delete()
+
+            # There is one monitoring loop per device; keep them active since they are re-used by different monitoring
+            # requests.
+            #self.monitoring_loops.remove(device_uuid)
+
+        return Empty()
diff --git a/src/device/service/MonitoringLoops.py b/src/device/service/MonitoringLoops.py
index 658e1de0a99db161fabb88733182d3386b165cbd..2e96e8df1dfd7a050aae004b5d5a41bae469e438 100644
--- a/src/device/service/MonitoringLoops.py
+++ b/src/device/service/MonitoringLoops.py
@@ -1,83 +1,134 @@
-#import logging, queue, threading
-#from typing import Dict
-#from monitoring.client.monitoring_client import MonitoringClient
-#from monitoring.proto.monitoring_pb2 import Kpi
-#from .driver_api._Driver import _Driver
-#
-#LOGGER = logging.getLogger(__name__)
-#QUEUE_GET_WAIT_TIMEOUT = 0.5
-#
-#class MonitoringLoop:
-#    def __init__(self, driver : _Driver, samples_queue : queue.Queue) -> None:
-#        self._driver = driver
-#        self._samples_queue = samples_queue
-#        self._running = threading.Event()
-#        self._terminate = threading.Event()
-#        self._samples_stream = self._driver.GetState(blocking=True)
-#        self._collector_thread = threading.Thread(target=self._collect, daemon=False)
-#
-#    def _collect(self) -> None:
-#        for sample in self._samples_stream:
-#            if self._terminate.is_set(): break
-#            LOGGER.info('[MonitoringLoop:_collect] sample={:s}'.format(str(sample)))
-#            # TODO: add timestamp (if not present)
-#            self._samples_queue.put_nowait(sample)
-#
-#    def start(self):
-#        self._collector_thread.start()
-#        self._running.set()
-#
-#    @property
-#    def is_running(self): return self._running.is_set()
-#
-#    def stop(self):
-#        self._terminate.set()
-#        self._samples_stream.cancel()
-#        self._collector_thread.join()
-#
-#class MonitoringLoops:
-#    def __init__(self, monitoring_client : MonitoringClient) -> None:
-#        self._monitoring_client = monitoring_client
-#        self._samples_queue = queue.Queue()
-#        self._running = threading.Event()
-#        self._terminate = threading.Event()
-#        self._lock = threading.Lock()
-#        self._device_uuid__to__monitoring_loop : Dict[str, MonitoringLoop] = {}
-#        self._exporter_thread = threading.Thread(target=self._export, daemon=False)
-#
-#    def add(self, device_uuid : str, driver : _Driver) -> None:
-#        with self._lock:
-#            monitoring_loop = self._device_uuid__to__monitoring_loop.get(device_uuid)
-#            if (monitoring_loop is not None) and monitoring_loop.is_running: return
-#            monitoring_loop = MonitoringLoop(driver, self._samples_queue)
-#            self._device_uuid__to__monitoring_loop[device_uuid] = monitoring_loop
-#            monitoring_loop.start()
-#
-#    def remove(self, device_uuid : str) -> None:
-#        with self._lock:
-#            monitoring_loop = self._device_uuid__to__monitoring_loop.get(device_uuid)
-#            if monitoring_loop is None: return
-#            if monitoring_loop.is_running: monitoring_loop.stop()
-#            self._device_uuid__to__monitoring_loop.pop(device_uuid, None)
-#
-#    def start(self):
-#        self._exporter_thread.start()
-#        self._running.set()
-#
-#    @property
-#    def is_running(self): return self._running.is_set()
-#
-#    def stop(self):
-#        self._terminate.set()
-#        self._exporter_thread.join()
-#
-#    def _export(self) -> None:
-#        while not self._terminate.is_set():
-#            try:
-#                sample = self._samples_queue.get(block=True, timeout=QUEUE_GET_WAIT_TIMEOUT)
-#                LOGGER.info('[MonitoringLoops:_export] sample={:s}'.format(str(sample)))
-#            except queue.Empty:
-#                continue
-#            # TODO: find in database the KpiId, format KPI and send to Monitoring
-#            kpi_data = {}
-#            self._monitoring_client.IncludeKpi(Kpi(**kpi_data))
+import logging, queue, threading
+from typing import Dict
+from common.orm.Database import Database
+from common.orm.HighLevel import get_object
+from common.orm.backend.Tools import key_to_str
+from device.service.database.RelationModels import EndPointMonitorKpiModel
+from monitoring.client.monitoring_client import MonitoringClient
+from monitoring.proto.monitoring_pb2 import Kpi
+from .database.KpiModel import KpiModel
+from .driver_api._Driver import _Driver
+
+LOGGER = logging.getLogger(__name__)
+QUEUE_GET_WAIT_TIMEOUT = 0.5
+
+class MonitoringLoop:
+    def __init__(self, device_uuid : str, driver : _Driver, samples_queue : queue.Queue) -> None:
+        self._device_uuid = device_uuid
+        self._driver = driver
+        self._samples_queue = samples_queue
+        self._running = threading.Event()
+        self._terminate = threading.Event()
+        self._samples_stream = self._driver.GetState(blocking=True)
+        self._collector_thread = threading.Thread(target=self._collect, daemon=True)
+
+    def _collect(self) -> None:
+        for sample in self._samples_stream:
+            if self._terminate.is_set(): break
+            sample = (self._device_uuid, *sample)
+            self._samples_queue.put_nowait(sample)
+
+    def start(self):
+        self._collector_thread.start()
+        self._running.set()
+
+    @property
+    def is_running(self): return self._running.is_set()
+
+    def stop(self):
+        self._terminate.set()
+        self._samples_stream.cancel()
+        self._collector_thread.join()
+
+class MonitoringLoops:
+    def __init__(self, monitoring_client : MonitoringClient, database : Database) -> None:
+        self._monitoring_client = monitoring_client
+        self._database = database
+        self._samples_queue = queue.Queue()
+        self._running = threading.Event()
+        self._terminate = threading.Event()
+        self._lock = threading.Lock()
+        self._device_uuid__to__monitoring_loop : Dict[str, MonitoringLoop] = {}
+        self._exporter_thread = threading.Thread(target=self._export, daemon=True)
+
+    def add(self, device_uuid : str, driver : _Driver) -> None:
+        with self._lock:
+            monitoring_loop = self._device_uuid__to__monitoring_loop.get(device_uuid)
+            if (monitoring_loop is not None) and monitoring_loop.is_running: return
+            monitoring_loop = MonitoringLoop(device_uuid, driver, self._samples_queue)
+            self._device_uuid__to__monitoring_loop[device_uuid] = monitoring_loop
+            monitoring_loop.start()
+
+    def remove(self, device_uuid : str) -> None:
+        with self._lock:
+            monitoring_loop = self._device_uuid__to__monitoring_loop.get(device_uuid)
+            if monitoring_loop is None: return
+            if monitoring_loop.is_running: monitoring_loop.stop()
+            self._device_uuid__to__monitoring_loop.pop(device_uuid, None)
+
+    def start(self):
+        self._exporter_thread.start()
+        self._running.set()
+
+    @property
+    def is_running(self): return self._running.is_set()
+
+    def stop(self):
+        self._terminate.set()
+        self._exporter_thread.join()
+
+    def _export(self) -> None:
+        if self._database is None:
+            LOGGER.error('[MonitoringLoops:_export] Database not set. Terminating Exporter.')
+            return
+
+        while not self._terminate.is_set():
+            try:
+                sample = self._samples_queue.get(block=True, timeout=QUEUE_GET_WAIT_TIMEOUT)
+                #LOGGER.debug('[MonitoringLoops:_export] sample={:s}'.format(str(sample)))
+            except queue.Empty:
+                continue
+
+            device_uuid, timestamp, endpoint_monitor_resource_key, value = sample
+            str_endpoint_monitor_kpi_key = key_to_str([device_uuid, endpoint_monitor_resource_key], separator=':')
+
+            #db_entries = self._database.dump()
+            #LOGGER.info('----- Database Dump [{:3d} entries] -------------------------'.format(len(db_entries)))
+            #for db_entry in db_entries:
+            #    LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
+            #LOGGER.info('-----------------------------------------------------------')
+
+            db_endpoint_monitor_kpi : EndPointMonitorKpiModel = get_object(
+                self._database, EndPointMonitorKpiModel, str_endpoint_monitor_kpi_key, raise_if_not_found=False)
+            if db_endpoint_monitor_kpi is None:
+                LOGGER.warning('EndPointMonitorKpi({:s}) not found'.format(str_endpoint_monitor_kpi_key))
+                continue
+
+            str_kpi_key = db_endpoint_monitor_kpi.kpi_fk
+            db_kpi : KpiModel = get_object(
+                self._database, KpiModel, str_kpi_key, raise_if_not_found=False)
+            if db_kpi is None:
+                LOGGER.warning('Kpi({:s}) not found'.format(str_kpi_key))
+                continue
+
+            if isinstance(value, int):
+                kpi_value_field_name = 'intVal'
+                kpi_value_field_cast = int
+            elif isinstance(value, float):
+                kpi_value_field_name = 'floatVal'
+                kpi_value_field_cast = float
+            elif isinstance(value, bool):
+                kpi_value_field_name = 'boolVal'
+                kpi_value_field_cast = bool
+            else:
+                kpi_value_field_name = 'stringVal'
+                kpi_value_field_cast = str
+
+            try:
+                self._monitoring_client.IncludeKpi(Kpi(**{
+                    'kpi_id'   : {'kpi_id': {'uuid': db_kpi.kpi_uuid}},
+                    'timestamp': str(timestamp),
+                    'kpi_value': {kpi_value_field_name: kpi_value_field_cast(value)}
+                }))
+            except: # pylint: disable=bare-except
+                LOGGER.exception('Unable to format/send Kpi')
diff --git a/src/device/service/__main__.py b/src/device/service/__main__.py
index 77572c51f9064712c2d9e4d9ccc9e943fe0df1c7..86f9b3ec1091883df068d3a7d2d4409dcffc9e90 100644
--- a/src/device/service/__main__.py
+++ b/src/device/service/__main__.py
@@ -5,9 +5,8 @@ from context.client.ContextClient import ContextClient
 from device.Config import (
     CONTEXT_SERVICE_HOST, CONTEXT_SERVICE_PORT, GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD, LOG_LEVEL,
     METRICS_PORT, MONITORING_SERVICE_HOST, MONITORING_SERVICE_PORT)
-#from monitoring.client.monitoring_client import MonitoringClient
+from monitoring.client.monitoring_client import MonitoringClient
 from .DeviceService import DeviceService
-#from .MonitoringLoops import MonitoringLoops
 from .driver_api.DriverFactory import DriverFactory
 from .driver_api.DriverInstanceCache import DriverInstanceCache
 from .drivers import DRIVERS
@@ -49,31 +48,26 @@ def main():
             str(context_service_host), str(context_service_port)))
     context_client = ContextClient(context_service_host, context_service_port)
 
-    ## Initialize Monitoring Client
-    #if monitoring_service_host is None or monitoring_service_port is None:
-    #    raise Exception('Wrong address({:s}):port({:s}) of Monitoring component'.format(
-    #        str(monitoring_service_host), str(monitoring_service_port)))
-    #monitoring_client = MonitoringClient(monitoring_service_host, monitoring_service_port)
+    # Initialize Monitoring Client
+    if monitoring_service_host is None or monitoring_service_port is None:
+        raise Exception('Wrong address({:s}):port({:s}) of Monitoring component'.format(
+            str(monitoring_service_host), str(monitoring_service_port)))
+    monitoring_client = MonitoringClient(monitoring_service_host, monitoring_service_port)
 
     # Initialize Driver framework
     driver_factory = DriverFactory(DRIVERS)
     driver_instance_cache = DriverInstanceCache(driver_factory)
-    #monitoring_loops = MonitoringLoops(monitoring_client)
 
     # Starting device service
     grpc_service = DeviceService(
-        context_client, driver_instance_cache,
-        #monitoring_loops,
-        port=grpc_service_port, max_workers=max_workers,
+        context_client, monitoring_client, driver_instance_cache, port=grpc_service_port, max_workers=max_workers,
         grace_period=grace_period)
     grpc_service.start()
-    #monitoring_loops.start()
 
     # Wait for Ctrl+C or termination signal
     while not terminate.wait(timeout=0.1): pass
 
     LOGGER.info('Terminating...')
-    #monitoring_loops.stop()
     grpc_service.stop()
     driver_instance_cache.terminate()
 
diff --git a/src/device/service/database/DatabaseTools.py b/src/device/service/database/DatabaseTools.py
index 5b43aae70af054f5d6da0bd92d9f2e59d152dc84..27a5f89a579451e9e512bc7288ca8690a25de27d 100644
--- a/src/device/service/database/DatabaseTools.py
+++ b/src/device/service/database/DatabaseTools.py
@@ -10,7 +10,7 @@ from device.service.driver_api.FilterFields import FilterFieldEnum
 from .ConfigModel import delete_all_config_rules, grpc_config_rules_to_raw, update_config
 from .ContextModel import ContextModel
 from .DeviceModel import DeviceModel, DriverModel, grpc_to_enum__device_operational_status, set_drivers
-from .EndPointModel import EndPointModel
+from .EndPointModel import EndPointModel, set_endpoint_monitors
 from .TopologyModel import TopologyModel
 
 def update_device_in_local_database(database : Database, device : Device) -> Tuple[DeviceModel, bool]:
@@ -74,7 +74,10 @@ def update_device_in_local_database(database : Database, device : Device) -> Tup
 
         result : Tuple[EndPointModel, bool] = update_or_create_object(
             database, EndPointModel, str_endpoint_key, endpoint_attributes)
-        _, db_endpoint_updated = result
+        db_endpoint, db_endpoint_updated = result
+
+        set_endpoint_monitors(database, db_endpoint, endpoint.kpi_sample_types)
+
         updated = updated or db_endpoint_updated
 
     return db_device, updated
diff --git a/src/device/service/database/EndPointModel.py b/src/device/service/database/EndPointModel.py
index 38b87d6f37c4e99dd3790f4d8802acd03873f77d..7e0832c51f70cabe49cd4b19d7c23bf923bcf98e 100644
--- a/src/device/service/database/EndPointModel.py
+++ b/src/device/service/database/EndPointModel.py
@@ -1,10 +1,14 @@
 import logging
-from typing import Dict
+from typing import Dict, List
+from common.orm.Database import Database
+from common.orm.backend.Tools import key_to_str
+from common.orm.fields.EnumeratedField import EnumeratedField
 from common.orm.fields.ForeignKeyField import ForeignKeyField
 from common.orm.fields.PrimaryKeyField import PrimaryKeyField
 from common.orm.fields.StringField import StringField
 from common.orm.model.Model import Model
 from .DeviceModel import DeviceModel
+from .KpiSampleType import ORM_KpiSampleTypeEnum, grpc_to_enum__kpi_sample_type
 from .TopologyModel import TopologyModel
 
 LOGGER = logging.getLogger(__name__)
@@ -15,6 +19,7 @@ class EndPointModel(Model):
     device_fk = ForeignKeyField(DeviceModel)
     endpoint_uuid = StringField(required=True, allow_empty=False)
     endpoint_type = StringField()
+    resource_key = StringField(required=True, allow_empty=False)
 
     def dump_id(self) -> Dict:
         device_id = DeviceModel(self.database, self.device_fk).dump_id()
@@ -26,8 +31,36 @@ class EndPointModel(Model):
             result['topology_id'] = TopologyModel(self.database, self.topology_fk).dump_id()
         return result
 
-    def dump(self) -> Dict:
-        return {
+    def dump_kpi_sample_types(self) -> List[int]:
+        db_kpi_sample_type_pks = self.references(EndPointMonitorModel)
+        return [EndPointMonitorModel(self.database, pk).dump() for pk,_ in db_kpi_sample_type_pks]
+
+    def dump(   # pylint: disable=arguments-differ
+            self, include_kpi_sample_types=True
+        ) -> Dict:
+        result = {
             'endpoint_id': self.dump_id(),
             'endpoint_type': self.endpoint_type,
         }
+        if include_kpi_sample_types: result['kpi_sample_types'] = self.dump_kpi_sample_types()
+        return result
+
+class EndPointMonitorModel(Model): # pylint: disable=abstract-method
+    pk = PrimaryKeyField()
+    endpoint_fk = ForeignKeyField(EndPointModel)
+    resource_key = StringField(required=True, allow_empty=True)
+    kpi_sample_type = EnumeratedField(ORM_KpiSampleTypeEnum, required=True)
+
+    def dump(self) -> Dict:
+        return self.kpi_sample_type.value
+
+def set_endpoint_monitors(database : Database, db_endpoint : EndPointModel, grpc_endpoint_kpi_sample_types):
+    db_endpoint_pk = db_endpoint.pk
+    for kpi_sample_type in grpc_endpoint_kpi_sample_types:
+        orm_kpi_sample_type = grpc_to_enum__kpi_sample_type(kpi_sample_type)
+        str_endpoint_kpi_sample_type_key = key_to_str([db_endpoint_pk, orm_kpi_sample_type.name])
+        db_endpoint_kpi_sample_type = EndPointMonitorModel(database, str_endpoint_kpi_sample_type_key)
+        db_endpoint_kpi_sample_type.endpoint_fk = db_endpoint
+        db_endpoint_kpi_sample_type.resource_key = '' # during initialization, allow empty value
+        db_endpoint_kpi_sample_type.kpi_sample_type = orm_kpi_sample_type
+        db_endpoint_kpi_sample_type.save()
diff --git a/src/device/service/database/KpiModel.py b/src/device/service/database/KpiModel.py
index 1bed9fc7a169665eb459b295a6fc9903513e13f0..3ec78f60f233f5492d4780009e7cbf815f5e5248 100644
--- a/src/device/service/database/KpiModel.py
+++ b/src/device/service/database/KpiModel.py
@@ -8,7 +8,7 @@ from common.orm.fields.StringField import StringField
 from common.orm.model.Model import Model
 from .DeviceModel import DeviceModel
 from .EndPointModel import EndPointModel
-from .KpiSampleType import ORM_KpiSampleType
+from .KpiSampleType import ORM_KpiSampleTypeEnum
 
 LOGGER = logging.getLogger(__name__)
 
@@ -16,7 +16,7 @@ class KpiModel(Model):
     pk = PrimaryKeyField()
     kpi_uuid = StringField(required=True, allow_empty=False)
     kpi_description = StringField(required=False, allow_empty=True)
-    kpi_sample_type = EnumeratedField(ORM_KpiSampleType, required=True)
+    kpi_sample_type = EnumeratedField(ORM_KpiSampleTypeEnum, required=True)
     device_fk = ForeignKeyField(DeviceModel)
     endpoint_fk = ForeignKeyField(EndPointModel)
     sampling_duration = FloatField(min_value=0, required=True)
diff --git a/src/device/service/database/KpiSampleType.py b/src/device/service/database/KpiSampleType.py
index e5c4c5bbc0407e7dd3650ca4ff2f2e95a1202472..24ac67200e85bb7fe29cf0971de020351b2b45da 100644
--- a/src/device/service/database/KpiSampleType.py
+++ b/src/device/service/database/KpiSampleType.py
@@ -3,12 +3,12 @@ from enum import Enum
 from device.proto.kpi_sample_types_pb2 import KpiSampleType
 from .Tools import grpc_to_enum
 
-class ORM_KpiSampleType(Enum):
-    UNKNOWN             = KpiSampleType.UNKNOWN
-    PACKETS_TRANSMITTED = KpiSampleType.PACKETS_TRANSMITTED
-    PACKETS_RECEIVED    = KpiSampleType.PACKETS_RECEIVED
-    BYTES_TRANSMITTED   = KpiSampleType.BYTES_TRANSMITTED
-    BYTES_RECEIVED      = KpiSampleType.BYTES_RECEIVED
+class ORM_KpiSampleTypeEnum(Enum):
+    UNKNOWN             = KpiSampleType.KPISAMPLETYPE_UNKNOWN
+    PACKETS_TRANSMITTED = KpiSampleType.KPISAMPLETYPE_PACKETS_TRANSMITTED
+    PACKETS_RECEIVED    = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED
+    BYTES_TRANSMITTED   = KpiSampleType.KPISAMPLETYPE_BYTES_TRANSMITTED
+    BYTES_RECEIVED      = KpiSampleType.KPISAMPLETYPE_BYTES_RECEIVED
 
 grpc_to_enum__kpi_sample_type = functools.partial(
-    grpc_to_enum, KpiSampleType, ORM_KpiSampleType)
+    grpc_to_enum, KpiSampleType, ORM_KpiSampleTypeEnum)
diff --git a/src/device/service/database/RelationModels.py b/src/device/service/database/RelationModels.py
new file mode 100644
index 0000000000000000000000000000000000000000..6d1a9780f3bcd12d4aa3718c94e910b80a40ba18
--- /dev/null
+++ b/src/device/service/database/RelationModels.py
@@ -0,0 +1,13 @@
+import logging
+from common.orm.fields.ForeignKeyField import ForeignKeyField
+from common.orm.fields.PrimaryKeyField import PrimaryKeyField
+from common.orm.model.Model import Model
+from .EndPointModel import EndPointMonitorModel
+from .KpiModel import KpiModel
+
+LOGGER = logging.getLogger(__name__)
+
+class EndPointMonitorKpiModel(Model): # pylint: disable=abstract-method
+    pk = PrimaryKeyField()
+    endpoint_monitor_fk = ForeignKeyField(EndPointMonitorModel)
+    kpi_fk = ForeignKeyField(KpiModel)
diff --git a/src/device/service/driver_api/AnyTreeTools.py b/src/device/service/driver_api/AnyTreeTools.py
index df61c7e030a13a3d0d758ce51a011aaa95deb49f..3b247eca6af25d1c34ad46fd824f8303c12c74d9 100644
--- a/src/device/service/driver_api/AnyTreeTools.py
+++ b/src/device/service/driver_api/AnyTreeTools.py
@@ -1,5 +1,6 @@
 import anytree
-from typing import Any, List, Optional
+from typing import Any, List, Optional, Union
+from apscheduler.job import Job
 
 class TreeNode(anytree.node.Node):
     def __init__(self, name, parent=None, children=None, **kwargs) -> None:
@@ -27,25 +28,32 @@ class RawStyle(anytree.render.AbstractStyle):
         Node('/root/sub0/sub0A')
         Node('/root/sub1')
         """
-        super(RawStyle, self).__init__(u'', u'', u'')
+        super(RawStyle, self).__init__('', '', '')
 
-def get_subnode(resolver : anytree.Resolver, root : TreeNode, path : List[str], default : Optional[Any] = None):
+def get_subnode(
+    resolver : anytree.Resolver, root : TreeNode, key_or_path : Union[str, List[str]], default : Optional[Any] = None):
+
+    if isinstance(key_or_path, str): key_or_path = key_or_path.split('/')
     node = root
-    for path_item in path:
+    for path_item in key_or_path:
         try:
             node = resolver.get(node, path_item)
         except anytree.ChildResolverError:
             return default
     return node
 
-def set_subnode_value(resolver : anytree.Resolver, root : TreeNode, path : List[str], value : Any):
+def set_subnode_value(resolver : anytree.Resolver, root : TreeNode, key_or_path : Union[str, List[str]], value : Any):
+    if isinstance(key_or_path, str): key_or_path = key_or_path.split('/')
     node = root
-    for path_item in path:
+    for path_item in key_or_path:
         try:
             node = resolver.get(node, path_item)
         except anytree.ChildResolverError:
             node = TreeNode(path_item, parent=node)
-    node.value = value
+    if isinstance(node.value, dict) and isinstance(value, dict):
+        node.value.update(value)
+    else:
+        node.value = value
 
 def dump_subtree(root : TreeNode):
     if not isinstance(root, TreeNode): raise Exception('root must be a TreeNode')
@@ -56,5 +64,6 @@ def dump_subtree(root : TreeNode):
         if len(path) == 0: continue
         value = node.value
         if value is None: continue
+        if isinstance(value, Job): value = str(value)
         results.append((path, value))
     return results
diff --git a/src/device/service/driver_api/FilterFields.py b/src/device/service/driver_api/FilterFields.py
index c7de05f92a743c826d54897930b10013bd09c2b7..892e7f72056cd3342ce04190710d492ec83a02d3 100644
--- a/src/device/service/driver_api/FilterFields.py
+++ b/src/device/service/driver_api/FilterFields.py
@@ -8,6 +8,7 @@ class DeviceTypeFilterFieldEnum(Enum):
     OPTICAL_LINE_SYSTEM = 'optical-line-system'
     PACKET_ROUTER       = 'packet-router'
     PACKET_SWITCH       = 'packet-switch'
+    P4_SWITCH           = 'p4-switch'
 
 class FilterFieldEnum(Enum):
     DEVICE_TYPE   = 'device_type'
diff --git a/src/device/service/drivers/__init__.py b/src/device/service/drivers/__init__.py
index e59bae207e4cd14f238aabcb7e373bb973374005..54c944bab8183d57478a07ea46a9aa55b439f0b1 100644
--- a/src/device/service/drivers/__init__.py
+++ b/src/device/service/drivers/__init__.py
@@ -2,6 +2,7 @@ from ..driver_api.FilterFields import FilterFieldEnum, DeviceTypeFilterFieldEnum
 from .emulated.EmulatedDriver import EmulatedDriver
 from .openconfig.OpenConfigDriver import OpenConfigDriver
 from .transport_api.TransportApiDriver import TransportApiDriver
+from .p4.p4_driver import P4Driver
 
 DRIVERS = [
     (EmulatedDriver, [
@@ -22,4 +23,10 @@ DRIVERS = [
             FilterFieldEnum.DRIVER     : ORM_DeviceDriverEnum.TRANSPORT_API,
         }
     ]),
+    (P4Driver, [
+        {
+            FilterFieldEnum.DEVICE_TYPE: DeviceTypeFilterFieldEnum.P4_SWITCH,
+            FilterFieldEnum.DRIVER     : ORM_DeviceDriverEnum.P4,
+        }
+    ]),
 ]
diff --git a/src/device/service/drivers/emulated/EmulatedDriver.py b/src/device/service/drivers/emulated/EmulatedDriver.py
index 06dc4bd7c36e99d5f081f97e2caa46b8bb5fd2d6..c92554fe30bd86066e3b9e31f09412b1dd82020a 100644
--- a/src/device/service/drivers/emulated/EmulatedDriver.py
+++ b/src/device/service/drivers/emulated/EmulatedDriver.py
@@ -1,16 +1,52 @@
+import json
 import anytree, logging, pytz, queue, random, threading
 from datetime import datetime, timedelta
-from typing import Any, Iterator, List, Optional, Tuple, Union
+from typing import Any, Dict, Iterator, List, Optional, Tuple, Union
 from apscheduler.executors.pool import ThreadPoolExecutor
 from apscheduler.job import Job
 from apscheduler.jobstores.memory import MemoryJobStore
 from apscheduler.schedulers.background import BackgroundScheduler
 from common.type_checkers.Checkers import chk_float, chk_length, chk_string, chk_type
-from device.service.driver_api._Driver import _Driver
+from device.service.database.KpiSampleType import ORM_KpiSampleTypeEnum, grpc_to_enum__kpi_sample_type
+from device.service.driver_api._Driver import (
+    RESOURCE_ENDPOINTS, RESOURCE_INTERFACES, RESOURCE_NETWORK_INSTANCES,
+    _Driver)
 from device.service.driver_api.AnyTreeTools import TreeNode, dump_subtree, get_subnode, set_subnode_value
 
 LOGGER = logging.getLogger(__name__)
 
+SPECIAL_RESOURCE_MAPPINGS = {
+    RESOURCE_ENDPOINTS        : '/endpoints',
+    RESOURCE_INTERFACES       : '/interfaces',
+    RESOURCE_NETWORK_INSTANCES: '/net-instances',
+}
+
+def compose_resource_endpoint(endpoint_data : Dict[str, Any]) -> Tuple[str, Any]:
+    endpoint_uuid = endpoint_data.get('uuid')
+    if endpoint_uuid is None: return None
+    endpoint_resource_path = SPECIAL_RESOURCE_MAPPINGS.get(RESOURCE_ENDPOINTS)
+    endpoint_resource_key = '{:s}/endpoint[{:s}]'.format(endpoint_resource_path, endpoint_uuid)
+
+    endpoint_type = endpoint_data.get('type')
+    if endpoint_type is None: return None
+
+    endpoint_sample_types = endpoint_data.get('sample_types')
+    if endpoint_sample_types is None: return None
+    sample_types = {}
+    for endpoint_sample_type in endpoint_sample_types:
+        try:
+            kpi_sample_type : ORM_KpiSampleTypeEnum = grpc_to_enum__kpi_sample_type(endpoint_sample_type)
+        except: # pylint: disable=bare-except
+            LOGGER.warning('Unknown EndpointSampleType({:s}) for Endpoint({:s}). Ignoring and continuing...'.format(
+                str(endpoint_sample_type), str(endpoint_data)))
+            continue
+        metric_name = kpi_sample_type.name.lower()
+        monitoring_resource_key = '{:s}/state/{:s}'.format(endpoint_resource_key, metric_name)
+        sample_types[endpoint_sample_type] = monitoring_resource_key
+
+    endpoint_resource_value = {'uuid': endpoint_uuid, 'type': endpoint_type, 'sample_types': sample_types}
+    return endpoint_resource_key, endpoint_resource_value
+
 def do_sampling(resource_key : str, out_samples : queue.Queue):
     out_samples.put_nowait((datetime.timestamp(datetime.utcnow()), resource_key, random.random()))
 
@@ -19,6 +55,15 @@ class EmulatedDriver(_Driver):
         self.__lock = threading.Lock()
         self.__initial = TreeNode('.')
         self.__running = TreeNode('.')
+
+        endpoints = settings.get('endpoints', [])
+        endpoint_resources = []
+        for endpoint in endpoints:
+            endpoint_resource = compose_resource_endpoint(endpoint)
+            if endpoint_resource is None: continue
+            endpoint_resources.append(endpoint_resource)
+        self.SetConfig(endpoint_resources)
+
         self.__started = threading.Event()
         self.__terminate = threading.Event()
         self.__scheduler = BackgroundScheduler(daemon=True) # scheduler used to emulate sampling events
@@ -65,6 +110,7 @@ class EmulatedDriver(_Driver):
                 str_resource_name = 'resource_key[#{:d}]'.format(i)
                 try:
                     chk_string(str_resource_name, resource_key, allow_empty=False)
+                    resource_key = SPECIAL_RESOURCE_MAPPINGS.get(resource_key, resource_key)
                     resource_path = resource_key.split('/')
                 except Exception as e: # pylint: disable=broad-except
                     LOGGER.exception('Exception validating {:s}: {:s}'.format(str_resource_name, str(resource_key)))
@@ -77,12 +123,6 @@ class EmulatedDriver(_Driver):
                 results.extend(dump_subtree(resource_node))
             return results
 
-    def GetResource(self, endpoint_uuid : str) -> Optional[str]:
-        chk_string('endpoint_uuid', endpoint_uuid)
-        return {
-            #'key': 'value',
-        }.get(endpoint_uuid)
-
     def SetConfig(self, resources : List[Tuple[str, Any]]) -> List[Union[bool, Exception]]:
         chk_type('resources', resources, list)
         if len(resources) == 0: return []
@@ -102,6 +142,11 @@ class EmulatedDriver(_Driver):
                     results.append(e) # if validation fails, store the exception
                     continue
 
+                try:
+                    resource_value = json.loads(resource_value)
+                except: # pylint: disable=broad-except
+                    pass
+
                 set_subnode_value(resolver, self.__running, resource_path, resource_value)
                 results.append(True)
         return results
diff --git a/src/device/service/drivers/emulated/QueryFields.py b/src/device/service/drivers/emulated/QueryFields.py
deleted file mode 100644
index 6db43e5b5d4ffe1bbcc652d305981757bd960c3e..0000000000000000000000000000000000000000
--- a/src/device/service/drivers/emulated/QueryFields.py
+++ /dev/null
@@ -1,8 +0,0 @@
-from enum import Enum
-
-VENDOR_CTTC = 'cttc'
-
-DEVICE_MODEL_EMULATED_OPTICAL_ROADM       = 'cttc_emu_opt_rdm'
-DEVICE_MODEL_EMULATED_OPTICAL_TRANDPONDER = 'cttc_emu_opt_tp'
-DEVICE_MODEL_EMULATED_PACKET_ROUTER       = 'cttc_emu_pkt_rtr'
-DEVICE_MODEL_EMULATED_PACKET_SWITCH       = 'cttc_emu_pkt_swt'
diff --git a/src/device/service/drivers/openconfig/templates/EndPoints.py b/src/device/service/drivers/openconfig/templates/EndPoints.py
index 4908c818374ca3b94fb77c5d5cd88d97d3c881b5..a7d5c9be6ce9dd53d3b31998b4d4a269c972eb22 100644
--- a/src/device/service/drivers/openconfig/templates/EndPoints.py
+++ b/src/device/service/drivers/openconfig/templates/EndPoints.py
@@ -1,11 +1,13 @@
 import logging, lxml.etree as ET
 from typing import Any, Dict, List, Tuple
+from device.service.database.KpiSampleType import ORM_KpiSampleTypeEnum
 from .Namespace import NAMESPACES
-from .Tools import add_value_from_tag
+from .Tools import add_value_from_collection, add_value_from_tag
 
 LOGGER = logging.getLogger(__name__)
 
 XPATH_PORTS = "//ocp:components/ocp:component/ocp:state[ocp:type='PORT']/.."
+XPATH_IFACE_COUNTER = "//oci:interfaces/oci:interface[oci:name='{:s}']/state/counters/{:s}"
 
 def parse(xml_data : ET.Element) -> List[Tuple[str, Dict[str, Any]]]:
     response = []
@@ -16,12 +18,20 @@ def parse(xml_data : ET.Element) -> List[Tuple[str, Dict[str, Any]]]:
 
         component_name = xml_component.find('ocp:name', namespaces=NAMESPACES)
         if component_name is None or component_name.text is None: continue
-        add_value_from_tag(endpoint, 'name', component_name)
+        add_value_from_tag(endpoint, 'uuid', component_name)
 
         component_type = xml_component.find(
             'ocpp:port/ocpp:breakout-mode/ocpp:state/ocpp:channel-speed', namespaces=NAMESPACES)
         add_value_from_tag(endpoint, 'type', component_type)
 
+        sample_types = {
+            ORM_KpiSampleTypeEnum.BYTES_RECEIVED.value     : XPATH_IFACE_COUNTER.format(endpoint['uuid'], 'in-octets' ),
+            ORM_KpiSampleTypeEnum.BYTES_TRANSMITTED.value  : XPATH_IFACE_COUNTER.format(endpoint['uuid'], 'out-octets'),
+            ORM_KpiSampleTypeEnum.PACKETS_RECEIVED.value   : XPATH_IFACE_COUNTER.format(endpoint['uuid'], 'in-pkts'   ),
+            ORM_KpiSampleTypeEnum.PACKETS_TRANSMITTED.value: XPATH_IFACE_COUNTER.format(endpoint['uuid'], 'out-pkts'  ),
+        }
+        add_value_from_collection(endpoint, 'sample_types', sample_types)
+
         if len(endpoint) == 0: continue
-        response.append(('endpoint[{:s}]'.format(endpoint['name']), endpoint))
+        response.append(('endpoint[{:s}]'.format(endpoint['uuid']), endpoint))
     return response
diff --git a/src/device/service/drivers/p4/__init__.py b/src/device/service/drivers/p4/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/device/service/drivers/p4/p4_driver.py b/src/device/service/drivers/p4/p4_driver.py
new file mode 100644
index 0000000000000000000000000000000000000000..3d3abf236f016608ef93e3d63ab04ac86830da7d
--- /dev/null
+++ b/src/device/service/drivers/p4/p4_driver.py
@@ -0,0 +1,246 @@
+"""
+P4 driver plugin for the TeraFlow SDN controller.
+"""
+
+import logging
+import threading
+from typing import Any, Iterator, List, Optional, Tuple, Union
+from .p4_util import P4RuntimeClient,\
+    P4_ATTR_DEV_ID, P4_ATTR_DEV_NAME, P4_ATTR_DEV_VENDOR,\
+    P4_ATTR_DEV_HW_VER, P4_ATTR_DEV_SW_VER, P4_ATTR_DEV_PIPECONF,\
+    P4_VAL_DEF_VENDOR, P4_VAL_DEF_HW_VER, P4_VAL_DEF_SW_VER, P4_VAL_DEF_PIPECONF
+
+try:
+    from _Driver import _Driver
+except ImportError:
+    from device.service.driver_api._Driver import _Driver
+
+LOGGER = logging.getLogger(__name__)
+
+
+class P4Driver(_Driver):
+    """
+    P4Driver class inherits the abstract _Driver class to support P4 devices.
+
+    Attributes
+    ----------
+    address : str
+        IP address of the P4Runtime server running on the P4 device
+    port : int
+        transport port number of the P4Runtime server running on the P4 device
+    **settings : map
+        id : int
+            P4 device ID (Mandatory)
+        name : str
+            P4 device name (Optional)
+        vendor : str
+            P4 device vendor (Optional)
+        hw_ver : str
+            Hardware version of the P4 device (Optional)
+        sw_ver : str
+            Software version of the P4 device (Optional)
+        pipeconf : str
+            P4 device table configuration (Optional)
+    """
+
+    def __init__(self, address: str, port: int, **settings) -> None:
+        # pylint: disable=super-init-not-called
+        self.__client = None
+        self.__address = address
+        self.__port = int(port)
+        self.__settings = settings
+
+        try:
+            self.__dev_id = self.__settings.get(P4_ATTR_DEV_ID)
+        except Exception as ex:
+            LOGGER.error('P4 device ID is a mandatory setting')
+            raise Exception from ex
+
+        if P4_ATTR_DEV_NAME in self.__settings:
+            self.__dev_name = self.__settings.get(P4_ATTR_DEV_NAME)
+        else:
+            self.__dev_name = str(self.__dev_id)
+            LOGGER.warning(
+                'No device name is provided. Setting default name: %s',
+                self.__dev_name)
+
+        if P4_ATTR_DEV_VENDOR in self.__settings:
+            self.__dev_vendor = self.__settings.get(P4_ATTR_DEV_VENDOR)
+        else:
+            self.__dev_vendor = P4_VAL_DEF_VENDOR
+            LOGGER.warning(
+                'No vendor is provided. Setting default vendor: %s',
+                self.__dev_vendor)
+
+        if P4_ATTR_DEV_HW_VER in self.__settings:
+            self.__dev_hw_version = self.__settings.get(P4_ATTR_DEV_HW_VER)
+        else:
+            self.__dev_hw_version = P4_VAL_DEF_HW_VER
+            LOGGER.warning(
+                'No HW version is provided. Setting default HW version: %s',
+                self.__dev_hw_version)
+
+        if P4_ATTR_DEV_SW_VER in self.__settings:
+            self.__dev_sw_version = self.__settings.get(P4_ATTR_DEV_SW_VER)
+        else:
+            self.__dev_sw_version = P4_VAL_DEF_SW_VER
+            LOGGER.warning(
+                'No SW version is provided. Setting default SW version: %s',
+                self.__dev_sw_version)
+
+        if P4_ATTR_DEV_PIPECONF in self.__settings:
+            self.__dev_pipeconf = self.__settings.get(P4_ATTR_DEV_PIPECONF)
+        else:
+            self.__dev_pipeconf = P4_VAL_DEF_PIPECONF
+            LOGGER.warning(
+                'No P4 pipeconf is provided. Setting default P4 pipeconf: %s',
+                self.__dev_pipeconf)
+
+        self.__lock = threading.Lock()
+        self.__started = threading.Event()
+        self.__terminate = threading.Event()
+
+        LOGGER.info('Initializing P4 device at %s:%d with settings:',
+                    self.__address, self.__port)
+
+        for key, value in settings.items():
+            LOGGER.info('\t%8s = %s', key, value)
+
+    def Connect(self) -> bool:
+        """
+        Establishes a connection between the P4 device driver and a P4 device.
+
+        :return: boolean connection status.
+        """
+        LOGGER.info(
+            'Connecting to P4 device %s:%d ...',
+            self.__address, self.__port)
+
+        with self.__lock:
+            # Skip if already connected
+            if self.__started.is_set():
+                return True
+
+            # Instantiate a gRPC channel with the P4 device
+            grpc_address = f'{self.__address}:{self.__port}'
+            election_id = (1, 0)
+            self.__client = P4RuntimeClient(
+                self.__dev_id, grpc_address, election_id)
+            LOGGER.info('\tConnected!')
+            self.__started.set()
+
+            return True
+
+    def Disconnect(self) -> bool:
+        """
+        Terminates the connection between the P4 device driver and a P4 device.
+
+        :return: boolean disconnection status.
+        """
+        LOGGER.info(
+            'Disconnecting from P4 device %s:%d ...',
+            self.__address, self.__port)
+
+        # If not started, assume it is already disconnected
+        if not self.__started.is_set():
+            return True
+
+        # gRPC client must already be instantiated
+        assert self.__client
+
+        # Trigger termination of loops and processes
+        self.__terminate.set()
+
+        # Trigger connection tear down with the P4Runtime server
+        self.__client.tear_down()
+        self.__client = None
+
+        LOGGER.info('\tDisconnected!')
+
+        return True
+
+    def GetInitialConfig(self) -> List[Tuple[str, Any]]:
+        """
+        Retrieves the initial configuration of a P4 device.
+
+        :return: list of initial configuration items.
+        """
+        LOGGER.info('P4 GetInitialConfig()')
+        return []
+
+    def GetConfig(self, resource_keys : List[str] = [])\
+            -> List[Tuple[str, Union[Any, None, Exception]]]:
+        """
+        Retrieves the current configuration of a P4 device.
+
+        :param resource_keys: configuration parameters to retrieve.
+        :return: list of values associated with the requested resource keys.
+        """
+
+        LOGGER.info('P4 GetConfig()')
+        return []
+
+    def SetConfig(self, resources : List[Tuple[str, Any]])\
+            -> List[Union[bool, Exception]]:
+        """
+        Submits a new configuration to a P4 device.
+
+        :param resources: configuration parameters to set.
+        :return: list of results for resource key changes requested.
+        """
+        LOGGER.info('P4 SetConfig()')
+        return []
+
+    def DeleteConfig(self, resources : List[Tuple[str, Any]])\
+            -> List[Union[bool, Exception]]:
+        """
+        Revokes P4 device configuration.
+
+        :param resources: list of tuples with resource keys to be deleted.
+        :return: list of results for resource key deletions requested.
+        """
+        LOGGER.info('P4 DeleteConfig()')
+        return []
+
+    def GetResource(self, endpoint_uuid : str) -> Optional[str]:
+        """
+        Retrieves a certain resource from a P4 device.
+
+        :param endpoint_uuid: target endpoint UUID.
+        :return: The path of the endpoint or None if not found.
+        """
+        LOGGER.info('P4 GetResource()')
+        return ""
+
+    def GetState(self, blocking=False) -> Iterator[Tuple[str, Any]]:
+        """
+        Retrieves the state of a P4 device.
+
+        :param blocking: if non-blocking, the driver terminates the loop and
+        returns.
+        :return: sequences of state sample.
+        """
+        LOGGER.info('P4 GetState()')
+        return []
+
+    def SubscribeState(self, subscriptions : List[Tuple[str, float, float]])\
+            -> List[Union[bool, Exception]]:
+        """
+        Subscribes to certain state information.
+
+        :param subscriptions: list of tuples with resources to be subscribed.
+        :return: list of results for resource subscriptions requested.
+        """
+        LOGGER.info('P4 SubscribeState()')
+        return []
+
+    def UnsubscribeState(self, subscriptions : List[Tuple[str, float, float]])\
+            -> List[Union[bool, Exception]]:
+        """
+        Unsubscribes from certain state information.
+
+        :param subscriptions: list of tuples with resources to be unsubscribed.
+        :return: list of results for resource un-subscriptions requested.
+        """
+        LOGGER.info('P4 UnsubscribeState()')
+        return []
diff --git a/src/device/service/drivers/p4/p4_util.py b/src/device/service/drivers/p4/p4_util.py
new file mode 100644
index 0000000000000000000000000000000000000000..8d6f258ddb0285a5fa4ee5bd11811d02df380345
--- /dev/null
+++ b/src/device/service/drivers/p4/p4_util.py
@@ -0,0 +1,257 @@
+"""
+P4 driver utilities.
+"""
+
+import logging
+import queue
+import sys
+import threading
+from functools import wraps
+import grpc
+import google.protobuf.text_format
+from google.rpc import code_pb2
+
+from p4.v1 import p4runtime_pb2
+from p4.v1 import p4runtime_pb2_grpc
+
+P4_ATTR_DEV_ID = 'id'
+P4_ATTR_DEV_NAME = 'name'
+P4_ATTR_DEV_VENDOR = 'vendor'
+P4_ATTR_DEV_HW_VER = 'hw_ver'
+P4_ATTR_DEV_SW_VER = 'sw_ver'
+P4_ATTR_DEV_PIPECONF = 'pipeconf'
+
+P4_VAL_DEF_VENDOR = 'Unknown'
+P4_VAL_DEF_HW_VER = 'BMv2 simple_switch'
+P4_VAL_DEF_SW_VER = 'Stratum'
+P4_VAL_DEF_PIPECONF = 'org.onosproject.pipelines.fabric'
+
+STREAM_ATTR_ARBITRATION = 'arbitration'
+STREAM_ATTR_PACKET = 'packet'
+STREAM_ATTR_DIGEST = 'digest'
+STREAM_ATTR_UNKNOWN = 'unknown'
+
+LOGGER = logging.getLogger(__name__)
+
+
+class P4RuntimeException(Exception):
+    """
+    P4Runtime exception handler.
+
+    Attributes
+    ----------
+    grpc_error : object
+        gRPC error
+    """
+
+    def __init__(self, grpc_error):
+        super().__init__()
+        self.grpc_error = grpc_error
+
+    def __str__(self):
+        return str('P4Runtime RPC error (%s): %s',
+                   self.grpc_error.code().name(), self.grpc_error.details())
+
+
+def parse_p4runtime_error(fun):
+    """
+    Parse P4Runtime error.
+
+    :param fun: function
+    :return: parsed error
+    """
+    @wraps(fun)
+    def handle(*args, **kwargs):
+        try:
+            return fun(*args, **kwargs)
+        except grpc.RpcError as rpc_ex:
+            raise P4RuntimeException(rpc_ex) from None
+        except Exception as ex:
+            raise Exception(ex) from None
+    return handle
+
+
+class P4RuntimeClient:
+    """
+    P4Runtime client.
+
+    Attributes
+    ----------
+    device_id : int
+        P4 device ID
+    grpc_address : str
+        IP address and port
+    election_id : tuple
+        Mastership election ID
+    role_name : str
+        Role name (optional)
+    """
+    def __init__(self, device_id, grpc_address, election_id, role_name=None):
+        self.device_id = device_id
+        self.election_id = election_id
+        self.role_name = role_name
+        self.stream_in_q = None
+        self.stream_out_q = None
+        self.stream = None
+        self.stream_recv_thread = None
+        LOGGER.debug(
+            'Connecting to device %d at %s', device_id, grpc_address)
+        self.channel = grpc.insecure_channel(grpc_address)
+        self.stub = p4runtime_pb2_grpc.P4RuntimeStub(self.channel)
+        try:
+            self.set_up_stream()
+        except P4RuntimeException:
+            LOGGER.critical('Failed to connect to P4Runtime server')
+            sys.exit(1)
+
+    def set_up_stream(self):
+        """
+        Set up a gRPC stream.
+        """
+        self.stream_out_q = queue.Queue()
+        # queues for different messages
+        self.stream_in_q = {
+            STREAM_ATTR_ARBITRATION: queue.Queue(),
+            STREAM_ATTR_PACKET: queue.Queue(),
+            STREAM_ATTR_DIGEST: queue.Queue(),
+            STREAM_ATTR_UNKNOWN: queue.Queue(),
+        }
+
+        def stream_req_iterator():
+            while True:
+                st_p = self.stream_out_q.get()
+                if st_p is None:
+                    break
+                yield st_p
+
+        def stream_recv_wrapper(stream):
+            @parse_p4runtime_error
+            def stream_recv():
+                for st_p in stream:
+                    if st_p.HasField(STREAM_ATTR_ARBITRATION):
+                        self.stream_in_q[STREAM_ATTR_ARBITRATION].put(st_p)
+                    elif st_p.HasField(STREAM_ATTR_PACKET):
+                        self.stream_in_q[STREAM_ATTR_PACKET].put(st_p)
+                    elif st_p.HasField(STREAM_ATTR_DIGEST):
+                        self.stream_in_q[STREAM_ATTR_DIGEST].put(st_p)
+                    else:
+                        self.stream_in_q[STREAM_ATTR_UNKNOWN].put(st_p)
+            try:
+                stream_recv()
+            except P4RuntimeException as ex:
+                LOGGER.critical('StreamChannel error, closing stream')
+                LOGGER.critical(ex)
+                for k in self.stream_in_q:
+                    self.stream_in_q[k].put(None)
+        self.stream = self.stub.StreamChannel(stream_req_iterator())
+        self.stream_recv_thread = threading.Thread(
+            target=stream_recv_wrapper, args=(self.stream,))
+        self.stream_recv_thread.start()
+        self.handshake()
+
+    def handshake(self):
+        """
+        Handshake with gRPC server.
+        """
+
+        req = p4runtime_pb2.StreamMessageRequest()
+        arbitration = req.arbitration
+        arbitration.device_id = self.device_id
+        election_id = arbitration.election_id
+        election_id.high = self.election_id[0]
+        election_id.low = self.election_id[1]
+        if self.role_name is not None:
+            arbitration.role.name = self.role_name
+        self.stream_out_q.put(req)
+
+        rep = self.get_stream_packet(STREAM_ATTR_ARBITRATION, timeout=2)
+        if rep is None:
+            LOGGER.critical('Failed to establish session with server')
+            sys.exit(1)
+        is_primary = (rep.arbitration.status.code == code_pb2.OK)
+        LOGGER.debug('Session established, client is %s',
+                        'primary' if is_primary else 'backup')
+        if not is_primary:
+            LOGGER.warning(
+                'You are not the primary client, '
+                'you only have read access to the server')
+
+    def get_stream_packet(self, type_, timeout=1):
+        """
+        Get a new message from the stream.
+
+        :param type_: stream type.
+        :param timeout: time to wait.
+        :return: message or None
+        """
+        if type_ not in self.stream_in_q:
+            LOGGER.critical('Unknown stream type %s', type_)
+            return None
+        try:
+            msg = self.stream_in_q[type_].get(timeout=timeout)
+            return msg
+        except queue.Empty:  # timeout expired
+            return None
+
+    @parse_p4runtime_error
+    def get_p4info(self):
+        """
+        Retrieve P4Info content.
+
+        :return: P4Info object.
+        """
+
+        LOGGER.debug('Retrieving P4Info file')
+        req = p4runtime_pb2.GetForwardingPipelineConfigRequest()
+        req.device_id = self.device_id
+        req.response_type =\
+            p4runtime_pb2.GetForwardingPipelineConfigRequest.P4INFO_AND_COOKIE
+        rep = self.stub.GetForwardingPipelineConfig(req)
+        return rep.config.p4info
+
+    @parse_p4runtime_error
+    def set_fwd_pipe_config(self, p4info_path, bin_path):
+        """
+        Configure the pipeline.
+
+        :param p4info_path: path to the P4Info file
+        :param bin_path: path to the binary file
+        :return:
+        """
+
+        LOGGER.debug('Setting forwarding pipeline config')
+        req = p4runtime_pb2.SetForwardingPipelineConfigRequest()
+        req.device_id = self.device_id
+        if self.role_name is not None:
+            req.role = self.role_name
+        election_id = req.election_id
+        election_id.high = self.election_id[0]
+        election_id.low = self.election_id[1]
+        req.action =\
+            p4runtime_pb2.SetForwardingPipelineConfigRequest.VERIFY_AND_COMMIT
+        with open(p4info_path, 'r', encoding='utf8') as f_1:
+            with open(bin_path, 'rb', encoding='utf8') as f_2:
+                try:
+                    google.protobuf.text_format.Merge(
+                        f_1.read(), req.config.p4info)
+                except google.protobuf.text_format.ParseError:
+                    LOGGER.error('Error when parsing P4Info')
+                    raise
+                req.config.p4_device_config = f_2.read()
+        return self.stub.SetForwardingPipelineConfig(req)
+
+    def tear_down(self):
+        """
+        Tear connection with the gRPC server down.
+        """
+
+        if self.stream_out_q:
+            LOGGER.debug('Cleaning up stream')
+            self.stream_out_q.put(None)
+        if self.stream_in_q:
+            for k in self.stream_in_q:
+                self.stream_in_q[k].put(None)
+        if self.stream_recv_thread:
+            self.stream_recv_thread.join()
+        self.channel.close()
+        del self.channel
diff --git a/src/device/service/drivers/transport_api/Tools.py b/src/device/service/drivers/transport_api/Tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..c569404bf7884501f9c8ea494f27983ca78ac3eb
--- /dev/null
+++ b/src/device/service/drivers/transport_api/Tools.py
@@ -0,0 +1,100 @@
+import json, logging, requests
+from device.service.driver_api._Driver import RESOURCE_ENDPOINTS
+
+LOGGER = logging.getLogger(__name__)
+
+
+def find_key(resource, key):
+    return json.loads(resource[1])[key]
+
+
+def config_getter(root_url, resource_key, timeout):
+    url = '{:s}/restconf/data/tapi-common:context'.format(root_url)
+    result = []
+    try:
+        response = requests.get(url, timeout=timeout)
+    except requests.exceptions.Timeout:
+        LOGGER.exception('Timeout connecting {:s}'.format(url))
+    except Exception as e:  # pylint: disable=broad-except
+        LOGGER.exception('Exception retrieving {:s}'.format(resource_key))
+        result.append((resource_key, e))
+    else:
+        context = json.loads(response.content)
+
+        if resource_key == RESOURCE_ENDPOINTS:
+            for sip in context['tapi-common:context']['service-interface-point']:
+                result.append(
+                    ('/endpoints/endpoint[{:s}]'.format(sip['uuid']), {'uuid': sip['uuid'], 'type': '10Gbps'}))
+
+    return result
+
+def create_connectivity_service(
+    root_url, timeout, uuid, input_sip, output_sip, direction, capacity_value, capacity_unit, layer_protocol_name,
+    layer_protocol_qualifier):
+
+    url = '{:s}/restconf/data/tapi-common:context/tapi-connectivity:connectivity-context'.format(root_url)
+    headers = {'content-type': 'application/json'}
+    data = {
+        'tapi-connectivity:connectivity-service': [
+            {
+                'uuid': uuid,
+                'connectivity-constraint': {
+                    'requested-capacity': {
+                        'total-size': {
+                            'value': capacity_value,
+                            'unit': capacity_unit
+                        }
+                    },
+                    'connectivity-direction': direction
+                },
+                'end-point': [
+                    {
+                        'service-interface-point': {
+                            'service-interface-point-uuid': input_sip
+                        },
+                        'layer-protocol-name': layer_protocol_name,
+                        'layer-protocol-qualifier': layer_protocol_qualifier,
+                        'local-id': input_sip
+                    },
+                    {
+                        'service-interface-point': {
+                            'service-interface-point-uuid': output_sip
+                        },
+                        'layer-protocol-name': layer_protocol_name,
+                        'layer-protocol-qualifier': layer_protocol_qualifier,
+                        'local-id': output_sip
+                    }
+                ]
+            }
+        ]
+    }
+    results = []
+    try:
+        LOGGER.info('Connectivity service {:s}: {:s}'.format(str(uuid), str(data)))
+        response = requests.post(url=url, data=json.dumps(data), timeout=timeout, headers=headers)
+        LOGGER.info('TAPI response: {:s}'.format(str(response)))
+    except Exception as e:  # pylint: disable=broad-except
+        LOGGER.exception('Exception creating ConnectivityService(uuid={:s}, data={:s})'.format(str(uuid), str(data)))
+        results.append(e)
+    else:
+        if response.status_code != 201:
+            msg = 'Could not create ConnectivityService(uuid={:s}, data={:s}). status_code={:s} reply={:s}'
+            LOGGER.error(msg.format(str(uuid), str(data), str(response.status_code), str(response)))
+        results.append(response.status_code == 201)
+    return results
+
+def delete_connectivity_service(root_url, timeout, uuid):
+    url = '{:s}/restconf/data/tapi-common:context/tapi-connectivity:connectivity-context/connectivity-service={:s}'
+    url = url.format(root_url, uuid)
+    results = []
+    try:
+        response = requests.delete(url=url, timeout=timeout)
+    except Exception as e:  # pylint: disable=broad-except
+        LOGGER.exception('Exception deleting ConnectivityService(uuid={:s})'.format(str(uuid)))
+        results.append(e)
+    else:
+        if response.status_code != 201:
+            msg = 'Could not delete ConnectivityService(uuid={:s}). status_code={:s} reply={:s}'
+            LOGGER.error(msg.format(str(uuid), str(response.status_code), str(response)))
+        results.append(response.status_code == 202)
+    return results
diff --git a/src/device/service/drivers/transport_api/TransportApiDriver.py b/src/device/service/drivers/transport_api/TransportApiDriver.py
index f20173dd022a517d1e5630dc23c3455b0ed3c710..b3e5f4fa33f20836629c06968261fb1ceac8f075 100644
--- a/src/device/service/drivers/transport_api/TransportApiDriver.py
+++ b/src/device/service/drivers/transport_api/TransportApiDriver.py
@@ -1,9 +1,97 @@
-import logging
+import logging, requests, threading
+from typing import Any, Iterator, List, Tuple, Union
+from common.type_checkers.Checkers import chk_string, chk_type
 from device.service.driver_api._Driver import _Driver
+from . import ALL_RESOURCE_KEYS
+from .Tools import create_connectivity_service, find_key, config_getter, delete_connectivity_service
 
 LOGGER = logging.getLogger(__name__)
 
-# TODO: Implement TransportAPI Driver
-
 class TransportApiDriver(_Driver):
-    pass
+    def __init__(self, address: str, port: int, **settings) -> None:    # pylint: disable=super-init-not-called
+        self.__lock = threading.Lock()
+        self.__started = threading.Event()
+        self.__terminate = threading.Event()
+        self.__tapi_root = 'http://' + address + ':' + str(port)
+        self.__timeout = int(settings.get('timeout', 120))
+
+    def Connect(self) -> bool:
+        url = self.__tapi_root + '/restconf/data/tapi-common:context'
+        with self.__lock:
+            if self.__started.is_set(): return True
+            try:
+                requests.get(url, timeout=self.__timeout)
+            except requests.exceptions.Timeout:
+                LOGGER.exception('Timeout connecting {:s}'.format(str(self.__tapi_root)))
+                return False
+            except Exception:  # pylint: disable=broad-except
+                LOGGER.exception('Exception connecting {:s}'.format(str(self.__tapi_root)))
+                return False
+            else:
+                self.__started.set()
+                return True
+
+    def Disconnect(self) -> bool:
+        with self.__lock:
+            self.__terminate.set()
+            return True
+
+    def GetInitialConfig(self) -> List[Tuple[str, Any]]:
+        with self.__lock:
+            return []
+
+    def GetConfig(self, resource_keys : List[str] = []) -> List[Tuple[str, Union[Any, None, Exception]]]:
+        chk_type('resources', resource_keys, list)
+        results = []
+        with self.__lock:
+            if len(resource_keys) == 0: resource_keys = ALL_RESOURCE_KEYS
+            for i, resource_key in enumerate(resource_keys):
+                str_resource_name = 'resource_key[#{:d}]'.format(i)
+                chk_string(str_resource_name, resource_key, allow_empty=False)
+                results.extend(config_getter(self.__tapi_root, resource_key, self.__timeout))
+        return results
+
+    def SetConfig(self, resources: List[Tuple[str, Any]]) -> List[Union[bool, Exception]]:
+        results = []
+        if len(resources) == 0:
+            return results
+        with self.__lock:
+            for resource in resources:
+                LOGGER.info('resource = {:s}'.format(str(resource)))
+
+                input_sip = find_key(resource, 'input_sip')
+                output_sip = find_key(resource, 'output_sip')
+                uuid = find_key(resource, 'uuid')
+                capacity_value = find_key(resource, 'capacity_value')
+                capacity_unit = find_key(resource, 'capacity_unit')
+                layer_protocol_name = find_key(resource, 'layer_protocol_name')
+                layer_protocol_qualifier = find_key(resource, 'layer_protocol_qualifier')
+                direction = find_key(resource, 'direction')
+
+                data = create_connectivity_service(
+                    self.__tapi_root, self.__timeout, uuid, input_sip, output_sip, direction, capacity_value,
+                    capacity_unit, layer_protocol_name, layer_protocol_qualifier)
+                results.extend(data)
+        return results
+
+    def DeleteConfig(self, resources: List[Tuple[str, Any]]) -> List[Union[bool, Exception]]:
+        results = []
+        if len(resources) == 0: return results
+        with self.__lock:
+            for resource in resources:
+                LOGGER.info('resource = {:s}'.format(str(resource)))
+                uuid = find_key(resource, 'uuid')
+                results.extend(delete_connectivity_service(self.__tapi_root, self.__timeout, uuid))
+        return results
+
+    def SubscribeState(self, subscriptions : List[Tuple[str, float, float]]) -> List[Union[bool, Exception]]:
+        # TODO: TAPI does not support monitoring by now
+        return [False for _ in subscriptions]
+
+    def UnsubscribeState(self, subscriptions : List[Tuple[str, float, float]]) -> List[Union[bool, Exception]]:
+        # TODO: TAPI does not support monitoring by now
+        return [False for _ in subscriptions]
+
+    def GetState(self, blocking=False) -> Iterator[Tuple[float, str, Any]]:
+        # TODO: TAPI does not support monitoring by now
+        return []
diff --git a/src/device/service/drivers/transport_api/__init__.py b/src/device/service/drivers/transport_api/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..d2a2d4b1a6c224c370479103553a769ce8a0956f 100644
--- a/src/device/service/drivers/transport_api/__init__.py
+++ b/src/device/service/drivers/transport_api/__init__.py
@@ -0,0 +1,13 @@
+from device.service.driver_api._Driver import RESOURCE_ENDPOINTS, RESOURCE_INTERFACES, RESOURCE_NETWORK_INSTANCES
+
+ALL_RESOURCE_KEYS = [
+    RESOURCE_ENDPOINTS,
+    RESOURCE_INTERFACES,
+    RESOURCE_NETWORK_INSTANCES,
+]
+
+RESOURCE_KEY_MAPPINGS = {
+    RESOURCE_ENDPOINTS        : 'component',
+    RESOURCE_INTERFACES       : 'interface',
+    RESOURCE_NETWORK_INSTANCES: 'network_instance',
+}
diff --git a/src/device/tests/.gitignore b/src/device/tests/.gitignore
index 067c7b77db596a97883a03426735b6ede9c6fa48..5cb8b444d357c5e39eb31759d67b92fca7beabb2 100644
--- a/src/device/tests/.gitignore
+++ b/src/device/tests/.gitignore
@@ -1,2 +1,3 @@
 # Add here your files containing confidential testbed details such as IP addresses, ports, usernames, passwords, etc.
 Device_OpenConfig_Infinera.py
+Device_Transport_Api_CTTC.py
diff --git a/src/device/tests/Device_Emulated.py b/src/device/tests/Device_Emulated.py
index 155383b49fd12d780e901db1aa2614a55d4e5e14..27595dd8afed8b2985c76338ffc88d81fc9cc2d0 100644
--- a/src/device/tests/Device_Emulated.py
+++ b/src/device/tests/Device_Emulated.py
@@ -1,10 +1,12 @@
+import operator
 from copy import deepcopy
 from device.proto.context_pb2 import DeviceDriverEnum, DeviceOperationalStatusEnum
+from device.service.database.KpiSampleType import ORM_KpiSampleTypeEnum
 from .Tools import config_rule_set, config_rule_delete
 
 # use "deepcopy" to prevent propagating forced changes during tests
 
-DEVICE_EMU_UUID           = 'EMULARED'
+DEVICE_EMU_UUID           = 'EMULATED'
 DEVICE_EMU_TYPE           = 'emulated'
 DEVICE_EMU_ADDRESS        = '127.0.0.1'
 DEVICE_EMU_PORT           = '0'
@@ -20,28 +22,80 @@ DEVICE_EMU = {
     'device_endpoints': [],
 }
 
+PACKET_PORT_SAMPLE_TYPES = [
+    ORM_KpiSampleTypeEnum.PACKETS_TRANSMITTED,
+    ORM_KpiSampleTypeEnum.PACKETS_RECEIVED,
+    ORM_KpiSampleTypeEnum.BYTES_TRANSMITTED,
+    ORM_KpiSampleTypeEnum.BYTES_RECEIVED,
+]
+
+ENDPOINT_UUIDS = ['EP1', 'EP2', 'EP3', 'EP4']
+
+DEVICE_EMU_ENDPOINTS = []
+for endpoint_uuid in ENDPOINT_UUIDS:
+    DEVICE_EMU_ENDPOINTS.append((endpoint_uuid, '10Gbps', PACKET_PORT_SAMPLE_TYPES))
+
+RSRC_EP       = '/endpoints/endpoint[{:s}]'
+RSRC_SUBIF    = RSRC_EP    + '/subinterfaces/subinterface[{:d}]'
+RSRC_ADDRIPV4 = RSRC_SUBIF + '/ipv4/address[{:s}]'
+
+DEVICE_EMU_ENDPOINTS_COOKED = []
+for endpoint_uuid,endpoint_type,endpoint_sample_types in DEVICE_EMU_ENDPOINTS:
+    endpoint_resource_key = RSRC_EP.format(str(endpoint_uuid))
+    sample_types = {}
+    for endpoint_sample_type in endpoint_sample_types:
+        sample_type_name = endpoint_sample_type.name.lower()
+        sample_types[endpoint_sample_type.value] = '{:s}/state/{:s}'.format(endpoint_resource_key, sample_type_name)
+    endpoint_resource_value = {'uuid': endpoint_uuid, 'type': endpoint_type, 'sample_types': sample_types}
+    DEVICE_EMU_ENDPOINTS_COOKED.append((endpoint_resource_key, endpoint_resource_value))
+
 DEVICE_EMU_CONNECT_RULES = [
     config_rule_set('_connect/address',  DEVICE_EMU_ADDRESS ),
     config_rule_set('_connect/port',     DEVICE_EMU_PORT    ),
+    config_rule_set('_connect/settings', {'endpoints': [
+        {
+            'uuid': endpoint_uuid, 'type': endpoint_type,
+            'sample_types': list(map(operator.attrgetter('value'), endpoint_sample_types)),
+        }
+        for endpoint_uuid,endpoint_type,endpoint_sample_types in DEVICE_EMU_ENDPOINTS
+    ]}),
 ]
 
-DEVICE_EMU_CONFIG_RULES = [
-    config_rule_set('dev/rsrc1/value', 'value1'),
-    config_rule_set('dev/rsrc2/value', 'value2'),
-    config_rule_set('dev/rsrc3/value', 'value3'),
-]
+DEVICE_EMU_CONFIG_ENDPOINTS = []
+for endpoint_uuid in ENDPOINT_UUIDS:
+    DEVICE_EMU_CONFIG_ENDPOINTS.append(config_rule_set(RSRC_EP.format(endpoint_uuid), {'enabled' : True}))
 
-DEVICE_EMU_RECONFIG_RULES = [
-    config_rule_delete('dev/rsrc1/value', ''),
-    config_rule_set   ('dev/rsrc10/value', 'value10'),
-    config_rule_set   ('dev/rsrc11/value', 'value11'),
-    config_rule_set   ('dev/rsrc12/value', 'value12'),
-]
+DEVICE_EMU_CONFIG_ADDRESSES = []
+for endpoint_uuid in ENDPOINT_UUIDS:
+    endpoint_number = int(endpoint_uuid.replace('EP', ''))
+    subinterface_index = 0
+    subinterface_address = '10.{:d}.{:d}.1'.format(endpoint_number, subinterface_index)
+    subinterface_prefix_length = 24
+    DEVICE_EMU_CONFIG_ADDRESSES.extend([
+        config_rule_set(RSRC_SUBIF   .format(endpoint_uuid, subinterface_index), {
+            'index': subinterface_index}),
+        config_rule_set(RSRC_ADDRIPV4.format(endpoint_uuid, subinterface_index, subinterface_address), {
+            'ip': subinterface_address, 'prefix_length': subinterface_prefix_length}),
+    ])
+
+DEVICE_EMU_RECONFIG_ADDRESSES = [
+    config_rule_delete(RSRC_SUBIF   .format('EP2', 0            ), {}),
+    config_rule_delete(RSRC_ADDRIPV4.format('EP2', 0, '10.2.0.1'), {'ip': '10.2.0.1', 'prefix_length': 24}),
 
-DEVICE_EMU_DECONFIG_RULES = [
-    config_rule_delete('dev/rsrc2/value', 'value2'),
-    config_rule_delete('dev/rsrc3/value', 'value3'),
-    config_rule_delete('dev/rsrc10/value', 'value10'),
-    config_rule_delete('dev/rsrc11/value', 'value11'),
-    config_rule_delete('dev/rsrc12/value', 'value12'),
+    config_rule_set   (RSRC_SUBIF   .format('EP2', 1            ), {'index': 1}),
+    config_rule_set   (RSRC_ADDRIPV4.format('EP2', 1, '10.2.1.1'), {'ip': '10.2.1.1', 'prefix_length': 24}),
 ]
+
+DEVICE_EMU_DECONFIG_ADDRESSES = []
+for endpoint_uuid in ENDPOINT_UUIDS:
+    endpoint_number = int(endpoint_uuid.replace('EP', ''))
+    subinterface_index = 1 if endpoint_uuid == 'EP2' else 0
+    subinterface_address = '10.{:d}.{:d}.1'.format(endpoint_number, subinterface_index)
+    DEVICE_EMU_DECONFIG_ADDRESSES.extend([
+        config_rule_delete(RSRC_SUBIF   .format(endpoint_uuid, subinterface_index), {}),
+        config_rule_delete(RSRC_ADDRIPV4.format(endpoint_uuid, subinterface_index, subinterface_address), {}),
+    ])
+
+DEVICE_EMU_DECONFIG_ENDPOINTS = []
+for endpoint_uuid in ENDPOINT_UUIDS:
+    DEVICE_EMU_DECONFIG_ENDPOINTS.append(config_rule_delete(RSRC_EP.format(endpoint_uuid), {}))
diff --git a/src/device/tests/Device_OpenConfig_Template.py b/src/device/tests/Device_OpenConfig_Template.py
index 73b6d4f55e8ffb08eb4c2e0badf8aa8d012c0d2b..5f917c5c4057727a64697251696fdeb0283ab7cd 100644
--- a/src/device/tests/Device_OpenConfig_Template.py
+++ b/src/device/tests/Device_OpenConfig_Template.py
@@ -4,13 +4,14 @@ from .Tools import config_rule_set, config_rule_delete
 
 # use "deepcopy" to prevent propagating forced changes during tests
 
-DEVICE_OC_UUID           = 'DEV2'
-DEVICE_OC_TYPE           = 'packet-router'
-DEVICE_OC_ADDRESS        = '127.0.0.1'  # populate the Netconf Server IP address of the device to test
-DEVICE_OC_PORT           = '830'        # populate the Netconf Server port of the device to test
-DEVICE_OC_USERNAME       = 'username'   # populate the Netconf Server username of the device to test
-DEVICE_OC_PASSWORD       = 'password'   # populate the Netconf Server password of the device to test
-DEVICE_OC_DRIVERS        = [DeviceDriverEnum.DEVICEDRIVER_OPENCONFIG]
+DEVICE_OC_UUID     = 'DEV2'
+DEVICE_OC_TYPE     = 'packet-router'
+DEVICE_OC_ADDRESS  = '127.0.0.1'  # populate the Netconf Server IP address of the device to test
+DEVICE_OC_PORT     = '830'        # populate the Netconf Server port of the device to test
+DEVICE_OC_USERNAME = 'username'   # populate the Netconf Server username of the device to test
+DEVICE_OC_PASSWORD = 'password'   # populate the Netconf Server password of the device to test
+DEVICE_OC_TIMEOUT  = 120
+DEVICE_OC_DRIVERS  = [DeviceDriverEnum.DEVICEDRIVER_OPENCONFIG]
 
 DEVICE_OC_ID = {'device_uuid': {'uuid': DEVICE_OC_UUID}}
 DEVICE_OC = {
@@ -23,10 +24,13 @@ DEVICE_OC = {
 }
 
 DEVICE_OC_CONNECT_RULES = [
-    config_rule_set('_connect/address',  DEVICE_OC_ADDRESS ),
-    config_rule_set('_connect/port',     DEVICE_OC_PORT    ),
-    config_rule_set('_connect/username', DEVICE_OC_USERNAME),
-    config_rule_set('_connect/password', DEVICE_OC_PASSWORD),
+    config_rule_set('_connect/address', DEVICE_OC_ADDRESS),
+    config_rule_set('_connect/port',    DEVICE_OC_PORT   ),
+    config_rule_set('_connect/settings', {
+        'username': DEVICE_OC_USERNAME,
+        'password': DEVICE_OC_PASSWORD,
+        'timeout' : DEVICE_OC_TIMEOUT,
+    }),
 ]
 
 DEVICE_OC_CONFIG_RULES   = []           # populate your configuration rules to test
diff --git a/src/device/tests/Device_Transport_Api_Template.py b/src/device/tests/Device_Transport_Api_Template.py
new file mode 100644
index 0000000000000000000000000000000000000000..6032f0ff8ba683cd3a39bb6bd3c7a8c905974ce6
--- /dev/null
+++ b/src/device/tests/Device_Transport_Api_Template.py
@@ -0,0 +1,40 @@
+from copy import deepcopy
+from device.proto.context_pb2 import DeviceDriverEnum, DeviceOperationalStatusEnum
+from .Tools import config_rule_set, config_rule_delete
+
+# use "deepcopy" to prevent propagating forced changes during tests
+
+DEVICE_TAPI_UUID         = 'DEVICE-TAPI'
+DEVICE_TAPI_TYPE         = 'optical-line-system'
+DEVICE_TAPI_ADDRESS      = '0.0.0.0'
+DEVICE_TAPI_PORT         = '4900'
+DEVICE_TAPI_TIMEOUT      = '120'
+DEVICE_TAPI_DRIVERS      = [DeviceDriverEnum.DEVICEDRIVER_TRANSPORT_API]
+
+DEVICE_TAPI_ID = {'device_uuid': {'uuid': DEVICE_TAPI_UUID}}
+DEVICE_TAPI = {
+    'device_id': deepcopy(DEVICE_TAPI_ID),
+    'device_type': DEVICE_TAPI_TYPE,
+    'device_config': {'config_rules': []},
+    'device_operational_status': DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_DISABLED,
+    'device_drivers': DEVICE_TAPI_DRIVERS,
+    'device_endpoints': [],
+}
+
+DEVICE_TAPI_CONNECT_RULES = [
+    config_rule_set('_connect/address',  DEVICE_TAPI_ADDRESS),
+    config_rule_set('_connect/port',     DEVICE_TAPI_PORT),
+    config_rule_set('_connect/timeout',  DEVICE_TAPI_TIMEOUT),
+]
+
+DEVICE_TAPI_CONFIG_RULES = [
+    config_rule_set('network_instance[DemoOFC-NetInst]/interface[13/1/3]', {
+        'name': 'DemoOFC-NetInst', 'id': '13/1/3',
+    })
+]
+
+DEVICE_TAPI_DECONFIG_RULES = [
+    config_rule_delete('network_instance[DemoOFC-NetInst]/interface[13/1/3]', {
+        'name': 'DemoOFC-NetInst', 'id': '13/1/3'
+    })
+]
\ No newline at end of file
diff --git a/src/device/tests/MockMonitoringService.py b/src/device/tests/MockMonitoringService.py
new file mode 100644
index 0000000000000000000000000000000000000000..6f51f0d8360f96ed183c11b336eb300fe3695a36
--- /dev/null
+++ b/src/device/tests/MockMonitoringService.py
@@ -0,0 +1,47 @@
+import grpc, logging
+from concurrent import futures
+from queue import Queue
+from monitoring.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD
+from monitoring.proto.monitoring_pb2_grpc import  add_MonitoringServiceServicer_to_server
+from .MockMonitoringServiceServicerImpl import MockMonitoringServiceServicerImpl
+
+BIND_ADDRESS = '0.0.0.0'
+LOGGER = logging.getLogger(__name__)
+
+class MockMonitoringService:
+    def __init__(
+        self, address=BIND_ADDRESS, port=GRPC_SERVICE_PORT, max_workers=GRPC_MAX_WORKERS,
+        grace_period=GRPC_GRACE_PERIOD):
+
+        self.queue_samples = Queue()
+        self.address = address
+        self.port = port
+        self.endpoint = None
+        self.max_workers = max_workers
+        self.grace_period = grace_period
+        self.monitoring_servicer = None
+        self.pool = None
+        self.server = None
+
+    def start(self):
+        self.endpoint = '{:s}:{:s}'.format(str(self.address), str(self.port))
+        LOGGER.info('Starting Service (tentative endpoint: {:s}, max_workers: {:s})...'.format(
+            str(self.endpoint), str(self.max_workers)))
+
+        self.pool = futures.ThreadPoolExecutor(max_workers=self.max_workers)
+        self.server = grpc.server(self.pool) # , interceptors=(tracer_interceptor,))
+
+        self.monitoring_servicer = MockMonitoringServiceServicerImpl(self.queue_samples)
+        add_MonitoringServiceServicer_to_server(self.monitoring_servicer, self.server)
+
+        port = self.server.add_insecure_port(self.endpoint)
+        self.endpoint = '{:s}:{:s}'.format(str(self.address), str(port))
+        LOGGER.info('Listening on {:s}...'.format(str(self.endpoint)))
+        self.server.start()
+
+        LOGGER.debug('Service started')
+
+    def stop(self):
+        LOGGER.debug('Stopping service (grace period {:s} seconds)...'.format(str(self.grace_period)))
+        self.server.stop(self.grace_period)
+        LOGGER.debug('Service stopped')
diff --git a/src/device/tests/MockMonitoringServiceServicerImpl.py b/src/device/tests/MockMonitoringServiceServicerImpl.py
new file mode 100644
index 0000000000000000000000000000000000000000..1787acdaa68fd80037ab1f4d14d7f86599f9ac14
--- /dev/null
+++ b/src/device/tests/MockMonitoringServiceServicerImpl.py
@@ -0,0 +1,15 @@
+import logging
+from queue import Queue
+from monitoring.proto.context_pb2 import Empty
+from monitoring.proto.monitoring_pb2 import Kpi
+from monitoring.proto.monitoring_pb2_grpc import MonitoringServiceServicer
+
+LOGGER = logging.getLogger(__name__)
+
+class MockMonitoringServiceServicerImpl(MonitoringServiceServicer):
+    def __init__(self, queue_samples : Queue):
+        self.queue_samples = queue_samples
+
+    def IncludeKpi(self, request : Kpi, context) -> Empty:
+        self.queue_samples.put(request)
+        return Empty()
diff --git a/src/device/tests/Tools.py b/src/device/tests/Tools.py
index 94a6d50900eb1865c69064b2e98bca0d6e91643b..2d8e99de30ebfcc4ed257a7bee512f4d416bd64c 100644
--- a/src/device/tests/Tools.py
+++ b/src/device/tests/Tools.py
@@ -13,9 +13,13 @@ def config_rule_set(resource_key : str, resource_value : Union[str, Dict[str, An
 def config_rule_delete(resource_key : str, resource_value : Union[str, Dict[str, Any]]):
     return config_rule(ConfigActionEnum.CONFIGACTION_DELETE, resource_key, resource_value)
 
-def endpoint_id(topology_id, device_id, endpoint_uuid):
-    return {'topology_id': deepcopy(topology_id), 'device_id': deepcopy(device_id),
-            'endpoint_uuid': {'uuid': endpoint_uuid}}
+def endpoint_id(device_id, endpoint_uuid, topology_id=None):
+    result = {'device_id': deepcopy(device_id), 'endpoint_uuid': {'uuid': endpoint_uuid}}
+    if topology_id is not None: result['topology_id'] = deepcopy(topology_id)
+    return result
 
-def endpoint(topology_id, device_id, endpoint_uuid, endpoint_type):
-    return {'endpoint_id': endpoint_id(topology_id, device_id, endpoint_uuid), 'endpoint_type': endpoint_type}
+def endpoint(device_id, endpoint_uuid, endpoint_type, topology_id=None):
+    return {
+        'endpoint_id': endpoint_id(device_id, endpoint_uuid, topology_id=topology_id),
+        'endpoint_type': endpoint_type,
+    }
diff --git a/src/device/tests/device_p4.py b/src/device/tests/device_p4.py
new file mode 100644
index 0000000000000000000000000000000000000000..55c1025861ad1a00fff6b692eaeaaadcd639a519
--- /dev/null
+++ b/src/device/tests/device_p4.py
@@ -0,0 +1,49 @@
+"""
+P4 device example configuration.
+"""
+
+from copy import deepcopy
+try:
+    from .context_pb2 import DeviceDriverEnum, DeviceOperationalStatusEnum
+except ImportError:
+    from device.proto.context_pb2 import DeviceDriverEnum, DeviceOperationalStatusEnum
+from .Tools import config_rule_set
+
+DEVICE_P4_ID = 0
+DEVICE_P4_NAME = 'device:leaf1'
+DEVICE_P4_TYPE = 'p4-switch'
+DEVICE_P4_ADDRESS = '127.0.0.1'
+DEVICE_P4_PORT = '50101'
+DEVICE_P4_DRIVERS = [DeviceDriverEnum.DEVICEDRIVER_P4]
+DEVICE_P4_VENDOR = 'Open Networking Foundation'
+DEVICE_P4_HW_VER = 'BMv2 simple_switch'
+DEVICE_P4_SW_VER = 'Stratum'
+DEVICE_P4_PIPECONF = 'org.onosproject.pipelines.fabric'
+DEVICE_P4_WORKERS = 2
+DEVICE_P4_GRACE_PERIOD = 60
+
+DEVICE_P4_UUID = {'device_uuid': {'uuid': DEVICE_P4_NAME}}
+DEVICE_P4 = {
+    'device_id': deepcopy(DEVICE_P4_UUID),
+    'device_type': DEVICE_P4_TYPE,
+    'device_config': {'config_rules': []},
+    'device_operational_status': DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_DISABLED,
+    'device_drivers': DEVICE_P4_DRIVERS,
+    'device_endpoints': [],
+}
+
+DEVICE_P4_CONNECT_RULES = [
+    config_rule_set('_connect/address', DEVICE_P4_ADDRESS),
+    config_rule_set('_connect/port', DEVICE_P4_PORT),
+    config_rule_set('_connect/settings', {
+        'id': int(DEVICE_P4_ID),
+        'name': DEVICE_P4_NAME,
+        'hw-ver': DEVICE_P4_HW_VER,
+        'sw-ver': DEVICE_P4_SW_VER,
+        'pipeconf': DEVICE_P4_PIPECONF
+    }),
+]
+
+DEVICE_P4_CONFIG_RULES = [
+    config_rule_set('key1', 'value1'),
+]
diff --git a/src/device/tests/mock_p4runtime_service.py b/src/device/tests/mock_p4runtime_service.py
new file mode 100644
index 0000000000000000000000000000000000000000..f1cf673e34a1dd604a17589ad39c93f0ca1375b8
--- /dev/null
+++ b/src/device/tests/mock_p4runtime_service.py
@@ -0,0 +1,66 @@
+"""
+A mock P4Runtime server.
+"""
+
+import logging
+from concurrent import futures
+import grpc
+from p4.v1 import p4runtime_pb2_grpc
+
+from .device_p4 import(
+    DEVICE_P4_ADDRESS, DEVICE_P4_PORT,
+    DEVICE_P4_WORKERS, DEVICE_P4_GRACE_PERIOD)
+from .mock_p4runtime_servicer_impl import MockP4RuntimeServicerImpl
+
+LOGGER = logging.getLogger(__name__)
+
+
+class MockP4RuntimeService:
+    """
+    P4Runtime server for testing purposes.
+    """
+
+    def __init__(
+            self, address=DEVICE_P4_ADDRESS, port=DEVICE_P4_PORT,
+            max_workers=DEVICE_P4_WORKERS,
+            grace_period=DEVICE_P4_GRACE_PERIOD):
+        self.address = address
+        self.port = port
+        self.endpoint = f'{self.address}:{self.port}'
+        self.max_workers = max_workers
+        self.grace_period = grace_period
+        self.server = None
+        self.servicer = None
+
+    def start(self):
+        """
+        Start the P4Runtime server.
+        """
+
+        LOGGER.info(
+            'Starting P4Runtime service on %s with max_workers: %s',
+            str(self.endpoint), str(self.max_workers))
+
+        self.server = grpc.server(
+            futures.ThreadPoolExecutor(max_workers=self.max_workers))
+
+        self.servicer = MockP4RuntimeServicerImpl()
+        p4runtime_pb2_grpc.add_P4RuntimeServicer_to_server(
+            self.servicer, self.server)
+
+        _ = self.server.add_insecure_port(self.endpoint)
+        LOGGER.info('Listening on %s...', str(self.endpoint))
+
+        self.server.start()
+        LOGGER.debug('P4Runtime service started')
+
+    def stop(self):
+        """
+        Stop the P4Runtime server.
+        """
+
+        LOGGER.debug(
+            'Stopping P4Runtime service (grace period %d seconds...',
+            self.grace_period)
+        self.server.stop(self.grace_period)
+        LOGGER.debug('P4Runtime service stopped')
diff --git a/src/device/tests/mock_p4runtime_servicer_impl.py b/src/device/tests/mock_p4runtime_servicer_impl.py
new file mode 100644
index 0000000000000000000000000000000000000000..f29f05ba2d504794a085c4499ebfefba08604811
--- /dev/null
+++ b/src/device/tests/mock_p4runtime_servicer_impl.py
@@ -0,0 +1,55 @@
+"""
+A mock P4Runtime service implementation.
+"""
+
+import queue
+from google.rpc import code_pb2
+from p4.v1 import p4runtime_pb2, p4runtime_pb2_grpc
+from p4.config.v1 import p4info_pb2
+
+try:
+    from p4_util import STREAM_ATTR_ARBITRATION, STREAM_ATTR_PACKET
+except ImportError:
+    from device.service.drivers.p4.p4_util import STREAM_ATTR_ARBITRATION,\
+        STREAM_ATTR_PACKET
+
+class MockP4RuntimeServicerImpl(p4runtime_pb2_grpc.P4RuntimeServicer):
+    """
+    A P4Runtime service implementation for testing purposes.
+    """
+
+    def __init__(self):
+        self.p4info = p4info_pb2.P4Info()
+        self.p4runtime_api_version = "1.3.0"
+        self.stored_packet_out = queue.Queue()
+
+    def GetForwardingPipelineConfig(self, request, context):
+        rep = p4runtime_pb2.GetForwardingPipelineConfigResponse()
+        if self.p4info is not None:
+            rep.config.p4info.CopyFrom(self.p4info)
+        return rep
+
+    def SetForwardingPipelineConfig(self, request, context):
+        self.p4info.CopyFrom(request.config.p4info)
+        return p4runtime_pb2.SetForwardingPipelineConfigResponse()
+
+    def Write(self, request, context):
+        return p4runtime_pb2.WriteResponse()
+
+    def Read(self, request, context):
+        yield p4runtime_pb2.ReadResponse()
+
+    def StreamChannel(self, request_iterator, context):
+        for req in request_iterator:
+            if req.HasField(STREAM_ATTR_ARBITRATION):
+                rep = p4runtime_pb2.StreamMessageResponse()
+                rep.arbitration.CopyFrom(req.arbitration)
+                rep.arbitration.status.code = code_pb2.OK
+                yield rep
+            elif req.HasField(STREAM_ATTR_PACKET):
+                self.stored_packet_out.put(req)
+
+    def Capabilities(self, request, context):
+        rep = p4runtime_pb2.CapabilitiesResponse()
+        rep.p4runtime_api_version = self.p4runtime_api_version
+        return rep
diff --git a/src/device/tests/test_unit_p4.py b/src/device/tests/test_unit_p4.py
new file mode 100644
index 0000000000000000000000000000000000000000..8f2f89f05f7cbee1e5263464a4c59e77ccde2092
--- /dev/null
+++ b/src/device/tests/test_unit_p4.py
@@ -0,0 +1,83 @@
+import pytest
+from device.service.drivers.p4.p4_driver import P4Driver
+from .device_p4 import(
+        DEVICE_P4_ADDRESS, DEVICE_P4_PORT, DEVICE_P4_ID, DEVICE_P4_NAME,
+        DEVICE_P4_VENDOR, DEVICE_P4_HW_VER, DEVICE_P4_SW_VER,
+        DEVICE_P4_PIPECONF, DEVICE_P4_WORKERS, DEVICE_P4_GRACE_PERIOD)
+from .mock_p4runtime_service import MockP4RuntimeService
+
+
+@pytest.fixture(scope='session')
+def p4runtime_service():
+    _service = MockP4RuntimeService(
+        address=DEVICE_P4_ADDRESS, port=DEVICE_P4_PORT,
+        max_workers=DEVICE_P4_WORKERS,
+        grace_period=DEVICE_P4_GRACE_PERIOD)
+    _service.start()
+    yield _service
+    _service.stop()
+
+
+@pytest.fixture(scope='session')
+def device_driverapi_p4():
+    _driver = P4Driver(
+        address=DEVICE_P4_ADDRESS,
+        port=DEVICE_P4_PORT,
+        id=DEVICE_P4_ID,
+        name=DEVICE_P4_NAME,
+        vendor=DEVICE_P4_VENDOR,
+        hw_ver=DEVICE_P4_HW_VER,
+        sw_ver=DEVICE_P4_SW_VER,
+        pipeconf=DEVICE_P4_PIPECONF)
+    _driver.Connect()
+    yield _driver
+    _driver.Disconnect()
+
+
+def test_device_driverapi_p4_setconfig(
+        p4runtime_service: MockP4RuntimeService,
+        device_driverapi_p4: P4Driver):  # pylint: disable=redefined-outer-name
+    device_driverapi_p4.SetConfig([])
+    return
+
+
+def test_device_driverapi_p4_getconfig(
+        p4runtime_service: MockP4RuntimeService,
+        device_driverapi_p4: P4Driver):  # pylint: disable=redefined-outer-name
+    device_driverapi_p4.GetConfig()
+    return
+
+
+def test_device_driverapi_p4_getresource(
+        p4runtime_service: MockP4RuntimeService,
+        device_driverapi_p4: P4Driver):  # pylint: disable=redefined-outer-name
+    device_driverapi_p4.GetResource("")
+    return
+
+
+def test_device_driverapi_p4_getstate(
+        p4runtime_service: MockP4RuntimeService,
+        device_driverapi_p4: P4Driver):  # pylint: disable=redefined-outer-name
+    device_driverapi_p4.GetState()
+    return
+
+
+def test_device_driverapi_p4_deleteconfig(
+        p4runtime_service: MockP4RuntimeService,
+        device_driverapi_p4: P4Driver):  # pylint: disable=redefined-outer-name
+    device_driverapi_p4.DeleteConfig([])
+    return
+
+
+def test_device_driverapi_p4_subscribe_state(
+        p4runtime_service: MockP4RuntimeService,
+        device_driverapi_p4: P4Driver):  # pylint: disable=redefined-outer-name
+    device_driverapi_p4.SubscribeState([])
+    return
+
+
+def test_device_driverapi_p4_unsubscribe_state(
+        p4runtime_service: MockP4RuntimeService,
+        device_driverapi_p4: P4Driver):  # pylint: disable=redefined-outer-name
+    device_driverapi_p4.UnsubscribeState([])
+    return
diff --git a/src/device/tests/test_unitary.py b/src/device/tests/test_unitary.py
index fc9175620b7763cabc9da58756054fda9092490d..7eaee6e9fe232998e9f7a56aaaaf1c1ed136420a 100644
--- a/src/device/tests/test_unitary.py
+++ b/src/device/tests/test_unitary.py
@@ -1,5 +1,7 @@
-import copy, grpc, logging, operator, os, pytest
+import copy, grpc, json, logging, operator, os, pytest, time
+from datetime import datetime
 from typing import Tuple
+from queue import Queue, Empty
 from google.protobuf.json_format import MessageToDict
 from common.orm.Database import Database
 from common.orm.Factory import get_database_backend, BackendEnum as DatabaseBackendEnum
@@ -9,24 +11,31 @@ from context.Config import (
     GRPC_SERVICE_PORT as CONTEXT_GRPC_SERVICE_PORT, GRPC_MAX_WORKERS as CONTEXT_GRPC_MAX_WORKERS,
     GRPC_GRACE_PERIOD as CONTEXT_GRPC_GRACE_PERIOD)
 from context.client.ContextClient import ContextClient
-from context.proto.context_pb2 import DeviceId
+from context.proto.context_pb2 import DeviceId, DeviceOperationalStatusEnum
 from context.service.grpc_server.ContextService import ContextService
 from device.Config import (
     GRPC_SERVICE_PORT as DEVICE_GRPC_SERVICE_PORT, GRPC_MAX_WORKERS as DEVICE_GRPC_MAX_WORKERS,
     GRPC_GRACE_PERIOD as DEVICE_GRPC_GRACE_PERIOD)
 from device.client.DeviceClient import DeviceClient
 from device.proto.context_pb2 import ConfigActionEnum, Context, Device, Topology
+from device.proto.device_pb2 import MonitoringSettings
+from device.proto.kpi_sample_types_pb2 import KpiSampleType
 from device.service.DeviceService import DeviceService
-#from device.service.MonitoringLoops import MonitoringLoops
 from device.service.driver_api._Driver import _Driver
 from device.service.driver_api.DriverFactory import DriverFactory
 from device.service.driver_api.DriverInstanceCache import DriverInstanceCache
 from device.service.drivers import DRIVERS
-#from monitoring.client.monitoring_client import MonitoringClient
+from device.tests.MockMonitoringService import MockMonitoringService
+from device.tests.Tools import endpoint, endpoint_id
+from monitoring.Config import (
+    GRPC_SERVICE_PORT as MONITORING_GRPC_SERVICE_PORT, GRPC_MAX_WORKERS as MONITORING_GRPC_MAX_WORKERS,
+    GRPC_GRACE_PERIOD as MONITORING_GRPC_GRACE_PERIOD)
+from monitoring.client.monitoring_client import MonitoringClient
 from .CommonObjects import CONTEXT, TOPOLOGY
 from .Device_Emulated import (
-    DEVICE_EMU, DEVICE_EMU_CONFIG_RULES, DEVICE_EMU_CONNECT_RULES, DEVICE_EMU_DECONFIG_RULES, DEVICE_EMU_ID,
-    DEVICE_EMU_RECONFIG_RULES, DEVICE_EMU_UUID)
+    DEVICE_EMU, DEVICE_EMU_CONFIG_ADDRESSES, DEVICE_EMU_CONFIG_ENDPOINTS, DEVICE_EMU_CONNECT_RULES,
+    DEVICE_EMU_DECONFIG_ADDRESSES, DEVICE_EMU_DECONFIG_ENDPOINTS, DEVICE_EMU_ENDPOINTS, DEVICE_EMU_ENDPOINTS_COOKED,
+    DEVICE_EMU_ID, DEVICE_EMU_RECONFIG_ADDRESSES, DEVICE_EMU_UUID)
 try:
     from .Device_OpenConfig_Infinera import(
         DEVICE_OC, DEVICE_OC_CONFIG_RULES, DEVICE_OC_DECONFIG_RULES, DEVICE_OC_CONNECT_RULES, DEVICE_OC_ID,
@@ -40,11 +49,38 @@ except ImportError:
     #       DEVICE_OC, DEVICE_OC_CONFIG_RULES, DEVICE_OC_DECONFIG_RULES, DEVICE_OC_CONNECT_RULES, DEVICE_OC_ID,
     #       DEVICE_OC_UUID)
 
+try:
+    from .Device_Transport_Api_CTTC import (
+        DEVICE_TAPI, DEVICE_TAPI_CONNECT_RULES, DEVICE_TAPI_UUID, DEVICE_TAPI_ID, DEVICE_TAPI_CONFIG_RULES,
+        DEVICE_TAPI_DECONFIG_RULES)
+    ENABLE_TAPI = True
+except ImportError:
+    ENABLE_TAPI = False
+    # Create a Device_Transport_Api_??.py file with the details for your device to test it and import it as follows in
+    # the try block of this import statement.
+    #   from .Device_Transport_Api_?? import(
+    #       DEVICE_TAPI, DEVICE_TAPI_CONFIG_RULES, DEVICE_TAPI_DECONFIG_RULES, DEVICE_TAPI_CONNECT_RULES,
+    #       DEVICE_TAPI_ID, DEVICE_TAPI_UUID)
+
+#ENABLE_OPENCONFIG = False
+#ENABLE_TAPI       = False
+
+from .mock_p4runtime_service import MockP4RuntimeService
+try:
+    from .device_p4 import(
+        DEVICE_P4, DEVICE_P4_ID, DEVICE_P4_UUID, DEVICE_P4_NAME,
+        DEVICE_P4_ADDRESS, DEVICE_P4_PORT, DEVICE_P4_WORKERS,
+        DEVICE_P4_GRACE_PERIOD, DEVICE_P4_CONNECT_RULES,
+        DEVICE_P4_CONFIG_RULES)
+except ImportError:
+    raise ImportError("Test configuration for P4 devices not found")
+
 LOGGER = logging.getLogger(__name__)
 LOGGER.setLevel(logging.DEBUG)
 
 CONTEXT_GRPC_SERVICE_PORT = 10000 + CONTEXT_GRPC_SERVICE_PORT # avoid privileged ports
 DEVICE_GRPC_SERVICE_PORT = 10000 + DEVICE_GRPC_SERVICE_PORT # avoid privileged ports
+MONITORING_GRPC_SERVICE_PORT = 10000 + MONITORING_GRPC_SERVICE_PORT # avoid privileged ports
 
 DEFAULT_REDIS_SERVICE_HOST = '127.0.0.1'
 DEFAULT_REDIS_SERVICE_PORT = 6379
@@ -87,18 +123,32 @@ def context_client(context_service : ContextService): # pylint: disable=redefine
     _client.close()
 
 @pytest.fixture(scope='session')
-def device_service(context_client : ContextClient):  # pylint: disable=redefined-outer-name
+def monitoring_service():
+    _service = MockMonitoringService(port=MONITORING_GRPC_SERVICE_PORT, max_workers=MONITORING_GRPC_MAX_WORKERS,
+        grace_period=MONITORING_GRPC_GRACE_PERIOD)
+    _service.start()
+    yield _service
+    _service.stop()
+
+@pytest.fixture(scope='session')
+def monitoring_client(monitoring_service : MockMonitoringService): # pylint: disable=redefined-outer-name
+    _client = MonitoringClient(server='127.0.0.1', port=MONITORING_GRPC_SERVICE_PORT)
+    #yield _client
+    #_client.close()
+    return _client
+
+@pytest.fixture(scope='session')
+def device_service(
+    context_client : ContextClient,         # pylint: disable=redefined-outer-name
+    monitoring_client : MonitoringClient):  # pylint: disable=redefined-outer-name
+
     _driver_factory = DriverFactory(DRIVERS)
     _driver_instance_cache = DriverInstanceCache(_driver_factory)
-    #_monitoring_loops = MonitoringLoops(None) # TODO: replace by monitoring client
-    #_monitoring_loops.start()
     _service = DeviceService(
-        context_client, _driver_instance_cache,
-        #_monitoring_loops,
-        port=DEVICE_GRPC_SERVICE_PORT, max_workers=DEVICE_GRPC_MAX_WORKERS, grace_period=DEVICE_GRPC_GRACE_PERIOD)
+        context_client, monitoring_client, _driver_instance_cache, port=DEVICE_GRPC_SERVICE_PORT,
+        max_workers=DEVICE_GRPC_MAX_WORKERS, grace_period=DEVICE_GRPC_GRACE_PERIOD)
     _service.start()
     yield _service
-    #_monitoring_loops.stop()
     _service.stop()
 
 @pytest.fixture(scope='session')
@@ -107,6 +157,16 @@ def device_client(device_service : DeviceService): # pylint: disable=redefined-o
     yield _client
     _client.close()
 
+@pytest.fixture(scope='session')
+def p4runtime_service():
+    _service = MockP4RuntimeService(
+        address=DEVICE_P4_ADDRESS, port=DEVICE_P4_PORT,
+        max_workers=DEVICE_P4_WORKERS,
+        grace_period=DEVICE_P4_GRACE_PERIOD)
+    _service.start()
+    yield _service
+    _service.stop()
+
 def grpc_message_to_json_string(message):
     return str(MessageToDict(
         message, including_default_value_fields=True, preserving_proto_field_name=True, use_integers_for_enums=False))
@@ -121,17 +181,31 @@ def test_prepare_environment(
     context_client.SetTopology(Topology(**TOPOLOGY))
 
 
-# ----- Test Device Driver Emulated ------------------------------------------------------------------------------------
+# ----- Test Device Driver Emulated --------------------------------------------
+# Device Driver Emulated tests are used to validate Driver API as well as Emulated Device Driver. Note that other
+# Drivers might support a different set of resource paths, and attributes/values per resource; however, they must
+# implement the Driver API.
 
 def test_device_emulated_add_error_cases(
     context_client : ContextClient,     # pylint: disable=redefined-outer-name
     device_client : DeviceClient,       # pylint: disable=redefined-outer-name
     device_service : DeviceService):    # pylint: disable=redefined-outer-name
 
+    with pytest.raises(grpc.RpcError) as e:
+        DEVICE_EMU_WITH_ENDPOINTS = copy.deepcopy(DEVICE_EMU)
+        DEVICE_EMU_WITH_ENDPOINTS['device_endpoints'].append(endpoint(DEVICE_EMU_ID, 'ep-id', 'ep-type'))
+        device_client.AddDevice(Device(**DEVICE_EMU_WITH_ENDPOINTS))
+    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
+    msg_head = 'device.device_endpoints(['
+    msg_tail = ']) is invalid; RPC method AddDevice does not accept Endpoints. '\
+               'Endpoints are discovered through interrogation of the physical device.'
+    except_msg = str(e.value.details())
+    assert except_msg.startswith(msg_head) and except_msg.endswith(msg_tail)
+
     with pytest.raises(grpc.RpcError) as e:
         DEVICE_EMU_WITH_EXTRA_RULES = copy.deepcopy(DEVICE_EMU)
         DEVICE_EMU_WITH_EXTRA_RULES['device_config']['config_rules'].extend(DEVICE_EMU_CONNECT_RULES)
-        DEVICE_EMU_WITH_EXTRA_RULES['device_config']['config_rules'].extend(DEVICE_EMU_CONFIG_RULES)
+        DEVICE_EMU_WITH_EXTRA_RULES['device_config']['config_rules'].extend(DEVICE_EMU_CONFIG_ENDPOINTS)
         device_client.AddDevice(Device(**DEVICE_EMU_WITH_EXTRA_RULES))
     assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
     msg_head = 'device.device_config.config_rules(['
@@ -174,61 +248,193 @@ def test_device_emulated_configure(
     assert driver is not None
 
     driver_config = sorted(driver.GetConfig(), key=operator.itemgetter(0))
-    LOGGER.info('driver_config = {:s}'.format(str(driver_config)))
-    assert len(driver_config) == 0
+    #LOGGER.info('driver_config = {:s}'.format(str(driver_config)))
+    assert len(driver_config) == len(DEVICE_EMU_ENDPOINTS_COOKED)
+    for endpoint_cooked in DEVICE_EMU_ENDPOINTS_COOKED:
+        assert endpoint_cooked in driver_config
 
     DEVICE_EMU_WITH_CONFIG_RULES = copy.deepcopy(DEVICE_EMU)
-    DEVICE_EMU_WITH_CONFIG_RULES['device_config']['config_rules'].extend(DEVICE_EMU_CONFIG_RULES)
+    DEVICE_EMU_WITH_CONFIG_RULES['device_config']['config_rules'].extend(DEVICE_EMU_CONFIG_ENDPOINTS)
     device_client.ConfigureDevice(Device(**DEVICE_EMU_WITH_CONFIG_RULES))
 
+    DEVICE_EMU_WITH_CONFIG_RULES = copy.deepcopy(DEVICE_EMU)
+    DEVICE_EMU_WITH_CONFIG_RULES['device_config']['config_rules'].extend(DEVICE_EMU_CONFIG_ADDRESSES)
+    device_client.ConfigureDevice(Device(**DEVICE_EMU_WITH_CONFIG_RULES))
+
+    DEVICE_EMU_WITH_OPERATIONAL_STATUS = copy.deepcopy(DEVICE_EMU)
+    DEVICE_EMU_WITH_OPERATIONAL_STATUS['device_operational_status'] = \
+        DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_ENABLED
+    device_client.ConfigureDevice(Device(**DEVICE_EMU_WITH_OPERATIONAL_STATUS))
+
+
     driver_config = sorted(driver.GetConfig(), key=operator.itemgetter(0))
-    LOGGER.info('driver_config = {:s}'.format(str(driver_config)))
-    assert len(driver_config) == 3
-    assert driver_config[0] == ('/dev/rsrc1/value', 'value1')
-    assert driver_config[1] == ('/dev/rsrc2/value', 'value2')
-    assert driver_config[2] == ('/dev/rsrc3/value', 'value3')
+    #LOGGER.info('driver_config = {:s}'.format(str(driver_config)))
+    assert len(driver_config) == len(DEVICE_EMU_ENDPOINTS_COOKED) + len(DEVICE_EMU_CONFIG_ADDRESSES)
+    for endpoint_cooked in DEVICE_EMU_ENDPOINTS_COOKED:
+        endpoint_cooked = copy.deepcopy(endpoint_cooked)
+        endpoint_cooked[1]['enabled'] = True
+        assert endpoint_cooked in driver_config
+    for config_rule in DEVICE_EMU_CONFIG_ADDRESSES:
+        assert (config_rule['resource_key'], json.loads(config_rule['resource_value'])) in driver_config
 
     device_data = context_client.GetDevice(DeviceId(**DEVICE_EMU_ID))
+    assert device_data.device_operational_status == DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_ENABLED
+
     config_rules = [
         (ConfigActionEnum.Name(config_rule.action), config_rule.resource_key, config_rule.resource_value)
         for config_rule in device_data.device_config.config_rules
     ]
-    LOGGER.info('device_data.device_config.config_rules = \n{:s}'.format(
-        '\n'.join(['{:s} {:s} = {:s}'.format(*config_rule) for config_rule in config_rules])))
-    for config_rule in DEVICE_EMU_CONFIG_RULES:
+    #LOGGER.info('device_data.device_config.config_rules = \n{:s}'.format(
+    #    '\n'.join(['{:s} {:s} = {:s}'.format(*config_rule) for config_rule in config_rules])))
+    RESULTING_CONFIG_ENDPOINTS = {cr['resource_key']:cr for cr in copy.deepcopy(DEVICE_EMU_CONFIG_ENDPOINTS)}
+    for endpoint_cooked in DEVICE_EMU_ENDPOINTS_COOKED:
+        values = json.loads(RESULTING_CONFIG_ENDPOINTS[endpoint_cooked[0]]['resource_value'])
+        values.update(endpoint_cooked[1])
+        RESULTING_CONFIG_ENDPOINTS[endpoint_cooked[0]]['resource_value'] = json.dumps(values, sort_keys=True)
+    for config_rule in RESULTING_CONFIG_ENDPOINTS.values():
         config_rule = (
-            ConfigActionEnum.Name(config_rule['action']), config_rule['resource_key'], config_rule['resource_value'])
+            ConfigActionEnum.Name(config_rule['action']), config_rule['resource_key'],
+            json.loads(json.dumps(config_rule['resource_value'])))
+        assert config_rule in config_rules
+    for config_rule in DEVICE_EMU_CONFIG_ADDRESSES:
+        config_rule = (
+            ConfigActionEnum.Name(config_rule['action']), config_rule['resource_key'],
+            json.loads(json.dumps(config_rule['resource_value'])))
         assert config_rule in config_rules
 
     # Try to reconfigure...
 
     DEVICE_EMU_WITH_RECONFIG_RULES = copy.deepcopy(DEVICE_EMU)
-    DEVICE_EMU_WITH_RECONFIG_RULES['device_config']['config_rules'].extend(DEVICE_EMU_RECONFIG_RULES)
+    DEVICE_EMU_WITH_RECONFIG_RULES['device_operational_status'] = \
+        DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_ENABLED
+    DEVICE_EMU_WITH_RECONFIG_RULES['device_config']['config_rules'].extend(DEVICE_EMU_RECONFIG_ADDRESSES)
     device_client.ConfigureDevice(Device(**DEVICE_EMU_WITH_RECONFIG_RULES))
 
+    RESULTING_CONFIG_RULES = {cr['resource_key']:cr for cr in copy.deepcopy(DEVICE_EMU_CONFIG_ENDPOINTS)}
+    for endpoint_cooked in DEVICE_EMU_ENDPOINTS_COOKED:
+        values = json.loads(RESULTING_CONFIG_RULES[endpoint_cooked[0]]['resource_value'])
+        values.update(endpoint_cooked[1])
+        RESULTING_CONFIG_RULES[endpoint_cooked[0]]['resource_value'] = json.dumps(values, sort_keys=True)
+    RESULTING_CONFIG_RULES.update({cr['resource_key']:cr for cr in copy.deepcopy(DEVICE_EMU_CONFIG_ADDRESSES)})
+    for reconfig_rule in DEVICE_EMU_RECONFIG_ADDRESSES:
+        if reconfig_rule['action'] == ConfigActionEnum.CONFIGACTION_DELETE:
+            RESULTING_CONFIG_RULES.pop(reconfig_rule['resource_key'], None)
+        else:
+            RESULTING_CONFIG_RULES[reconfig_rule['resource_key']] = reconfig_rule
+    RESULTING_CONFIG_RULES = RESULTING_CONFIG_RULES.values()
+    #LOGGER.info('RESULTING_CONFIG_RULES = {:s}'.format(str(RESULTING_CONFIG_RULES)))
+
     driver_config = sorted(driver.GetConfig(), key=operator.itemgetter(0))
-    LOGGER.info('driver_config = {:s}'.format(str(driver_config)))
-    assert len(driver_config) == 5
-    assert driver_config[0] == ('/dev/rsrc10/value', 'value10')
-    assert driver_config[1] == ('/dev/rsrc11/value', 'value11')
-    assert driver_config[2] == ('/dev/rsrc12/value', 'value12')
-    assert driver_config[3] == ('/dev/rsrc2/value', 'value2')
-    assert driver_config[4] == ('/dev/rsrc3/value', 'value3')
+    driver_config = json.loads(json.dumps(driver_config)) # prevent integer keys to fail matching with string keys
+    #LOGGER.info('driver_config = {:s}'.format(str(driver_config)))
+    assert len(driver_config) == len(RESULTING_CONFIG_RULES)
+    for config_rule in RESULTING_CONFIG_RULES:
+        resource = [config_rule['resource_key'], json.loads(config_rule['resource_value'])]
+        assert resource in driver_config
 
     device_data = context_client.GetDevice(DeviceId(**DEVICE_EMU_ID))
     config_rules = [
         (ConfigActionEnum.Name(config_rule.action), config_rule.resource_key, config_rule.resource_value)
         for config_rule in device_data.device_config.config_rules
     ]
-    LOGGER.info('device_data.device_config.config_rules = \n{:s}'.format(
-        '\n'.join(['{:s} {:s} = {:s}'.format(*config_rule) for config_rule in config_rules])))
-    final_config_rules = DEVICE_EMU_CONFIG_RULES[1:] + DEVICE_EMU_RECONFIG_RULES[1:] # remove '/dev/rsrc1/value'
-    for config_rule in final_config_rules:
+    #LOGGER.info('device_data.device_config.config_rules = \n{:s}'.format(
+    #    '\n'.join(['{:s} {:s} = {:s}'.format(*config_rule) for config_rule in config_rules])))
+    for config_rule in RESULTING_CONFIG_RULES:
         config_rule = (
             ConfigActionEnum.Name(config_rule['action']), config_rule['resource_key'], config_rule['resource_value'])
         assert config_rule in config_rules
 
 
+def test_device_emulated_monitor(
+    context_client : ContextClient,                 # pylint: disable=redefined-outer-name
+    device_client : DeviceClient,                   # pylint: disable=redefined-outer-name
+    device_service : DeviceService,                 # pylint: disable=redefined-outer-name
+    monitoring_service : MockMonitoringService):    # pylint: disable=redefined-outer-name
+
+    #device_data = context_client.GetDevice(DeviceId(**DEVICE_EMU_ID))
+    #LOGGER.info('device_data = \n{:s}'.format(str(device_data)))
+
+    driver : _Driver = device_service.driver_instance_cache.get(DEVICE_EMU_UUID) # we know the driver exists now
+    assert driver is not None
+    driver_config = sorted(driver.GetConfig(), key=operator.itemgetter(0))
+    #LOGGER.info('driver_config = {:s}'.format(str(driver_config)))
+    assert len(driver_config) == len(DEVICE_EMU_ENDPOINTS_COOKED) + len(DEVICE_EMU_CONFIG_ADDRESSES)
+
+    SAMPLING_DURATION_SEC = 3.0
+    SAMPLING_INTERVAL_SEC = 0.5
+
+    MONITORING_SETTINGS_LIST = []
+    KPI_UUIDS__TO__NUM_SAMPLES_RECEIVED = {}
+    for endpoint_uuid,_,sample_types in DEVICE_EMU_ENDPOINTS:
+        for sample_type in sample_types:
+            sample_type_id = sample_type.value
+            sample_type_name = str(KpiSampleType.Name(sample_type_id)).upper().replace('KPISAMPLETYPE_', '')
+            kpi_uuid = '{:s}-{:s}-{:s}-kpi_uuid'.format(DEVICE_EMU_UUID, endpoint_uuid, str(sample_type_id))
+            monitoring_settings = {
+                'kpi_id'        : {'kpi_id': {'uuid': kpi_uuid}},
+                'kpi_descriptor': {
+                    'kpi_description': 'Metric {:s} for Endpoint {:s} in Device {:s}'.format(
+                        sample_type_name, endpoint_uuid, DEVICE_EMU_UUID),
+                    'kpi_sample_type': sample_type_id,
+                    'device_id': DEVICE_EMU_ID,
+                    'endpoint_id': endpoint_id(DEVICE_EMU_ID, endpoint_uuid),
+                },
+                'sampling_duration_s': SAMPLING_DURATION_SEC,
+                'sampling_interval_s': SAMPLING_INTERVAL_SEC,
+            }
+            MONITORING_SETTINGS_LIST.append(monitoring_settings)
+            KPI_UUIDS__TO__NUM_SAMPLES_RECEIVED[kpi_uuid] = 0
+
+    NUM_SAMPLES_EXPECTED_PER_KPI = SAMPLING_DURATION_SEC / SAMPLING_INTERVAL_SEC
+    NUM_SAMPLES_EXPECTED = len(MONITORING_SETTINGS_LIST) * NUM_SAMPLES_EXPECTED_PER_KPI
+
+    # Start monitoring the device
+    t_start_monitoring = datetime.timestamp(datetime.utcnow())
+    for monitoring_settings in MONITORING_SETTINGS_LIST:
+        device_client.MonitorDeviceKpi(MonitoringSettings(**monitoring_settings))
+
+    # wait to receive the expected number of samples
+    # if takes more than 1.5 times the sampling duration, assume there is an error
+    time_ini = time.time()
+    queue_samples : Queue = monitoring_service.queue_samples
+    received_samples = []
+    while (len(received_samples) < NUM_SAMPLES_EXPECTED) and (time.time() - time_ini < SAMPLING_DURATION_SEC * 1.5):
+        try:
+            received_sample = queue_samples.get(block=True, timeout=SAMPLING_INTERVAL_SEC / NUM_SAMPLES_EXPECTED)
+            #LOGGER.info('received_sample = {:s}'.format(str(received_sample)))
+            received_samples.append(received_sample)
+        except Empty:
+            continue
+
+    t_end_monitoring = datetime.timestamp(datetime.utcnow())
+
+    LOGGER.info('received_samples = {:s}'.format(str(received_samples)))
+    assert len(received_samples) == NUM_SAMPLES_EXPECTED
+    for received_sample in received_samples:
+        kpi_uuid = received_sample.kpi_id.kpi_id.uuid
+        assert kpi_uuid in KPI_UUIDS__TO__NUM_SAMPLES_RECEIVED
+        assert isinstance(received_sample.timestamp, str)
+        timestamp = float(received_sample.timestamp)
+        assert timestamp > t_start_monitoring
+        assert timestamp < t_end_monitoring
+        assert received_sample.kpi_value.HasField('floatVal')
+        assert isinstance(received_sample.kpi_value.floatVal, float)
+        KPI_UUIDS__TO__NUM_SAMPLES_RECEIVED[kpi_uuid] += 1
+
+    LOGGER.info('KPI_UUIDS__TO__NUM_SAMPLES_RECEIVED = {:s}'.format(str(KPI_UUIDS__TO__NUM_SAMPLES_RECEIVED)))
+    for kpi_uuid, num_samples_received in KPI_UUIDS__TO__NUM_SAMPLES_RECEIVED.items():
+        assert num_samples_received == NUM_SAMPLES_EXPECTED_PER_KPI
+
+    # Unsubscribe monitoring
+    for kpi_uuid in KPI_UUIDS__TO__NUM_SAMPLES_RECEIVED.keys():
+        MONITORING_SETTINGS_UNSUBSCRIBE = {
+            'kpi_id'             : {'kpi_id': {'uuid': kpi_uuid}},
+            'sampling_duration_s': -1, # negative value in sampling_duration_s or sampling_interval_s means unsibscribe
+            'sampling_interval_s': -1, # kpi_id is mandatory to unsibscribe
+        }
+        device_client.MonitorDeviceKpi(MonitoringSettings(**MONITORING_SETTINGS_UNSUBSCRIBE))
+
+
 def test_device_emulated_deconfigure(
     context_client : ContextClient,     # pylint: disable=redefined-outer-name
     device_client : DeviceClient,       # pylint: disable=redefined-outer-name
@@ -238,14 +444,37 @@ def test_device_emulated_deconfigure(
     assert driver is not None
 
     driver_config = driver.GetConfig()
-    LOGGER.info('driver_config = {:s}'.format(str(driver_config)))
+    #LOGGER.info('driver_config = {:s}'.format(str(driver_config)))
 
     DEVICE_EMU_WITH_DECONFIG_RULES = copy.deepcopy(DEVICE_EMU)
-    DEVICE_EMU_WITH_DECONFIG_RULES['device_config']['config_rules'].extend(DEVICE_EMU_DECONFIG_RULES)
+    DEVICE_EMU_WITH_DECONFIG_RULES['device_operational_status'] = \
+        DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_DISABLED
+    DEVICE_EMU_WITH_DECONFIG_RULES['device_config']['config_rules'].extend(DEVICE_EMU_DECONFIG_ADDRESSES)
     device_client.ConfigureDevice(Device(**DEVICE_EMU_WITH_DECONFIG_RULES))
 
+    RESULTING_CONFIG_RULES = {cr['resource_key']:cr for cr in copy.deepcopy(DEVICE_EMU_CONFIG_ENDPOINTS)}
+    for endpoint_cooked in DEVICE_EMU_ENDPOINTS_COOKED:
+        values = json.loads(RESULTING_CONFIG_RULES[endpoint_cooked[0]]['resource_value'])
+        values.update(endpoint_cooked[1])
+        RESULTING_CONFIG_RULES[endpoint_cooked[0]]['resource_value'] = json.dumps(values, sort_keys=True)
+    RESULTING_CONFIG_RULES = RESULTING_CONFIG_RULES.values()
     driver_config = sorted(driver.GetConfig(), key=operator.itemgetter(0))
-    LOGGER.info('driver_config = {:s}'.format(str(driver_config)))
+    driver_config = json.loads(json.dumps(driver_config)) # prevent integer keys to fail matching with string keys
+    #LOGGER.info('driver_config = {:s}'.format(str(driver_config)))
+    assert len(driver_config) == len(RESULTING_CONFIG_RULES)
+    #LOGGER.info('RESULTING_CONFIG_RULES = {:s}'.format(str(RESULTING_CONFIG_RULES)))
+    for config_rule in RESULTING_CONFIG_RULES:
+        config_rule = [config_rule['resource_key'], json.loads(config_rule['resource_value'])]
+        #LOGGER.info('config_rule = {:s}'.format(str(config_rule)))
+        assert config_rule in driver_config
+
+    DEVICE_EMU_WITH_DECONFIG_RULES = copy.deepcopy(DEVICE_EMU)
+    DEVICE_EMU_WITH_DECONFIG_RULES['device_config']['config_rules'].extend(DEVICE_EMU_DECONFIG_ENDPOINTS)
+    device_client.ConfigureDevice(Device(**DEVICE_EMU_WITH_DECONFIG_RULES))
+
+    driver_config = sorted(driver.GetConfig(), key=operator.itemgetter(0))
+    driver_config = json.loads(json.dumps(driver_config)) # prevent integer keys to fail matching with string keys
+    #LOGGER.info('driver_config = {:s}'.format(str(driver_config)))
     assert len(driver_config) == 0
 
     device_data = context_client.GetDevice(DeviceId(**DEVICE_EMU_ID))
@@ -262,7 +491,7 @@ def test_device_emulated_delete(
     assert driver is None
 
 
-# ----- Test Device Driver OpenConfig ----------------------------------------------------------------------------------
+# ----- Test Device Driver OpenConfig ------------------------------------------
 
 def test_device_openconfig_add_error_cases(
     context_client : ContextClient,     # pylint: disable=redefined-outer-name
@@ -392,3 +621,208 @@ def test_device_openconfig_delete(
     device_client.DeleteDevice(DeviceId(**DEVICE_OC_ID))
     driver : _Driver = device_service.driver_instance_cache.get(DEVICE_OC_UUID, {})
     assert driver is None
+
+
+# ----- Test Device Driver TAPI ------------------------------------------------
+
+def test_device_tapi_add_error_cases(
+    device_client : DeviceClient):      # pylint: disable=redefined-outer-name
+
+    if not ENABLE_TAPI: return # if there is no device to test against, asusme test is silently passed.
+
+    with pytest.raises(grpc.RpcError) as e:
+        DEVICE_TAPI_WITH_EXTRA_RULES = copy.deepcopy(DEVICE_TAPI)
+        DEVICE_TAPI_WITH_EXTRA_RULES['device_config']['config_rules'].extend(DEVICE_TAPI_CONNECT_RULES)
+        DEVICE_TAPI_WITH_EXTRA_RULES['device_config']['config_rules'].extend(DEVICE_TAPI_CONFIG_RULES)
+        device_client.AddDevice(Device(**DEVICE_TAPI_WITH_EXTRA_RULES))
+    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
+    msg_head = 'device.device_config.config_rules(['
+    msg_tail = ']) is invalid; RPC method AddDevice only accepts connection Config Rules that should start '\
+               'with "_connect/" tag. Others should be configured after adding the device.'
+    except_msg = str(e.value.details())
+    assert except_msg.startswith(msg_head) and except_msg.endswith(msg_tail)
+
+
+def test_device_tapi_add_correct(
+    device_client: DeviceClient,        # pylint: disable=redefined-outer-name
+    device_service: DeviceService):     # pylint: disable=redefined-outer-name
+
+    if not ENABLE_TAPI: return # if there is no device to test against, asusme test is silently passed.
+
+    DEVICE_TAPI_WITH_CONNECT_RULES = copy.deepcopy(DEVICE_TAPI)
+    DEVICE_TAPI_WITH_CONNECT_RULES['device_config']['config_rules'].extend(DEVICE_TAPI_CONNECT_RULES)
+    device_client.AddDevice(Device(**DEVICE_TAPI_WITH_CONNECT_RULES))
+    driver: _Driver = device_service.driver_instance_cache.get(DEVICE_TAPI_UUID)
+    assert driver is not None
+
+
+def test_device_tapi_get(
+    context_client: ContextClient,      # pylint: disable=redefined-outer-name
+    device_client: DeviceClient):       # pylint: disable=redefined-outer-name
+
+    if not ENABLE_TAPI: return # if there is no device to test against, asusme test is silently passed.
+
+    initial_config = device_client.GetInitialConfig(DeviceId(**DEVICE_TAPI_ID))
+    LOGGER.info('initial_config = {:s}'.format(grpc_message_to_json_string(initial_config)))
+
+    device_data = context_client.GetDevice(DeviceId(**DEVICE_TAPI_ID))
+    LOGGER.info('device_data = {:s}'.format(grpc_message_to_json_string(device_data)))
+
+
+def test_device_tapi_configure(
+    context_client: ContextClient,      # pylint: disable=redefined-outer-name
+    device_client: DeviceClient,        # pylint: disable=redefined-outer-name
+    device_service: DeviceService):     # pylint: disable=redefined-outer-name
+
+    if not ENABLE_TAPI: return # if there is no device to test against, asusme test is silently passed.
+
+    driver : _Driver = device_service.driver_instance_cache.get(DEVICE_TAPI_UUID)
+    assert driver is not None
+
+    # Requires to retrieve data from device; might be slow. Uncomment only when needed and test does not pass directly.
+    #driver_config = sorted(driver.GetConfig(), key=operator.itemgetter(0))
+    #LOGGER.info('driver_config = {:s}'.format(str(driver_config)))
+
+    DEVICE_TAPI_WITH_CONFIG_RULES = copy.deepcopy(DEVICE_TAPI)
+    DEVICE_TAPI_WITH_CONFIG_RULES['device_config']['config_rules'].extend(DEVICE_TAPI_CONFIG_RULES)
+    device_client.ConfigureDevice(Device(**DEVICE_TAPI_WITH_CONFIG_RULES))
+
+    # Requires to retrieve data from device; might be slow. Uncomment only when needed and test does not pass directly.
+    #driver_config = sorted(driver.GetConfig(), key=operator.itemgetter(0))
+    #LOGGER.info('driver_config = {:s}'.format(str(driver_config)))
+
+    device_data = context_client.GetDevice(DeviceId(**DEVICE_TAPI_ID))
+    config_rules = [
+        (ConfigActionEnum.Name(config_rule.action), config_rule.resource_key, config_rule.resource_value)
+        for config_rule in device_data.device_config.config_rules
+    ]
+    LOGGER.info('device_data.device_config.config_rules = \n{:s}'.format(
+        '\n'.join(['{:s} {:s} = {:s}'.format(*config_rule) for config_rule in config_rules])))
+    for config_rule in DEVICE_TAPI_CONFIG_RULES:
+        config_rule = (
+            ConfigActionEnum.Name(config_rule['action']), config_rule['resource_key'], config_rule['resource_value'])
+        assert config_rule in config_rules
+
+
+def test_device_tapi_deconfigure(
+    context_client: ContextClient,      # pylint: disable=redefined-outer-name
+    device_client: DeviceClient,        # pylint: disable=redefined-outer-name
+    device_service: DeviceService):     # pylint: disable=redefined-outer-name
+
+    if not ENABLE_TAPI: return # if there is no device to test against, asusme test is silently passed.
+
+    driver: _Driver = device_service.driver_instance_cache.get(DEVICE_TAPI_UUID)
+    assert driver is not None
+
+    # Requires to retrieve data from device; might be slow. Uncomment only when needed and test does not pass directly.
+    #driver_config = sorted(driver.GetConfig(), key=operator.itemgetter(0))
+    #LOGGER.info('driver_config = {:s}'.format(str(driver_config)))
+
+    DEVICE_TAPI_WITH_DECONFIG_RULES = copy.deepcopy(DEVICE_TAPI)
+    DEVICE_TAPI_WITH_DECONFIG_RULES['device_config']['config_rules'].extend(DEVICE_TAPI_DECONFIG_RULES)
+    device_client.ConfigureDevice(Device(**DEVICE_TAPI_WITH_DECONFIG_RULES))
+
+    # Requires to retrieve data from device; might be slow. Uncomment only when needed and test does not pass directly.
+    #driver_config = sorted(driver.GetConfig(), key=operator.itemgetter(0))
+    #LOGGER.info('driver_config = {:s}'.format(str(driver_config)))
+
+    device_data = context_client.GetDevice(DeviceId(**DEVICE_TAPI_ID))
+    config_rules = [
+        (ConfigActionEnum.Name(config_rule.action), config_rule.resource_key, config_rule.resource_value)
+        for config_rule in device_data.device_config.config_rules
+    ]
+    LOGGER.info('device_data.device_config.config_rules = \n{:s}'.format(
+        '\n'.join(['{:s} {:s} = {:s}'.format(*config_rule) for config_rule in config_rules])))
+    for config_rule in DEVICE_TAPI_DECONFIG_RULES:
+        action_set = ConfigActionEnum.Name(ConfigActionEnum.CONFIGACTION_SET)
+        config_rule = (action_set, config_rule['resource_key'], config_rule['resource_value'])
+        assert config_rule not in config_rules
+
+
+def test_device_tapi_delete(
+    device_client : DeviceClient,       # pylint: disable=redefined-outer-name
+    device_service : DeviceService):    # pylint: disable=redefined-outer-name
+
+    if not ENABLE_TAPI: return # if there is no device to test against, asusme test is silently passed.
+
+    device_client.DeleteDevice(DeviceId(**DEVICE_TAPI_ID))
+    driver : _Driver = device_service.driver_instance_cache.get(DEVICE_TAPI_UUID, {})
+    assert driver is None
+
+
+# ----- Test Device Driver P4 --------------------------------------------------
+
+def test_device_p4_add_error_cases(
+        context_client: ContextClient,   # pylint: disable=redefined-outer-name
+        device_client: DeviceClient,     # pylint: disable=redefined-outer-name
+        device_service: DeviceService):  # pylint: disable=redefined-outer-name
+
+    with pytest.raises(grpc.RpcError) as e:
+        device_p4_with_extra_rules = copy.deepcopy(DEVICE_P4)
+        device_p4_with_extra_rules['device_config']['config_rules'].extend(
+            DEVICE_P4_CONNECT_RULES)
+        device_p4_with_extra_rules['device_config']['config_rules'].extend(
+            DEVICE_P4_CONFIG_RULES)
+        device_client.AddDevice(Device(**device_p4_with_extra_rules))
+    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
+    msg_head = 'device.device_config.config_rules(['
+    msg_tail = ']) is invalid; RPC method AddDevice only accepts connection Config Rules that should start '\
+               'with "_connect/" tag. Others should be configured after adding the device.'
+    except_msg = str(e.value.details())
+    assert except_msg.startswith(msg_head) and except_msg.endswith(msg_tail)
+
+
+def test_device_p4_add_correct(
+        context_client: ContextClient,   # pylint: disable=redefined-outer-name
+        device_client: DeviceClient,     # pylint: disable=redefined-outer-name
+        device_service: DeviceService,   # pylint: disable=redefined-outer-name
+        p4runtime_service: MockP4RuntimeService):
+
+    device_p4_with_connect_rules = copy.deepcopy(DEVICE_P4)
+    device_p4_with_connect_rules['device_config']['config_rules'].extend(
+        DEVICE_P4_CONNECT_RULES)
+    device_client.AddDevice(Device(**device_p4_with_connect_rules))
+    driver : _Driver = device_service.driver_instance_cache.get(DEVICE_P4_NAME)
+    assert driver is not None
+
+
+def test_device_p4_get(
+        context_client: ContextClient,   # pylint: disable=redefined-outer-name
+        device_client: DeviceClient,     # pylint: disable=redefined-outer-name
+        device_service: DeviceService,   # pylint: disable=redefined-outer-name
+        p4runtime_service: MockP4RuntimeService):
+
+    initial_config = device_client.GetInitialConfig(DeviceId(**DEVICE_P4_UUID))
+    LOGGER.info('initial_config = {:s}'.format(
+        grpc_message_to_json_string(initial_config)))
+
+    device_data = context_client.GetDevice(DeviceId(**DEVICE_P4_UUID))
+    LOGGER.info('device_data = {:s}'.format(
+        grpc_message_to_json_string(device_data)))
+
+
+def test_device_p4_configure(
+        context_client: ContextClient,   # pylint: disable=redefined-outer-name
+        device_client: DeviceClient,     # pylint: disable=redefined-outer-name
+        device_service: DeviceService,   # pylint: disable=redefined-outer-name
+        p4runtime_service: MockP4RuntimeService):
+    pytest.skip("Skipping test for unimplemented method")
+
+
+def test_device_p4_deconfigure(
+        context_client: ContextClient,   # pylint: disable=redefined-outer-name
+        device_client: DeviceClient,     # pylint: disable=redefined-outer-name
+        device_service: DeviceService,   # pylint: disable=redefined-outer-name
+        p4runtime_service: MockP4RuntimeService):
+    pytest.skip("Skipping test for unimplemented method")
+
+
+def test_device_p4_delete(
+        context_client: ContextClient,   # pylint: disable=redefined-outer-name
+        device_client: DeviceClient,     # pylint: disable=redefined-outer-name
+        device_service: DeviceService,   # pylint: disable=redefined-outer-name
+        p4runtime_service: MockP4RuntimeService):
+
+    device_client.DeleteDevice(DeviceId(**DEVICE_P4_UUID))
+    driver : _Driver = device_service.driver_instance_cache.get(DEVICE_P4_NAME)
+    assert driver is None
diff --git a/src/device/tests/test_unitary_driverapi.py b/src/device/tests/test_unitary_driverapi.py
deleted file mode 100644
index 027e7775eae4a3f7a19c056266e1fc807b09cf2d..0000000000000000000000000000000000000000
--- a/src/device/tests/test_unitary_driverapi.py
+++ /dev/null
@@ -1,129 +0,0 @@
-import copy, logging, math, pytest, time
-from device.service.drivers.emulated.EmulatedDriver import EmulatedDriver
-
-LOGGER = logging.getLogger(__name__)
-LOGGER.setLevel(logging.DEBUG)
-
-PATH_IF = '/interfaces/interface[name="{}"]'
-PATH_SUBIF = PATH_IF + '/subinterfaces/subinterface[index="{}"]'
-PATH_ADDRIPV4 = PATH_SUBIF + '/ipv4/address[ip="{}"]'
-
-DEVICE_CONFIG_IF1 = []
-DEVICE_CONFIG_IF1.append((PATH_IF      .format('IF1'               ) + '/config/name',           "IF1"     ))
-DEVICE_CONFIG_IF1.append((PATH_IF      .format('IF1'               ) + '/config/enabled',        True      ))
-DEVICE_CONFIG_IF1.append((PATH_SUBIF   .format('IF1', 0            ) + '/config/index',          0         ))
-DEVICE_CONFIG_IF1.append((PATH_ADDRIPV4.format('IF1', 0, '10.1.0.1') + '/config/ip',             "10.1.0.1"))
-DEVICE_CONFIG_IF1.append((PATH_ADDRIPV4.format('IF1', 0, '10.1.0.1') + '/config/prefix_length',  24        ))
-
-DEVICE_CONFIG_IF2 = []
-DEVICE_CONFIG_IF2.append((PATH_IF      .format('IF2'               ) + '/config/name',           "IF2"     ))
-DEVICE_CONFIG_IF2.append((PATH_IF      .format('IF2'               ) + '/config/enabled',        True      ))
-DEVICE_CONFIG_IF2.append((PATH_SUBIF   .format('IF2', 0            ) + '/config/index',          0         ))
-DEVICE_CONFIG_IF2.append((PATH_ADDRIPV4.format('IF2', 0, '10.2.0.1') + '/config/ip',             "10.2.0.1"))
-DEVICE_CONFIG_IF2.append((PATH_ADDRIPV4.format('IF2', 0, '10.2.0.1') + '/config/prefix_length',  24        ))
-
-PATH_IF_TX_PKTS = PATH_IF + 'state/tx_packets_per_second'
-PATH_IF_RX_PKTS = PATH_IF + 'state/rx_packets_per_second'
-
-DEVICE_STATE_IF1_TX_PKTS = PATH_IF_TX_PKTS.format('IF1')
-DEVICE_STATE_IF1_RX_PKTS = PATH_IF_RX_PKTS.format('IF1')
-DEVICE_STATE_IF2_TX_PKTS = PATH_IF_TX_PKTS.format('IF2')
-DEVICE_STATE_IF2_RX_PKTS = PATH_IF_RX_PKTS.format('IF2')
-
-@pytest.fixture(scope='session')
-def device_driverapi_emulated():
-    _driver = EmulatedDriver('127.0.0.1', 0)
-    _driver.Connect()
-    yield _driver
-    _driver.Disconnect()
-
-def test_device_driverapi_emulated_setconfig(
-    device_driverapi_emulated : EmulatedDriver): # pylint: disable=redefined-outer-name
-
-    results = device_driverapi_emulated.SetConfig(DEVICE_CONFIG_IF1)
-    LOGGER.info('results:\n{:s}'.format('\n'.join(map(str, results))))
-    assert len(results) == len(DEVICE_CONFIG_IF1)
-    for result in results: assert isinstance(result, bool) and result
-
-    results = device_driverapi_emulated.SetConfig(DEVICE_CONFIG_IF2)
-    LOGGER.info('results:\n{:s}'.format('\n'.join(map(str, results))))
-    assert len(results) == len(DEVICE_CONFIG_IF2)
-    for result in results: assert isinstance(result, bool) and result
-
-def test_device_driverapi_emulated_getconfig(
-    device_driverapi_emulated : EmulatedDriver): # pylint: disable=redefined-outer-name
-
-    stored_config = device_driverapi_emulated.GetConfig()
-    LOGGER.info('stored_config:\n{:s}'.format('\n'.join(map(str, stored_config))))
-    assert len(stored_config) == len(DEVICE_CONFIG_IF1) + len(DEVICE_CONFIG_IF2)
-    for config_row in stored_config: assert (config_row in DEVICE_CONFIG_IF1) or (config_row in DEVICE_CONFIG_IF2)
-    for config_row in DEVICE_CONFIG_IF1: assert config_row in stored_config
-    for config_row in DEVICE_CONFIG_IF2: assert config_row in stored_config
-
-    stored_config = device_driverapi_emulated.GetConfig([PATH_IF.format('IF2')])
-    LOGGER.info('stored_config:\n{:s}'.format('\n'.join(map(str, stored_config))))
-    assert len(stored_config) == len(DEVICE_CONFIG_IF2)
-    for config_row in stored_config: assert config_row in DEVICE_CONFIG_IF2
-    for config_row in DEVICE_CONFIG_IF2: assert config_row in stored_config
-
-def test_device_driverapi_emulated_deleteconfig(
-    device_driverapi_emulated : EmulatedDriver): # pylint: disable=redefined-outer-name
-
-    results = device_driverapi_emulated.DeleteConfig([(PATH_ADDRIPV4.format('IF2', 0, '10.2.0.1'), '')])
-    LOGGER.info('results:\n{:s}'.format('\n'.join(map(str, results))))
-    assert (len(results) == 1) and isinstance(results[0], bool) and results[0]
-
-    stored_config = device_driverapi_emulated.GetConfig()
-    LOGGER.info('stored_config:\n{:s}'.format('\n'.join(map(str, stored_config))))
-
-    device_config_if2 = list(filter(lambda row: '10.2.0.1' not in row[0], copy.deepcopy(DEVICE_CONFIG_IF2)))
-    assert len(stored_config) == len(DEVICE_CONFIG_IF1) + len(device_config_if2)
-    for config_row in stored_config: assert (config_row in DEVICE_CONFIG_IF1) or (config_row in device_config_if2)
-    for config_row in DEVICE_CONFIG_IF1: assert config_row in stored_config
-    for config_row in device_config_if2: assert config_row in stored_config
-
-def test_device_driverapi_emulated_subscriptions(
-    device_driverapi_emulated : EmulatedDriver): # pylint: disable=redefined-outer-name
-
-    duration = 10.0
-    interval = 1.5
-    results = device_driverapi_emulated.SubscribeState([
-        (DEVICE_STATE_IF1_TX_PKTS, duration, interval),
-        (DEVICE_STATE_IF1_RX_PKTS, duration, interval),
-        (DEVICE_STATE_IF2_TX_PKTS, duration, interval),
-        (DEVICE_STATE_IF2_RX_PKTS, duration, interval),
-    ])
-    LOGGER.info('results:\n{:s}'.format('\n'.join(map(str, results))))
-    assert len(results) == 4
-    for result in results: assert isinstance(result, bool) and result
-
-    stored_config = device_driverapi_emulated.GetConfig()
-    LOGGER.info('stored_config:\n{:s}'.format('\n'.join(map(str, stored_config))))
-
-    time.sleep(duration + 1.0) # let time to generate samples, plus 1 second extra time
-
-    samples = []
-    for sample in device_driverapi_emulated.GetState(blocking=False):
-        LOGGER.info('sample: {:s}'.format(str(sample)))
-        timestamp,resource_key,resource_value = sample
-        samples.append((timestamp, resource_key, resource_value))
-    LOGGER.info('samples:\n{:s}'.format('\n'.join(map(str, samples))))
-    assert len(samples) == 4 * (math.floor(duration/interval) + 1)
-
-    results = device_driverapi_emulated.UnsubscribeState([
-        (DEVICE_STATE_IF1_TX_PKTS, 10.0, 1.5),
-        (DEVICE_STATE_IF1_RX_PKTS, 10.0, 1.5),
-        (DEVICE_STATE_IF2_TX_PKTS, 10.0, 1.5),
-        (DEVICE_STATE_IF2_RX_PKTS, 10.0, 1.5),
-    ])
-    LOGGER.info('results:\n{:s}'.format('\n'.join(map(str, results))))
-    assert len(results) == 4
-    for result in results: assert isinstance(result, bool) and result
-
-    stored_config = device_driverapi_emulated.GetConfig()
-    LOGGER.info('stored_config:\n{:s}'.format('\n'.join(map(str, stored_config))))
-    device_config_if2 = list(filter(lambda row: '10.2.0.1' not in row[0], copy.deepcopy(DEVICE_CONFIG_IF2)))
-    assert len(stored_config) == len(DEVICE_CONFIG_IF1) + len(device_config_if2)
-    for config_row in stored_config: assert (config_row in DEVICE_CONFIG_IF1) or (config_row in device_config_if2)
-    for config_row in DEVICE_CONFIG_IF1: assert config_row in stored_config
-    for config_row in device_config_if2: assert config_row in stored_config
diff --git a/src/dlt/.gitignore b/src/dlt/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..1de6c650e4e3891fba0a81d585634f635e03a5c4
--- /dev/null
+++ b/src/dlt/.gitignore
@@ -0,0 +1,90 @@
+# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
+# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
+
+# User-specific stuff
+.idea/**/workspace.xml
+.idea/**/tasks.xml
+.idea/**/usage.statistics.xml
+.idea/**/dictionaries
+.idea/**/shelf
+
+# AWS User-specific
+.idea/**/aws.xml
+
+# Generated files
+.idea/**/contentModel.xml
+
+# Sensitive or high-churn files
+.idea/**/dataSources/
+.idea/**/dataSources.ids
+.idea/**/dataSources.local.xml
+.idea/**/sqlDataSources.xml
+.idea/**/dynamic.xml
+.idea/**/uiDesigner.xml
+.idea/**/dbnavigator.xml
+
+# Gradle
+.idea/**/gradle.xml
+.idea/**/libraries
+
+# Gradle and Maven with auto-import
+# When using Gradle or Maven with auto-import, you should exclude module files,
+# since they will be recreated, and may cause churn.  Uncomment if using
+# auto-import.
+# .idea/artifacts
+# .idea/compiler.xml
+# .idea/jarRepositories.xml
+# .idea/modules.xml
+# .idea/*.iml
+# .idea/modules
+# *.iml
+# *.ipr
+
+# CMake
+cmake-build-*/
+
+# Mongo Explorer plugin
+.idea/**/mongoSettings.xml
+
+# File-based project format
+*.iws
+
+# IntelliJ
+out/
+
+# mpeltonen/sbt-idea plugin
+.idea_modules/
+
+# JIRA plugin
+atlassian-ide-plugin.xml
+
+# Cursive Clojure plugin
+.idea/replstate.xml
+
+# Crashlytics plugin (for Android Studio and IntelliJ)
+com_crashlytics_export_strings.xml
+crashlytics.properties
+crashlytics-build.properties
+fabric.properties
+
+# Editor-based Rest Client
+.idea/httpRequests
+
+# Android studio 3.1+ serialized cache file
+.idea/caches/build_file_checksums.ser
+
+# From https://github.com/github/gitignore/blob/master/Gradle.gitignore
+/.gradle/
+/build/
+
+# Ignore Gradle GUI config
+gradle-app.setting
+
+# Avoid ignoring Gradle wrapper jar file (.jar files are usually ignored)
+!gradle-wrapper.jar
+
+# Cache of project
+.gradletasknamecache
+
+local.properties
+wallet/
\ No newline at end of file
diff --git a/src/dlt/README.md b/src/dlt/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..361de07c6a35fb7951f063a9aa6fc3fb28d3ba0d
--- /dev/null
+++ b/src/dlt/README.md
@@ -0,0 +1,84 @@
+```
+     NEC Laboratories Europe GmbH
+
+     PROPRIETARY INFORMATION
+
+ The software and its source code contain valuable trade secrets and
+ shall be maintained in confidence and treated as confidential
+ information. The software may only be used for evaluation and/or
+ testing purposes, unless otherwise explicitly stated in a written
+ agreement with NEC Laboratories Europe GmbH.
+
+ Any unauthorized publication, transfer to third parties or
+ duplication of the object or source code - either totally or in
+ part - is strictly prohibited.
+
+          Copyright (c) 2021 NEC Laboratories Europe GmbH
+          All Rights Reserved.
+
+ Authors: Konstantin Munichev <konstantin.munichev@neclab.eu>
+
+
+ NEC Laboratories Europe GmbH DISCLAIMS ALL WARRANTIES, EITHER
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO IMPLIED WARRANTIES
+ OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE AND THE
+ WARRANTY AGAINST LATENT DEFECTS, WITH RESPECT TO THE PROGRAM AND
+ THE ACCOMPANYING DOCUMENTATION.
+
+ NO LIABILITIES FOR CONSEQUENTIAL DAMAGES: IN NO EVENT SHALL NEC
+ Laboratories Europe GmbH or ANY OF ITS SUBSIDIARIES BE LIABLE FOR
+ ANY DAMAGES WHATSOEVER (INCLUDING, WITHOUT LIMITATION, DAMAGES FOR
+ LOSS OF BUSINESS PROFITS, BUSINESS INTERRUPTION, LOSS OF
+ INFORMATION, OR OTHER PECUNIARY LOSS AND INDIRECT, CONSEQUENTIAL,
+ INCIDENTAL, ECONOMIC OR PUNITIVE DAMAGES) ARISING OUT OF THE USE OF
+ OR INABILITY TO USE THIS PROGRAM, EVEN IF NEC Laboratories Europe
+ GmbH HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+
+ THIS HEADER MAY NOT BE EXTRACTED OR MODIFIED IN ANY WAY.
+ ```
+
+# DLT module guide
+
+## General information
+The DLT module is used to provide access to the underlying Fabric deployment. It allows clients
+to add, retrieve, modify and delete blockchain-backed data, essentially working as a key-value
+database. External clients should use REST API to communicate with this service, its detailed
+description available below.
+
+## Code structure
+The whole DLT module consists of several packages:
+- fabric package
+- http package
+- proto package
+- client example
+
+### Fabric package
+The most important class in this package is `FabricConnector`. First, it establishes connection
+with the underlying Fabric network using Java Gateway SDK. After that, it could be used as a
+CRUD interface.
+Other files contain auxiliary code for `FabricConnector` which allows it to register/enroll
+users and to obtain smart contract instances.
+
+### HTTP package
+Contains server side HTTP handler. It accepts requests from the outside and performs the
+requested operation. For the detailed description see API description section.
+
+### Proto package
+The proto package contains `Config.proto` file which contains messages for REST API. The most
+important ones are `DltConfig` (it defines the whole DLT configuration) and `DltRecord` which
+represents data to store in the blockchain.
+
+### Client example
+This code is not necessary to the service, but it could be used to test the service. It contains
+a sample REST client which connects the service and perform all the CRUD operations. 
+
+## REST API description
+| Method | URL | Input | Response code | Output |
+| --- | ----------- | --- | --- | --- |
+| POST | /dlt/configure | Configuration object | 201 or 400 | Status value | 
+| GET | /dlt/configure | - | 200 or 404 | Configuration object |
+| POST | /dlt/record | Record object | 200, 201, 400 or 404 | Status value |
+| GET | /dlt/record | Record id | 200 or 404 | Record object |
+
+Record and configuration object are defined in `proto` package.
+
diff --git a/src/dlt/build.gradle.kts b/src/dlt/build.gradle.kts
new file mode 100644
index 0000000000000000000000000000000000000000..8eb0d53fa99ec972edd6ee03aafeb0d676f3d3c0
--- /dev/null
+++ b/src/dlt/build.gradle.kts
@@ -0,0 +1,115 @@
+//     NEC Laboratories Europe GmbH
+//
+//     PROPRIETARY INFORMATION
+//
+// The software and its source code contain valuable trade secrets and
+// shall be maintained in confidence and treated as confidential
+// information. The software may only be used for evaluation and/or
+// testing purposes, unless otherwise explicitly stated in a written
+// agreement with NEC Laboratories Europe GmbH.
+//
+// Any unauthorized publication, transfer to third parties or
+// duplication of the object or source code - either totally or in
+// part - is strictly prohibited.
+//
+//          Copyright (c) 2021 NEC Laboratories Europe GmbH
+//          All Rights Reserved.
+//
+// Authors: Konstantin Munichev <konstantin.munichev@neclab.eu>
+//
+//
+// NEC Laboratories Europe GmbH DISCLAIMS ALL WARRANTIES, EITHER
+// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO IMPLIED WARRANTIES
+// OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE AND THE
+// WARRANTY AGAINST LATENT DEFECTS, WITH RESPECT TO THE PROGRAM AND
+// THE ACCOMPANYING DOCUMENTATION.
+//
+// NO LIABILITIES FOR CONSEQUENTIAL DAMAGES: IN NO EVENT SHALL NEC
+// Laboratories Europe GmbH or ANY OF ITS SUBSIDIARIES BE LIABLE FOR
+// ANY DAMAGES WHATSOEVER (INCLUDING, WITHOUT LIMITATION, DAMAGES FOR
+// LOSS OF BUSINESS PROFITS, BUSINESS INTERRUPTION, LOSS OF
+// INFORMATION, OR OTHER PECUNIARY LOSS AND INDIRECT, CONSEQUENTIAL,
+// INCIDENTAL, ECONOMIC OR PUNITIVE DAMAGES) ARISING OUT OF THE USE OF
+// OR INABILITY TO USE THIS PROGRAM, EVEN IF NEC Laboratories Europe
+// GmbH HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+//
+// THIS HEADER MAY NOT BE EXTRACTED OR MODIFIED IN ANY WAY.
+
+import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
+
+import com.google.protobuf.gradle.generateProtoTasks
+import com.google.protobuf.gradle.id
+import com.google.protobuf.gradle.protobuf
+import com.google.protobuf.gradle.protoc
+
+ext["protobufVersion"] = "3.19.1"
+ext["ktorVersion"] = "1.6.5"
+
+plugins {
+    kotlin("jvm") version "1.5.31"
+    kotlin("plugin.serialization") version "1.4.21"
+    id("com.google.protobuf") version "0.8.17"
+    application
+}
+
+group = "eu.neclab"
+version = "1.0-SNAPSHOT"
+
+repositories {
+    mavenCentral()
+}
+
+dependencies {
+    testImplementation("org.jetbrains.kotlin:kotlin-test:1.5.31")
+    implementation("org.hyperledger.fabric:fabric-gateway-java:2.2.2")
+    api("com.google.protobuf:protobuf-kotlin:${rootProject.ext["protobufVersion"]}")
+    implementation("io.ktor:ktor-server-core:${rootProject.ext["ktorVersion"]}")
+    implementation("io.ktor:ktor-server-netty:${rootProject.ext["ktorVersion"]}")
+    implementation("io.ktor:ktor-serialization:${rootProject.ext["ktorVersion"]}")
+    implementation("io.ktor:ktor-client-serialization:${rootProject.ext["ktorVersion"]}")
+    implementation("io.ktor:ktor-client-core:${rootProject.ext["ktorVersion"]}")
+    implementation("io.ktor:ktor-client-cio:${rootProject.ext["ktorVersion"]}")
+    implementation("ch.qos.logback:logback-classic:1.2.5")
+    implementation("org.jetbrains.kotlinx:kotlinx-serialization-json:1.3.1")
+    implementation("org.jetbrains.kotlinx:kotlinx-serialization-protobuf:1.3.1")
+}
+
+tasks.test {
+    useJUnitPlatform()
+}
+
+tasks.withType<KotlinCompile> {
+    kotlinOptions.jvmTarget = "11"
+}
+
+tasks.withType<KotlinCompile>().all {
+    kotlinOptions {
+        freeCompilerArgs = listOf("-Xopt-in=kotlin.RequiresOptIn")
+    }
+}
+
+
+application {
+    mainClass.set("MainKt")
+}
+
+sourceSets {
+    main {
+        proto {
+            srcDir("src/main/kotlin/proto")
+        }
+    }
+}
+
+protobuf {
+    protoc {
+        artifact = "com.google.protobuf:protoc:${rootProject.ext["protobufVersion"]}"
+    }
+    generateProtoTasks {
+        all().forEach {
+            it.builtins {
+                id("kotlin")
+            }
+        }
+    }
+}
\ No newline at end of file
diff --git a/src/dlt/config/ca.org1.example.com-cert.pem b/src/dlt/config/ca.org1.example.com-cert.pem
new file mode 100644
index 0000000000000000000000000000000000000000..9c10b97908d70ae16b542f922563dbcba21056c6
--- /dev/null
+++ b/src/dlt/config/ca.org1.example.com-cert.pem
@@ -0,0 +1,14 @@
+-----BEGIN CERTIFICATE-----
+MIICJjCCAc2gAwIBAgIULDVQiqifWHypuW50LhHXSJeeFG0wCgYIKoZIzj0EAwIw
+cDELMAkGA1UEBhMCVVMxFzAVBgNVBAgTDk5vcnRoIENhcm9saW5hMQ8wDQYDVQQH
+EwZEdXJoYW0xGTAXBgNVBAoTEG9yZzEuZXhhbXBsZS5jb20xHDAaBgNVBAMTE2Nh
+Lm9yZzEuZXhhbXBsZS5jb20wHhcNMjExMTIzMTIzNzAwWhcNMzYxMTE5MTIzNzAw
+WjBwMQswCQYDVQQGEwJVUzEXMBUGA1UECBMOTm9ydGggQ2Fyb2xpbmExDzANBgNV
+BAcTBkR1cmhhbTEZMBcGA1UEChMQb3JnMS5leGFtcGxlLmNvbTEcMBoGA1UEAxMT
+Y2Eub3JnMS5leGFtcGxlLmNvbTBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABFNi
+ggnkzv5qQSkIAQ05Y9DUq4teNjbMTdqD0IwdQe+lcYI0wgkR9wpBn5fj2pN093+P
+l2crhgyNqZyAuqaylvCjRTBDMA4GA1UdDwEB/wQEAwIBBjASBgNVHRMBAf8ECDAG
+AQH/AgEBMB0GA1UdDgQWBBS44tGlbJ5GIBwATL383Hw0vz8jHDAKBggqhkjOPQQD
+AgNHADBEAiAlkKikh7eG8lf0uI7NwBi0QrbnnY4vHf9ErceKwUhZWgIgS9Z9AzBc
+n9RAK3v/freT3Exs7b+MK13UeDdejRWS+RE=
+-----END CERTIFICATE-----
diff --git a/src/dlt/config/connection-org1.json b/src/dlt/config/connection-org1.json
new file mode 100644
index 0000000000000000000000000000000000000000..c28ac57118e8695375187eaf103f1f499f2cf301
--- /dev/null
+++ b/src/dlt/config/connection-org1.json
@@ -0,0 +1,73 @@
+{
+    "name": "test-network-org1",
+    "version": "1.0.0",
+    "client": {
+        "organization": "Org1",
+        "connection": {
+            "timeout": {
+                "peer": {
+                    "endorser": "300"
+                }
+            }
+        }
+    },
+    "organizations": {
+        "Org1": {
+            "mspid": "Org1MSP",
+            "peers": [
+                "peer0.org1.example.com"
+            ],
+            "certificateAuthorities": [
+                "ca.org1.example.com"
+            ]
+        }
+    },
+    "peers": {
+        "peer0.org1.example.com": {
+            "url": "grpcs://s2:7051",
+            "tlsCACerts": {
+                "pem": "-----BEGIN CERTIFICATE-----\nMIICJjCCAc2gAwIBAgIULDVQiqifWHypuW50LhHXSJeeFG0wCgYIKoZIzj0EAwIw\ncDELMAkGA1UEBhMCVVMxFzAVBgNVBAgTDk5vcnRoIENhcm9saW5hMQ8wDQYDVQQH\nEwZEdXJoYW0xGTAXBgNVBAoTEG9yZzEuZXhhbXBsZS5jb20xHDAaBgNVBAMTE2Nh\nLm9yZzEuZXhhbXBsZS5jb20wHhcNMjExMTIzMTIzNzAwWhcNMzYxMTE5MTIzNzAw\nWjBwMQswCQYDVQQGEwJVUzEXMBUGA1UECBMOTm9ydGggQ2Fyb2xpbmExDzANBgNV\nBAcTBkR1cmhhbTEZMBcGA1UEChMQb3JnMS5leGFtcGxlLmNvbTEcMBoGA1UEAxMT\nY2Eub3JnMS5leGFtcGxlLmNvbTBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABFNi\nggnkzv5qQSkIAQ05Y9DUq4teNjbMTdqD0IwdQe+lcYI0wgkR9wpBn5fj2pN093+P\nl2crhgyNqZyAuqaylvCjRTBDMA4GA1UdDwEB/wQEAwIBBjASBgNVHRMBAf8ECDAG\nAQH/AgEBMB0GA1UdDgQWBBS44tGlbJ5GIBwATL383Hw0vz8jHDAKBggqhkjOPQQD\nAgNHADBEAiAlkKikh7eG8lf0uI7NwBi0QrbnnY4vHf9ErceKwUhZWgIgS9Z9AzBc\nn9RAK3v/freT3Exs7b+MK13UeDdejRWS+RE=\n-----END CERTIFICATE-----\n"
+            },
+            "grpcOptions": {
+                "ssl-target-name-override": "peer0.org1.example.com",
+                "hostnameOverride": "peer0.org1.example.com"
+            }
+        },
+        "peer0.org2.example.com": {
+            "url": "grpcs://s2:9051",
+            "tlsCACerts": {
+                "pem": "-----BEGIN CERTIFICATE-----\nMIICHzCCAcWgAwIBAgIUJiJ5815YVes2sG95oFzj0QWWBKswCgYIKoZIzj0EAwIw\nbDELMAkGA1UEBhMCVUsxEjAQBgNVBAgTCUhhbXBzaGlyZTEQMA4GA1UEBxMHSHVy\nc2xleTEZMBcGA1UEChMQb3JnMi5leGFtcGxlLmNvbTEcMBoGA1UEAxMTY2Eub3Jn\nMi5leGFtcGxlLmNvbTAeFw0yMTExMjMxMjM3MDBaFw0zNjExMTkxMjM3MDBaMGwx\nCzAJBgNVBAYTAlVLMRIwEAYDVQQIEwlIYW1wc2hpcmUxEDAOBgNVBAcTB0h1cnNs\nZXkxGTAXBgNVBAoTEG9yZzIuZXhhbXBsZS5jb20xHDAaBgNVBAMTE2NhLm9yZzIu\nZXhhbXBsZS5jb20wWTATBgcqhkjOPQIBBggqhkjOPQMBBwNCAATdQw+PQyT3Ql5M\nv/xvafYFhU5Jtl0CwYyrXTtRajpPnlPnNvXUqVMxmdSR+4m2WBYyBdZ8IhGaayb/\nrOro8Mpko0UwQzAOBgNVHQ8BAf8EBAMCAQYwEgYDVR0TAQH/BAgwBgEB/wIBATAd\nBgNVHQ4EFgQUrsPP1HIS6O+JeWL2ct/ujkfP7dEwCgYIKoZIzj0EAwIDSAAwRQIh\nAMynA2Q/cbMXEHihnQdpdEz/83jGAokp7dKMHst02b3gAiA0XReJWdc0AUXZtbSq\nZG8mXOJeZu0Zro9DuQUEDPBfDQ==\n-----END CERTIFICATE-----\n"
+            },
+            "grpcOptions": {
+                "ssl-target-name-override": "peer0.org2.example.com",
+                "hostnameOverride": "peer0.org2.example.com"
+            }
+        }
+    },
+    "certificateAuthorities": {
+        "ca.org1.example.com": {
+            "url": "https://s2:7054",
+            "caName": "ca-org1",
+            "tlsCACerts": {
+                "pem": [
+                    "-----BEGIN CERTIFICATE-----\nMIICJjCCAc2gAwIBAgIULDVQiqifWHypuW50LhHXSJeeFG0wCgYIKoZIzj0EAwIw\ncDELMAkGA1UEBhMCVVMxFzAVBgNVBAgTDk5vcnRoIENhcm9saW5hMQ8wDQYDVQQH\nEwZEdXJoYW0xGTAXBgNVBAoTEG9yZzEuZXhhbXBsZS5jb20xHDAaBgNVBAMTE2Nh\nLm9yZzEuZXhhbXBsZS5jb20wHhcNMjExMTIzMTIzNzAwWhcNMzYxMTE5MTIzNzAw\nWjBwMQswCQYDVQQGEwJVUzEXMBUGA1UECBMOTm9ydGggQ2Fyb2xpbmExDzANBgNV\nBAcTBkR1cmhhbTEZMBcGA1UEChMQb3JnMS5leGFtcGxlLmNvbTEcMBoGA1UEAxMT\nY2Eub3JnMS5leGFtcGxlLmNvbTBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABFNi\nggnkzv5qQSkIAQ05Y9DUq4teNjbMTdqD0IwdQe+lcYI0wgkR9wpBn5fj2pN093+P\nl2crhgyNqZyAuqaylvCjRTBDMA4GA1UdDwEB/wQEAwIBBjASBgNVHRMBAf8ECDAG\nAQH/AgEBMB0GA1UdDgQWBBS44tGlbJ5GIBwATL383Hw0vz8jHDAKBggqhkjOPQQD\nAgNHADBEAiAlkKikh7eG8lf0uI7NwBi0QrbnnY4vHf9ErceKwUhZWgIgS9Z9AzBc\nn9RAK3v/freT3Exs7b+MK13UeDdejRWS+RE=\n-----END CERTIFICATE-----\n"
+                ]
+            },
+            "httpOptions": {
+                "verify": false
+            }
+        }
+    },
+    "orderers": {
+        "orderer0.example.com": {
+            "url": "grpcs://s2:7050",
+            "tlsCACerts": {
+                "pem": "-----BEGIN CERTIFICATE-----\nMIICCzCCAbGgAwIBAgIUDr4RiMRC/q95iWAfWiZTLTdDoDUwCgYIKoZIzj0EAwIw\nYjELMAkGA1UEBhMCVVMxETAPBgNVBAgTCE5ldyBZb3JrMREwDwYDVQQHEwhOZXcg\nWW9yazEUMBIGA1UEChMLZXhhbXBsZS5jb20xFzAVBgNVBAMTDmNhLmV4YW1wbGUu\nY29tMB4XDTIxMTEyMzEyMzcwMFoXDTM2MTExOTEyMzcwMFowYjELMAkGA1UEBhMC\nVVMxETAPBgNVBAgTCE5ldyBZb3JrMREwDwYDVQQHEwhOZXcgWW9yazEUMBIGA1UE\nChMLZXhhbXBsZS5jb20xFzAVBgNVBAMTDmNhLmV4YW1wbGUuY29tMFkwEwYHKoZI\nzj0CAQYIKoZIzj0DAQcDQgAE4ErISW/k5iZv0n7n1qpbOZcRMNmn8VqAOX4UIELm\nQ51neULt9T3pxdGEsq7B5O1ncB/RdZwf+3dihBHjngXerKNFMEMwDgYDVR0PAQH/\nBAQDAgEGMBIGA1UdEwEB/wQIMAYBAf8CAQEwHQYDVR0OBBYEFP2/1CHK9OdPjs/9\np1mh3t+pyujsMAoGCCqGSM49BAMCA0gAMEUCIQDETOC8hB9EaEvdc2sdEtcNeyXu\nYryyf6I5tJC13E2hOQIgef+ymHy6eXf0jNkY6DXz041THa/67dSrXaZGgaTh4LI=\n-----END CERTIFICATE-----\n"
+            },
+            "grpcOptions": {
+                "ssl-target-name-override": "orderer0.example.com",
+                "hostnameOverride": "orderer0.example.com"
+            }
+        }
+    }
+}
diff --git a/src/dlt/gradle.properties b/src/dlt/gradle.properties
new file mode 100644
index 0000000000000000000000000000000000000000..7fc6f1ff272ee12d8be9694acdaa36b4284eefdb
--- /dev/null
+++ b/src/dlt/gradle.properties
@@ -0,0 +1 @@
+kotlin.code.style=official
diff --git a/src/dlt/gradle/wrapper/gradle-wrapper.jar b/src/dlt/gradle/wrapper/gradle-wrapper.jar
new file mode 100644
index 0000000000000000000000000000000000000000..7454180f2ae8848c63b8b4dea2cb829da983f2fa
Binary files /dev/null and b/src/dlt/gradle/wrapper/gradle-wrapper.jar differ
diff --git a/src/dlt/gradle/wrapper/gradle-wrapper.properties b/src/dlt/gradle/wrapper/gradle-wrapper.properties
new file mode 100644
index 0000000000000000000000000000000000000000..69a9715077f4fe68764b2e50867736b0c7f015a2
--- /dev/null
+++ b/src/dlt/gradle/wrapper/gradle-wrapper.properties
@@ -0,0 +1,5 @@
+distributionBase=GRADLE_USER_HOME
+distributionPath=wrapper/dists
+distributionUrl=https\://services.gradle.org/distributions/gradle-7.1-bin.zip
+zipStoreBase=GRADLE_USER_HOME
+zipStorePath=wrapper/dists
diff --git a/src/dlt/gradlew b/src/dlt/gradlew
new file mode 100755
index 0000000000000000000000000000000000000000..744e882ed57263a19bf3a504977da292d009345f
--- /dev/null
+++ b/src/dlt/gradlew
@@ -0,0 +1,185 @@
+#!/usr/bin/env sh
+
+#
+# Copyright 2015 the original author or authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+##############################################################################
+##
+##  Gradle start up script for UN*X
+##
+##############################################################################
+
+# Attempt to set APP_HOME
+# Resolve links: $0 may be a link
+PRG="$0"
+# Need this for relative symlinks.
+while [ -h "$PRG" ] ; do
+    ls=`ls -ld "$PRG"`
+    link=`expr "$ls" : '.*-> \(.*\)$'`
+    if expr "$link" : '/.*' > /dev/null; then
+        PRG="$link"
+    else
+        PRG=`dirname "$PRG"`"/$link"
+    fi
+done
+SAVED="`pwd`"
+cd "`dirname \"$PRG\"`/" >/dev/null
+APP_HOME="`pwd -P`"
+cd "$SAVED" >/dev/null
+
+APP_NAME="Gradle"
+APP_BASE_NAME=`basename "$0"`
+
+# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
+
+# Use the maximum available, or set MAX_FD != -1 to use that value.
+MAX_FD="maximum"
+
+warn () {
+    echo "$*"
+}
+
+die () {
+    echo
+    echo "$*"
+    echo
+    exit 1
+}
+
+# OS specific support (must be 'true' or 'false').
+cygwin=false
+msys=false
+darwin=false
+nonstop=false
+case "`uname`" in
+  CYGWIN* )
+    cygwin=true
+    ;;
+  Darwin* )
+    darwin=true
+    ;;
+  MSYS* | MINGW* )
+    msys=true
+    ;;
+  NONSTOP* )
+    nonstop=true
+    ;;
+esac
+
+CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
+
+
+# Determine the Java command to use to start the JVM.
+if [ -n "$JAVA_HOME" ] ; then
+    if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
+        # IBM's JDK on AIX uses strange locations for the executables
+        JAVACMD="$JAVA_HOME/jre/sh/java"
+    else
+        JAVACMD="$JAVA_HOME/bin/java"
+    fi
+    if [ ! -x "$JAVACMD" ] ; then
+        die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+    fi
+else
+    JAVACMD="java"
+    which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+fi
+
+# Increase the maximum file descriptors if we can.
+if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
+    MAX_FD_LIMIT=`ulimit -H -n`
+    if [ $? -eq 0 ] ; then
+        if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
+            MAX_FD="$MAX_FD_LIMIT"
+        fi
+        ulimit -n $MAX_FD
+        if [ $? -ne 0 ] ; then
+            warn "Could not set maximum file descriptor limit: $MAX_FD"
+        fi
+    else
+        warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
+    fi
+fi
+
+# For Darwin, add options to specify how the application appears in the dock
+if $darwin; then
+    GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
+fi
+
+# For Cygwin or MSYS, switch paths to Windows format before running java
+if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
+    APP_HOME=`cygpath --path --mixed "$APP_HOME"`
+    CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
+
+    JAVACMD=`cygpath --unix "$JAVACMD"`
+
+    # We build the pattern for arguments to be converted via cygpath
+    ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
+    SEP=""
+    for dir in $ROOTDIRSRAW ; do
+        ROOTDIRS="$ROOTDIRS$SEP$dir"
+        SEP="|"
+    done
+    OURCYGPATTERN="(^($ROOTDIRS))"
+    # Add a user-defined pattern to the cygpath arguments
+    if [ "$GRADLE_CYGPATTERN" != "" ] ; then
+        OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
+    fi
+    # Now convert the arguments - kludge to limit ourselves to /bin/sh
+    i=0
+    for arg in "$@" ; do
+        CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
+        CHECK2=`echo "$arg"|egrep -c "^-"`                                 ### Determine if an option
+
+        if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then                    ### Added a condition
+            eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
+        else
+            eval `echo args$i`="\"$arg\""
+        fi
+        i=`expr $i + 1`
+    done
+    case $i in
+        0) set -- ;;
+        1) set -- "$args0" ;;
+        2) set -- "$args0" "$args1" ;;
+        3) set -- "$args0" "$args1" "$args2" ;;
+        4) set -- "$args0" "$args1" "$args2" "$args3" ;;
+        5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
+        6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
+        7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
+        8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
+        9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
+    esac
+fi
+
+# Escape application args
+save () {
+    for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
+    echo " "
+}
+APP_ARGS=`save "$@"`
+
+# Collect all arguments for the java command, following the shell quoting and substitution rules
+eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
+
+exec "$JAVACMD" "$@"
diff --git a/src/dlt/gradlew.bat b/src/dlt/gradlew.bat
new file mode 100644
index 0000000000000000000000000000000000000000..ac1b06f93825db68fb0c0b5150917f340eaa5d02
--- /dev/null
+++ b/src/dlt/gradlew.bat
@@ -0,0 +1,89 @@
+@rem
+@rem Copyright 2015 the original author or authors.
+@rem
+@rem Licensed under the Apache License, Version 2.0 (the "License");
+@rem you may not use this file except in compliance with the License.
+@rem You may obtain a copy of the License at
+@rem
+@rem      https://www.apache.org/licenses/LICENSE-2.0
+@rem
+@rem Unless required by applicable law or agreed to in writing, software
+@rem distributed under the License is distributed on an "AS IS" BASIS,
+@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+@rem See the License for the specific language governing permissions and
+@rem limitations under the License.
+@rem
+
+@if "%DEBUG%" == "" @echo off
+@rem ##########################################################################
+@rem
+@rem  Gradle startup script for Windows
+@rem
+@rem ##########################################################################
+
+@rem Set local scope for the variables with windows NT shell
+if "%OS%"=="Windows_NT" setlocal
+
+set DIRNAME=%~dp0
+if "%DIRNAME%" == "" set DIRNAME=.
+set APP_BASE_NAME=%~n0
+set APP_HOME=%DIRNAME%
+
+@rem Resolve any "." and ".." in APP_HOME to make it shorter.
+for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
+
+@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
+
+@rem Find java.exe
+if defined JAVA_HOME goto findJavaFromJavaHome
+
+set JAVA_EXE=java.exe
+%JAVA_EXE% -version >NUL 2>&1
+if "%ERRORLEVEL%" == "0" goto execute
+
+echo.
+echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+echo.
+echo Please set the JAVA_HOME variable in your environment to match the
+echo location of your Java installation.
+
+goto fail
+
+:findJavaFromJavaHome
+set JAVA_HOME=%JAVA_HOME:"=%
+set JAVA_EXE=%JAVA_HOME%/bin/java.exe
+
+if exist "%JAVA_EXE%" goto execute
+
+echo.
+echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
+echo.
+echo Please set the JAVA_HOME variable in your environment to match the
+echo location of your Java installation.
+
+goto fail
+
+:execute
+@rem Setup the command line
+
+set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
+
+
+@rem Execute Gradle
+"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
+
+:end
+@rem End local scope for the variables with windows NT shell
+if "%ERRORLEVEL%"=="0" goto mainEnd
+
+:fail
+rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
+rem the _cmd.exe /c_ return code!
+if  not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
+exit /b 1
+
+:mainEnd
+if "%OS%"=="Windows_NT" endlocal
+
+:omega
diff --git a/src/dlt/settings.gradle.kts b/src/dlt/settings.gradle.kts
new file mode 100644
index 0000000000000000000000000000000000000000..0ebdd07b29682c72c65695e4f5655437ed11d74d
--- /dev/null
+++ b/src/dlt/settings.gradle.kts
@@ -0,0 +1,3 @@
+
+rootProject.name = "dlt"
+
diff --git a/src/dlt/src/main/kotlin/Main.kt b/src/dlt/src/main/kotlin/Main.kt
new file mode 100644
index 0000000000000000000000000000000000000000..d6298cc2890a8cec8ea9af4a05bbabb5ab8f3b4d
--- /dev/null
+++ b/src/dlt/src/main/kotlin/Main.kt
@@ -0,0 +1,142 @@
+//     NEC Laboratories Europe GmbH
+//
+//     PROPRIETARY INFORMATION
+//
+// The software and its source code contain valuable trade secrets and
+// shall be maintained in confidence and treated as confidential
+// information. The software may only be used for evaluation and/or
+// testing purposes, unless otherwise explicitly stated in a written
+// agreement with NEC Laboratories Europe GmbH.
+//
+// Any unauthorized publication, transfer to third parties or
+// duplication of the object or source code - either totally or in
+// part - is strictly prohibited.
+//
+//          Copyright (c) 2021 NEC Laboratories Europe GmbH
+//          All Rights Reserved.
+//
+// Authors: Konstantin Munichev <konstantin.munichev@neclab.eu>
+//
+//
+// NEC Laboratories Europe GmbH DISCLAIMS ALL WARRANTIES, EITHER
+// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO IMPLIED WARRANTIES
+// OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE AND THE
+// WARRANTY AGAINST LATENT DEFECTS, WITH RESPECT TO THE PROGRAM AND
+// THE ACCOMPANYING DOCUMENTATION.
+//
+// NO LIABILITIES FOR CONSEQUENTIAL DAMAGES: IN NO EVENT SHALL NEC
+// Laboratories Europe GmbH or ANY OF ITS SUBSIDIARIES BE LIABLE FOR
+// ANY DAMAGES WHATSOEVER (INCLUDING, WITHOUT LIMITATION, DAMAGES FOR
+// LOSS OF BUSINESS PROFITS, BUSINESS INTERRUPTION, LOSS OF
+// INFORMATION, OR OTHER PECUNIARY LOSS AND INDIRECT, CONSEQUENTIAL,
+// INCIDENTAL, ECONOMIC OR PUNITIVE DAMAGES) ARISING OUT OF THE USE OF
+// OR INABILITY TO USE THIS PROGRAM, EVEN IF NEC Laboratories Europe
+// GmbH HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+//
+// THIS HEADER MAY NOT BE EXTRACTED OR MODIFIED IN ANY WAY.
+
+import io.ktor.client.*
+import io.ktor.client.engine.cio.*
+import io.ktor.client.features.*
+import io.ktor.client.request.*
+import io.ktor.utils.io.jvm.javaio.*
+import kotlinx.serialization.ExperimentalSerializationApi
+import proto.Config
+import proto.Config.DltConfig
+
+@OptIn(ExperimentalSerializationApi::class)
+suspend fun main(args: Array<String>) {
+    // TODO: default configuration file
+    val cfg = DltConfig.newBuilder().setWallet("wallet").setConnectionFile("config/connection-org1.json")
+        .setUser("appUser")
+        .setChannel("dlt")
+        .setContract("basic").setCaCertFile("config/ca.org1.example.com-cert.pem").setCaUrl("https://s2:7054")
+        .setCaAdmin("admin").setCaAdminSecret("adminpw").setMsp("Org1MSP").setAffiliation("org1.department1")
+        .build()
+    val cfgBytes = cfg.toByteArray()
+
+    val client = HttpClient(CIO) {
+        HttpResponseValidator {
+            validateResponse { response ->
+                println(response.status)
+            }
+        }
+    }
+
+    try {
+        client.post<ByteArray>("http://localhost:8080/dlt/configure") {
+            body = cfgBytes
+        }
+    } catch (e: ClientRequestException) {
+        println(e.response.status)
+        println(String(e.response.content.toInputStream().readAllBytes()))
+    }
+
+    try {
+        val config = client.get<ByteArray>("http://localhost:8080/dlt/configure")
+        println(DltConfig.parseFrom(config))
+    } catch (e: ClientRequestException) {
+        println(e.response.status)
+        println(String(e.response.content.toInputStream().readAllBytes()))
+    }
+
+    val uuid = "41f4d2e2-f4ef-4c81-872a-c32f2d26b2ca"
+    try {
+        val record = client.get<ByteArray>("http://localhost:8080/dlt/record") {
+            body = uuid
+        }
+        println(Config.DltRecord.parseFrom(record))
+    } catch (e: ClientRequestException) {
+        println(e.response.status)
+        println(String(e.response.content.toInputStream().readAllBytes()))
+    }
+
+    val id = Config.DltRecordId.newBuilder().setUuid(uuid).build()
+    val record = Config.DltRecord.newBuilder().setId(id).setOperation(Config.DltRecordOperation.ADD)
+        .setType(Config.DltRecordType.DEVICE).setJson("{}").build()
+    try {
+        val result = client.post<ByteArray>("http://localhost:8080/dlt/record") {
+            body = record.toByteArray()
+        }
+        println(String(result))
+        val requestedRecord = client.get<ByteArray>("http://localhost:8080/dlt/record") {
+            body = uuid
+        }
+        println(Config.DltRecord.parseFrom(requestedRecord))
+    } catch (e: ClientRequestException) {
+        println(e.response.status)
+        println(String(e.response.content.toInputStream().readAllBytes()))
+    }
+
+    try {
+        val newRecord = Config.DltRecord.newBuilder().setId(id).setOperation(Config.DltRecordOperation.UPDATE)
+            .setType(Config.DltRecordType.UNKNOWN).setJson("{}").build()
+        val result = client.post<ByteArray>("http://localhost:8080/dlt/record") {
+            body = newRecord.toByteArray()
+        }
+        println(String(result))
+        val requestedRecord = client.get<ByteArray>("http://localhost:8080/dlt/record") {
+            body = uuid
+        }
+        println(Config.DltRecord.parseFrom(requestedRecord))
+    } catch (e: ClientRequestException) {
+        println(e.response.status)
+        println(String(e.response.content.toInputStream().readAllBytes()))
+    }
+
+    try {
+        val newRecord = Config.DltRecord.newBuilder().setId(id).setOperation(Config.DltRecordOperation.DISABLE)
+            .setType(Config.DltRecordType.SLICE).setJson("{}").build()
+        val result = client.post<ByteArray>("http://localhost:8080/dlt/record") {
+            body = newRecord.toByteArray()
+        }
+        println(String(result))
+        val requestedRecord = client.get<ByteArray>("http://localhost:8080/dlt/record") {
+            body = uuid
+        }
+        println(Config.DltRecord.parseFrom(requestedRecord))
+    } catch (e: ClientRequestException) {
+        println(e.response.status)
+        println(String(e.response.content.toInputStream().readAllBytes()))
+    }
+}
\ No newline at end of file
diff --git a/src/dlt/src/main/kotlin/fabric/ConnectGateway.kt b/src/dlt/src/main/kotlin/fabric/ConnectGateway.kt
new file mode 100644
index 0000000000000000000000000000000000000000..245bd4828776837802a1303787d5cfc34a5bffbc
--- /dev/null
+++ b/src/dlt/src/main/kotlin/fabric/ConnectGateway.kt
@@ -0,0 +1,54 @@
+//     NEC Laboratories Europe GmbH
+//
+//     PROPRIETARY INFORMATION
+//
+// The software and its source code contain valuable trade secrets and
+// shall be maintained in confidence and treated as confidential
+// information. The software may only be used for evaluation and/or
+// testing purposes, unless otherwise explicitly stated in a written
+// agreement with NEC Laboratories Europe GmbH.
+//
+// Any unauthorized publication, transfer to third parties or
+// duplication of the object or source code - either totally or in
+// part - is strictly prohibited.
+//
+//          Copyright (c) 2021 NEC Laboratories Europe GmbH
+//          All Rights Reserved.
+//
+// Authors: Konstantin Munichev <konstantin.munichev@neclab.eu>
+//
+//
+// NEC Laboratories Europe GmbH DISCLAIMS ALL WARRANTIES, EITHER
+// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO IMPLIED WARRANTIES
+// OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE AND THE
+// WARRANTY AGAINST LATENT DEFECTS, WITH RESPECT TO THE PROGRAM AND
+// THE ACCOMPANYING DOCUMENTATION.
+//
+// NO LIABILITIES FOR CONSEQUENTIAL DAMAGES: IN NO EVENT SHALL NEC
+// Laboratories Europe GmbH or ANY OF ITS SUBSIDIARIES BE LIABLE FOR
+// ANY DAMAGES WHATSOEVER (INCLUDING, WITHOUT LIMITATION, DAMAGES FOR
+// LOSS OF BUSINESS PROFITS, BUSINESS INTERRUPTION, LOSS OF
+// INFORMATION, OR OTHER PECUNIARY LOSS AND INDIRECT, CONSEQUENTIAL,
+// INCIDENTAL, ECONOMIC OR PUNITIVE DAMAGES) ARISING OUT OF THE USE OF
+// OR INABILITY TO USE THIS PROGRAM, EVEN IF NEC Laboratories Europe
+// GmbH HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+//
+// THIS HEADER MAY NOT BE EXTRACTED OR MODIFIED IN ANY WAY.
+
+package fabric
+
+import org.hyperledger.fabric.gateway.Contract
+import org.hyperledger.fabric.gateway.Gateway
+import org.hyperledger.fabric.gateway.Wallet
+import java.nio.file.Paths
+
+// helper function for getting connected to the gateway
+fun getContract(config: proto.Config.DltConfig, wallet: Wallet): Contract {
+    // load a CCP
+    val networkConfigPath = Paths.get(config.connectionFile)
+    val builder = Gateway.createBuilder()
+    builder.identity(wallet, config.user).networkConfig(networkConfigPath).discovery(true)
+    val gateway = builder.connect()
+    val network = gateway.getNetwork(config.channel)
+    return network.getContract(config.contract)
+}
\ No newline at end of file
diff --git a/src/dlt/src/main/kotlin/fabric/EnrollAdmin.kt b/src/dlt/src/main/kotlin/fabric/EnrollAdmin.kt
new file mode 100644
index 0000000000000000000000000000000000000000..b4420271961f659149392614ec2e89b8c10a7da9
--- /dev/null
+++ b/src/dlt/src/main/kotlin/fabric/EnrollAdmin.kt
@@ -0,0 +1,29 @@
+/*
+ * Copyright IBM Corp. All Rights Reserved.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ */
+
+package fabric
+
+import org.hyperledger.fabric.gateway.Identities
+import org.hyperledger.fabric.gateway.Wallet
+import org.hyperledger.fabric_ca.sdk.EnrollmentRequest
+import org.hyperledger.fabric_ca.sdk.HFCAClient
+
+fun enrollAdmin(config: proto.Config.DltConfig, caClient: HFCAClient, wallet: Wallet) {
+    // Check to see if we've already enrolled the admin user.
+    if (wallet.get(config.caAdmin) != null) {
+        println("An identity for the admin user ${config.caAdmin} already exists in the wallet")
+        return
+    }
+
+    // Enroll the admin user, and import the new identity into the wallet.
+    val enrollmentRequestTLS = EnrollmentRequest()
+    enrollmentRequestTLS.addHost(config.caUrl)
+    enrollmentRequestTLS.profile = "tls"
+    val enrollment = caClient.enroll(config.caAdmin, config.caAdminSecret, enrollmentRequestTLS)
+    val user = Identities.newX509Identity(config.msp, enrollment)
+    wallet.put(config.caAdmin, user)
+    println("Successfully enrolled user ${config.caAdmin} and imported it into the wallet")
+}
diff --git a/src/dlt/src/main/kotlin/fabric/FabricConnector.kt b/src/dlt/src/main/kotlin/fabric/FabricConnector.kt
new file mode 100644
index 0000000000000000000000000000000000000000..0918f59e1f616af4deae3835cc2f48ae6c932902
--- /dev/null
+++ b/src/dlt/src/main/kotlin/fabric/FabricConnector.kt
@@ -0,0 +1,105 @@
+//     NEC Laboratories Europe GmbH
+//
+//     PROPRIETARY INFORMATION
+//
+// The software and its source code contain valuable trade secrets and
+// shall be maintained in confidence and treated as confidential
+// information. The software may only be used for evaluation and/or
+// testing purposes, unless otherwise explicitly stated in a written
+// agreement with NEC Laboratories Europe GmbH.
+//
+// Any unauthorized publication, transfer to third parties or
+// duplication of the object or source code - either totally or in
+// part - is strictly prohibited.
+//
+//          Copyright (c) 2021 NEC Laboratories Europe GmbH
+//          All Rights Reserved.
+//
+// Authors: Konstantin Munichev <konstantin.munichev@neclab.eu>
+//
+//
+// NEC Laboratories Europe GmbH DISCLAIMS ALL WARRANTIES, EITHER
+// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO IMPLIED WARRANTIES
+// OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE AND THE
+// WARRANTY AGAINST LATENT DEFECTS, WITH RESPECT TO THE PROGRAM AND
+// THE ACCOMPANYING DOCUMENTATION.
+//
+// NO LIABILITIES FOR CONSEQUENTIAL DAMAGES: IN NO EVENT SHALL NEC
+// Laboratories Europe GmbH or ANY OF ITS SUBSIDIARIES BE LIABLE FOR
+// ANY DAMAGES WHATSOEVER (INCLUDING, WITHOUT LIMITATION, DAMAGES FOR
+// LOSS OF BUSINESS PROFITS, BUSINESS INTERRUPTION, LOSS OF
+// INFORMATION, OR OTHER PECUNIARY LOSS AND INDIRECT, CONSEQUENTIAL,
+// INCIDENTAL, ECONOMIC OR PUNITIVE DAMAGES) ARISING OUT OF THE USE OF
+// OR INABILITY TO USE THIS PROGRAM, EVEN IF NEC Laboratories Europe
+// GmbH HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+//
+// THIS HEADER MAY NOT BE EXTRACTED OR MODIFIED IN ANY WAY.
+
+package fabric
+
+import org.hyperledger.fabric.gateway.Contract
+import org.hyperledger.fabric.gateway.Wallet
+import org.hyperledger.fabric.gateway.Wallets
+import org.hyperledger.fabric.sdk.security.CryptoSuiteFactory
+import org.hyperledger.fabric_ca.sdk.HFCAClient
+import proto.Config
+import java.nio.file.Paths
+import java.util.*
+
+class FabricConnector(val config: Config.DltConfig) {
+    private val caClient: HFCAClient
+    private val wallet: Wallet
+    private val contract: Contract
+
+    init {
+        // Create a CA client for interacting with the CA.
+        val props = Properties()
+        props["pemFile"] = config.caCertFile
+        props["allowAllHostNames"] = "true"
+        caClient = HFCAClient.createNewInstance(config.caUrl, props)
+        val cryptoSuite = CryptoSuiteFactory.getDefault().cryptoSuite
+        caClient.cryptoSuite = cryptoSuite
+
+        // Create a wallet for managing identities
+        wallet = Wallets.newFileSystemWallet(Paths.get(config.wallet))
+        contract = connect()
+    }
+
+    fun connect(): Contract {
+        enrollAdmin(config, caClient, wallet)
+        registerUser(config, caClient, wallet)
+        return getContract(config, wallet)
+    }
+
+    fun putData(record: Config.DltRecord): String {
+        println(record.type.toString())
+        return String(
+            contract.submitTransaction(
+                "AddRecord",
+                record.id.uuid,
+                record.type.number.toString(),
+                record.json
+            )
+        )
+    }
+
+    fun getData(uuid: String): Config.DltRecord {
+        val result = contract.evaluateTransaction("GetRecord", uuid)
+        return Config.DltRecord.parseFrom(result)
+    }
+
+    fun updateData(record: Config.DltRecord): String {
+        return String(
+            contract.submitTransaction(
+                "UpdateRecord",
+                record.id.uuid,
+                record.type.number.toString(),
+                record.json
+            )
+        )
+    }
+
+    fun deleteData(uuid: String): String {
+        return String(contract.submitTransaction("DeactivateRecord", uuid))
+    }
+}
\ No newline at end of file
diff --git a/src/dlt/src/main/kotlin/fabric/RegisterUser.kt b/src/dlt/src/main/kotlin/fabric/RegisterUser.kt
new file mode 100644
index 0000000000000000000000000000000000000000..fb5cc29695427d54ad1350ee92c99b3d31c8ab4c
--- /dev/null
+++ b/src/dlt/src/main/kotlin/fabric/RegisterUser.kt
@@ -0,0 +1,65 @@
+/*
+SPDX-License-Identifier: Apache-2.0
+*/
+package fabric
+
+import org.hyperledger.fabric.gateway.Identities
+import org.hyperledger.fabric.gateway.Wallet
+import org.hyperledger.fabric.gateway.X509Identity
+import org.hyperledger.fabric.sdk.Enrollment
+import org.hyperledger.fabric.sdk.User
+import org.hyperledger.fabric_ca.sdk.HFCAClient
+import org.hyperledger.fabric_ca.sdk.RegistrationRequest
+import java.security.PrivateKey
+
+fun registerUser(config: proto.Config.DltConfig, caClient: HFCAClient, wallet: Wallet) {
+    // Check to see if we've already enrolled the user.
+    if (wallet[config.user] != null) {
+        println("An identity for the user ${config.user} already exists in the wallet")
+        return
+    }
+    val adminIdentity = wallet[config.caAdmin] as X509Identity
+    val admin = object : User {
+        override fun getName(): String {
+            return config.caAdmin
+        }
+
+        override fun getRoles(): Set<String>? {
+            return null
+        }
+
+        override fun getAccount(): String? {
+            return null
+        }
+
+        override fun getAffiliation(): String {
+            return config.affiliation
+        }
+
+        override fun getEnrollment(): Enrollment {
+            return object : Enrollment {
+                override fun getKey(): PrivateKey {
+                    return adminIdentity.privateKey
+                }
+
+                override fun getCert(): String {
+                    return Identities.toPemString(adminIdentity.certificate)
+                }
+            }
+        }
+
+        override fun getMspId(): String {
+            return config.msp
+        }
+    }
+
+    // Register the user, enroll the user, and import the new identity into the wallet.
+    val registrationRequest = RegistrationRequest(config.user)
+    registrationRequest.affiliation = config.affiliation
+    registrationRequest.enrollmentID = config.user
+    val enrollmentSecret = caClient.register(registrationRequest, admin)
+    val enrollment = caClient.enroll(config.user, enrollmentSecret)
+    val user = Identities.newX509Identity(config.msp, enrollment)
+    wallet.put(config.user, user)
+    println("Successfully enrolled user ${config.user} and imported it into the wallet")
+}
diff --git a/src/dlt/src/main/kotlin/http/Server.kt b/src/dlt/src/main/kotlin/http/Server.kt
new file mode 100644
index 0000000000000000000000000000000000000000..4e3400af36b32726096b177da230c8baa4bb3dab
--- /dev/null
+++ b/src/dlt/src/main/kotlin/http/Server.kt
@@ -0,0 +1,162 @@
+//     NEC Laboratories Europe GmbH
+//
+//     PROPRIETARY INFORMATION
+//
+// The software and its source code contain valuable trade secrets and
+// shall be maintained in confidence and treated as confidential
+// information. The software may only be used for evaluation and/or
+// testing purposes, unless otherwise explicitly stated in a written
+// agreement with NEC Laboratories Europe GmbH.
+//
+// Any unauthorized publication, transfer to third parties or
+// duplication of the object or source code - either totally or in
+// part - is strictly prohibited.
+//
+//          Copyright (c) 2021 NEC Laboratories Europe GmbH
+//          All Rights Reserved.
+//
+// Authors: Konstantin Munichev <konstantin.munichev@neclab.eu>
+//
+//
+// NEC Laboratories Europe GmbH DISCLAIMS ALL WARRANTIES, EITHER
+// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO IMPLIED WARRANTIES
+// OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE AND THE
+// WARRANTY AGAINST LATENT DEFECTS, WITH RESPECT TO THE PROGRAM AND
+// THE ACCOMPANYING DOCUMENTATION.
+//
+// NO LIABILITIES FOR CONSEQUENTIAL DAMAGES: IN NO EVENT SHALL NEC
+// Laboratories Europe GmbH or ANY OF ITS SUBSIDIARIES BE LIABLE FOR
+// ANY DAMAGES WHATSOEVER (INCLUDING, WITHOUT LIMITATION, DAMAGES FOR
+// LOSS OF BUSINESS PROFITS, BUSINESS INTERRUPTION, LOSS OF
+// INFORMATION, OR OTHER PECUNIARY LOSS AND INDIRECT, CONSEQUENTIAL,
+// INCIDENTAL, ECONOMIC OR PUNITIVE DAMAGES) ARISING OUT OF THE USE OF
+// OR INABILITY TO USE THIS PROGRAM, EVEN IF NEC Laboratories Europe
+// GmbH HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+//
+// THIS HEADER MAY NOT BE EXTRACTED OR MODIFIED IN ANY WAY.
+
+package http
+
+import fabric.FabricConnector
+import io.ktor.application.*
+import io.ktor.features.*
+import io.ktor.http.*
+import io.ktor.request.*
+import io.ktor.response.*
+import io.ktor.routing.*
+import io.ktor.server.engine.*
+import io.ktor.server.netty.*
+import kotlinx.coroutines.Dispatchers
+import kotlinx.coroutines.sync.Mutex
+import kotlinx.coroutines.withContext
+import proto.Config
+import proto.Config.DltConfig
+import proto.Config.DltRecord
+
+class Server {
+    var connector: FabricConnector? = null
+    val port = 8080
+    val mutex = Mutex()
+}
+
+fun checkException(e: Exception): String {
+    if (e.message == null) return ""
+    return e.message!!
+}
+
+fun main() {
+    val server = Server()
+    embeddedServer(Netty, port = server.port) {
+        install(ContentNegotiation)
+        routing {
+            post("/dlt/configure") {
+                withContext(Dispatchers.IO) {
+                    try {
+                        val data = call.receiveStream()
+                        val config = DltConfig.parseFrom(data)
+                        println(config)
+                        server.mutex.lock()
+                        server.connector = FabricConnector(config)
+                        server.mutex.unlock()
+                        call.response.status(HttpStatusCode.Created)
+                    }
+                    // TODO: catch exceptions one by one
+                    catch (e: Exception) {
+                        call.respond(HttpStatusCode.BadRequest, checkException(e))
+                        e.printStackTrace()
+                    }
+                }
+            }
+            get("/dlt/configure") {
+                withContext(Dispatchers.IO) {
+                    server.mutex.lock()
+                    if (server.connector == null) {
+                        server.mutex.unlock()
+                        call.respond(HttpStatusCode.NotFound, "Not initialized")
+                    } else {
+                        val configBytes = server.connector!!.config.toByteArray()
+                        server.mutex.unlock()
+                        call.respond(HttpStatusCode.OK, configBytes)
+                    }
+                }
+            }
+            post("/dlt/record") {
+                withContext(Dispatchers.IO) {
+                    server.mutex.lock()
+                    try {
+                        if (server.connector == null) {
+                            call.respond(HttpStatusCode.NotFound, "Not initialized")
+                        } else {
+                            val record = DltRecord.parseFrom(call.receiveStream())
+                            when (record.operation) {
+                                Config.DltRecordOperation.ADD -> {
+                                    val result = server.connector!!.putData(record)
+                                    call.respond(HttpStatusCode.Created, result)
+                                }
+                                Config.DltRecordOperation.UPDATE -> {
+                                    val result = server.connector!!.updateData(record)
+                                    call.respond(HttpStatusCode.OK, result)
+                                }
+                                // TODO: Disable should require only uuid
+                                Config.DltRecordOperation.DISABLE -> {
+                                    val result = server.connector!!.deleteData(record.id.uuid)
+                                    call.respond(HttpStatusCode.OK, result)
+                                }
+                                else -> {
+                                    call.respond(HttpStatusCode.BadRequest, "Invalid operation")
+                                }
+                            }
+                        }
+                    }
+                    // TODO: catch exceptions one by one
+                    catch (e: Exception) {
+                        call.respond(HttpStatusCode.BadRequest, checkException(e))
+                        e.printStackTrace()
+                    }
+                    server.mutex.unlock()
+                }
+            }
+            get("/dlt/record") {
+                withContext(Dispatchers.IO) {
+                    server.mutex.lock()
+                    try {
+                        if (server.connector == null) {
+                            call.respond(HttpStatusCode.NotFound)
+                        } else {
+                            val uuid = call.receiveText()
+                            println("Uuid request: $uuid")
+                            val result = server.connector!!.getData(uuid)
+                            call.respond(HttpStatusCode.OK, result.toByteArray())
+                        }
+                    }
+                    // TODO: catch exceptions one by one
+                    catch (e: Exception) {
+                        call.respond(HttpStatusCode.NotFound, checkException(e))
+                        e.printStackTrace()
+                    }
+                    server.mutex.unlock()
+                }
+            }
+        }
+    }.start(wait = true)
+}
diff --git a/src/dlt/src/main/kotlin/proto/Config.proto b/src/dlt/src/main/kotlin/proto/Config.proto
new file mode 100644
index 0000000000000000000000000000000000000000..f492e63ce65924a98b38ea4925d43336f84d211c
--- /dev/null
+++ b/src/dlt/src/main/kotlin/proto/Config.proto
@@ -0,0 +1,80 @@
+//     NEC Laboratories Europe GmbH
+//
+//     PROPRIETARY INFORMATION
+//
+// The software and its source code contain valuable trade secrets and
+// shall be maintained in confidence and treated as confidential
+// information. The software may only be used for evaluation and/or
+// testing purposes, unless otherwise explicitly stated in a written
+// agreement with NEC Laboratories Europe GmbH.
+//
+// Any unauthorized publication, transfer to third parties or
+// duplication of the object or source code - either totally or in
+// part - is strictly prohibited.
+//
+//          Copyright (c) 2021 NEC Laboratories Europe GmbH
+//          All Rights Reserved.
+//
+// Authors: Konstantin Munichev <konstantin.munichev@neclab.eu>
+//
+//
+// NEC Laboratories Europe GmbH DISCLAIMS ALL WARRANTIES, EITHER
+// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO IMPLIED WARRANTIES
+// OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE AND THE
+// WARRANTY AGAINST LATENT DEFECTS, WITH RESPECT TO THE PROGRAM AND
+// THE ACCOMPANYING DOCUMENTATION.
+//
+// NO LIABILITIES FOR CONSEQUENTIAL DAMAGES: IN NO EVENT SHALL NEC
+// Laboratories Europe GmbH or ANY OF ITS SUBSIDIARIES BE LIABLE FOR
+// ANY DAMAGES WHATSOEVER (INCLUDING, WITHOUT LIMITATION, DAMAGES FOR
+// LOSS OF BUSINESS PROFITS, BUSINESS INTERRUPTION, LOSS OF
+// INFORMATION, OR OTHER PECUNIARY LOSS AND INDIRECT, CONSEQUENTIAL,
+// INCIDENTAL, ECONOMIC OR PUNITIVE DAMAGES) ARISING OUT OF THE USE OF
+// OR INABILITY TO USE THIS PROGRAM, EVEN IF NEC Laboratories Europe
+// GmbH HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+//
+// THIS HEADER MAY NOT BE EXTRACTED OR MODIFIED IN ANY WAY.
+
+syntax = "proto3";
+
+package proto;
+
+message DltConfig {
+  string wallet = 1;
+  string connectionFile = 2;
+  string user = 3;
+  string channel = 4;
+  string contract = 5;
+  string caCertFile = 6;
+  string caUrl = 7;
+  string caAdmin = 8;
+  string caAdminSecret = 9;
+  string msp = 10;
+  string affiliation = 11;
+}
+
+message DltRecordId {
+  string uuid = 1;
+}
+
+enum DltRecordOperation {
+  OP_UNSET = 0;
+  ADD = 1;
+  UPDATE = 2;
+  DISABLE = 3;
+}
+
+enum DltRecordType {
+  RECORD_UNSET = 0;
+  UNKNOWN = 1;
+  SERVICE = 2;
+  DEVICE = 3;
+  SLICE = 4;
+}
+
+message DltRecord {
+  DltRecordId id = 1;
+  DltRecordOperation operation = 2;
+  DltRecordType type = 3;
+  string json = 4;
+}
\ No newline at end of file
diff --git a/src/l3_attackmitigator/.gitlab-ci.yml b/src/l3_attackmitigator/.gitlab-ci.yml
index e20771744b43a9209c8b9a193a1fb126008bb006..1fb3d88ee50ab2f330049f737fc455ac05aa8375 100644
--- a/src/l3_attackmitigator/.gitlab-ci.yml
+++ b/src/l3_attackmitigator/.gitlab-ci.yml
@@ -2,7 +2,6 @@
 build l3_attackmitigator:
   variables:
     IMAGE_NAME: 'l3_attackmitigator' # name of the microservice
-    IMAGE_NAME_TEST: 'l3_attackmitigator-test' # name of the microservice
     IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: build
   before_script:
@@ -11,16 +10,22 @@ build l3_attackmitigator:
     - docker build -t "$IMAGE_NAME:$IMAGE_TAG" -f ./src/$IMAGE_NAME/Dockerfile ./src/
     - docker tag "$IMAGE_NAME:$IMAGE_TAG" "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
     - docker push "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
+  after_script:
+    - docker images --filter="dangling=true" --quiet | xargs -r docker rmi
   rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' 
     - changes:
-      - src/$IMAGE_NAME/**
+      - src/$IMAGE_NAME/**/*.{py,in,yml}
+      - src/$IMAGE_NAME/Dockerfile
+      - src/$IMAGE_NAME/tests/*.py
+      - manifests/${IMAGE_NAME}service.yaml
       - .gitlab-ci.yml
 
 # Pull, execute, and run unitary tests for the Docker image from the GitLab registry
-unit_test l3_attackmitigator:
+unit test l3_attackmitigator:
   variables:
     IMAGE_NAME: 'l3_attackmitigator' # name of the microservice
-    IMAGE_NAME_TEST: 'l3_attackmitigator-test' # name of the microservice
     IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: unit_test
   needs:
@@ -28,34 +33,50 @@ unit_test l3_attackmitigator:
   before_script:
     - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
     - if docker network list | grep teraflowbridge; then echo "teraflowbridge is already created"; else docker network create -d bridge teraflowbridge; fi  
+    - if docker container ls | grep $IMAGE_NAME; then docker rm -f $IMAGE_NAME; else echo "$IMAGE_NAME image is not in the system"; fi
   script:
     - docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
-    - docker run -d -p 10002:10002 --name $IMAGE_NAME --network=teraflowbridge "$IMAGE_NAME:$IMAGE_TAG"
-    - docker ps -a
+    - docker run --name $IMAGE_NAME -d -p 10002:10002 --network=teraflowbridge $CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG
     - sleep 5
     - docker ps -a
-    - docker port $IMAGE_NAME
     - docker logs $IMAGE_NAME
-    - docker exec -i $IMAGE_NAME bash -c "pytest --log-level=DEBUG --verbose -o log_cli=true $IMAGE_NAME/tests/test_unitary.py"
+    - docker exec -i $IMAGE_NAME bash -c "pytest --log-level=DEBUG --verbose $IMAGE_NAME/tests/test_unitary.py"
   after_script:
-    - docker stop $IMAGE_NAME
-    - docker rm $IMAGE_NAME
+    - docker rm -f $IMAGE_NAME
+    - docker network rm teraflowbridge
   rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' 
     - changes:
-      - src/$IMAGE_NAME/**
+      - src/$IMAGE_NAME/**/*.{py,in,yml}
+      - src/$IMAGE_NAME/Dockerfile
+      - src/$IMAGE_NAME/tests/*.py
+      - src/$IMAGE_NAME/tests/Dockerfile
+      - manifests/${IMAGE_NAME}service.yaml
       - .gitlab-ci.yml
 
 # Deployment of the service in Kubernetes Cluster
 deploy l3_attackmitigator:
+  variables:
+    IMAGE_NAME: 'l3_attackmitigator' # name of the microservice
+    IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: deploy
   needs:
-    - build l3_attackmitigator
-    - unit_test l3_attackmitigator
-    - dependencies all
-    - integ_test execute
+    - unit test l3_attackmitigator
+    # - integ_test execute
   script:
+    - 'sed -i "s/$IMAGE_NAME:.*/$IMAGE_NAME:$IMAGE_TAG/" manifests/${IMAGE_NAME}service.yaml'
     - kubectl version
     - kubectl get all
-    - kubectl apply -f "manifests/l3_attackmitigatorservice.yaml"
-    - kubectl delete pods --selector app=l3_attackmitigatorservice
+    - kubectl apply -f "manifests/${IMAGE_NAME}service.yaml"
     - kubectl get all
+  # environment:
+  #   name: test
+  #   url: https://example.com
+  #   kubernetes:
+  #     namespace: test
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+      when: manual    
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
+      when: manual
diff --git a/src/l3_centralizedattackdetector/.gitlab-ci.yml b/src/l3_centralizedattackdetector/.gitlab-ci.yml
index aaebe01e4dce047b8c84a515283231d06cd26dab..cd410cb9cdf8df5a7f41db46304192a9ba591443 100644
--- a/src/l3_centralizedattackdetector/.gitlab-ci.yml
+++ b/src/l3_centralizedattackdetector/.gitlab-ci.yml
@@ -2,7 +2,6 @@
 build l3_centralizedattackdetector:
   variables:
     IMAGE_NAME: 'l3_centralizedattackdetector' # name of the microservice
-    IMAGE_NAME_TEST: 'l3_centralizedattackdetector-test' # name of the microservice
     IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: build
   before_script:
@@ -11,16 +10,22 @@ build l3_centralizedattackdetector:
     - docker build -t "$IMAGE_NAME:$IMAGE_TAG" -f ./src/$IMAGE_NAME/Dockerfile ./src/
     - docker tag "$IMAGE_NAME:$IMAGE_TAG" "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
     - docker push "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
+  after_script:
+    - docker images --filter="dangling=true" --quiet | xargs -r docker rmi
   rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' 
     - changes:
-      - src/$IMAGE_NAME/**
+      - src/$IMAGE_NAME/**/*.{py,in,yml}
+      - src/$IMAGE_NAME/Dockerfile
+      - src/$IMAGE_NAME/tests/*.py
+      - manifests/${IMAGE_NAME}service.yaml
       - .gitlab-ci.yml
 
 # Pull, execute, and run unitary tests for the Docker image from the GitLab registry
-unit_test l3_centralizedattackdetector:
+unit test l3_centralizedattackdetector:
   variables:
     IMAGE_NAME: 'l3_centralizedattackdetector' # name of the microservice
-    IMAGE_NAME_TEST: 'l3_centralizedattackdetector-test' # name of the microservice
     IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: unit_test
   needs:
@@ -28,34 +33,50 @@ unit_test l3_centralizedattackdetector:
   before_script:
     - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
     - if docker network list | grep teraflowbridge; then echo "teraflowbridge is already created"; else docker network create -d bridge teraflowbridge; fi  
+    - if docker container ls | grep $IMAGE_NAME; then docker rm -f $IMAGE_NAME; else echo "$IMAGE_NAME image is not in the system"; fi
   script:
     - docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
-    - docker run -d -p 10001:10001 --name $IMAGE_NAME --network=teraflowbridge "$IMAGE_NAME:$IMAGE_TAG"
-    - docker ps -a
+    - docker run --name $IMAGE_NAME -d -p 10001:10001 --network=teraflowbridge $CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG
     - sleep 5
     - docker ps -a
-    - docker port $IMAGE_NAME
     - docker logs $IMAGE_NAME
-    - docker exec -i $IMAGE_NAME bash -c "pytest --log-level=DEBUG --verbose -o log_cli=true $IMAGE_NAME/tests/test_unitary.py"
+    - docker exec -i $IMAGE_NAME bash -c "pytest --log-level=DEBUG --verbose $IMAGE_NAME/tests/test_unitary.py"
   after_script:
-    - docker stop $IMAGE_NAME
-    - docker rm $IMAGE_NAME
+    - docker rm -f $IMAGE_NAME
+    - docker network rm teraflowbridge
   rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' 
     - changes:
-      - src/$IMAGE_NAME/**
+      - src/$IMAGE_NAME/**/*.{py,in,yml}
+      - src/$IMAGE_NAME/Dockerfile
+      - src/$IMAGE_NAME/tests/*.py
+      - src/$IMAGE_NAME/tests/Dockerfile
+      - manifests/${IMAGE_NAME}service.yaml
       - .gitlab-ci.yml
 
 # Deployment of the service in Kubernetes Cluster
 deploy l3_centralizedattackdetector:
+  variables:
+    IMAGE_NAME: 'l3_centralizedattackdetector' # name of the microservice
+    IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: deploy
   needs:
-    - build l3_centralizedattackdetector
-    - unit_test l3_centralizedattackdetector
-    - dependencies all
-    - integ_test execute
+    - unit test l3_centralizedattackdetector
+    # - integ_test execute
   script:
+    - 'sed -i "s/$IMAGE_NAME:.*/$IMAGE_NAME:$IMAGE_TAG/" manifests/${IMAGE_NAME}service.yaml'
     - kubectl version
     - kubectl get all
-    - kubectl apply -f "manifests/l3_centralizedattackdetectorservice.yaml"
-    - kubectl delete pods --selector app=l3_centralizedattackdetectorservice
+    - kubectl apply -f "manifests/${IMAGE_NAME}service.yaml"
     - kubectl get all
+  # environment:
+  #   name: test
+  #   url: https://example.com
+  #   kubernetes:
+  #     namespace: test
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+      when: manual    
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
+      when: manual
diff --git a/src/l3_distributedattackdetector/.gitlab-ci.yml b/src/l3_distributedattackdetector/.gitlab-ci.yml
index c2579c70ac78c7ec8ca21fc48110b4656b644f08..435e6ee833ca29fdfd204d9e30990a3c655f2284 100644
--- a/src/l3_distributedattackdetector/.gitlab-ci.yml
+++ b/src/l3_distributedattackdetector/.gitlab-ci.yml
@@ -2,7 +2,6 @@
 build l3_distributedattackdetector:
   variables:
     IMAGE_NAME: 'l3_distributedattackdetector' # name of the microservice
-    IMAGE_NAME_TEST: 'l3_distributedattackdetector-test' # name of the microservice
     IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: build
   before_script:
@@ -11,16 +10,22 @@ build l3_distributedattackdetector:
     - docker build -t "$IMAGE_NAME:$IMAGE_TAG" -f ./src/$IMAGE_NAME/Dockerfile ./src/
     - docker tag "$IMAGE_NAME:$IMAGE_TAG" "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
     - docker push "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
+  after_script:
+    - docker images --filter="dangling=true" --quiet | xargs -r docker rmi
   rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' 
     - changes:
-      - src/$IMAGE_NAME/**
+      - src/$IMAGE_NAME/**/*.{py,in,yml}
+      - src/$IMAGE_NAME/Dockerfile
+      - src/$IMAGE_NAME/tests/*.py
+      - manifests/${IMAGE_NAME}service.yaml
       - .gitlab-ci.yml
 
 # Pull, execute, and run unitary tests for the Docker image from the GitLab registry
-unit_test l3_distributedattackdetector:
+unit test l3_distributedattackdetector:
   variables:
     IMAGE_NAME: 'l3_distributedattackdetector' # name of the microservice
-    IMAGE_NAME_TEST: 'l3_distributedattackdetector-test' # name of the microservice
     IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: unit_test
   needs:
@@ -28,33 +33,50 @@ unit_test l3_distributedattackdetector:
   before_script:
     - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
     - if docker network list | grep teraflowbridge; then echo "teraflowbridge is already created"; else docker network create -d bridge teraflowbridge; fi  
+    - if docker container ls | grep $IMAGE_NAME; then docker rm -f $IMAGE_NAME; else echo "$IMAGE_NAME image is not in the system"; fi
   script:
     - docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
-    - docker run -d -p 10000:10000 --name $IMAGE_NAME --network=teraflowbridge "$IMAGE_NAME:$IMAGE_TAG"
-    - docker ps -a
+    - docker run --name $IMAGE_NAME -d -p 10000:10000 --network=teraflowbridge $CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG
     - sleep 5
     - docker ps -a
     - docker logs $IMAGE_NAME
-    - docker exec -i $IMAGE_NAME bash -c "pytest --log-level=DEBUG --verbose -o log_cli=true $IMAGE_NAME/tests/test_unitary.py"
+    - docker exec -i $IMAGE_NAME bash -c "pytest --log-level=DEBUG --verbose $IMAGE_NAME/tests/test_unitary.py"
   after_script:
-    - docker stop $IMAGE_NAME
-    - docker rm $IMAGE_NAME
+    - docker rm -f $IMAGE_NAME
+    - docker network rm teraflowbridge
   rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' 
     - changes:
-      - src/$IMAGE_NAME/**
+      - src/$IMAGE_NAME/**/*.{py,in,yml}
+      - src/$IMAGE_NAME/Dockerfile
+      - src/$IMAGE_NAME/tests/*.py
+      - src/$IMAGE_NAME/tests/Dockerfile
+      - manifests/${IMAGE_NAME}service.yaml
       - .gitlab-ci.yml
 
 # Deployment of the service in Kubernetes Cluster
 deploy l3_distributedattackdetector:
+  variables:
+    IMAGE_NAME: 'l3_distributedattackdetector' # name of the microservice
+    IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: deploy
   needs:
-    - build l3_distributedattackdetector
-    - unit_test l3_distributedattackdetector
-    - dependencies all
-    - integ_test execute
+    - unit test l3_distributedattackdetector
+    # - integ_test execute
   script:
+    - 'sed -i "s/$IMAGE_NAME:.*/$IMAGE_NAME:$IMAGE_TAG/" manifests/${IMAGE_NAME}service.yaml'
     - kubectl version
     - kubectl get all
-    - kubectl apply -f "manifests/l3_distributedattackdetectorservice.yaml"
-    - kubectl delete pods --selector app=l3_distributedattackdetectorservice
+    - kubectl apply -f "manifests/${IMAGE_NAME}service.yaml"
     - kubectl get all
+  # environment:
+  #   name: test
+  #   url: https://example.com
+  #   kubernetes:
+  #     namespace: test
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+      when: manual    
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
+      when: manual
diff --git a/src/monitoring/.gitlab-ci.yml b/src/monitoring/.gitlab-ci.yml
index 337ed7c9183702388663a593a154c28b049fe7ef..3fad3a185ed44c657af4b96ce3f026a8e0b5192b 100644
--- a/src/monitoring/.gitlab-ci.yml
+++ b/src/monitoring/.gitlab-ci.yml
@@ -1,4 +1,4 @@
-# build, tag and push the Docker image to the gitlab registry
+# Build, tag and push the Docker image to the GitLab registry
 build monitoring:
   variables:
     IMAGE_NAME: 'monitoring' # name of the microservice
@@ -11,7 +11,7 @@ build monitoring:
     - docker tag "$IMAGE_NAME:$IMAGE_TAG" "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
     - docker push "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
   after_script:
-#    - docker rmi $(docker images --quiet --filter=dangling=true)
+    - docker images --filter="dangling=true" --quiet | xargs -r docker rmi
   rules:
     - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
     - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' 
@@ -19,11 +19,10 @@ build monitoring:
       - src/$IMAGE_NAME/**/*.{py,in,yml}
       - src/$IMAGE_NAME/Dockerfile
       - src/$IMAGE_NAME/tests/*.py
-      - src/$IMAGE_NAME/tests/Dockerfile
-      - manifests/$IMAGE_NAME.yaml
+      - manifests/${IMAGE_NAME}service.yaml
       - .gitlab-ci.yml
 
-# apply unit test to the monitoring component
+# Apply unit test to the component
 unit test monitoring:
   variables:
     IMAGE_NAME: 'monitoring' # name of the microservice
@@ -39,10 +38,13 @@ unit test monitoring:
   script:
     - docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
     - docker run --name influxdb -d -p 8086:8086 -e INFLUXDB_DB=$INFLUXDB_DATABASE -e INFLUXDB_ADMIN_USER=$INFLUXDB_USER -e INFLUXDB_ADMIN_PASSWORD=$INFLUXDB_PASSWORD -e INFLUXDB_HTTP_AUTH_ENABLED=True --network=teraflowbridge --rm influxdb:1.8
-    - docker run --name $IMAGE_NAME -d -p 7070:7070 --env INFLUXDB_USER=$INFLUXDB_USER --env INFLUXDB_PASSWORD=$INFLUXDB_PASSWORD --env INFLUXDB_DATABASE=$INFLUXDB_DATABASE --env INFLUXDB_HOSTNAME=influxdb -v "$PWD/src/$IMAGE_NAME/tests:/opt/results" --network=teraflowbridge  --rm $CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG
+    - sleep 10
+    - docker run --name $IMAGE_NAME -d -p 7070:7070 --env INFLUXDB_USER=$INFLUXDB_USER --env INFLUXDB_PASSWORD=$INFLUXDB_PASSWORD --env INFLUXDB_DATABASE=$INFLUXDB_DATABASE --env INFLUXDB_HOSTNAME=influxdb -v "$PWD/src/$IMAGE_NAME/tests:/opt/results" --network=teraflowbridge --rm $CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG
     - sleep 30
     - docker ps -a
-    - docker exec -i $IMAGE_NAME bash -c "pytest --junitxml=/opt/results/report.xml"
+    - docker logs $IMAGE_NAME
+    - docker exec -i $IMAGE_NAME bash -c "coverage run -m pytest --log-level=INFO --verbose $IMAGE_NAME/tests/test_unitary.py --junitxml=/opt/results/${IMAGE_NAME}_report.xml; coverage xml -o /opt/results/${IMAGE_NAME}_coverage.xml; coverage report --include='${IMAGE_NAME}/*' --show-missing"
+  coverage: '/TOTAL\s+\d+\s+\d+\s+(\d+%)/'
   after_script:
     - docker rm -f $IMAGE_NAME
     - docker rm -f  influxdb
@@ -55,14 +57,15 @@ unit test monitoring:
       - src/$IMAGE_NAME/Dockerfile
       - src/$IMAGE_NAME/tests/*.py
       - src/$IMAGE_NAME/tests/Dockerfile
-      - manifests/$IMAGE_NAME.yaml
+      - manifests/${IMAGE_NAME}service.yaml
       - .gitlab-ci.yml
   artifacts:
       when: always
       reports:
-        junit: src/$IMAGE_NAME/tests/report.xml
-
-# Deployment of the monitoring service in Kubernetes Cluster
+        junit: src/$IMAGE_NAME/tests/${IMAGE_NAME}_report.xml
+        cobertura: src/$IMAGE_NAME/tests/${IMAGE_NAME}_coverage.xml
+ 
+# Deployment of the service in Kubernetes Cluster
 deploy monitoring:
   variables:
     IMAGE_NAME: 'monitoring' # name of the microservice
@@ -72,10 +75,10 @@ deploy monitoring:
     - unit test monitoring
     # - integ_test execute
   script:
-    - 'sed -i "s/$IMAGE_NAME:.*/$IMAGE_NAME:$IMAGE_TAG/" manifests/$IMAGE_NAME.yaml'
+    - 'sed -i "s/$IMAGE_NAME:.*/$IMAGE_NAME:$IMAGE_TAG/" manifests/${IMAGE_NAME}service.yaml'
     - kubectl version
     - kubectl get all
-    - kubectl apply -f "manifests/$IMAGE_NAME.yaml"
+    - kubectl apply -f "manifests/${IMAGE_NAME}service.yaml"
     - kubectl get all
   # environment:
   #   name: test
@@ -86,4 +89,4 @@ deploy monitoring:
     - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
       when: manual    
     - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
-      when: manual
\ No newline at end of file
+      when: manual
diff --git a/src/monitoring/Dockerfile b/src/monitoring/Dockerfile
index cbf6bcb6ae99733e855e86440e54ec80454713a5..af2fd608521c297f7aa97b45f0e3fc5d88e5615c 100644
--- a/src/monitoring/Dockerfile
+++ b/src/monitoring/Dockerfile
@@ -27,6 +27,8 @@ RUN mkdir -p /var/teraflow/common/rpc_method_wrapper
 RUN mkdir -p /var/teraflow/device
 RUN mkdir -p /var/teraflow/device/proto
 RUN mkdir -p /var/teraflow/device/client
+RUN mkdir -p /var/teraflow/context
+
 
 # Get Python packages per module
 COPY monitoring/requirements.in requirements.in
@@ -38,8 +40,12 @@ COPY monitoring/. monitoring
 COPY device/proto/. device/proto
 COPY device/client/. device/client
 COPY device/Config.py device
-COPY common/logger.py common
-COPY common/tools/. common/tools
-COPY common/rpc_method_wrapper/ServiceExceptions.py common/rpc_method_wrapper
+COPY common/. common
+COPY context/. context
+
+RUN rm -r common/message_broker/tests
+RUN rm -r common/orm/tests
+RUN rm -r common/rpc_method_wrapper/tests
+RUN rm -r context/tests/test_unitary.py
 
 ENTRYPOINT ["python", "-m", "monitoring.service"]
diff --git a/src/monitoring/client/monitoring_client.py b/src/monitoring/client/monitoring_client.py
index 19d60bc3fc61c7c0ea49ab7e5d529b0d5fef6899..c3ab508c947ae07835801ad724f4589af92379c8 100644
--- a/src/monitoring/client/monitoring_client.py
+++ b/src/monitoring/client/monitoring_client.py
@@ -21,19 +21,19 @@ class MonitoringClient:
         LOGGER.info('CreateKpi: {}'.format(request))
         response = self.server.CreateKpi(request)
         LOGGER.info('CreateKpi result: {}'.format(response))
-        return monitoring_pb2.KpiId()
+        return response
 
     def MonitorKpi(self, request):
         LOGGER.info('MonitorKpi: {}'.format(request))
         response = self.server.MonitorKpi(request)
         LOGGER.info('MonitorKpi result: {}'.format(response))
-        return context_pb2.Empty()
+        return response
 
     def IncludeKpi(self, request):
         LOGGER.info('IncludeKpi: {}'.format(request))
         response = self.server.IncludeKpi(request)
         LOGGER.info('IncludeKpi result: {}'.format(response))
-        return context_pb2.Empty()
+        return response
 
     def GetStreamKpi(self, request):
         LOGGER.info('GetStreamKpi: {}'.format(request))
@@ -47,17 +47,15 @@ class MonitoringClient:
         LOGGER.info('GetInstantKpi result: {}'.format(response))
         return monitoring_pb2.Kpi()
 
+    def GetKpiDescriptor(self, request):
+        LOGGER.info('GetKpiDescriptor: {}'.format(request))
+        response = self.server.GetKpiDescriptor(request)
+        LOGGER.info('GetKpiDescriptor result: {}'.format(response))
+        return response
 
 if __name__ == '__main__':
     # get port
     port = sys.argv[1] if len(sys.argv) > 1 else '7070'
 
-    # form request
-    kpi_request = monitoring_pb2.KpiRequest()
-    kpi_request.device_id.device_id = 'KPIID0000' # pylint: disable=maybe-no-member
-    kpi_request.kpiDescription = 'KPI Description'
-    kpi_request.kpi_sample_type = monitoring_pb2.KpiSampleType.PACKETS_TRANSMITTED
-
     # make call to server
     client = MonitoringClient(port=port)
-    response=client.IncludeKpi(kpi_request)
diff --git a/src/monitoring/proto/context_pb2.py b/src/monitoring/proto/context_pb2.py
index 8b4848bc33bfb0eba76590c8a3a627b2db84ca9f..68602b16f264ceac9acc3ef6669b09d5984e72c2 100644
--- a/src/monitoring/proto/context_pb2.py
+++ b/src/monitoring/proto/context_pb2.py
@@ -12,6 +12,7 @@ from google.protobuf import symbol_database as _symbol_database
 _sym_db = _symbol_database.Default()
 
 
+from . import kpi_sample_types_pb2 as kpi__sample__types__pb2
 
 
 DESCRIPTOR = _descriptor.FileDescriptor(
@@ -20,8 +21,9 @@ DESCRIPTOR = _descriptor.FileDescriptor(
   syntax='proto3',
   serialized_options=None,
   create_key=_descriptor._internal_create_key,
-  serialized_pb=b'\n\rcontext.proto\x12\x07\x63ontext\"\x07\n\x05\x45mpty\"\x14\n\x04Uuid\x12\x0c\n\x04uuid\x18\x01 \x01(\t\"F\n\x05\x45vent\x12\x11\n\ttimestamp\x18\x01 \x01(\x01\x12*\n\nevent_type\x18\x02 \x01(\x0e\x32\x16.context.EventTypeEnum\"0\n\tContextId\x12#\n\x0c\x63ontext_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\xb6\x01\n\x07\x43ontext\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12)\n\x0ctopology_ids\x18\x02 \x03(\x0b\x32\x13.context.TopologyId\x12\'\n\x0bservice_ids\x18\x03 \x03(\x0b\x32\x12.context.ServiceId\x12/\n\ncontroller\x18\x04 \x01(\x0b\x32\x1b.context.TeraFlowController\"8\n\rContextIdList\x12\'\n\x0b\x63ontext_ids\x18\x01 \x03(\x0b\x32\x12.context.ContextId\"1\n\x0b\x43ontextList\x12\"\n\x08\x63ontexts\x18\x01 \x03(\x0b\x32\x10.context.Context\"U\n\x0c\x43ontextEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12&\n\ncontext_id\x18\x02 \x01(\x0b\x32\x12.context.ContextId\"Z\n\nTopologyId\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12$\n\rtopology_uuid\x18\x02 \x01(\x0b\x32\r.context.Uuid\"~\n\x08Topology\x12(\n\x0btopology_id\x18\x01 \x01(\x0b\x32\x13.context.TopologyId\x12%\n\ndevice_ids\x18\x02 \x03(\x0b\x32\x11.context.DeviceId\x12!\n\x08link_ids\x18\x03 \x03(\x0b\x32\x0f.context.LinkId\";\n\x0eTopologyIdList\x12)\n\x0ctopology_ids\x18\x01 \x03(\x0b\x32\x13.context.TopologyId\"5\n\x0cTopologyList\x12%\n\ntopologies\x18\x01 \x03(\x0b\x32\x11.context.Topology\"X\n\rTopologyEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12(\n\x0btopology_id\x18\x02 \x01(\x0b\x32\x13.context.TopologyId\".\n\x08\x44\x65viceId\x12\"\n\x0b\x64\x65vice_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\x9a\x02\n\x06\x44\x65vice\x12$\n\tdevice_id\x18\x01 \x01(\x0b\x32\x11.context.DeviceId\x12\x13\n\x0b\x64\x65vice_type\x18\x02 \x01(\t\x12,\n\rdevice_config\x18\x03 \x01(\x0b\x32\x15.context.DeviceConfig\x12G\n\x19\x64\x65vice_operational_status\x18\x04 \x01(\x0e\x32$.context.DeviceOperationalStatusEnum\x12\x31\n\x0e\x64\x65vice_drivers\x18\x05 \x03(\x0e\x32\x19.context.DeviceDriverEnum\x12+\n\x10\x64\x65vice_endpoints\x18\x06 \x03(\x0b\x32\x11.context.EndPoint\"9\n\x0c\x44\x65viceConfig\x12)\n\x0c\x63onfig_rules\x18\x01 \x03(\x0b\x32\x13.context.ConfigRule\"5\n\x0c\x44\x65viceIdList\x12%\n\ndevice_ids\x18\x01 \x03(\x0b\x32\x11.context.DeviceId\".\n\nDeviceList\x12 \n\x07\x64\x65vices\x18\x01 \x03(\x0b\x32\x0f.context.Device\"R\n\x0b\x44\x65viceEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12$\n\tdevice_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\"*\n\x06LinkId\x12 \n\tlink_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"X\n\x04Link\x12 \n\x07link_id\x18\x01 \x01(\x0b\x32\x0f.context.LinkId\x12.\n\x11link_endpoint_ids\x18\x02 \x03(\x0b\x32\x13.context.EndPointId\"/\n\nLinkIdList\x12!\n\x08link_ids\x18\x01 \x03(\x0b\x32\x0f.context.LinkId\"(\n\x08LinkList\x12\x1c\n\x05links\x18\x01 \x03(\x0b\x32\r.context.Link\"L\n\tLinkEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12 \n\x07link_id\x18\x02 \x01(\x0b\x32\x0f.context.LinkId\"X\n\tServiceId\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12#\n\x0cservice_uuid\x18\x02 \x01(\x0b\x32\r.context.Uuid\"\xa6\x02\n\x07Service\x12&\n\nservice_id\x18\x01 \x01(\x0b\x32\x12.context.ServiceId\x12.\n\x0cservice_type\x18\x02 \x01(\x0e\x32\x18.context.ServiceTypeEnum\x12\x31\n\x14service_endpoint_ids\x18\x03 \x03(\x0b\x32\x13.context.EndPointId\x12\x30\n\x13service_constraints\x18\x04 \x03(\x0b\x32\x13.context.Constraint\x12.\n\x0eservice_status\x18\x05 \x01(\x0b\x32\x16.context.ServiceStatus\x12.\n\x0eservice_config\x18\x06 \x01(\x0b\x32\x16.context.ServiceConfig\"C\n\rServiceStatus\x12\x32\n\x0eservice_status\x18\x01 \x01(\x0e\x32\x1a.context.ServiceStatusEnum\":\n\rServiceConfig\x12)\n\x0c\x63onfig_rules\x18\x01 \x03(\x0b\x32\x13.context.ConfigRule\"8\n\rServiceIdList\x12\'\n\x0bservice_ids\x18\x01 \x03(\x0b\x32\x12.context.ServiceId\"1\n\x0bServiceList\x12\"\n\x08services\x18\x01 \x03(\x0b\x32\x10.context.Service\"U\n\x0cServiceEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12&\n\nservice_id\x18\x02 \x01(\x0b\x32\x12.context.ServiceId\"\x82\x01\n\nEndPointId\x12(\n\x0btopology_id\x18\x01 \x01(\x0b\x32\x13.context.TopologyId\x12$\n\tdevice_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\x12$\n\rendpoint_uuid\x18\x03 \x01(\x0b\x32\r.context.Uuid\"K\n\x08\x45ndPoint\x12(\n\x0b\x65ndpoint_id\x18\x01 \x01(\x0b\x32\x13.context.EndPointId\x12\x15\n\rendpoint_type\x18\x02 \x01(\t\"e\n\nConfigRule\x12)\n\x06\x61\x63tion\x18\x01 \x01(\x0e\x32\x19.context.ConfigActionEnum\x12\x14\n\x0cresource_key\x18\x02 \x01(\t\x12\x16\n\x0eresource_value\x18\x03 \x01(\t\"?\n\nConstraint\x12\x17\n\x0f\x63onstraint_type\x18\x01 \x01(\t\x12\x18\n\x10\x63onstraint_value\x18\x02 \x01(\t\"6\n\x0c\x43onnectionId\x12&\n\x0f\x63onnection_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\x8d\x01\n\nConnection\x12,\n\rconnection_id\x18\x01 \x01(\x0b\x32\x15.context.ConnectionId\x12.\n\x12related_service_id\x18\x02 \x01(\x0b\x32\x12.context.ServiceId\x12!\n\x04path\x18\x03 \x03(\x0b\x32\x13.context.EndPointId\"A\n\x10\x43onnectionIdList\x12-\n\x0e\x63onnection_ids\x18\x01 \x03(\x0b\x32\x15.context.ConnectionId\":\n\x0e\x43onnectionList\x12(\n\x0b\x63onnections\x18\x01 \x03(\x0b\x32\x13.context.Connection\"^\n\x12TeraFlowController\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x12\n\nip_address\x18\x02 \x01(\t\x12\x0c\n\x04port\x18\x03 \x01(\r\"U\n\x14\x41uthenticationResult\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x15\n\rauthenticated\x18\x02 \x01(\x08*j\n\rEventTypeEnum\x12\x17\n\x13\x45VENTTYPE_UNDEFINED\x10\x00\x12\x14\n\x10\x45VENTTYPE_CREATE\x10\x01\x12\x14\n\x10\x45VENTTYPE_UPDATE\x10\x02\x12\x14\n\x10\x45VENTTYPE_REMOVE\x10\x03*\xc5\x01\n\x10\x44\x65viceDriverEnum\x12\x1a\n\x16\x44\x45VICEDRIVER_UNDEFINED\x10\x00\x12\x1b\n\x17\x44\x45VICEDRIVER_OPENCONFIG\x10\x01\x12\x1e\n\x1a\x44\x45VICEDRIVER_TRANSPORT_API\x10\x02\x12\x13\n\x0f\x44\x45VICEDRIVER_P4\x10\x03\x12&\n\"DEVICEDRIVER_IETF_NETWORK_TOPOLOGY\x10\x04\x12\x1b\n\x17\x44\x45VICEDRIVER_ONF_TR_352\x10\x05*\x8f\x01\n\x1b\x44\x65viceOperationalStatusEnum\x12%\n!DEVICEOPERATIONALSTATUS_UNDEFINED\x10\x00\x12$\n DEVICEOPERATIONALSTATUS_DISABLED\x10\x01\x12#\n\x1f\x44\x45VICEOPERATIONALSTATUS_ENABLED\x10\x02*\x81\x01\n\x0fServiceTypeEnum\x12\x17\n\x13SERVICETYPE_UNKNOWN\x10\x00\x12\x14\n\x10SERVICETYPE_L3NM\x10\x01\x12\x14\n\x10SERVICETYPE_L2NM\x10\x02\x12)\n%SERVICETYPE_TAPI_CONNECTIVITY_SERVICE\x10\x03*\x88\x01\n\x11ServiceStatusEnum\x12\x1b\n\x17SERVICESTATUS_UNDEFINED\x10\x00\x12\x19\n\x15SERVICESTATUS_PLANNED\x10\x01\x12\x18\n\x14SERVICESTATUS_ACTIVE\x10\x02\x12!\n\x1dSERVICESTATUS_PENDING_REMOVAL\x10\x03*]\n\x10\x43onfigActionEnum\x12\x1a\n\x16\x43ONFIGACTION_UNDEFINED\x10\x00\x12\x14\n\x10\x43ONFIGACTION_SET\x10\x01\x12\x17\n\x13\x43ONFIGACTION_DELETE\x10\x02\x32\xa5\r\n\x0e\x43ontextService\x12:\n\x0eListContextIds\x12\x0e.context.Empty\x1a\x16.context.ContextIdList\"\x00\x12\x36\n\x0cListContexts\x12\x0e.context.Empty\x1a\x14.context.ContextList\"\x00\x12\x34\n\nGetContext\x12\x12.context.ContextId\x1a\x10.context.Context\"\x00\x12\x34\n\nSetContext\x12\x10.context.Context\x1a\x12.context.ContextId\"\x00\x12\x35\n\rRemoveContext\x12\x12.context.ContextId\x1a\x0e.context.Empty\"\x00\x12=\n\x10GetContextEvents\x12\x0e.context.Empty\x1a\x15.context.ContextEvent\"\x00\x30\x01\x12@\n\x0fListTopologyIds\x12\x12.context.ContextId\x1a\x17.context.TopologyIdList\"\x00\x12=\n\x0eListTopologies\x12\x12.context.ContextId\x1a\x15.context.TopologyList\"\x00\x12\x37\n\x0bGetTopology\x12\x13.context.TopologyId\x1a\x11.context.Topology\"\x00\x12\x37\n\x0bSetTopology\x12\x11.context.Topology\x1a\x13.context.TopologyId\"\x00\x12\x37\n\x0eRemoveTopology\x12\x13.context.TopologyId\x1a\x0e.context.Empty\"\x00\x12?\n\x11GetTopologyEvents\x12\x0e.context.Empty\x1a\x16.context.TopologyEvent\"\x00\x30\x01\x12\x38\n\rListDeviceIds\x12\x0e.context.Empty\x1a\x15.context.DeviceIdList\"\x00\x12\x34\n\x0bListDevices\x12\x0e.context.Empty\x1a\x13.context.DeviceList\"\x00\x12\x31\n\tGetDevice\x12\x11.context.DeviceId\x1a\x0f.context.Device\"\x00\x12\x31\n\tSetDevice\x12\x0f.context.Device\x1a\x11.context.DeviceId\"\x00\x12\x33\n\x0cRemoveDevice\x12\x11.context.DeviceId\x1a\x0e.context.Empty\"\x00\x12;\n\x0fGetDeviceEvents\x12\x0e.context.Empty\x1a\x14.context.DeviceEvent\"\x00\x30\x01\x12\x34\n\x0bListLinkIds\x12\x0e.context.Empty\x1a\x13.context.LinkIdList\"\x00\x12\x30\n\tListLinks\x12\x0e.context.Empty\x1a\x11.context.LinkList\"\x00\x12+\n\x07GetLink\x12\x0f.context.LinkId\x1a\r.context.Link\"\x00\x12+\n\x07SetLink\x12\r.context.Link\x1a\x0f.context.LinkId\"\x00\x12/\n\nRemoveLink\x12\x0f.context.LinkId\x1a\x0e.context.Empty\"\x00\x12\x37\n\rGetLinkEvents\x12\x0e.context.Empty\x1a\x12.context.LinkEvent\"\x00\x30\x01\x12>\n\x0eListServiceIds\x12\x12.context.ContextId\x1a\x16.context.ServiceIdList\"\x00\x12:\n\x0cListServices\x12\x12.context.ContextId\x1a\x14.context.ServiceList\"\x00\x12\x34\n\nGetService\x12\x12.context.ServiceId\x1a\x10.context.Service\"\x00\x12\x34\n\nSetService\x12\x10.context.Service\x1a\x12.context.ServiceId\"\x00\x12\x35\n\rRemoveService\x12\x12.context.ServiceId\x1a\x0e.context.Empty\"\x00\x12=\n\x10GetServiceEvents\x12\x0e.context.Empty\x1a\x15.context.ServiceEvent\"\x00\x30\x01\x62\x06proto3'
-)
+  serialized_pb=b'\n\rcontext.proto\x12\x07\x63ontext\x1a\x16kpi_sample_types.proto\"\x07\n\x05\x45mpty\"\x14\n\x04Uuid\x12\x0c\n\x04uuid\x18\x01 \x01(\t\"F\n\x05\x45vent\x12\x11\n\ttimestamp\x18\x01 \x01(\x01\x12*\n\nevent_type\x18\x02 \x01(\x0e\x32\x16.context.EventTypeEnum\"0\n\tContextId\x12#\n\x0c\x63ontext_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\xb6\x01\n\x07\x43ontext\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12)\n\x0ctopology_ids\x18\x02 \x03(\x0b\x32\x13.context.TopologyId\x12\'\n\x0bservice_ids\x18\x03 \x03(\x0b\x32\x12.context.ServiceId\x12/\n\ncontroller\x18\x04 \x01(\x0b\x32\x1b.context.TeraFlowController\"8\n\rContextIdList\x12\'\n\x0b\x63ontext_ids\x18\x01 \x03(\x0b\x32\x12.context.ContextId\"1\n\x0b\x43ontextList\x12\"\n\x08\x63ontexts\x18\x01 \x03(\x0b\x32\x10.context.Context\"U\n\x0c\x43ontextEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12&\n\ncontext_id\x18\x02 \x01(\x0b\x32\x12.context.ContextId\"Z\n\nTopologyId\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12$\n\rtopology_uuid\x18\x02 \x01(\x0b\x32\r.context.Uuid\"~\n\x08Topology\x12(\n\x0btopology_id\x18\x01 \x01(\x0b\x32\x13.context.TopologyId\x12%\n\ndevice_ids\x18\x02 \x03(\x0b\x32\x11.context.DeviceId\x12!\n\x08link_ids\x18\x03 \x03(\x0b\x32\x0f.context.LinkId\";\n\x0eTopologyIdList\x12)\n\x0ctopology_ids\x18\x01 \x03(\x0b\x32\x13.context.TopologyId\"5\n\x0cTopologyList\x12%\n\ntopologies\x18\x01 \x03(\x0b\x32\x11.context.Topology\"X\n\rTopologyEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12(\n\x0btopology_id\x18\x02 \x01(\x0b\x32\x13.context.TopologyId\".\n\x08\x44\x65viceId\x12\"\n\x0b\x64\x65vice_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\x9a\x02\n\x06\x44\x65vice\x12$\n\tdevice_id\x18\x01 \x01(\x0b\x32\x11.context.DeviceId\x12\x13\n\x0b\x64\x65vice_type\x18\x02 \x01(\t\x12,\n\rdevice_config\x18\x03 \x01(\x0b\x32\x15.context.DeviceConfig\x12G\n\x19\x64\x65vice_operational_status\x18\x04 \x01(\x0e\x32$.context.DeviceOperationalStatusEnum\x12\x31\n\x0e\x64\x65vice_drivers\x18\x05 \x03(\x0e\x32\x19.context.DeviceDriverEnum\x12+\n\x10\x64\x65vice_endpoints\x18\x06 \x03(\x0b\x32\x11.context.EndPoint\"9\n\x0c\x44\x65viceConfig\x12)\n\x0c\x63onfig_rules\x18\x01 \x03(\x0b\x32\x13.context.ConfigRule\"5\n\x0c\x44\x65viceIdList\x12%\n\ndevice_ids\x18\x01 \x03(\x0b\x32\x11.context.DeviceId\".\n\nDeviceList\x12 \n\x07\x64\x65vices\x18\x01 \x03(\x0b\x32\x0f.context.Device\"R\n\x0b\x44\x65viceEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12$\n\tdevice_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\"*\n\x06LinkId\x12 \n\tlink_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"X\n\x04Link\x12 \n\x07link_id\x18\x01 \x01(\x0b\x32\x0f.context.LinkId\x12.\n\x11link_endpoint_ids\x18\x02 \x03(\x0b\x32\x13.context.EndPointId\"/\n\nLinkIdList\x12!\n\x08link_ids\x18\x01 \x03(\x0b\x32\x0f.context.LinkId\"(\n\x08LinkList\x12\x1c\n\x05links\x18\x01 \x03(\x0b\x32\r.context.Link\"L\n\tLinkEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12 \n\x07link_id\x18\x02 \x01(\x0b\x32\x0f.context.LinkId\"X\n\tServiceId\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12#\n\x0cservice_uuid\x18\x02 \x01(\x0b\x32\r.context.Uuid\"\xa6\x02\n\x07Service\x12&\n\nservice_id\x18\x01 \x01(\x0b\x32\x12.context.ServiceId\x12.\n\x0cservice_type\x18\x02 \x01(\x0e\x32\x18.context.ServiceTypeEnum\x12\x31\n\x14service_endpoint_ids\x18\x03 \x03(\x0b\x32\x13.context.EndPointId\x12\x30\n\x13service_constraints\x18\x04 \x03(\x0b\x32\x13.context.Constraint\x12.\n\x0eservice_status\x18\x05 \x01(\x0b\x32\x16.context.ServiceStatus\x12.\n\x0eservice_config\x18\x06 \x01(\x0b\x32\x16.context.ServiceConfig\"C\n\rServiceStatus\x12\x32\n\x0eservice_status\x18\x01 \x01(\x0e\x32\x1a.context.ServiceStatusEnum\":\n\rServiceConfig\x12)\n\x0c\x63onfig_rules\x18\x01 \x03(\x0b\x32\x13.context.ConfigRule\"8\n\rServiceIdList\x12\'\n\x0bservice_ids\x18\x01 \x03(\x0b\x32\x12.context.ServiceId\"1\n\x0bServiceList\x12\"\n\x08services\x18\x01 \x03(\x0b\x32\x10.context.Service\"U\n\x0cServiceEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12&\n\nservice_id\x18\x02 \x01(\x0b\x32\x12.context.ServiceId\"6\n\x0c\x43onnectionId\x12&\n\x0f\x63onnection_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\xc4\x01\n\nConnection\x12,\n\rconnection_id\x18\x01 \x01(\x0b\x32\x15.context.ConnectionId\x12&\n\nservice_id\x18\x02 \x01(\x0b\x32\x12.context.ServiceId\x12\x33\n\x16path_hops_endpoint_ids\x18\x03 \x03(\x0b\x32\x13.context.EndPointId\x12+\n\x0fsub_service_ids\x18\x04 \x03(\x0b\x32\x12.context.ServiceId\"A\n\x10\x43onnectionIdList\x12-\n\x0e\x63onnection_ids\x18\x01 \x03(\x0b\x32\x15.context.ConnectionId\":\n\x0e\x43onnectionList\x12(\n\x0b\x63onnections\x18\x01 \x03(\x0b\x32\x13.context.Connection\"^\n\x0f\x43onnectionEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12,\n\rconnection_id\x18\x02 \x01(\x0b\x32\x15.context.ConnectionId\"\x82\x01\n\nEndPointId\x12(\n\x0btopology_id\x18\x01 \x01(\x0b\x32\x13.context.TopologyId\x12$\n\tdevice_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\x12$\n\rendpoint_uuid\x18\x03 \x01(\x0b\x32\r.context.Uuid\"\x86\x01\n\x08\x45ndPoint\x12(\n\x0b\x65ndpoint_id\x18\x01 \x01(\x0b\x32\x13.context.EndPointId\x12\x15\n\rendpoint_type\x18\x02 \x01(\t\x12\x39\n\x10kpi_sample_types\x18\x03 \x03(\x0e\x32\x1f.kpi_sample_types.KpiSampleType\"e\n\nConfigRule\x12)\n\x06\x61\x63tion\x18\x01 \x01(\x0e\x32\x19.context.ConfigActionEnum\x12\x14\n\x0cresource_key\x18\x02 \x01(\t\x12\x16\n\x0eresource_value\x18\x03 \x01(\t\"?\n\nConstraint\x12\x17\n\x0f\x63onstraint_type\x18\x01 \x01(\t\x12\x18\n\x10\x63onstraint_value\x18\x02 \x01(\t\"^\n\x12TeraFlowController\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x12\n\nip_address\x18\x02 \x01(\t\x12\x0c\n\x04port\x18\x03 \x01(\r\"U\n\x14\x41uthenticationResult\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x15\n\rauthenticated\x18\x02 \x01(\x08*j\n\rEventTypeEnum\x12\x17\n\x13\x45VENTTYPE_UNDEFINED\x10\x00\x12\x14\n\x10\x45VENTTYPE_CREATE\x10\x01\x12\x14\n\x10\x45VENTTYPE_UPDATE\x10\x02\x12\x14\n\x10\x45VENTTYPE_REMOVE\x10\x03*\xc5\x01\n\x10\x44\x65viceDriverEnum\x12\x1a\n\x16\x44\x45VICEDRIVER_UNDEFINED\x10\x00\x12\x1b\n\x17\x44\x45VICEDRIVER_OPENCONFIG\x10\x01\x12\x1e\n\x1a\x44\x45VICEDRIVER_TRANSPORT_API\x10\x02\x12\x13\n\x0f\x44\x45VICEDRIVER_P4\x10\x03\x12&\n\"DEVICEDRIVER_IETF_NETWORK_TOPOLOGY\x10\x04\x12\x1b\n\x17\x44\x45VICEDRIVER_ONF_TR_352\x10\x05*\x8f\x01\n\x1b\x44\x65viceOperationalStatusEnum\x12%\n!DEVICEOPERATIONALSTATUS_UNDEFINED\x10\x00\x12$\n DEVICEOPERATIONALSTATUS_DISABLED\x10\x01\x12#\n\x1f\x44\x45VICEOPERATIONALSTATUS_ENABLED\x10\x02*\x81\x01\n\x0fServiceTypeEnum\x12\x17\n\x13SERVICETYPE_UNKNOWN\x10\x00\x12\x14\n\x10SERVICETYPE_L3NM\x10\x01\x12\x14\n\x10SERVICETYPE_L2NM\x10\x02\x12)\n%SERVICETYPE_TAPI_CONNECTIVITY_SERVICE\x10\x03*\x88\x01\n\x11ServiceStatusEnum\x12\x1b\n\x17SERVICESTATUS_UNDEFINED\x10\x00\x12\x19\n\x15SERVICESTATUS_PLANNED\x10\x01\x12\x18\n\x14SERVICESTATUS_ACTIVE\x10\x02\x12!\n\x1dSERVICESTATUS_PENDING_REMOVAL\x10\x03*]\n\x10\x43onfigActionEnum\x12\x1a\n\x16\x43ONFIGACTION_UNDEFINED\x10\x00\x12\x14\n\x10\x43ONFIGACTION_SET\x10\x01\x12\x17\n\x13\x43ONFIGACTION_DELETE\x10\x02\x32\xad\x10\n\x0e\x43ontextService\x12:\n\x0eListContextIds\x12\x0e.context.Empty\x1a\x16.context.ContextIdList\"\x00\x12\x36\n\x0cListContexts\x12\x0e.context.Empty\x1a\x14.context.ContextList\"\x00\x12\x34\n\nGetContext\x12\x12.context.ContextId\x1a\x10.context.Context\"\x00\x12\x34\n\nSetContext\x12\x10.context.Context\x1a\x12.context.ContextId\"\x00\x12\x35\n\rRemoveContext\x12\x12.context.ContextId\x1a\x0e.context.Empty\"\x00\x12=\n\x10GetContextEvents\x12\x0e.context.Empty\x1a\x15.context.ContextEvent\"\x00\x30\x01\x12@\n\x0fListTopologyIds\x12\x12.context.ContextId\x1a\x17.context.TopologyIdList\"\x00\x12=\n\x0eListTopologies\x12\x12.context.ContextId\x1a\x15.context.TopologyList\"\x00\x12\x37\n\x0bGetTopology\x12\x13.context.TopologyId\x1a\x11.context.Topology\"\x00\x12\x37\n\x0bSetTopology\x12\x11.context.Topology\x1a\x13.context.TopologyId\"\x00\x12\x37\n\x0eRemoveTopology\x12\x13.context.TopologyId\x1a\x0e.context.Empty\"\x00\x12?\n\x11GetTopologyEvents\x12\x0e.context.Empty\x1a\x16.context.TopologyEvent\"\x00\x30\x01\x12\x38\n\rListDeviceIds\x12\x0e.context.Empty\x1a\x15.context.DeviceIdList\"\x00\x12\x34\n\x0bListDevices\x12\x0e.context.Empty\x1a\x13.context.DeviceList\"\x00\x12\x31\n\tGetDevice\x12\x11.context.DeviceId\x1a\x0f.context.Device\"\x00\x12\x31\n\tSetDevice\x12\x0f.context.Device\x1a\x11.context.DeviceId\"\x00\x12\x33\n\x0cRemoveDevice\x12\x11.context.DeviceId\x1a\x0e.context.Empty\"\x00\x12;\n\x0fGetDeviceEvents\x12\x0e.context.Empty\x1a\x14.context.DeviceEvent\"\x00\x30\x01\x12\x34\n\x0bListLinkIds\x12\x0e.context.Empty\x1a\x13.context.LinkIdList\"\x00\x12\x30\n\tListLinks\x12\x0e.context.Empty\x1a\x11.context.LinkList\"\x00\x12+\n\x07GetLink\x12\x0f.context.LinkId\x1a\r.context.Link\"\x00\x12+\n\x07SetLink\x12\r.context.Link\x1a\x0f.context.LinkId\"\x00\x12/\n\nRemoveLink\x12\x0f.context.LinkId\x1a\x0e.context.Empty\"\x00\x12\x37\n\rGetLinkEvents\x12\x0e.context.Empty\x1a\x12.context.LinkEvent\"\x00\x30\x01\x12>\n\x0eListServiceIds\x12\x12.context.ContextId\x1a\x16.context.ServiceIdList\"\x00\x12:\n\x0cListServices\x12\x12.context.ContextId\x1a\x14.context.ServiceList\"\x00\x12\x34\n\nGetService\x12\x12.context.ServiceId\x1a\x10.context.Service\"\x00\x12\x34\n\nSetService\x12\x10.context.Service\x1a\x12.context.ServiceId\"\x00\x12\x35\n\rRemoveService\x12\x12.context.ServiceId\x1a\x0e.context.Empty\"\x00\x12=\n\x10GetServiceEvents\x12\x0e.context.Empty\x1a\x15.context.ServiceEvent\"\x00\x30\x01\x12\x44\n\x11ListConnectionIds\x12\x12.context.ServiceId\x1a\x19.context.ConnectionIdList\"\x00\x12@\n\x0fListConnections\x12\x12.context.ServiceId\x1a\x17.context.ConnectionList\"\x00\x12=\n\rGetConnection\x12\x15.context.ConnectionId\x1a\x13.context.Connection\"\x00\x12=\n\rSetConnection\x12\x13.context.Connection\x1a\x15.context.ConnectionId\"\x00\x12;\n\x10RemoveConnection\x12\x15.context.ConnectionId\x1a\x0e.context.Empty\"\x00\x12\x43\n\x13GetConnectionEvents\x12\x0e.context.Empty\x1a\x18.context.ConnectionEvent\"\x00\x30\x01\x62\x06proto3'
+  ,
+  dependencies=[kpi__sample__types__pb2.DESCRIPTOR,])
 
 _EVENTTYPEENUM = _descriptor.EnumDescriptor(
   name='EventTypeEnum',
@@ -53,8 +55,8 @@ _EVENTTYPEENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=3468,
-  serialized_end=3574,
+  serialized_start=3703,
+  serialized_end=3809,
 )
 _sym_db.RegisterEnumDescriptor(_EVENTTYPEENUM)
 
@@ -99,8 +101,8 @@ _DEVICEDRIVERENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=3577,
-  serialized_end=3774,
+  serialized_start=3812,
+  serialized_end=4009,
 )
 _sym_db.RegisterEnumDescriptor(_DEVICEDRIVERENUM)
 
@@ -130,8 +132,8 @@ _DEVICEOPERATIONALSTATUSENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=3777,
-  serialized_end=3920,
+  serialized_start=4012,
+  serialized_end=4155,
 )
 _sym_db.RegisterEnumDescriptor(_DEVICEOPERATIONALSTATUSENUM)
 
@@ -166,8 +168,8 @@ _SERVICETYPEENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=3923,
-  serialized_end=4052,
+  serialized_start=4158,
+  serialized_end=4287,
 )
 _sym_db.RegisterEnumDescriptor(_SERVICETYPEENUM)
 
@@ -202,8 +204,8 @@ _SERVICESTATUSENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=4055,
-  serialized_end=4191,
+  serialized_start=4290,
+  serialized_end=4426,
 )
 _sym_db.RegisterEnumDescriptor(_SERVICESTATUSENUM)
 
@@ -233,8 +235,8 @@ _CONFIGACTIONENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=4193,
-  serialized_end=4286,
+  serialized_start=4428,
+  serialized_end=4521,
 )
 _sym_db.RegisterEnumDescriptor(_CONFIGACTIONENUM)
 
@@ -286,8 +288,8 @@ _EMPTY = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=26,
-  serialized_end=33,
+  serialized_start=50,
+  serialized_end=57,
 )
 
 
@@ -318,8 +320,8 @@ _UUID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=35,
-  serialized_end=55,
+  serialized_start=59,
+  serialized_end=79,
 )
 
 
@@ -357,8 +359,8 @@ _EVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=57,
-  serialized_end=127,
+  serialized_start=81,
+  serialized_end=151,
 )
 
 
@@ -389,8 +391,8 @@ _CONTEXTID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=129,
-  serialized_end=177,
+  serialized_start=153,
+  serialized_end=201,
 )
 
 
@@ -442,8 +444,8 @@ _CONTEXT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=180,
-  serialized_end=362,
+  serialized_start=204,
+  serialized_end=386,
 )
 
 
@@ -474,8 +476,8 @@ _CONTEXTIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=364,
-  serialized_end=420,
+  serialized_start=388,
+  serialized_end=444,
 )
 
 
@@ -506,8 +508,8 @@ _CONTEXTLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=422,
-  serialized_end=471,
+  serialized_start=446,
+  serialized_end=495,
 )
 
 
@@ -545,8 +547,8 @@ _CONTEXTEVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=473,
-  serialized_end=558,
+  serialized_start=497,
+  serialized_end=582,
 )
 
 
@@ -584,8 +586,8 @@ _TOPOLOGYID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=560,
-  serialized_end=650,
+  serialized_start=584,
+  serialized_end=674,
 )
 
 
@@ -630,8 +632,8 @@ _TOPOLOGY = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=652,
-  serialized_end=778,
+  serialized_start=676,
+  serialized_end=802,
 )
 
 
@@ -662,8 +664,8 @@ _TOPOLOGYIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=780,
-  serialized_end=839,
+  serialized_start=804,
+  serialized_end=863,
 )
 
 
@@ -694,8 +696,8 @@ _TOPOLOGYLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=841,
-  serialized_end=894,
+  serialized_start=865,
+  serialized_end=918,
 )
 
 
@@ -733,8 +735,8 @@ _TOPOLOGYEVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=896,
-  serialized_end=984,
+  serialized_start=920,
+  serialized_end=1008,
 )
 
 
@@ -765,8 +767,8 @@ _DEVICEID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=986,
-  serialized_end=1032,
+  serialized_start=1010,
+  serialized_end=1056,
 )
 
 
@@ -832,8 +834,8 @@ _DEVICE = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1035,
-  serialized_end=1317,
+  serialized_start=1059,
+  serialized_end=1341,
 )
 
 
@@ -864,8 +866,8 @@ _DEVICECONFIG = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1319,
-  serialized_end=1376,
+  serialized_start=1343,
+  serialized_end=1400,
 )
 
 
@@ -896,8 +898,8 @@ _DEVICEIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1378,
-  serialized_end=1431,
+  serialized_start=1402,
+  serialized_end=1455,
 )
 
 
@@ -928,8 +930,8 @@ _DEVICELIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1433,
-  serialized_end=1479,
+  serialized_start=1457,
+  serialized_end=1503,
 )
 
 
@@ -967,8 +969,8 @@ _DEVICEEVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1481,
-  serialized_end=1563,
+  serialized_start=1505,
+  serialized_end=1587,
 )
 
 
@@ -999,8 +1001,8 @@ _LINKID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1565,
-  serialized_end=1607,
+  serialized_start=1589,
+  serialized_end=1631,
 )
 
 
@@ -1038,8 +1040,8 @@ _LINK = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1609,
-  serialized_end=1697,
+  serialized_start=1633,
+  serialized_end=1721,
 )
 
 
@@ -1070,8 +1072,8 @@ _LINKIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1699,
-  serialized_end=1746,
+  serialized_start=1723,
+  serialized_end=1770,
 )
 
 
@@ -1102,8 +1104,8 @@ _LINKLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1748,
-  serialized_end=1788,
+  serialized_start=1772,
+  serialized_end=1812,
 )
 
 
@@ -1141,8 +1143,8 @@ _LINKEVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1790,
-  serialized_end=1866,
+  serialized_start=1814,
+  serialized_end=1890,
 )
 
 
@@ -1180,8 +1182,8 @@ _SERVICEID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1868,
-  serialized_end=1956,
+  serialized_start=1892,
+  serialized_end=1980,
 )
 
 
@@ -1247,8 +1249,8 @@ _SERVICE = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1959,
-  serialized_end=2253,
+  serialized_start=1983,
+  serialized_end=2277,
 )
 
 
@@ -1279,8 +1281,8 @@ _SERVICESTATUS = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2255,
-  serialized_end=2322,
+  serialized_start=2279,
+  serialized_end=2346,
 )
 
 
@@ -1311,8 +1313,8 @@ _SERVICECONFIG = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2324,
-  serialized_end=2382,
+  serialized_start=2348,
+  serialized_end=2406,
 )
 
 
@@ -1343,8 +1345,8 @@ _SERVICEIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2384,
-  serialized_end=2440,
+  serialized_start=2408,
+  serialized_end=2464,
 )
 
 
@@ -1375,8 +1377,8 @@ _SERVICELIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2442,
-  serialized_end=2491,
+  serialized_start=2466,
+  serialized_end=2515,
 )
 
 
@@ -1414,40 +1416,26 @@ _SERVICEEVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2493,
-  serialized_end=2578,
+  serialized_start=2517,
+  serialized_end=2602,
 )
 
 
-_ENDPOINTID = _descriptor.Descriptor(
-  name='EndPointId',
-  full_name='context.EndPointId',
+_CONNECTIONID = _descriptor.Descriptor(
+  name='ConnectionId',
+  full_name='context.ConnectionId',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='topology_id', full_name='context.EndPointId.topology_id', index=0,
+      name='connection_uuid', full_name='context.ConnectionId.connection_uuid', index=0,
       number=1, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='device_id', full_name='context.EndPointId.device_id', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='endpoint_uuid', full_name='context.EndPointId.endpoint_uuid', index=2,
-      number=3, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
   ],
   extensions=[
   ],
@@ -1460,30 +1448,44 @@ _ENDPOINTID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2581,
-  serialized_end=2711,
+  serialized_start=2604,
+  serialized_end=2658,
 )
 
 
-_ENDPOINT = _descriptor.Descriptor(
-  name='EndPoint',
-  full_name='context.EndPoint',
+_CONNECTION = _descriptor.Descriptor(
+  name='Connection',
+  full_name='context.Connection',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='endpoint_id', full_name='context.EndPoint.endpoint_id', index=0,
+      name='connection_id', full_name='context.Connection.connection_id', index=0,
       number=1, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='endpoint_type', full_name='context.EndPoint.endpoint_type', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='service_id', full_name='context.Connection.service_id', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='path_hops_endpoint_ids', full_name='context.Connection.path_hops_endpoint_ids', index=2,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='sub_service_ids', full_name='context.Connection.sub_service_ids', index=3,
+      number=4, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -1499,37 +1501,55 @@ _ENDPOINT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2713,
-  serialized_end=2788,
+  serialized_start=2661,
+  serialized_end=2857,
 )
 
 
-_CONFIGRULE = _descriptor.Descriptor(
-  name='ConfigRule',
-  full_name='context.ConfigRule',
+_CONNECTIONIDLIST = _descriptor.Descriptor(
+  name='ConnectionIdList',
+  full_name='context.ConnectionIdList',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='action', full_name='context.ConfigRule.action', index=0,
-      number=1, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='resource_key', full_name='context.ConfigRule.resource_key', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='connection_ids', full_name='context.ConnectionIdList.connection_ids', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2859,
+  serialized_end=2924,
+)
+
+
+_CONNECTIONLIST = _descriptor.Descriptor(
+  name='ConnectionList',
+  full_name='context.ConnectionList',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
     _descriptor.FieldDescriptor(
-      name='resource_value', full_name='context.ConfigRule.resource_value', index=2,
-      number=3, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='connections', full_name='context.ConnectionList.connections', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -1545,30 +1565,30 @@ _CONFIGRULE = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2790,
-  serialized_end=2891,
+  serialized_start=2926,
+  serialized_end=2984,
 )
 
 
-_CONSTRAINT = _descriptor.Descriptor(
-  name='Constraint',
-  full_name='context.Constraint',
+_CONNECTIONEVENT = _descriptor.Descriptor(
+  name='ConnectionEvent',
+  full_name='context.ConnectionEvent',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='constraint_type', full_name='context.Constraint.constraint_type', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='event', full_name='context.ConnectionEvent.event', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='constraint_value', full_name='context.Constraint.constraint_value', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='connection_id', full_name='context.ConnectionEvent.connection_id', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -1584,26 +1604,40 @@ _CONSTRAINT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2893,
-  serialized_end=2956,
+  serialized_start=2986,
+  serialized_end=3080,
 )
 
 
-_CONNECTIONID = _descriptor.Descriptor(
-  name='ConnectionId',
-  full_name='context.ConnectionId',
+_ENDPOINTID = _descriptor.Descriptor(
+  name='EndPointId',
+  full_name='context.EndPointId',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='connection_uuid', full_name='context.ConnectionId.connection_uuid', index=0,
+      name='topology_id', full_name='context.EndPointId.topology_id', index=0,
       number=1, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='device_id', full_name='context.EndPointId.device_id', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='endpoint_uuid', full_name='context.EndPointId.endpoint_uuid', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
   ],
   extensions=[
   ],
@@ -1616,36 +1650,36 @@ _CONNECTIONID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2958,
-  serialized_end=3012,
+  serialized_start=3083,
+  serialized_end=3213,
 )
 
 
-_CONNECTION = _descriptor.Descriptor(
-  name='Connection',
-  full_name='context.Connection',
+_ENDPOINT = _descriptor.Descriptor(
+  name='EndPoint',
+  full_name='context.EndPoint',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='connection_id', full_name='context.Connection.connection_id', index=0,
+      name='endpoint_id', full_name='context.EndPoint.endpoint_id', index=0,
       number=1, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='related_service_id', full_name='context.Connection.related_service_id', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
+      name='endpoint_type', full_name='context.EndPoint.endpoint_type', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='path', full_name='context.Connection.path', index=2,
-      number=3, type=11, cpp_type=10, label=3,
+      name='kpi_sample_types', full_name='context.EndPoint.kpi_sample_types', index=2,
+      number=3, type=14, cpp_type=8, label=3,
       has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
@@ -1662,23 +1696,37 @@ _CONNECTION = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=3015,
-  serialized_end=3156,
+  serialized_start=3216,
+  serialized_end=3350,
 )
 
 
-_CONNECTIONIDLIST = _descriptor.Descriptor(
-  name='ConnectionIdList',
-  full_name='context.ConnectionIdList',
+_CONFIGRULE = _descriptor.Descriptor(
+  name='ConfigRule',
+  full_name='context.ConfigRule',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='connection_ids', full_name='context.ConnectionIdList.connection_ids', index=0,
-      number=1, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
+      name='action', full_name='context.ConfigRule.action', index=0,
+      number=1, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='resource_key', full_name='context.ConfigRule.resource_key', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='resource_value', full_name='context.ConfigRule.resource_value', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -1694,23 +1742,30 @@ _CONNECTIONIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=3158,
-  serialized_end=3223,
+  serialized_start=3352,
+  serialized_end=3453,
 )
 
 
-_CONNECTIONLIST = _descriptor.Descriptor(
-  name='ConnectionList',
-  full_name='context.ConnectionList',
+_CONSTRAINT = _descriptor.Descriptor(
+  name='Constraint',
+  full_name='context.Constraint',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='connections', full_name='context.ConnectionList.connections', index=0,
-      number=1, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
+      name='constraint_type', full_name='context.Constraint.constraint_type', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='constraint_value', full_name='context.Constraint.constraint_value', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -1726,8 +1781,8 @@ _CONNECTIONLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=3225,
-  serialized_end=3283,
+  serialized_start=3455,
+  serialized_end=3518,
 )
 
 
@@ -1772,8 +1827,8 @@ _TERAFLOWCONTROLLER = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=3285,
-  serialized_end=3379,
+  serialized_start=3520,
+  serialized_end=3614,
 )
 
 
@@ -1811,8 +1866,8 @@ _AUTHENTICATIONRESULT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=3381,
-  serialized_end=3466,
+  serialized_start=3616,
+  serialized_end=3701,
 )
 
 _EVENT.fields_by_name['event_type'].enum_type = _EVENTTYPEENUM
@@ -1866,17 +1921,21 @@ _SERVICEIDLIST.fields_by_name['service_ids'].message_type = _SERVICEID
 _SERVICELIST.fields_by_name['services'].message_type = _SERVICE
 _SERVICEEVENT.fields_by_name['event'].message_type = _EVENT
 _SERVICEEVENT.fields_by_name['service_id'].message_type = _SERVICEID
+_CONNECTIONID.fields_by_name['connection_uuid'].message_type = _UUID
+_CONNECTION.fields_by_name['connection_id'].message_type = _CONNECTIONID
+_CONNECTION.fields_by_name['service_id'].message_type = _SERVICEID
+_CONNECTION.fields_by_name['path_hops_endpoint_ids'].message_type = _ENDPOINTID
+_CONNECTION.fields_by_name['sub_service_ids'].message_type = _SERVICEID
+_CONNECTIONIDLIST.fields_by_name['connection_ids'].message_type = _CONNECTIONID
+_CONNECTIONLIST.fields_by_name['connections'].message_type = _CONNECTION
+_CONNECTIONEVENT.fields_by_name['event'].message_type = _EVENT
+_CONNECTIONEVENT.fields_by_name['connection_id'].message_type = _CONNECTIONID
 _ENDPOINTID.fields_by_name['topology_id'].message_type = _TOPOLOGYID
 _ENDPOINTID.fields_by_name['device_id'].message_type = _DEVICEID
 _ENDPOINTID.fields_by_name['endpoint_uuid'].message_type = _UUID
 _ENDPOINT.fields_by_name['endpoint_id'].message_type = _ENDPOINTID
+_ENDPOINT.fields_by_name['kpi_sample_types'].enum_type = kpi__sample__types__pb2._KPISAMPLETYPE
 _CONFIGRULE.fields_by_name['action'].enum_type = _CONFIGACTIONENUM
-_CONNECTIONID.fields_by_name['connection_uuid'].message_type = _UUID
-_CONNECTION.fields_by_name['connection_id'].message_type = _CONNECTIONID
-_CONNECTION.fields_by_name['related_service_id'].message_type = _SERVICEID
-_CONNECTION.fields_by_name['path'].message_type = _ENDPOINTID
-_CONNECTIONIDLIST.fields_by_name['connection_ids'].message_type = _CONNECTIONID
-_CONNECTIONLIST.fields_by_name['connections'].message_type = _CONNECTION
 _TERAFLOWCONTROLLER.fields_by_name['context_id'].message_type = _CONTEXTID
 _AUTHENTICATIONRESULT.fields_by_name['context_id'].message_type = _CONTEXTID
 DESCRIPTOR.message_types_by_name['Empty'] = _EMPTY
@@ -1910,14 +1969,15 @@ DESCRIPTOR.message_types_by_name['ServiceConfig'] = _SERVICECONFIG
 DESCRIPTOR.message_types_by_name['ServiceIdList'] = _SERVICEIDLIST
 DESCRIPTOR.message_types_by_name['ServiceList'] = _SERVICELIST
 DESCRIPTOR.message_types_by_name['ServiceEvent'] = _SERVICEEVENT
-DESCRIPTOR.message_types_by_name['EndPointId'] = _ENDPOINTID
-DESCRIPTOR.message_types_by_name['EndPoint'] = _ENDPOINT
-DESCRIPTOR.message_types_by_name['ConfigRule'] = _CONFIGRULE
-DESCRIPTOR.message_types_by_name['Constraint'] = _CONSTRAINT
 DESCRIPTOR.message_types_by_name['ConnectionId'] = _CONNECTIONID
 DESCRIPTOR.message_types_by_name['Connection'] = _CONNECTION
 DESCRIPTOR.message_types_by_name['ConnectionIdList'] = _CONNECTIONIDLIST
 DESCRIPTOR.message_types_by_name['ConnectionList'] = _CONNECTIONLIST
+DESCRIPTOR.message_types_by_name['ConnectionEvent'] = _CONNECTIONEVENT
+DESCRIPTOR.message_types_by_name['EndPointId'] = _ENDPOINTID
+DESCRIPTOR.message_types_by_name['EndPoint'] = _ENDPOINT
+DESCRIPTOR.message_types_by_name['ConfigRule'] = _CONFIGRULE
+DESCRIPTOR.message_types_by_name['Constraint'] = _CONSTRAINT
 DESCRIPTOR.message_types_by_name['TeraFlowController'] = _TERAFLOWCONTROLLER
 DESCRIPTOR.message_types_by_name['AuthenticationResult'] = _AUTHENTICATIONRESULT
 DESCRIPTOR.enum_types_by_name['EventTypeEnum'] = _EVENTTYPEENUM
@@ -2145,34 +2205,6 @@ ServiceEvent = _reflection.GeneratedProtocolMessageType('ServiceEvent', (_messag
   })
 _sym_db.RegisterMessage(ServiceEvent)
 
-EndPointId = _reflection.GeneratedProtocolMessageType('EndPointId', (_message.Message,), {
-  'DESCRIPTOR' : _ENDPOINTID,
-  '__module__' : 'context_pb2'
-  # @@protoc_insertion_point(class_scope:context.EndPointId)
-  })
-_sym_db.RegisterMessage(EndPointId)
-
-EndPoint = _reflection.GeneratedProtocolMessageType('EndPoint', (_message.Message,), {
-  'DESCRIPTOR' : _ENDPOINT,
-  '__module__' : 'context_pb2'
-  # @@protoc_insertion_point(class_scope:context.EndPoint)
-  })
-_sym_db.RegisterMessage(EndPoint)
-
-ConfigRule = _reflection.GeneratedProtocolMessageType('ConfigRule', (_message.Message,), {
-  'DESCRIPTOR' : _CONFIGRULE,
-  '__module__' : 'context_pb2'
-  # @@protoc_insertion_point(class_scope:context.ConfigRule)
-  })
-_sym_db.RegisterMessage(ConfigRule)
-
-Constraint = _reflection.GeneratedProtocolMessageType('Constraint', (_message.Message,), {
-  'DESCRIPTOR' : _CONSTRAINT,
-  '__module__' : 'context_pb2'
-  # @@protoc_insertion_point(class_scope:context.Constraint)
-  })
-_sym_db.RegisterMessage(Constraint)
-
 ConnectionId = _reflection.GeneratedProtocolMessageType('ConnectionId', (_message.Message,), {
   'DESCRIPTOR' : _CONNECTIONID,
   '__module__' : 'context_pb2'
@@ -2201,6 +2233,41 @@ ConnectionList = _reflection.GeneratedProtocolMessageType('ConnectionList', (_me
   })
 _sym_db.RegisterMessage(ConnectionList)
 
+ConnectionEvent = _reflection.GeneratedProtocolMessageType('ConnectionEvent', (_message.Message,), {
+  'DESCRIPTOR' : _CONNECTIONEVENT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ConnectionEvent)
+  })
+_sym_db.RegisterMessage(ConnectionEvent)
+
+EndPointId = _reflection.GeneratedProtocolMessageType('EndPointId', (_message.Message,), {
+  'DESCRIPTOR' : _ENDPOINTID,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.EndPointId)
+  })
+_sym_db.RegisterMessage(EndPointId)
+
+EndPoint = _reflection.GeneratedProtocolMessageType('EndPoint', (_message.Message,), {
+  'DESCRIPTOR' : _ENDPOINT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.EndPoint)
+  })
+_sym_db.RegisterMessage(EndPoint)
+
+ConfigRule = _reflection.GeneratedProtocolMessageType('ConfigRule', (_message.Message,), {
+  'DESCRIPTOR' : _CONFIGRULE,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ConfigRule)
+  })
+_sym_db.RegisterMessage(ConfigRule)
+
+Constraint = _reflection.GeneratedProtocolMessageType('Constraint', (_message.Message,), {
+  'DESCRIPTOR' : _CONSTRAINT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Constraint)
+  })
+_sym_db.RegisterMessage(Constraint)
+
 TeraFlowController = _reflection.GeneratedProtocolMessageType('TeraFlowController', (_message.Message,), {
   'DESCRIPTOR' : _TERAFLOWCONTROLLER,
   '__module__' : 'context_pb2'
@@ -2224,8 +2291,8 @@ _CONTEXTSERVICE = _descriptor.ServiceDescriptor(
   index=0,
   serialized_options=None,
   create_key=_descriptor._internal_create_key,
-  serialized_start=4289,
-  serialized_end=5990,
+  serialized_start=4524,
+  serialized_end=6617,
   methods=[
   _descriptor.MethodDescriptor(
     name='ListContextIds',
@@ -2527,6 +2594,66 @@ _CONTEXTSERVICE = _descriptor.ServiceDescriptor(
     serialized_options=None,
     create_key=_descriptor._internal_create_key,
   ),
+  _descriptor.MethodDescriptor(
+    name='ListConnectionIds',
+    full_name='context.ContextService.ListConnectionIds',
+    index=30,
+    containing_service=None,
+    input_type=_SERVICEID,
+    output_type=_CONNECTIONIDLIST,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='ListConnections',
+    full_name='context.ContextService.ListConnections',
+    index=31,
+    containing_service=None,
+    input_type=_SERVICEID,
+    output_type=_CONNECTIONLIST,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetConnection',
+    full_name='context.ContextService.GetConnection',
+    index=32,
+    containing_service=None,
+    input_type=_CONNECTIONID,
+    output_type=_CONNECTION,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='SetConnection',
+    full_name='context.ContextService.SetConnection',
+    index=33,
+    containing_service=None,
+    input_type=_CONNECTION,
+    output_type=_CONNECTIONID,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='RemoveConnection',
+    full_name='context.ContextService.RemoveConnection',
+    index=34,
+    containing_service=None,
+    input_type=_CONNECTIONID,
+    output_type=_EMPTY,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetConnectionEvents',
+    full_name='context.ContextService.GetConnectionEvents',
+    index=35,
+    containing_service=None,
+    input_type=_EMPTY,
+    output_type=_CONNECTIONEVENT,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
 ])
 _sym_db.RegisterServiceDescriptor(_CONTEXTSERVICE)
 
diff --git a/src/monitoring/proto/kpi_sample_types_pb2.py b/src/monitoring/proto/kpi_sample_types_pb2.py
index ad22554ec352d0aeae644fdce00c0f28996ed73b..ea7fd2f82757d4c3db02d7e2c7817e2787b0b490 100644
--- a/src/monitoring/proto/kpi_sample_types_pb2.py
+++ b/src/monitoring/proto/kpi_sample_types_pb2.py
@@ -2,6 +2,7 @@
 # Generated by the protocol buffer compiler.  DO NOT EDIT!
 # source: kpi_sample_types.proto
 """Generated protocol buffer code."""
+from google.protobuf.internal import enum_type_wrapper
 from google.protobuf import descriptor as _descriptor
 from google.protobuf import message as _message
 from google.protobuf import reflection as _reflection
@@ -15,15 +16,62 @@ _sym_db = _symbol_database.Default()
 
 DESCRIPTOR = _descriptor.FileDescriptor(
   name='kpi_sample_types.proto',
-  package='',
+  package='kpi_sample_types',
   syntax='proto3',
   serialized_options=None,
   create_key=_descriptor._internal_create_key,
-  serialized_pb=b'\n\x16kpi_sample_types.protob\x06proto3'
+  serialized_pb=b'\n\x16kpi_sample_types.proto\x12\x10kpi_sample_types*\xbe\x01\n\rKpiSampleType\x12\x19\n\x15KPISAMPLETYPE_UNKNOWN\x10\x00\x12%\n!KPISAMPLETYPE_PACKETS_TRANSMITTED\x10\x65\x12\"\n\x1eKPISAMPLETYPE_PACKETS_RECEIVED\x10\x66\x12$\n\x1fKPISAMPLETYPE_BYTES_TRANSMITTED\x10\xc9\x01\x12!\n\x1cKPISAMPLETYPE_BYTES_RECEIVED\x10\xca\x01\x62\x06proto3'
 )
 
+_KPISAMPLETYPE = _descriptor.EnumDescriptor(
+  name='KpiSampleType',
+  full_name='kpi_sample_types.KpiSampleType',
+  filename=None,
+  file=DESCRIPTOR,
+  create_key=_descriptor._internal_create_key,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='KPISAMPLETYPE_UNKNOWN', index=0, number=0,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='KPISAMPLETYPE_PACKETS_TRANSMITTED', index=1, number=101,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='KPISAMPLETYPE_PACKETS_RECEIVED', index=2, number=102,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='KPISAMPLETYPE_BYTES_TRANSMITTED', index=3, number=201,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='KPISAMPLETYPE_BYTES_RECEIVED', index=4, number=202,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=45,
+  serialized_end=235,
+)
+_sym_db.RegisterEnumDescriptor(_KPISAMPLETYPE)
+
+KpiSampleType = enum_type_wrapper.EnumTypeWrapper(_KPISAMPLETYPE)
+KPISAMPLETYPE_UNKNOWN = 0
+KPISAMPLETYPE_PACKETS_TRANSMITTED = 101
+KPISAMPLETYPE_PACKETS_RECEIVED = 102
+KPISAMPLETYPE_BYTES_TRANSMITTED = 201
+KPISAMPLETYPE_BYTES_RECEIVED = 202
 
 
+DESCRIPTOR.enum_types_by_name['KpiSampleType'] = _KPISAMPLETYPE
 _sym_db.RegisterFileDescriptor(DESCRIPTOR)
 
 
diff --git a/src/monitoring/proto/monitoring_pb2.py b/src/monitoring/proto/monitoring_pb2.py
index 7368609d2145f94cc3b746836a5297333151c738..b313ebb68f0da37a540898e8c362fd204a799076 100644
--- a/src/monitoring/proto/monitoring_pb2.py
+++ b/src/monitoring/proto/monitoring_pb2.py
@@ -2,7 +2,6 @@
 # Generated by the protocol buffer compiler.  DO NOT EDIT!
 # source: monitoring.proto
 """Generated protocol buffer code."""
-from google.protobuf.internal import enum_type_wrapper
 from google.protobuf import descriptor as _descriptor
 from google.protobuf import message as _message
 from google.protobuf import reflection as _reflection
@@ -13,6 +12,7 @@ _sym_db = _symbol_database.Default()
 
 
 from . import context_pb2 as context__pb2
+from . import kpi_sample_types_pb2 as kpi__sample__types__pb2
 
 
 DESCRIPTOR = _descriptor.FileDescriptor(
@@ -21,177 +21,53 @@ DESCRIPTOR = _descriptor.FileDescriptor(
   syntax='proto3',
   serialized_options=None,
   create_key=_descriptor._internal_create_key,
-  serialized_pb=b'\n\x10monitoring.proto\x12\nmonitoring\x1a\rcontext.proto\"\x84\x01\n\x10\x43reateKpiRequest\x12\x16\n\x0ekpiDescription\x18\x01 \x01(\t\x12$\n\tdevice_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\x12\x32\n\x0fkpi_sample_type\x18\x03 \x01(\x0e\x32\x19.monitoring.KpiSampleType\"h\n\x11MonitorKpiRequest\x12!\n\x06kpi_id\x18\x01 \x01(\x0b\x32\x11.monitoring.KpiId\x12\x18\n\x10\x63onnexion_time_s\x18\x02 \x01(\r\x12\x16\n\x0esample_rate_ms\x18\x03 \x01(\r\"i\n\x17MonitorDeviceKpiRequest\x12\x1c\n\x03kpi\x18\x01 \x01(\x0b\x32\x0f.monitoring.Kpi\x12\x18\n\x10\x63onnexion_time_s\x18\x02 \x01(\r\x12\x16\n\x0esample_rate_ms\x18\x03 \x01(\r\"s\n\x11IncludeKpiRequest\x12!\n\x06kpi_id\x18\x01 \x01(\x0b\x32\x11.monitoring.KpiId\x12\x12\n\ntime_stamp\x18\x02 \x01(\t\x12\'\n\tkpi_value\x18\x03 \x01(\x0b\x32\x14.monitoring.KpiValue\"&\n\x05KpiId\x12\x1d\n\x06kpi_id\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\xd6\x01\n\x03Kpi\x12!\n\x06kpi_id\x18\x01 \x01(\x0b\x32\x11.monitoring.KpiId\x12\x11\n\ttimestamp\x18\x02 \x01(\t\x12\x16\n\x0ekpiDescription\x18\x03 \x01(\t\x12\'\n\tkpi_value\x18\x04 \x01(\x0b\x32\x14.monitoring.KpiValue\x12\x32\n\x0fkpi_sample_type\x18\x05 \x01(\x0e\x32\x19.monitoring.KpiSampleType\x12$\n\tdevice_id\x18\x06 \x01(\x0b\x32\x11.context.DeviceId\"a\n\x08KpiValue\x12\x10\n\x06intVal\x18\x01 \x01(\rH\x00\x12\x12\n\x08\x66loatVal\x18\x02 \x01(\x02H\x00\x12\x13\n\tstringVal\x18\x03 \x01(\tH\x00\x12\x11\n\x07\x62oolVal\x18\x04 \x01(\x08H\x00\x42\x07\n\x05value\"+\n\x07KpiList\x12 \n\x07kpiList\x18\x01 \x03(\x0b\x32\x0f.monitoring.Kpi*x\n\rKpiSampleType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x17\n\x13PACKETS_TRANSMITTED\x10\x65\x12\x14\n\x10PACKETS_RECEIVED\x10\x66\x12\x16\n\x11\x42YTES_TRANSMITTED\x10\xc9\x01\x12\x13\n\x0e\x42YTES_RECEIVED\x10\xca\x01\x32\x8b\x03\n\x11MonitoringService\x12>\n\tCreateKpi\x12\x1c.monitoring.CreateKpiRequest\x1a\x11.monitoring.KpiId\"\x00\x12=\n\nIncludeKpi\x12\x1d.monitoring.IncludeKpiRequest\x1a\x0e.context.Empty\"\x00\x12=\n\nMonitorKpi\x12\x1d.monitoring.MonitorKpiRequest\x1a\x0e.context.Empty\"\x00\x12I\n\x10MonitorDeviceKpi\x12#.monitoring.MonitorDeviceKpiRequest\x1a\x0e.context.Empty\"\x00\x12\x36\n\x0cGetStreamKpi\x12\x11.monitoring.KpiId\x1a\x0f.monitoring.Kpi\"\x00\x30\x01\x12\x35\n\rGetInstantKpi\x12\x11.monitoring.KpiId\x1a\x0f.monitoring.Kpi\"\x00\x62\x06proto3'
+  serialized_pb=b'\n\x10monitoring.proto\x12\nmonitoring\x1a\rcontext.proto\x1a\x16kpi_sample_types.proto\"\xda\x01\n\rKpiDescriptor\x12\x17\n\x0fkpi_description\x18\x01 \x01(\t\x12\x38\n\x0fkpi_sample_type\x18\x02 \x01(\x0e\x32\x1f.kpi_sample_types.KpiSampleType\x12$\n\tdevice_id\x18\x03 \x01(\x0b\x32\x11.context.DeviceId\x12(\n\x0b\x65ndpoint_id\x18\x04 \x01(\x0b\x32\x13.context.EndPointId\x12&\n\nservice_id\x18\x05 \x01(\x0b\x32\x12.context.ServiceId\"p\n\x11MonitorKpiRequest\x12!\n\x06kpi_id\x18\x01 \x01(\x0b\x32\x11.monitoring.KpiId\x12\x1b\n\x13sampling_duration_s\x18\x02 \x01(\x02\x12\x1b\n\x13sampling_interval_s\x18\x03 \x01(\x02\"&\n\x05KpiId\x12\x1d\n\x06kpi_id\x18\x01 \x01(\x0b\x32\r.context.Uuid\"d\n\x03Kpi\x12!\n\x06kpi_id\x18\x01 \x01(\x0b\x32\x11.monitoring.KpiId\x12\x11\n\ttimestamp\x18\x02 \x01(\t\x12\'\n\tkpi_value\x18\x04 \x01(\x0b\x32\x14.monitoring.KpiValue\"a\n\x08KpiValue\x12\x10\n\x06intVal\x18\x01 \x01(\rH\x00\x12\x12\n\x08\x66loatVal\x18\x02 \x01(\x02H\x00\x12\x13\n\tstringVal\x18\x03 \x01(\tH\x00\x12\x11\n\x07\x62oolVal\x18\x04 \x01(\x08H\x00\x42\x07\n\x05value\",\n\x07KpiList\x12!\n\x08kpi_list\x18\x01 \x03(\x0b\x32\x0f.monitoring.Kpi2\xf3\x02\n\x11MonitoringService\x12;\n\tCreateKpi\x12\x19.monitoring.KpiDescriptor\x1a\x11.monitoring.KpiId\"\x00\x12\x42\n\x10GetKpiDescriptor\x12\x11.monitoring.KpiId\x1a\x19.monitoring.KpiDescriptor\"\x00\x12/\n\nIncludeKpi\x12\x0f.monitoring.Kpi\x1a\x0e.context.Empty\"\x00\x12=\n\nMonitorKpi\x12\x1d.monitoring.MonitorKpiRequest\x1a\x0e.context.Empty\"\x00\x12\x36\n\x0cGetStreamKpi\x12\x11.monitoring.KpiId\x1a\x0f.monitoring.Kpi\"\x00\x30\x01\x12\x35\n\rGetInstantKpi\x12\x11.monitoring.KpiId\x1a\x0f.monitoring.Kpi\"\x00\x62\x06proto3'
   ,
-  dependencies=[context__pb2.DESCRIPTOR,])
+  dependencies=[context__pb2.DESCRIPTOR,kpi__sample__types__pb2.DESCRIPTOR,])
 
-_KPISAMPLETYPE = _descriptor.EnumDescriptor(
-  name='KpiSampleType',
-  full_name='monitoring.KpiSampleType',
-  filename=None,
-  file=DESCRIPTOR,
-  create_key=_descriptor._internal_create_key,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='UNKNOWN', index=0, number=0,
-      serialized_options=None,
-      type=None,
-      create_key=_descriptor._internal_create_key),
-    _descriptor.EnumValueDescriptor(
-      name='PACKETS_TRANSMITTED', index=1, number=101,
-      serialized_options=None,
-      type=None,
-      create_key=_descriptor._internal_create_key),
-    _descriptor.EnumValueDescriptor(
-      name='PACKETS_RECEIVED', index=2, number=102,
-      serialized_options=None,
-      type=None,
-      create_key=_descriptor._internal_create_key),
-    _descriptor.EnumValueDescriptor(
-      name='BYTES_TRANSMITTED', index=3, number=201,
-      serialized_options=None,
-      type=None,
-      create_key=_descriptor._internal_create_key),
-    _descriptor.EnumValueDescriptor(
-      name='BYTES_RECEIVED', index=4, number=202,
-      serialized_options=None,
-      type=None,
-      create_key=_descriptor._internal_create_key),
-  ],
-  containing_type=None,
-  serialized_options=None,
-  serialized_start=913,
-  serialized_end=1033,
-)
-_sym_db.RegisterEnumDescriptor(_KPISAMPLETYPE)
-
-KpiSampleType = enum_type_wrapper.EnumTypeWrapper(_KPISAMPLETYPE)
-UNKNOWN = 0
-PACKETS_TRANSMITTED = 101
-PACKETS_RECEIVED = 102
-BYTES_TRANSMITTED = 201
-BYTES_RECEIVED = 202
 
 
 
-_CREATEKPIREQUEST = _descriptor.Descriptor(
-  name='CreateKpiRequest',
-  full_name='monitoring.CreateKpiRequest',
+_KPIDESCRIPTOR = _descriptor.Descriptor(
+  name='KpiDescriptor',
+  full_name='monitoring.KpiDescriptor',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='kpiDescription', full_name='monitoring.CreateKpiRequest.kpiDescription', index=0,
+      name='kpi_description', full_name='monitoring.KpiDescriptor.kpi_description', index=0,
       number=1, type=9, cpp_type=9, label=1,
       has_default_value=False, default_value=b"".decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='device_id', full_name='monitoring.CreateKpiRequest.device_id', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='kpi_sample_type', full_name='monitoring.CreateKpiRequest.kpi_sample_type', index=2,
-      number=3, type=14, cpp_type=8, label=1,
+      name='kpi_sample_type', full_name='monitoring.KpiDescriptor.kpi_sample_type', index=1,
+      number=2, type=14, cpp_type=8, label=1,
       has_default_value=False, default_value=0,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=48,
-  serialized_end=180,
-)
-
-
-_MONITORKPIREQUEST = _descriptor.Descriptor(
-  name='MonitorKpiRequest',
-  full_name='monitoring.MonitorKpiRequest',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  create_key=_descriptor._internal_create_key,
-  fields=[
     _descriptor.FieldDescriptor(
-      name='kpi_id', full_name='monitoring.MonitorKpiRequest.kpi_id', index=0,
-      number=1, type=11, cpp_type=10, label=1,
+      name='device_id', full_name='monitoring.KpiDescriptor.device_id', index=2,
+      number=3, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='connexion_time_s', full_name='monitoring.MonitorKpiRequest.connexion_time_s', index=1,
-      number=2, type=13, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='sample_rate_ms', full_name='monitoring.MonitorKpiRequest.sample_rate_ms', index=2,
-      number=3, type=13, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=182,
-  serialized_end=286,
-)
-
-
-_MONITORDEVICEKPIREQUEST = _descriptor.Descriptor(
-  name='MonitorDeviceKpiRequest',
-  full_name='monitoring.MonitorDeviceKpiRequest',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  create_key=_descriptor._internal_create_key,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='kpi', full_name='monitoring.MonitorDeviceKpiRequest.kpi', index=0,
-      number=1, type=11, cpp_type=10, label=1,
+      name='endpoint_id', full_name='monitoring.KpiDescriptor.endpoint_id', index=3,
+      number=4, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='connexion_time_s', full_name='monitoring.MonitorDeviceKpiRequest.connexion_time_s', index=1,
-      number=2, type=13, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='sample_rate_ms', full_name='monitoring.MonitorDeviceKpiRequest.sample_rate_ms', index=2,
-      number=3, type=13, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
+      name='service_id', full_name='monitoring.KpiDescriptor.service_id', index=4,
+      number=5, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -207,37 +83,37 @@ _MONITORDEVICEKPIREQUEST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=288,
-  serialized_end=393,
+  serialized_start=72,
+  serialized_end=290,
 )
 
 
-_INCLUDEKPIREQUEST = _descriptor.Descriptor(
-  name='IncludeKpiRequest',
-  full_name='monitoring.IncludeKpiRequest',
+_MONITORKPIREQUEST = _descriptor.Descriptor(
+  name='MonitorKpiRequest',
+  full_name='monitoring.MonitorKpiRequest',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='kpi_id', full_name='monitoring.IncludeKpiRequest.kpi_id', index=0,
+      name='kpi_id', full_name='monitoring.MonitorKpiRequest.kpi_id', index=0,
       number=1, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='time_stamp', full_name='monitoring.IncludeKpiRequest.time_stamp', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='sampling_duration_s', full_name='monitoring.MonitorKpiRequest.sampling_duration_s', index=1,
+      number=2, type=2, cpp_type=6, label=1,
+      has_default_value=False, default_value=float(0),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='kpi_value', full_name='monitoring.IncludeKpiRequest.kpi_value', index=2,
-      number=3, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
+      name='sampling_interval_s', full_name='monitoring.MonitorKpiRequest.sampling_interval_s', index=2,
+      number=3, type=2, cpp_type=6, label=1,
+      has_default_value=False, default_value=float(0),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -253,8 +129,8 @@ _INCLUDEKPIREQUEST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=395,
-  serialized_end=510,
+  serialized_start=292,
+  serialized_end=404,
 )
 
 
@@ -285,8 +161,8 @@ _KPIID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=512,
-  serialized_end=550,
+  serialized_start=406,
+  serialized_end=444,
 )
 
 
@@ -313,33 +189,12 @@ _KPI = _descriptor.Descriptor(
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='kpiDescription', full_name='monitoring.Kpi.kpiDescription', index=2,
-      number=3, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='kpi_value', full_name='monitoring.Kpi.kpi_value', index=3,
+      name='kpi_value', full_name='monitoring.Kpi.kpi_value', index=2,
       number=4, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='kpi_sample_type', full_name='monitoring.Kpi.kpi_sample_type', index=4,
-      number=5, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='device_id', full_name='monitoring.Kpi.device_id', index=5,
-      number=6, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
   ],
   extensions=[
   ],
@@ -352,8 +207,8 @@ _KPI = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=553,
-  serialized_end=767,
+  serialized_start=446,
+  serialized_end=546,
 )
 
 
@@ -410,8 +265,8 @@ _KPIVALUE = _descriptor.Descriptor(
       create_key=_descriptor._internal_create_key,
     fields=[]),
   ],
-  serialized_start=769,
-  serialized_end=866,
+  serialized_start=548,
+  serialized_end=645,
 )
 
 
@@ -424,7 +279,7 @@ _KPILIST = _descriptor.Descriptor(
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='kpiList', full_name='monitoring.KpiList.kpiList', index=0,
+      name='kpi_list', full_name='monitoring.KpiList.kpi_list', index=0,
       number=1, type=11, cpp_type=10, label=3,
       has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
@@ -442,21 +297,18 @@ _KPILIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=868,
-  serialized_end=911,
+  serialized_start=647,
+  serialized_end=691,
 )
 
-_CREATEKPIREQUEST.fields_by_name['device_id'].message_type = context__pb2._DEVICEID
-_CREATEKPIREQUEST.fields_by_name['kpi_sample_type'].enum_type = _KPISAMPLETYPE
+_KPIDESCRIPTOR.fields_by_name['kpi_sample_type'].enum_type = kpi__sample__types__pb2._KPISAMPLETYPE
+_KPIDESCRIPTOR.fields_by_name['device_id'].message_type = context__pb2._DEVICEID
+_KPIDESCRIPTOR.fields_by_name['endpoint_id'].message_type = context__pb2._ENDPOINTID
+_KPIDESCRIPTOR.fields_by_name['service_id'].message_type = context__pb2._SERVICEID
 _MONITORKPIREQUEST.fields_by_name['kpi_id'].message_type = _KPIID
-_MONITORDEVICEKPIREQUEST.fields_by_name['kpi'].message_type = _KPI
-_INCLUDEKPIREQUEST.fields_by_name['kpi_id'].message_type = _KPIID
-_INCLUDEKPIREQUEST.fields_by_name['kpi_value'].message_type = _KPIVALUE
 _KPIID.fields_by_name['kpi_id'].message_type = context__pb2._UUID
 _KPI.fields_by_name['kpi_id'].message_type = _KPIID
 _KPI.fields_by_name['kpi_value'].message_type = _KPIVALUE
-_KPI.fields_by_name['kpi_sample_type'].enum_type = _KPISAMPLETYPE
-_KPI.fields_by_name['device_id'].message_type = context__pb2._DEVICEID
 _KPIVALUE.oneofs_by_name['value'].fields.append(
   _KPIVALUE.fields_by_name['intVal'])
 _KPIVALUE.fields_by_name['intVal'].containing_oneof = _KPIVALUE.oneofs_by_name['value']
@@ -469,24 +321,21 @@ _KPIVALUE.fields_by_name['stringVal'].containing_oneof = _KPIVALUE.oneofs_by_nam
 _KPIVALUE.oneofs_by_name['value'].fields.append(
   _KPIVALUE.fields_by_name['boolVal'])
 _KPIVALUE.fields_by_name['boolVal'].containing_oneof = _KPIVALUE.oneofs_by_name['value']
-_KPILIST.fields_by_name['kpiList'].message_type = _KPI
-DESCRIPTOR.message_types_by_name['CreateKpiRequest'] = _CREATEKPIREQUEST
+_KPILIST.fields_by_name['kpi_list'].message_type = _KPI
+DESCRIPTOR.message_types_by_name['KpiDescriptor'] = _KPIDESCRIPTOR
 DESCRIPTOR.message_types_by_name['MonitorKpiRequest'] = _MONITORKPIREQUEST
-DESCRIPTOR.message_types_by_name['MonitorDeviceKpiRequest'] = _MONITORDEVICEKPIREQUEST
-DESCRIPTOR.message_types_by_name['IncludeKpiRequest'] = _INCLUDEKPIREQUEST
 DESCRIPTOR.message_types_by_name['KpiId'] = _KPIID
 DESCRIPTOR.message_types_by_name['Kpi'] = _KPI
 DESCRIPTOR.message_types_by_name['KpiValue'] = _KPIVALUE
 DESCRIPTOR.message_types_by_name['KpiList'] = _KPILIST
-DESCRIPTOR.enum_types_by_name['KpiSampleType'] = _KPISAMPLETYPE
 _sym_db.RegisterFileDescriptor(DESCRIPTOR)
 
-CreateKpiRequest = _reflection.GeneratedProtocolMessageType('CreateKpiRequest', (_message.Message,), {
-  'DESCRIPTOR' : _CREATEKPIREQUEST,
+KpiDescriptor = _reflection.GeneratedProtocolMessageType('KpiDescriptor', (_message.Message,), {
+  'DESCRIPTOR' : _KPIDESCRIPTOR,
   '__module__' : 'monitoring_pb2'
-  # @@protoc_insertion_point(class_scope:monitoring.CreateKpiRequest)
+  # @@protoc_insertion_point(class_scope:monitoring.KpiDescriptor)
   })
-_sym_db.RegisterMessage(CreateKpiRequest)
+_sym_db.RegisterMessage(KpiDescriptor)
 
 MonitorKpiRequest = _reflection.GeneratedProtocolMessageType('MonitorKpiRequest', (_message.Message,), {
   'DESCRIPTOR' : _MONITORKPIREQUEST,
@@ -495,20 +344,6 @@ MonitorKpiRequest = _reflection.GeneratedProtocolMessageType('MonitorKpiRequest'
   })
 _sym_db.RegisterMessage(MonitorKpiRequest)
 
-MonitorDeviceKpiRequest = _reflection.GeneratedProtocolMessageType('MonitorDeviceKpiRequest', (_message.Message,), {
-  'DESCRIPTOR' : _MONITORDEVICEKPIREQUEST,
-  '__module__' : 'monitoring_pb2'
-  # @@protoc_insertion_point(class_scope:monitoring.MonitorDeviceKpiRequest)
-  })
-_sym_db.RegisterMessage(MonitorDeviceKpiRequest)
-
-IncludeKpiRequest = _reflection.GeneratedProtocolMessageType('IncludeKpiRequest', (_message.Message,), {
-  'DESCRIPTOR' : _INCLUDEKPIREQUEST,
-  '__module__' : 'monitoring_pb2'
-  # @@protoc_insertion_point(class_scope:monitoring.IncludeKpiRequest)
-  })
-_sym_db.RegisterMessage(IncludeKpiRequest)
-
 KpiId = _reflection.GeneratedProtocolMessageType('KpiId', (_message.Message,), {
   'DESCRIPTOR' : _KPIID,
   '__module__' : 'monitoring_pb2'
@@ -546,45 +381,45 @@ _MONITORINGSERVICE = _descriptor.ServiceDescriptor(
   index=0,
   serialized_options=None,
   create_key=_descriptor._internal_create_key,
-  serialized_start=1036,
-  serialized_end=1431,
+  serialized_start=694,
+  serialized_end=1065,
   methods=[
   _descriptor.MethodDescriptor(
     name='CreateKpi',
     full_name='monitoring.MonitoringService.CreateKpi',
     index=0,
     containing_service=None,
-    input_type=_CREATEKPIREQUEST,
+    input_type=_KPIDESCRIPTOR,
     output_type=_KPIID,
     serialized_options=None,
     create_key=_descriptor._internal_create_key,
   ),
   _descriptor.MethodDescriptor(
-    name='IncludeKpi',
-    full_name='monitoring.MonitoringService.IncludeKpi',
+    name='GetKpiDescriptor',
+    full_name='monitoring.MonitoringService.GetKpiDescriptor',
     index=1,
     containing_service=None,
-    input_type=_INCLUDEKPIREQUEST,
-    output_type=context__pb2._EMPTY,
+    input_type=_KPIID,
+    output_type=_KPIDESCRIPTOR,
     serialized_options=None,
     create_key=_descriptor._internal_create_key,
   ),
   _descriptor.MethodDescriptor(
-    name='MonitorKpi',
-    full_name='monitoring.MonitoringService.MonitorKpi',
+    name='IncludeKpi',
+    full_name='monitoring.MonitoringService.IncludeKpi',
     index=2,
     containing_service=None,
-    input_type=_MONITORKPIREQUEST,
+    input_type=_KPI,
     output_type=context__pb2._EMPTY,
     serialized_options=None,
     create_key=_descriptor._internal_create_key,
   ),
   _descriptor.MethodDescriptor(
-    name='MonitorDeviceKpi',
-    full_name='monitoring.MonitoringService.MonitorDeviceKpi',
+    name='MonitorKpi',
+    full_name='monitoring.MonitoringService.MonitorKpi',
     index=3,
     containing_service=None,
-    input_type=_MONITORDEVICEKPIREQUEST,
+    input_type=_MONITORKPIREQUEST,
     output_type=context__pb2._EMPTY,
     serialized_options=None,
     create_key=_descriptor._internal_create_key,
diff --git a/src/monitoring/proto/monitoring_pb2_grpc.py b/src/monitoring/proto/monitoring_pb2_grpc.py
index 2063dc0f3accaaed4d0a511faeceb53519626fd2..36c6835938af46b34b37f673bb6d4a1374a57cf0 100644
--- a/src/monitoring/proto/monitoring_pb2_grpc.py
+++ b/src/monitoring/proto/monitoring_pb2_grpc.py
@@ -17,12 +17,17 @@ class MonitoringServiceStub(object):
         """
         self.CreateKpi = channel.unary_unary(
                 '/monitoring.MonitoringService/CreateKpi',
-                request_serializer=monitoring__pb2.CreateKpiRequest.SerializeToString,
+                request_serializer=monitoring__pb2.KpiDescriptor.SerializeToString,
                 response_deserializer=monitoring__pb2.KpiId.FromString,
                 )
+        self.GetKpiDescriptor = channel.unary_unary(
+                '/monitoring.MonitoringService/GetKpiDescriptor',
+                request_serializer=monitoring__pb2.KpiId.SerializeToString,
+                response_deserializer=monitoring__pb2.KpiDescriptor.FromString,
+                )
         self.IncludeKpi = channel.unary_unary(
                 '/monitoring.MonitoringService/IncludeKpi',
-                request_serializer=monitoring__pb2.IncludeKpiRequest.SerializeToString,
+                request_serializer=monitoring__pb2.Kpi.SerializeToString,
                 response_deserializer=context__pb2.Empty.FromString,
                 )
         self.MonitorKpi = channel.unary_unary(
@@ -30,11 +35,6 @@ class MonitoringServiceStub(object):
                 request_serializer=monitoring__pb2.MonitorKpiRequest.SerializeToString,
                 response_deserializer=context__pb2.Empty.FromString,
                 )
-        self.MonitorDeviceKpi = channel.unary_unary(
-                '/monitoring.MonitoringService/MonitorDeviceKpi',
-                request_serializer=monitoring__pb2.MonitorDeviceKpiRequest.SerializeToString,
-                response_deserializer=context__pb2.Empty.FromString,
-                )
         self.GetStreamKpi = channel.unary_stream(
                 '/monitoring.MonitoringService/GetStreamKpi',
                 request_serializer=monitoring__pb2.KpiId.SerializeToString,
@@ -51,25 +51,24 @@ class MonitoringServiceServicer(object):
     """Missing associated documentation comment in .proto file."""
 
     def CreateKpi(self, request, context):
-        """Old RPCs:
-        """
+        """Missing associated documentation comment in .proto file."""
         context.set_code(grpc.StatusCode.UNIMPLEMENTED)
         context.set_details('Method not implemented!')
         raise NotImplementedError('Method not implemented!')
 
-    def IncludeKpi(self, request, context):
+    def GetKpiDescriptor(self, request, context):
         """Missing associated documentation comment in .proto file."""
         context.set_code(grpc.StatusCode.UNIMPLEMENTED)
         context.set_details('Method not implemented!')
         raise NotImplementedError('Method not implemented!')
 
-    def MonitorKpi(self, request, context):
+    def IncludeKpi(self, request, context):
         """Missing associated documentation comment in .proto file."""
         context.set_code(grpc.StatusCode.UNIMPLEMENTED)
         context.set_details('Method not implemented!')
         raise NotImplementedError('Method not implemented!')
 
-    def MonitorDeviceKpi(self, request, context):
+    def MonitorKpi(self, request, context):
         """Missing associated documentation comment in .proto file."""
         context.set_code(grpc.StatusCode.UNIMPLEMENTED)
         context.set_details('Method not implemented!')
@@ -92,12 +91,17 @@ def add_MonitoringServiceServicer_to_server(servicer, server):
     rpc_method_handlers = {
             'CreateKpi': grpc.unary_unary_rpc_method_handler(
                     servicer.CreateKpi,
-                    request_deserializer=monitoring__pb2.CreateKpiRequest.FromString,
+                    request_deserializer=monitoring__pb2.KpiDescriptor.FromString,
                     response_serializer=monitoring__pb2.KpiId.SerializeToString,
             ),
+            'GetKpiDescriptor': grpc.unary_unary_rpc_method_handler(
+                    servicer.GetKpiDescriptor,
+                    request_deserializer=monitoring__pb2.KpiId.FromString,
+                    response_serializer=monitoring__pb2.KpiDescriptor.SerializeToString,
+            ),
             'IncludeKpi': grpc.unary_unary_rpc_method_handler(
                     servicer.IncludeKpi,
-                    request_deserializer=monitoring__pb2.IncludeKpiRequest.FromString,
+                    request_deserializer=monitoring__pb2.Kpi.FromString,
                     response_serializer=context__pb2.Empty.SerializeToString,
             ),
             'MonitorKpi': grpc.unary_unary_rpc_method_handler(
@@ -105,11 +109,6 @@ def add_MonitoringServiceServicer_to_server(servicer, server):
                     request_deserializer=monitoring__pb2.MonitorKpiRequest.FromString,
                     response_serializer=context__pb2.Empty.SerializeToString,
             ),
-            'MonitorDeviceKpi': grpc.unary_unary_rpc_method_handler(
-                    servicer.MonitorDeviceKpi,
-                    request_deserializer=monitoring__pb2.MonitorDeviceKpiRequest.FromString,
-                    response_serializer=context__pb2.Empty.SerializeToString,
-            ),
             'GetStreamKpi': grpc.unary_stream_rpc_method_handler(
                     servicer.GetStreamKpi,
                     request_deserializer=monitoring__pb2.KpiId.FromString,
@@ -142,13 +141,13 @@ class MonitoringService(object):
             timeout=None,
             metadata=None):
         return grpc.experimental.unary_unary(request, target, '/monitoring.MonitoringService/CreateKpi',
-            monitoring__pb2.CreateKpiRequest.SerializeToString,
+            monitoring__pb2.KpiDescriptor.SerializeToString,
             monitoring__pb2.KpiId.FromString,
             options, channel_credentials,
             insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
 
     @staticmethod
-    def IncludeKpi(request,
+    def GetKpiDescriptor(request,
             target,
             options=(),
             channel_credentials=None,
@@ -158,14 +157,14 @@ class MonitoringService(object):
             wait_for_ready=None,
             timeout=None,
             metadata=None):
-        return grpc.experimental.unary_unary(request, target, '/monitoring.MonitoringService/IncludeKpi',
-            monitoring__pb2.IncludeKpiRequest.SerializeToString,
-            context__pb2.Empty.FromString,
+        return grpc.experimental.unary_unary(request, target, '/monitoring.MonitoringService/GetKpiDescriptor',
+            monitoring__pb2.KpiId.SerializeToString,
+            monitoring__pb2.KpiDescriptor.FromString,
             options, channel_credentials,
             insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
 
     @staticmethod
-    def MonitorKpi(request,
+    def IncludeKpi(request,
             target,
             options=(),
             channel_credentials=None,
@@ -175,14 +174,14 @@ class MonitoringService(object):
             wait_for_ready=None,
             timeout=None,
             metadata=None):
-        return grpc.experimental.unary_unary(request, target, '/monitoring.MonitoringService/MonitorKpi',
-            monitoring__pb2.MonitorKpiRequest.SerializeToString,
+        return grpc.experimental.unary_unary(request, target, '/monitoring.MonitoringService/IncludeKpi',
+            monitoring__pb2.Kpi.SerializeToString,
             context__pb2.Empty.FromString,
             options, channel_credentials,
             insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
 
     @staticmethod
-    def MonitorDeviceKpi(request,
+    def MonitorKpi(request,
             target,
             options=(),
             channel_credentials=None,
@@ -192,8 +191,8 @@ class MonitoringService(object):
             wait_for_ready=None,
             timeout=None,
             metadata=None):
-        return grpc.experimental.unary_unary(request, target, '/monitoring.MonitoringService/MonitorDeviceKpi',
-            monitoring__pb2.MonitorDeviceKpiRequest.SerializeToString,
+        return grpc.experimental.unary_unary(request, target, '/monitoring.MonitoringService/MonitorKpi',
+            monitoring__pb2.MonitorKpiRequest.SerializeToString,
             context__pb2.Empty.FromString,
             options, channel_credentials,
             insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
diff --git a/src/monitoring/requirements.in b/src/monitoring/requirements.in
index 040bafcc2dcf3d9fa89bd0b538f0e85dd9dbe604..70d0c3ee926a9b136f2b1d79f91aecfe2c451ae3 100644
--- a/src/monitoring/requirements.in
+++ b/src/monitoring/requirements.in
@@ -9,3 +9,5 @@ prometheus-client
 pytest
 pytest-benchmark
 influxdb
+redis
+coverage
\ No newline at end of file
diff --git a/src/monitoring/service/EventTools.py b/src/monitoring/service/EventTools.py
new file mode 100644
index 0000000000000000000000000000000000000000..a28ff36a3f469bc0967be71e7b0005c316e9282a
--- /dev/null
+++ b/src/monitoring/service/EventTools.py
@@ -0,0 +1,80 @@
+import threading
+from queue import Queue
+
+import grpc
+
+from common.rpc_method_wrapper.ServiceExceptions import ServiceException
+from context.client.ContextClient import ContextClient
+from context.proto import kpi_sample_types_pb2
+from context.proto.context_pb2 import Empty, EventTypeEnum
+
+from common.logger import getJSONLogger
+from monitoring.client.monitoring_client import MonitoringClient
+from monitoring.proto import monitoring_pb2
+
+LOGGER = getJSONLogger('monitoringservice-server')
+LOGGER.setLevel('DEBUG')
+
+class EventsDeviceCollector:
+    def __init__(self, context_client_grpc : ContextClient, monitoring_client_grpc : MonitoringClient) -> None: # pylint: disable=redefined-outer-name
+        self._events_queue = Queue()
+
+        self._device_stream   = context_client_grpc.GetDeviceEvents(Empty())
+        self._context_client  = context_client_grpc
+        self._monitoring_client = monitoring_client_grpc
+
+        self._device_thread   = threading.Thread(target=self._collect, args=(self._device_stream  ,), daemon=False)
+
+    def _collect(self, events_stream) -> None:
+        try:
+            for event in events_stream:
+                self._events_queue.put_nowait(event)
+        except grpc.RpcError as e:
+            if e.code() != grpc.StatusCode.CANCELLED: # pylint: disable=no-member
+                raise # pragma: no cover
+
+    def start(self):
+        self._device_thread.start()
+
+    def get_event(self, block : bool = True, timeout : float = 0.1):
+        return self._events_queue.get(block=block, timeout=timeout)
+
+    def stop(self):
+
+        self._device_stream.cancel()
+
+        self._device_thread.join()
+
+    def listen_events(self):
+        LOGGER.info('getting Kpi by KpiID')
+        qsize = self._events_queue.qsize()
+        try:
+            kpi_id_list = []
+            if qsize > 0:
+                for i in range(qsize):
+                    print("Queue size: "+str(qsize))
+                    event = self.get_event(block=True)
+                    if event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE:
+                        device = self._context_client.GetDevice(event.device_id)
+                        print("Endpoints value: " + str(len(device.device_endpoints)))
+                        for j,end_point in enumerate(device.device_endpoints):
+
+                            # for k,rule in enumerate(device.device_config.config_rules):
+                            kpi_descriptor = monitoring_pb2.KpiDescriptor()
+
+                            kpi_descriptor.kpi_description                      = device.device_type
+                            kpi_descriptor.kpi_sample_type                      = kpi_sample_types_pb2.KpiSampleType.KPISAMPLETYPE_PACKETS_TRANSMITTED
+                            kpi_descriptor.device_id.CopyFrom(device.device_id)
+                            kpi_descriptor.endpoint_id.CopyFrom(end_point.endpoint_id)
+                            kpi_descriptor.service_id.service_uuid.uuid         = "SERV"+str(i+1)
+
+                            kpi_id = self._monitoring_client.CreateKpi(kpi_descriptor)
+                            kpi_id_list.append(kpi_id)
+
+            return kpi_id_list
+
+        except ServiceException as e:
+            LOGGER.exception('ListenEvents exception')
+
+        except Exception as e:  # pragma: no cover
+            LOGGER.exception('ListenEvents exception')
diff --git a/src/monitoring/service/influx_tools.py b/src/monitoring/service/InfluxTools.py
similarity index 97%
rename from src/monitoring/service/influx_tools.py
rename to src/monitoring/service/InfluxTools.py
index 7ed1c2e601854561f1c7c30e673efe8af760f925..df0cf01b2adddff4bcd0410d12505ed65c24e145 100644
--- a/src/monitoring/service/influx_tools.py
+++ b/src/monitoring/service/InfluxTools.py
@@ -28,3 +28,6 @@ class Influx():
       points = results.get_points(tags={'kpi_id' : '1','device_id': '1', 'kpi_sample_type': '101'})
       for point in points:
           print("Time: %s, Value: %i" % (point['time'], point['kpi_value']))
+
+      return points
+
diff --git a/src/monitoring/service/MonitoringService.py b/src/monitoring/service/MonitoringService.py
index 08a3c81b542f6001140c38c9d9b9ae7bddb32190..665ce44f7e87ca73a92dfd6123ed3aa8935f0e92 100644
--- a/src/monitoring/service/MonitoringService.py
+++ b/src/monitoring/service/MonitoringService.py
@@ -2,14 +2,13 @@ from concurrent import futures
 
 import grpc
 
-from monitoring.proto import monitoring_pb2_grpc
 from monitoring.service.MonitoringServiceServicerImpl import MonitoringServiceServicerImpl
 from monitoring.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD
+from monitoring.proto.monitoring_pb2_grpc import  add_MonitoringServiceServicer_to_server
 
 from grpc_health.v1 import health
 from grpc_health.v1 import health_pb2
 from grpc_health.v1.health_pb2_grpc import add_HealthServicer_to_server
-from monitoring.proto.monitoring_pb2_grpc import  add_MonitoringServiceServicer_to_server
 
 from common.logger import getJSONLogger
 LOGGER = getJSONLogger('monitoringservice-server')
diff --git a/src/monitoring/service/MonitoringServiceServicerImpl.py b/src/monitoring/service/MonitoringServiceServicerImpl.py
index bb4de08a2a7bc5b3b5ef0452218af7d636a0383d..fcadaa0c910cf912055ad30efc1ed389f256e3e4 100644
--- a/src/monitoring/service/MonitoringServiceServicerImpl.py
+++ b/src/monitoring/service/MonitoringServiceServicerImpl.py
@@ -1,53 +1,28 @@
-import os
-import grpc
-from common.rpc_method_wrapper.ServiceExceptions import ServiceException
+import os,grpc
 
-from device.Config import GRPC_SERVICE_PORT
-from device.client.DeviceClient import DeviceClient
-from device.proto import device_pb2
-from monitoring.proto import context_pb2
-from monitoring.service import sqlite_tools, influx_tools
+from prometheus_client import Summary
+from prometheus_client import Counter
 
+from monitoring.service import SqliteTools, InfluxTools
 from monitoring.proto import monitoring_pb2
 from monitoring.proto import monitoring_pb2_grpc
 
+from common.rpc_method_wrapper.ServiceExceptions import ServiceException
 from common.logger import getJSONLogger
+
+from context.proto import context_pb2
+
+
+from device.Config import GRPC_SERVICE_PORT
+from device.client.DeviceClient import DeviceClient
+from device.proto import device_pb2
+
 LOGGER = getJSONLogger('monitoringservice-server')
 LOGGER.setLevel('DEBUG')
 
-from prometheus_client import Summary, Histogram
-from prometheus_client import Counter
-
 MONITORING_GETINSTANTKPI_REQUEST_TIME = Summary('monitoring_getinstantkpi_processing_seconds', 'Time spent processing monitoring instant kpi request')
 MONITORING_INCLUDEKPI_COUNTER = Counter('monitoring_includekpi_counter', 'Monitoring include kpi request counter')
 
-# CREATEKPI_COUNTER_STARTED    = Counter  ('monitoring_createkpi_counter_started',
-#                                           'Monitoring:CreateKpi counter of requests started'  )
-# CREATEKPI_COUNTER_COMPLETED  = Counter  ('monitoring_createkpi counter_completed',
-#                                           'Monitoring:CreateKpi counter of requests completed')
-# CREATEKPI_COUNTER_FAILED     = Counter  ('monitoring_createkpi_counter_failed',
-#                                           'Monitoring:CreateKpi counter of requests failed'   )
-# CREATEKPI_HISTOGRAM_DURATION = Histogram('monitoring_createkpi_histogram_duration',
-#                                           'Monitoring:CreateKpi histogram of request duration')
-#
-# MONITORKPI_COUNTER_STARTED    = Counter  ('monitoring_monitorkpi_counter_started',
-#                                           'Monitoring:MonitorKpi counter of requests started'  )
-# MONITORKPI_COUNTER_COMPLETED  = Counter  ('monitoring_monitorkpi counter_completed',
-#                                           'Monitoring:MonitorKpi counter of requests completed')
-# MONITORKPI_COUNTER_FAILED     = Counter  ('monitoring_monitorkpi_counter_failed',
-#                                           'Monitoring:MonitorKpi counter of requests failed'   )
-# MONITORKPI_HISTOGRAM_DURATION = Histogram('monitoring_monitorkpi_histogram_duration',
-#                                           'Monitoring:MonitorKpi histogram of request duration')
-#
-# INCLUDEKPI_COUNTER_STARTED    = Counter  ('monitoring_includekpi_counter_started',
-#                                           'Monitoring:IncludeKpi counter of requests started'  )
-# INCLUDEKPI_COUNTER_COMPLETED  = Counter  ('monitoring_includekpi counter_completed',
-#                                           'Monitoring:IncludeKpi counter of requests completed')
-# INCLUDEKPI_COUNTER_FAILED     = Counter  ('monitoring_includekpi_counter_failed',
-#                                           'Monitoring:IncludeKpi counter of requests failed'   )
-# INCLUDEKPI_HISTOGRAM_DURATION = Histogram('monitoring_includekpi_histogram_duration',
-#                                           'Monitoring:IncludeKpi histogram of request duration')
-
 INFLUXDB_HOSTNAME = os.environ.get("INFLUXDB_HOSTNAME")
 INFLUXDB_USER = os.environ.get("INFLUXDB_USER")
 INFLUXDB_PASSWORD = os.environ.get("INFLUXDB_PASSWORD")
@@ -59,10 +34,10 @@ class MonitoringServiceServicerImpl(monitoring_pb2_grpc.MonitoringServiceService
         LOGGER.info('Init monitoringService')
 
         # Init sqlite monitoring db
-        self.sql_db = sqlite_tools.SQLite('monitoring.db')
+        self.sql_db = SqliteTools.SQLite('monitoring.db')
 
         # Create influx_db client
-        self.influx_db = influx_tools.Influx(INFLUXDB_HOSTNAME,"8086",INFLUXDB_USER,INFLUXDB_PASSWORD,INFLUXDB_DATABASE)
+        self.influx_db = InfluxTools.Influx(INFLUXDB_HOSTNAME,"8086",INFLUXDB_USER,INFLUXDB_PASSWORD,INFLUXDB_DATABASE)
 
     # CreateKpi (CreateKpiRequest) returns (KpiId) {}
     def CreateKpi(self, request : monitoring_pb2.KpiDescriptor, grpc_context : grpc.ServicerContext) -> monitoring_pb2.KpiId :
@@ -79,6 +54,7 @@ class MonitoringServiceServicerImpl(monitoring_pb2_grpc.MonitoringServiceService
             kpi_service_id  = request.service_id.service_uuid.uuid
 
             data = self.sql_db.insert_KPI(kpi_description, kpi_sample_type, kpi_device_id, kpi_endpoint_id, kpi_service_id)
+
             kpi_id.kpi_id.uuid = str(data)
 
             # CREATEKPI_COUNTER_COMPLETED.inc()
@@ -100,14 +76,10 @@ class MonitoringServiceServicerImpl(monitoring_pb2_grpc.MonitoringServiceService
             # Creates the request to send to the device service
             monitor_device_request = device_pb2.MonitoringSettings()
 
-            kpiDescriptor = self.get_KpiDescriptor(request.kpi_id)
+            kpiDescriptor = self.GetKpiDescriptor(request.kpi_id, grpc_context)
 
+            monitor_device_request.kpi_descriptor.CopyFrom(kpiDescriptor)
             monitor_device_request.kpi_id.kpi_id.uuid                               = request.kpi_id.kpi_id.uuid
-            monitor_device_request.kpi_descriptor.kpi_description                   = kpiDescriptor.kpi_description
-            monitor_device_request.kpi_descriptor.kpi_sample_type                   = kpiDescriptor.kpi_sample_type
-            monitor_device_request.kpi_descriptor.device_id.device_uuid.uuid        = kpiDescriptor.device_id.device_uuid.uuid
-            monitor_device_request.kpi_descriptor.endpoint_id.endpoint_uuid.uuid    = kpiDescriptor.endpoint_id.endpoint_uuid.uuid
-            monitor_device_request.kpi_descriptor.service_id.service_uuid.uuid      = kpiDescriptor.service_id.service_uuid.uuid
             monitor_device_request.sampling_duration_s                              = request.sampling_duration_s
             monitor_device_request.sampling_interval_s                              = request.sampling_interval_s
 
@@ -130,7 +102,7 @@ class MonitoringServiceServicerImpl(monitoring_pb2_grpc.MonitoringServiceService
         LOGGER.info('IncludeKpi')
 
         try:
-            kpiDescriptor = self.get_KpiDescriptor(request.kpi_id)
+            kpiDescriptor = self.GetKpiDescriptor(request.kpi_id, grpc_context)
 
             kpiSampleType   = kpiDescriptor.kpi_sample_type
             kpiId           = request.kpi_id.kpi_id.uuid
@@ -165,17 +137,27 @@ class MonitoringServiceServicerImpl(monitoring_pb2_grpc.MonitoringServiceService
         LOGGER.info('GetInstantKpi')
         return monitoring_pb2.Kpi()
 
-    def get_KpiDescriptor(self, kpiId):
+
+    def GetKpiDescriptor(self, request : monitoring_pb2.KpiId, grpc_context : grpc.ServicerContext) -> monitoring_pb2.KpiDescriptor:
         LOGGER.info('getting Kpi by KpiID')
+        try:
+            kpi_db = self.sql_db.get_KPI(int(request.kpi_id.uuid))
+            print(self.sql_db.get_KPIS())
+
+            kpiDescriptor = monitoring_pb2.KpiDescriptor()
+
+            kpiDescriptor.kpi_description                   = kpi_db[1]
+            kpiDescriptor.kpi_sample_type                   = kpi_db[2]
+            kpiDescriptor.device_id.device_uuid.uuid        = str(kpi_db[3])
+            kpiDescriptor.endpoint_id.endpoint_uuid.uuid    = str(kpi_db[4])
+            kpiDescriptor.service_id.service_uuid.uuid      = str(kpi_db[5])
 
-        kpi_db = self.sql_db.get_KPI(int(kpiId.kpi_id.uuid))
+            return kpiDescriptor
+        except ServiceException as e:
+            LOGGER.exception('GetKpiDescriptor exception')
+            grpc_context.abort(e.code, e.details)
 
-        kpiDescriptor = monitoring_pb2.KpiDescriptor()
+        except Exception as e:  # pragma: no cover
+            LOGGER.exception('GetKpiDescriptor exception')
 
-        kpiDescriptor.kpi_description                   = kpi_db[1]
-        kpiDescriptor.kpi_sample_type                   = kpi_db[2]
-        kpiDescriptor.device_id.device_uuid.uuid        = str(kpi_db[3])
-        kpiDescriptor.endpoint_id.endpoint_uuid.uuid    = str(kpi_db[4])
-        kpiDescriptor.service_id.service_uuid.uuid      = str(kpi_db[5])
 
-        return kpiDescriptor
\ No newline at end of file
diff --git a/src/monitoring/service/sqlite_tools.py b/src/monitoring/service/SqliteTools.py
similarity index 93%
rename from src/monitoring/service/sqlite_tools.py
rename to src/monitoring/service/SqliteTools.py
index 5502ac5c984d47222e3c439680f0838334a770d0..d03d6699b5ff29bc4bad4bd087cb6c0a965e397f 100644
--- a/src/monitoring/service/sqlite_tools.py
+++ b/src/monitoring/service/SqliteTools.py
@@ -16,7 +16,7 @@ class SQLite():
 
     def insert_KPI(self,kpi_description,kpi_sample_type,device_id,endpoint_id,service_id ):
         c = self.client.cursor()
-        c.execute("SELECT kpi_id FROM KPI WHERE device_id is ? AND kpi_sample_type is ?",(device_id,kpi_sample_type))
+        c.execute("SELECT kpi_id FROM KPI WHERE device_id is ? AND kpi_sample_type is ? AND endpoint_id is ?",(device_id,kpi_sample_type,endpoint_id))
         data=c.fetchone()
         if data is None:
             c.execute("INSERT INTO KPI (kpi_description,kpi_sample_type,device_id,endpoint_id,service_id) VALUES (?,?,?,?,?)", (kpi_description,kpi_sample_type,device_id,endpoint_id,service_id))
@@ -54,4 +54,5 @@ class SQLite():
     def get_KPIS(self):
         data = self.client.execute("SELECT * FROM KPI")
         for row in data:
-            print(row)
\ No newline at end of file
+            print(row)
+        return data.fetchall()
\ No newline at end of file
diff --git a/src/monitoring/service/__main__.py b/src/monitoring/service/__main__.py
index 4d001e286ae568198b9b47497d633cade524b864..ea189e1c905b07f92ef3e9cd100763eb7dffa08f 100644
--- a/src/monitoring/service/__main__.py
+++ b/src/monitoring/service/__main__.py
@@ -1,9 +1,13 @@
 import logging, os, signal, sys, threading
 import time
 
+from context.client.ContextClient import ContextClient
 from monitoring.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD, LOG_LEVEL, METRICS_PORT
 
 from common.logger import getJSONLogger
+from monitoring.client.monitoring_client import MonitoringClient
+from monitoring.proto import monitoring_pb2
+from monitoring.service.EventTools import EventsDeviceCollector
 from monitoring.service.MonitoringService import MonitoringService
 
 LOGGER = getJSONLogger('monitoringservice-server')
@@ -16,9 +20,40 @@ logger = None
 
 def signal_handler(signal, frame):
     global terminate, logger
-    logger.warning('Terminate signal received')
+    LOGGER.warning('Terminate signal received')
     terminate.set()
 
+def start_monitoring():
+    LOGGER.info('Start Monitoring...')
+    context_client_grpc = ContextClient(address='localhost', port='2020')
+    monitoring_client = MonitoringClient(server='localhost', port='7070')  # instantiate the client
+
+    while True:
+        if terminate.is_set():
+            LOGGER.warning("Stopping execution...")
+
+            break
+
+        # Start Listening Events
+        events_collector = EventsDeviceCollector(context_client_grpc, monitoring_client)
+        events_collector.start()
+
+        list_new_kpi_ids = events_collector.listen_events()
+
+        # Monitor Kpis
+        if bool(list_new_kpi_ids):
+            for kpi_id in list_new_kpi_ids:
+                # Create Monitor Kpi Requests
+                monitor_kpi_request = monitoring_pb2.MonitorKpiRequest()
+                monitor_kpi_request.kpi_id.CopyFrom(kpi_id)
+                monitor_kpi_request.sampling_duration_s = 120
+                monitor_kpi_request.sampling_interval_s = 5
+
+                # MonitorKpi(monitor_kpi_request)
+
+
+
+
 def main():
     global terminate, logger
 
@@ -42,6 +77,8 @@ def main():
     grpc_service = MonitoringService(port=service_port, max_workers=max_workers, grace_period=grace_period)
     grpc_service.start()
 
+    # start_monitoring()
+
     # Wait for Ctrl+C or termination signal
     while not terminate.wait(timeout=0.1): pass
 
diff --git a/src/monitoring/tests/test_monitoring.py b/src/monitoring/tests/test_monitoring.py
deleted file mode 100644
index 60c5e7036e9a28d17e49160f6ed181f44e2cab3e..0000000000000000000000000000000000000000
--- a/src/monitoring/tests/test_monitoring.py
+++ /dev/null
@@ -1,146 +0,0 @@
-import logging, os
-import pytest
-
-from monitoring.proto import context_pb2, kpi_sample_types_pb2
-from monitoring.proto import monitoring_pb2
-from monitoring.client.monitoring_client import MonitoringClient
-from monitoring.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD, LOG_LEVEL, METRICS_PORT
-from monitoring.service.MonitoringService import MonitoringService
-
-LOGGER = logging.getLogger(__name__)
-LOGGER.setLevel(logging.DEBUG)
-
-SERVER_ADDRESS = '127.0.0.1'
-LISTEN_ADDRESS = '[::]'
-PORT = 7070
-
-###########################
-# Tests Setup
-###########################
-
-# This fixture will be requested by test cases and last during testing session
-@pytest.fixture(scope='session')
-def monitoring_service():
-    LOGGER.warning('monitoring_service begin')
-
-    service_port = GRPC_SERVICE_PORT
-    max_workers = GRPC_MAX_WORKERS
-    grace_period = GRPC_GRACE_PERIOD
-
-    LOGGER.info('Initializing MonitoringService...')
-    grpc_service = MonitoringService(port=service_port, max_workers=max_workers, grace_period=grace_period)
-    server = grpc_service.start()
-
-    # yield the server, when test finishes, execution will resume to stop it
-    LOGGER.warning('monitoring_service yielding')
-    yield server
-
-    LOGGER.info('Terminating MonitoringService...')
-    grpc_service.stop()
-
-# This fixture will be requested by test cases and last during testing session.
-# The client requires the server, so client fixture has the server as dependency.
-@pytest.fixture(scope='session')
-def monitoring_client(monitoring_service):
-    LOGGER.warning('monitoring_client begin')
-    client = MonitoringClient(server=SERVER_ADDRESS, port=PORT)  # instantiate the client
-    LOGGER.warning('monitoring_client returning')
-    return client
-
-# This fixture will be requested by test cases and last during testing session.
-@pytest.fixture(scope='session')
-def kpi():
-    LOGGER.warning('test_include_kpi begin')
-    # form request
-    kpi = monitoring_pb2.Kpi()
-    kpi.kpi_id.kpi_id.uuid = 'KPIID0000'  # pylint: disable=maybe-no-member
-    kpi.kpiDescription = 'KPI Desc'
-    return kpi
-
-@pytest.fixture(scope='session')
-def kpi_id():
-    LOGGER.warning('test_include_kpi begin')
-
-    # form request
-    kpi_id = monitoring_pb2.KpiId()
-    kpi_id.kpi_id.uuid = 'KPIID0000'
-
-    return kpi_id
-
-@pytest.fixture(scope='session')
-def create_kpi_request():
-    LOGGER.warning('test_include_kpi begin')
-
-    create_kpi_request = monitoring_pb2.KpiDescriptor()
-    create_kpi_request.kpi_description = 'KPI Description Test'
-    create_kpi_request.kpi_sample_type = kpi_sample_types_pb2.KpiSampleType.PACKETS_TRANSMITTED
-    create_kpi_request.device_id.device_uuid.uuid = 'DEV1'  # pylint: disable=maybe-no-member
-    create_kpi_request.service_id.service_uuid.uuid = "SERV1"
-    create_kpi_request.endpoint_id.endpoint_uuid.uuid = "END1"
-
-    return create_kpi_request
-
-@pytest.fixture(scope='session')
-def monitor_kpi_request():
-    LOGGER.warning('test_monitor_kpi begin')
-
-    monitor_kpi_request = monitoring_pb2.MonitorKpiRequest()
-    monitor_kpi_request.kpi_id.kpi_id.uuid = str(1)
-    monitor_kpi_request.sampling_duration_s = 120
-    monitor_kpi_request.sampling_interval_s = 5
-
-    return monitor_kpi_request
-
-
-@pytest.fixture(scope='session')
-def include_kpi_request():
-    LOGGER.warning('test_include_kpi begin')
-
-    include_kpi_request = monitoring_pb2.Kpi()
-    include_kpi_request.kpi_id.kpi_id.uuid = str(1)
-    include_kpi_request.timestamp = "2021-10-12T13:14:42Z"
-    include_kpi_request.kpi_value.intVal = 500
-
-    return include_kpi_request
-
-###########################
-# Tests Implementation
-###########################
-
-# Test case that makes use of client fixture to test server's CreateKpi method
-def test_create_kpi(monitoring_client,create_kpi_request):
-    # make call to server
-    LOGGER.warning('test_create_kpi requesting')
-    response = monitoring_client.CreateKpi(create_kpi_request)
-    LOGGER.debug(str(response))
-    assert isinstance(response, monitoring_pb2.KpiId)
-
-# Test case that makes use of client fixture to test server's MonitorKpi method
-def test_monitor_kpi(monitoring_client,monitor_kpi_request):
-    LOGGER.warning('test_monitor_kpi begin')
-    response = monitoring_client.MonitorKpi(monitor_kpi_request)
-    LOGGER.debug(str(response))
-    assert isinstance(response, context_pb2.Empty)
-
-
-# Test case that makes use of client fixture to test server's IncludeKpi method
-def test_include_kpi(monitoring_client,include_kpi_request):
-    # make call to server
-    LOGGER.warning('test_include_kpi requesting')
-    response = monitoring_client.IncludeKpi(include_kpi_request)
-    LOGGER.debug(str(response))
-    assert isinstance(response, context_pb2.Empty)
-
-# Test case that makes use of client fixture to test server's GetStreamKpi method
-def test_getstream_kpi(monitoring_client,include_kpi_request):
-    LOGGER.warning('test_getstream_kpi begin')
-    response = monitoring_client.GetStreamKpi(kpi)
-    LOGGER.debug(str(response))
-    #assert isinstance(response, monitoring_pb2.Kpi)
-
-# Test case that makes use of client fixture to test server's GetInstantKpi method
-def test_getinstant_kpi(monitoring_client,kpi_id):
-    LOGGER.warning('test_getinstant_kpi begin')
-    response = monitoring_client.GetInstantKpi(kpi_id)
-    LOGGER.debug(str(response))
-    assert isinstance(response, monitoring_pb2.Kpi)
diff --git a/src/monitoring/tests/test_unitary.py b/src/monitoring/tests/test_unitary.py
new file mode 100644
index 0000000000000000000000000000000000000000..b77699c5b00b58d19c724a98516983754097b5f7
--- /dev/null
+++ b/src/monitoring/tests/test_unitary.py
@@ -0,0 +1,399 @@
+import logging, grpc
+import os
+import sqlite3
+
+import pytest
+from typing import Tuple
+
+from monitoring.proto import context_pb2, kpi_sample_types_pb2
+from monitoring.proto import monitoring_pb2
+from monitoring.client.monitoring_client import MonitoringClient
+from monitoring.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD
+from monitoring.service import SqliteTools, InfluxTools
+from monitoring.service.MonitoringService import MonitoringService
+from monitoring.service.EventTools import EventsDeviceCollector
+
+from common.orm.Database import Database
+from common.orm.Factory import get_database_backend, BackendEnum as DatabaseBackendEnum
+from common.message_broker.Factory import get_messagebroker_backend, BackendEnum as MessageBrokerBackendEnum
+from common.message_broker.MessageBroker import MessageBroker
+
+from context.Config import GRPC_SERVICE_PORT as grpc_port_context, GRPC_MAX_WORKERS as grpc_workers_context, GRPC_GRACE_PERIOD as grpc_grace_context
+from context.client.ContextClient import ContextClient
+from context.service.grpc_server.ContextService import ContextService
+from context.service.Populate import populate
+from context.proto.context_pb2 import EventTypeEnum, DeviceEvent, Device
+from context.tests.example_objects import (DEVICE1, DEVICE1_UUID)
+
+LOGGER = logging.getLogger(__name__)
+LOGGER.setLevel(logging.DEBUG)
+
+###########################
+# Tests Setup
+###########################
+
+SERVER_ADDRESS = '127.0.0.1'
+LISTEN_ADDRESS = '[::]'
+GRPC_PORT_MONITORING = 7070
+
+GRPC_PORT_CONTEXT    = 10000 + grpc_port_context    # avoid privileged ports
+
+SCENARIOS = [ # comment/uncomment scenarios to activate/deactivate them in the test unit
+    ('all_inmemory', DatabaseBackendEnum.INMEMORY, {},           MessageBrokerBackendEnum.INMEMORY, {}          ),
+]
+
+INFLUXDB_HOSTNAME = os.environ.get("INFLUXDB_HOSTNAME")
+INFLUXDB_USER = os.environ.get("INFLUXDB_USER")
+INFLUXDB_PASSWORD = os.environ.get("INFLUXDB_PASSWORD")
+INFLUXDB_DATABASE = os.environ.get("INFLUXDB_DATABASE")
+
+@pytest.fixture(scope='session', ids=[str(scenario[0]) for scenario in SCENARIOS], params=SCENARIOS)
+def context_db_mb(request) -> Tuple[Database, MessageBroker]:
+    name,db_backend,db_settings,mb_backend,mb_settings = request.param
+    msg = 'Running scenario {:s} db_backend={:s}, db_settings={:s}, mb_backend={:s}, mb_settings={:s}...'
+    LOGGER.info(msg.format(str(name), str(db_backend.value), str(db_settings), str(mb_backend.value), str(mb_settings)))
+    _database = Database(get_database_backend(backend=db_backend, **db_settings))
+    _message_broker = MessageBroker(get_messagebroker_backend(backend=mb_backend, **mb_settings))
+    yield _database, _message_broker
+    _message_broker.terminate()
+
+@pytest.fixture(scope='session')
+def context_service_grpc(context_db_mb : Tuple[Database, MessageBroker]): # pylint: disable=redefined-outer-name
+    database = context_db_mb[0]
+    database.clear_all()
+    _service = ContextService(
+        database, context_db_mb[1], port=GRPC_PORT_CONTEXT, max_workers=grpc_workers_context,
+        grace_period=grpc_grace_context)
+    _service.start()
+    yield _service
+    _service.stop()
+
+@pytest.fixture(scope='session')
+def context_client_grpc(context_service_grpc : ContextService): # pylint: disable=redefined-outer-name
+    _client = ContextClient(address='localhost', port=GRPC_PORT_CONTEXT)
+    yield _client
+    _client.close()
+
+
+# This fixture will be requested by test cases and last during testing session
+@pytest.fixture(scope='session')
+def monitoring_service():
+    LOGGER.warning('monitoring_service begin')
+
+    service_port    = GRPC_SERVICE_PORT
+    max_workers     = GRPC_MAX_WORKERS
+    grace_period    = GRPC_GRACE_PERIOD
+
+    LOGGER.info('Initializing MonitoringService...')
+    grpc_service = MonitoringService(port=service_port, max_workers=max_workers, grace_period=grace_period)
+    server = grpc_service.start()
+
+    # yield the server, when test finishes, execution will resume to stop it
+    LOGGER.warning('monitoring_service yielding')
+    yield server
+
+    LOGGER.info('Terminating MonitoringService...')
+    grpc_service.stop()
+
+# This fixture will be requested by test cases and last during testing session.
+# The client requires the server, so client fixture has the server as dependency.
+@pytest.fixture(scope='session')
+def monitoring_client(monitoring_service):
+    LOGGER.warning('monitoring_client begin')
+    client = MonitoringClient(server=SERVER_ADDRESS, port=GRPC_PORT_MONITORING)  # instantiate the client
+    LOGGER.warning('monitoring_client returning')
+    return client
+
+# This fixture will be requested by test cases and last during testing session.
+@pytest.fixture(scope='session')
+def kpi():
+    LOGGER.warning('test_include_kpi begin')
+    # form request
+    kpi                     = monitoring_pb2.Kpi()
+    kpi.kpi_id.kpi_id.uuid  = 'KPIID0000'
+    kpi.kpiDescription      = 'KPI Desc'
+    return kpi
+
+@pytest.fixture(scope='session')
+def kpi_id():
+    LOGGER.warning('test_include_kpi begin')
+
+    # form request
+    kpi_id              = monitoring_pb2.KpiId()
+    kpi_id.kpi_id.uuid  = str(1)
+
+    return kpi_id
+
+@pytest.fixture(scope='session')
+def sql_db():
+    sql_db = SqliteTools.SQLite('monitoring.db')
+    return sql_db
+
+@pytest.fixture(scope='session')
+def influx_db():
+    influx_db = InfluxTools.Influx(INFLUXDB_HOSTNAME, "8086", INFLUXDB_USER, INFLUXDB_PASSWORD, INFLUXDB_DATABASE)
+    return influx_db
+
+@pytest.fixture(scope='session')
+def create_kpi_request():
+    LOGGER.warning('test_include_kpi begin')
+
+    create_kpi_request                                  = monitoring_pb2.KpiDescriptor()
+    create_kpi_request.kpi_description                  = 'KPI Description Test'
+    create_kpi_request.kpi_sample_type                  = kpi_sample_types_pb2.KpiSampleType.KPISAMPLETYPE_PACKETS_TRANSMITTED
+    create_kpi_request.device_id.device_uuid.uuid       = 'DEV1'  # pylint: disable=maybe-no-member
+    create_kpi_request.service_id.service_uuid.uuid     = "SERV1"
+    create_kpi_request.endpoint_id.endpoint_uuid.uuid   = "END1"
+
+    return create_kpi_request
+
+@pytest.fixture(scope='session')
+def monitor_kpi_request():
+    LOGGER.warning('test_monitor_kpi begin')
+
+    monitor_kpi_request                     = monitoring_pb2.MonitorKpiRequest()
+    monitor_kpi_request.kpi_id.kpi_id.uuid  = str(1)
+    monitor_kpi_request.sampling_duration_s = 120
+    monitor_kpi_request.sampling_interval_s = 5
+
+    return monitor_kpi_request
+
+
+@pytest.fixture(scope='session')
+def include_kpi_request():
+    LOGGER.warning('test_include_kpi begin')
+
+    include_kpi_request                     = monitoring_pb2.Kpi()
+    include_kpi_request.kpi_id.kpi_id.uuid  = str(1)
+    include_kpi_request.timestamp           = "2021-10-12T13:14:42Z"
+    include_kpi_request.kpi_value.intVal    = 500
+
+    return include_kpi_request
+
+###########################
+# Tests Implementation
+###########################
+
+# Test case that makes use of client fixture to test server's CreateKpi method
+def test_create_kpi(monitoring_client,create_kpi_request):
+    # make call to server
+    LOGGER.warning('test_create_kpi requesting')
+    response = monitoring_client.CreateKpi(create_kpi_request)
+    LOGGER.debug(str(response))
+    assert isinstance(response, monitoring_pb2.KpiId)
+
+# Test case that makes use of client fixture to test server's MonitorKpi method
+def test_monitor_kpi(monitoring_client,create_kpi_request):
+    LOGGER.warning('test_monitor_kpi begin')
+
+    response = monitoring_client.CreateKpi(create_kpi_request)
+
+    monitor_kpi_request                     = monitoring_pb2.MonitorKpiRequest()
+    monitor_kpi_request.kpi_id.kpi_id.uuid  = response.kpi_id.uuid
+    monitor_kpi_request.sampling_duration_s = 120
+    monitor_kpi_request.sampling_interval_s = 5
+
+    response = monitoring_client.MonitorKpi(monitor_kpi_request)
+    LOGGER.debug(str(response))
+    assert isinstance(response, context_pb2.Empty)
+
+
+# Test case that makes use of client fixture to test server's IncludeKpi method
+def test_include_kpi(monitoring_client,include_kpi_request):
+    # make call to server
+    LOGGER.warning('test_include_kpi requesting')
+    response = monitoring_client.IncludeKpi(include_kpi_request)
+    LOGGER.debug(str(response))
+    assert isinstance(response, context_pb2.Empty)
+
+# Test case that makes use of client fixture to test server's GetStreamKpi method
+def test_get_stream_kpi(monitoring_client,include_kpi_request):
+    LOGGER.warning('test_getstream_kpi begin')
+    response = monitoring_client.GetStreamKpi(kpi)
+    LOGGER.debug(str(response))
+    #assert isinstance(response, monitoring_pb2.Kpi)
+
+# Test case that makes use of client fixture to test server's GetInstantKpi method
+def test_get_instant_kpi(monitoring_client,kpi_id):
+    LOGGER.warning('test_getinstant_kpi begin')
+    response = monitoring_client.GetInstantKpi(kpi_id)
+    LOGGER.debug(str(response))
+    assert isinstance(response, monitoring_pb2.Kpi)
+
+# Test case that makes use of client fixture to test server's GetInstantKpi method
+def test_get_kpidescritor_kpi(monitoring_client,create_kpi_request):
+    LOGGER.warning('test_getkpidescritor_kpi begin')
+
+    response = monitoring_client.CreateKpi(create_kpi_request)
+
+    response = monitoring_client.GetKpiDescriptor(response)
+    LOGGER.debug(str(response))
+    assert isinstance(response, monitoring_pb2.KpiDescriptor)
+
+def test_sqlitedb_tools_insert_kpi(sql_db, create_kpi_request):
+    LOGGER.warning('test_sqlitedb_tools_insert_kpi begin')
+
+    kpi_description = create_kpi_request.kpi_description
+    kpi_sample_type = create_kpi_request.kpi_sample_type
+    kpi_device_id = create_kpi_request.device_id.device_uuid.uuid
+    kpi_endpoint_id = create_kpi_request.endpoint_id.endpoint_uuid.uuid
+    kpi_service_id = create_kpi_request.service_id.service_uuid.uuid
+
+    response = sql_db.insert_KPI(kpi_description, kpi_sample_type, kpi_device_id, kpi_endpoint_id, kpi_service_id)
+    assert isinstance(response, int)
+
+def test_sqlitedb_tools_get_kpi(sql_db, create_kpi_request):
+    LOGGER.warning('test_sqlitedb_tools_get_kpi begin')
+
+    kpi_description = create_kpi_request.kpi_description
+    kpi_sample_type = create_kpi_request.kpi_sample_type
+    kpi_device_id = create_kpi_request.device_id.device_uuid.uuid
+    kpi_endpoint_id = create_kpi_request.endpoint_id.endpoint_uuid.uuid
+    kpi_service_id = create_kpi_request.service_id.service_uuid.uuid
+
+    kpi_id = sql_db.insert_KPI(kpi_description, kpi_sample_type, kpi_device_id, kpi_endpoint_id, kpi_service_id)
+
+    response = sql_db.get_KPI(kpi_id)
+    assert isinstance(response, tuple)
+
+def test_sqlitedb_tools_get_kpis(sql_db):
+    LOGGER.warning('test_sqlitedb_tools_get_kpis begin')
+    response = sql_db.get_KPIS()
+    assert isinstance(response, list)
+
+def test_sqlitedb_tools_delete_kpi(sql_db, create_kpi_request):
+    LOGGER.warning('test_sqlitedb_tools_get_kpi begin')
+
+    response = sql_db.delete_KPI("DEV1",kpi_sample_types_pb2.KpiSampleType.KPISAMPLETYPE_PACKETS_TRANSMITTED)
+
+    if response == False:
+        kpi_description = create_kpi_request.kpi_description
+        kpi_sample_type = create_kpi_request.kpi_sample_type
+        kpi_device_id = create_kpi_request.device_id.device_uuid.uuid
+        kpi_endpoint_id = create_kpi_request.endpoint_id.endpoint_uuid.uuid
+        kpi_service_id = create_kpi_request.service_id.service_uuid.uuid
+
+        sql_db.insert_KPI(kpi_description, kpi_sample_type, kpi_device_id, kpi_endpoint_id, kpi_service_id)
+        response = sql_db.delete_KPI("DEV1", kpi_sample_types_pb2.KpiSampleType.KPISAMPLETYPE_PACKETS_TRANSMITTED)
+
+    assert response == True
+
+def test_sqlitedb_tools_delete_kpid_id(sql_db, create_kpi_request):
+    LOGGER.warning('test_sqlitedb_tools_delete_kpid_id begin')
+
+    response = sql_db.delete_kpid_id(1)
+
+    if response == False:
+        kpi_description = create_kpi_request.kpi_description
+        kpi_sample_type = create_kpi_request.kpi_sample_type
+        kpi_device_id = create_kpi_request.device_id.device_uuid.uuid
+        kpi_endpoint_id = create_kpi_request.endpoint_id.endpoint_uuid.uuid
+        kpi_service_id = create_kpi_request.service_id.service_uuid.uuid
+
+        kpi_id = sql_db.insert_KPI(kpi_description, kpi_sample_type, kpi_device_id, kpi_endpoint_id, kpi_service_id)
+        response = sql_db.delete_kpid_id(kpi_id)
+
+    assert response == True
+
+
+def test_influxdb_tools_write_kpi(influx_db):
+    LOGGER.warning('test_influxdb_tools_write_kpi begin')
+
+def test_influxdb_tools_read_kpi_points(influx_db):
+    LOGGER.warning('test_influxdb_tools_read_kpi_points begin')
+
+
+def test_events_tools(context_client_grpc: ContextClient,  # pylint: disable=redefined-outer-name
+    monitoring_client : MonitoringClient,
+    context_db_mb: Tuple[Database, MessageBroker]):
+    LOGGER.warning('test_get_device_events begin')
+
+    context_database = context_db_mb[0]
+
+    # ----- Clean the database -----------------------------------------------------------------------------------------
+    context_database.clear_all()
+
+    # ----- Initialize the EventsCollector -----------------------------------------------------------------------------
+    events_collector = EventsDeviceCollector(context_client_grpc, monitoring_client)
+    events_collector.start()
+
+    # # ----- Dump state of database before create the object ------------------------------------------------------------
+    db_entries = context_database.dump()
+    LOGGER.info('----- Database Dump [{:3d} entries] -------------------------'.format(len(db_entries)))
+    for db_entry in db_entries:
+        LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
+    LOGGER.info('-----------------------------------------------------------')
+    assert len(db_entries) == 0
+
+    populate('localhost', GRPC_PORT_CONTEXT) # place this call in the appropriate line, according to your tests
+
+    # ----- Update the object ------------------------------------------------------------------------------------------
+    response = context_client_grpc.SetDevice(Device(**DEVICE1))
+    assert response.device_uuid.uuid == DEVICE1_UUID
+
+    events_collector.stop()
+
+
+def test_get_device_events(context_client_grpc: ContextClient,  # pylint: disable=redefined-outer-name
+    monitoring_client : MonitoringClient,
+    context_db_mb: Tuple[Database, MessageBroker]):
+
+    LOGGER.warning('test_get_device_events begin')
+
+    context_database = context_db_mb[0]
+
+    # ----- Clean the database -----------------------------------------------------------------------------------------
+    context_database.clear_all()
+
+    # ----- Initialize the EventsCollector -----------------------------------------------------------------------------
+    events_collector = EventsDeviceCollector(context_client_grpc,monitoring_client)
+    events_collector.start()
+
+    # # ----- Dump state of database before create the object ------------------------------------------------------------
+    db_entries = context_database.dump()
+    LOGGER.info('----- Database Dump [{:3d} entries] -------------------------'.format(len(db_entries)))
+    for db_entry in db_entries:
+        LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
+    LOGGER.info('-----------------------------------------------------------')
+    assert len(db_entries) == 0
+
+    populate('localhost', GRPC_PORT_CONTEXT) # place this call in the appropriate line, according to your tests
+
+    # ----- Check create event -----------------------------------------------------------------------------------------
+    event = events_collector.get_event(block=True)
+
+    assert isinstance(event, DeviceEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert event.device_id.device_uuid.uuid == DEVICE1_UUID
+
+    events_collector.stop()
+
+def test_listen_events(monitoring_client: MonitoringClient,
+    context_client_grpc: ContextClient,  # pylint: disable=redefined-outer-name
+    context_db_mb: Tuple[Database, MessageBroker]):
+
+    LOGGER.warning('test_listen_events begin')
+
+    context_database = context_db_mb[0]
+
+    # ----- Clean the database -----------------------------------------------------------------------------------------
+    context_database.clear_all()
+
+    # ----- Initialize the EventsCollector -----------------------------------------------------------------------------
+    events_collector = EventsDeviceCollector(context_client_grpc,monitoring_client)
+    events_collector.start()
+
+    # # ----- Dump state of database before create the object ------------------------------------------------------------
+    db_entries = context_database.dump()
+    LOGGER.info('----- Database Dump [{:3d} entries] -------------------------'.format(len(db_entries)))
+    for db_entry in db_entries:
+        LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
+    LOGGER.info('-----------------------------------------------------------')
+    assert len(db_entries) == 0
+
+    populate('localhost', GRPC_PORT_CONTEXT) # place this call in the appropriate line, according to your tests
+
+    kpi_id_list = events_collector.listen_events()
+
+    assert bool(kpi_id_list) == True
+
diff --git a/src/opticalattackmitigator/.gitlab-ci.yml b/src/opticalattackmitigator/.gitlab-ci.yml
new file mode 100644
index 0000000000000000000000000000000000000000..47a25be3c9c93c008821b1015fe66d1e06a75920
--- /dev/null
+++ b/src/opticalattackmitigator/.gitlab-ci.yml
@@ -0,0 +1,83 @@
+# build, tag and push the Docker image to the gitlab registry
+build opticalattackmitigator:
+  variables:
+    IMAGE_NAME: 'opticalattackmitigator' # name of the microservice
+    IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
+  stage: build
+  before_script:
+    - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
+  script:
+    - docker build -t "$IMAGE_NAME:$IMAGE_TAG" -f ./src/$IMAGE_NAME/Dockerfile ./src/
+    - docker tag "$IMAGE_NAME:$IMAGE_TAG" "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
+    - docker push "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
+  # after_script:
+  #   - docker rmi $(docker images --quiet --filter=dangling=true)
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' 
+    - changes:
+      - src/$IMAGE_NAME/**/*.{py,in,yml}
+      - src/$IMAGE_NAME/Dockerfile
+      - src/$IMAGE_NAME/tests/*.py
+      - src/$IMAGE_NAME/tests/Dockerfile
+      - manifests/$IMAGE_NAME.yaml
+      - .gitlab-ci.yml
+
+# apply unit test to the opticalattackmitigator component
+unit test opticalattackmitigator:
+  variables:
+    IMAGE_NAME: 'opticalattackmitigator' # name of the microservice
+    IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
+  stage: unit_test
+  needs:
+    - build opticalattackmitigator
+  before_script:
+    - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
+    - if docker network list | grep teraflowbridge; then echo "teraflowbridge is already created"; else docker network create -d bridge teraflowbridge; fi
+    - if docker container ls | grep $IMAGE_NAME; then docker rm -f $IMAGE_NAME; else echo "$IMAGE_NAME image is not in the system"; fi
+  script:
+    - docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
+    - docker run --name $IMAGE_NAME -d -p 10007:10007 --network=teraflowbridge --rm $CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG
+    - sleep 5
+    - docker ps -a
+    - docker exec -i $IMAGE_NAME bash -c "pytest --log-level=DEBUG --verbose $IMAGE_NAME/tests/test_unitary.py"
+  after_script:
+    #- docker rm -f $IMAGE_NAME
+    - docker network rm teraflowbridge
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' 
+    - changes:
+      - src/$IMAGE_NAME/**/*.{py,in,yml}
+      - src/$IMAGE_NAME/Dockerfile
+      - src/$IMAGE_NAME/tests/*.py
+      - src/$IMAGE_NAME/tests/Dockerfile
+      - manifests/$IMAGE_NAMEservice.yaml
+      - .gitlab-ci.yml
+
+
+# Deployment of the opticalattackmitigator service in Kubernetes Cluster
+deploy opticalattackmitigator:
+  variables:
+    IMAGE_NAME: 'opticalattackmitigator' # name of the microservice
+    IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
+  stage: deploy
+  needs:
+    - unit test opticalattackmitigator
+    # - integ_test execute
+  script:
+    - 'sed -i "s/$IMAGE_NAME:.*/$IMAGE_NAME:$IMAGE_TAG/" manifests/${IMAGE_NAME}service.yaml'
+    - kubectl version
+    - kubectl get all
+    - kubectl apply -f "manifests/$IMAGE_NAME.yaml"
+    - kubectl get all
+  # environment:
+  #   name: test
+  #   url: https://example.com
+  #   kubernetes:
+  #     namespace: test
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+      when: manual    
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
+      when: manual
\ No newline at end of file
diff --git a/src/opticalattackmitigator/Config.py b/src/opticalattackmitigator/Config.py
new file mode 100644
index 0000000000000000000000000000000000000000..64785206cff4f3b5049d78e15a8d8ac0b82caf9b
--- /dev/null
+++ b/src/opticalattackmitigator/Config.py
@@ -0,0 +1,12 @@
+import logging
+
+# General settings
+LOG_LEVEL = logging.DEBUG
+
+# gRPC settings
+GRPC_SERVICE_PORT = 10007
+GRPC_MAX_WORKERS  = 10
+GRPC_GRACE_PERIOD = 60
+
+# Prometheus settings
+METRICS_PORT = 9192
diff --git a/src/centralizedattackdetector/Dockerfile b/src/opticalattackmitigator/Dockerfile
similarity index 61%
rename from src/centralizedattackdetector/Dockerfile
rename to src/opticalattackmitigator/Dockerfile
index f683a8c853f1741ee3dbb1031a6df4aa00419d42..126786edd5466fbfdfadef1af804aba540f0bd3a 100644
--- a/src/centralizedattackdetector/Dockerfile
+++ b/src/opticalattackmitigator/Dockerfile
@@ -20,16 +20,16 @@ RUN python3 -m pip install --upgrade pip setuptools wheel pip-tools
 WORKDIR /var/teraflow
 
 # Create module sub-folders
-RUN mkdir -p /var/teraflow/centralizedcybersecurity
+RUN mkdir -p /var/teraflow/opticalattackmitigator
 
 # Get Python packages per module
-COPY centralizedattackdetector/requirements.in centralizedattackdetector/requirements.in
-RUN pip-compile --output-file=centralizedattackdetector/requirements.txt centralizedattackdetector/requirements.in
-RUN python3 -m pip install -r centralizedattackdetector/requirements.txt
+COPY opticalattackmitigator/requirements.in opticalattackmitigator/requirements.in
+RUN pip-compile --output-file=opticalattackmitigator/requirements.txt opticalattackmitigator/requirements.in
+RUN python3 -m pip install -r opticalattackmitigator/requirements.txt
 
 # Add files into working directory
 COPY common/. common
-COPY centralizedattackdetector/. centralizedattackdetector
+COPY opticalattackmitigator/. opticalattackmitigator
 
-# Start centralizedattackdetector service
-ENTRYPOINT ["python", "-m", "centralizedattackdetector.service"]
+# Start opticalattackmitigator service
+ENTRYPOINT ["python", "-m", "opticalattackmitigator.service"]
diff --git a/src/opticalattackmitigator/__init__.py b/src/opticalattackmitigator/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/opticalattackmitigator/client/OpticalAttackMitigatorClient.py b/src/opticalattackmitigator/client/OpticalAttackMitigatorClient.py
new file mode 100644
index 0000000000000000000000000000000000000000..f303dcc3fc8593b20b0a0ab1a7f6d46a724edec1
--- /dev/null
+++ b/src/opticalattackmitigator/client/OpticalAttackMitigatorClient.py
@@ -0,0 +1,33 @@
+import grpc, logging
+from common.tools.client.RetryDecorator import retry, delay_exponential
+from opticalattackmitigator.proto.optical_attack_mitigator_pb2 import AttackDescription, AttackResponse
+from opticalattackmitigator.proto.optical_attack_mitigator_pb2_grpc import AttackMitigatorStub
+
+LOGGER = logging.getLogger(__name__)
+MAX_RETRIES = 15
+DELAY_FUNCTION = delay_exponential(initial=0.01, increment=2.0, maximum=5.0)
+
+class OpticalAttackMitigatorClient:
+    def __init__(self, address, port):
+        self.endpoint = '{:s}:{:s}'.format(str(address), str(port))
+        LOGGER.debug('Creating channel to {:s}...'.format(str(self.endpoint)))
+        self.channel = None
+        self.stub = None
+        self.connect()
+        LOGGER.debug('Channel created')
+
+    def connect(self):
+        self.channel = grpc.insecure_channel(self.endpoint)
+        self.stub = AttackMitigatorStub(self.channel)
+
+    def close(self):
+        if(self.channel is not None): self.channel.close()
+        self.channel = None
+        self.stub = None
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def NotifyAttack(self, request : AttackDescription) -> AttackResponse:
+        LOGGER.debug('NotifyAttack request: {:s}'.format(str(request)))
+        response = self.stub.NotifyAttack(request)
+        LOGGER.debug('NotifyAttack result: {:s}'.format(str(response)))
+        return response
diff --git a/src/opticalattackmitigator/client/__init__.py b/src/opticalattackmitigator/client/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/opticalattackmitigator/genproto.sh b/src/opticalattackmitigator/genproto.sh
new file mode 100755
index 0000000000000000000000000000000000000000..9f0651441172ee902f16faedf65c6c0c54fba9a7
--- /dev/null
+++ b/src/opticalattackmitigator/genproto.sh
@@ -0,0 +1,40 @@
+#!/bin/bash -eu
+#
+# Copyright 2018 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+#!/bin/bash -e
+
+# Make folder containing the script the root folder for its execution
+cd $(dirname $0)
+
+rm -rf proto/*.py
+rm -rf proto/__pycache__
+touch proto/__init__.py
+
+# building protos of services used
+python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto context.proto
+python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto kpi_sample_types.proto
+
+rm proto/context_pb2_grpc.py
+rm proto/kpi_sample_types_pb2_grpc.py
+
+sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/context_pb2.py
+sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/kpi_sample_types_pb2.py
+
+# building current service protos
+python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto optical_attack_mitigator.proto
+
+sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/optical_attack_mitigator_pb2.py
+sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/optical_attack_mitigator_pb2_grpc.py
diff --git a/src/opticalattackmitigator/proto/__init__.py b/src/opticalattackmitigator/proto/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/centralizedattackdetector/proto/context_pb2.py b/src/opticalattackmitigator/proto/context_pb2.py
similarity index 82%
rename from src/centralizedattackdetector/proto/context_pb2.py
rename to src/opticalattackmitigator/proto/context_pb2.py
index 8b4848bc33bfb0eba76590c8a3a627b2db84ca9f..68602b16f264ceac9acc3ef6669b09d5984e72c2 100644
--- a/src/centralizedattackdetector/proto/context_pb2.py
+++ b/src/opticalattackmitigator/proto/context_pb2.py
@@ -12,6 +12,7 @@ from google.protobuf import symbol_database as _symbol_database
 _sym_db = _symbol_database.Default()
 
 
+from . import kpi_sample_types_pb2 as kpi__sample__types__pb2
 
 
 DESCRIPTOR = _descriptor.FileDescriptor(
@@ -20,8 +21,9 @@ DESCRIPTOR = _descriptor.FileDescriptor(
   syntax='proto3',
   serialized_options=None,
   create_key=_descriptor._internal_create_key,
-  serialized_pb=b'\n\rcontext.proto\x12\x07\x63ontext\"\x07\n\x05\x45mpty\"\x14\n\x04Uuid\x12\x0c\n\x04uuid\x18\x01 \x01(\t\"F\n\x05\x45vent\x12\x11\n\ttimestamp\x18\x01 \x01(\x01\x12*\n\nevent_type\x18\x02 \x01(\x0e\x32\x16.context.EventTypeEnum\"0\n\tContextId\x12#\n\x0c\x63ontext_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\xb6\x01\n\x07\x43ontext\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12)\n\x0ctopology_ids\x18\x02 \x03(\x0b\x32\x13.context.TopologyId\x12\'\n\x0bservice_ids\x18\x03 \x03(\x0b\x32\x12.context.ServiceId\x12/\n\ncontroller\x18\x04 \x01(\x0b\x32\x1b.context.TeraFlowController\"8\n\rContextIdList\x12\'\n\x0b\x63ontext_ids\x18\x01 \x03(\x0b\x32\x12.context.ContextId\"1\n\x0b\x43ontextList\x12\"\n\x08\x63ontexts\x18\x01 \x03(\x0b\x32\x10.context.Context\"U\n\x0c\x43ontextEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12&\n\ncontext_id\x18\x02 \x01(\x0b\x32\x12.context.ContextId\"Z\n\nTopologyId\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12$\n\rtopology_uuid\x18\x02 \x01(\x0b\x32\r.context.Uuid\"~\n\x08Topology\x12(\n\x0btopology_id\x18\x01 \x01(\x0b\x32\x13.context.TopologyId\x12%\n\ndevice_ids\x18\x02 \x03(\x0b\x32\x11.context.DeviceId\x12!\n\x08link_ids\x18\x03 \x03(\x0b\x32\x0f.context.LinkId\";\n\x0eTopologyIdList\x12)\n\x0ctopology_ids\x18\x01 \x03(\x0b\x32\x13.context.TopologyId\"5\n\x0cTopologyList\x12%\n\ntopologies\x18\x01 \x03(\x0b\x32\x11.context.Topology\"X\n\rTopologyEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12(\n\x0btopology_id\x18\x02 \x01(\x0b\x32\x13.context.TopologyId\".\n\x08\x44\x65viceId\x12\"\n\x0b\x64\x65vice_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\x9a\x02\n\x06\x44\x65vice\x12$\n\tdevice_id\x18\x01 \x01(\x0b\x32\x11.context.DeviceId\x12\x13\n\x0b\x64\x65vice_type\x18\x02 \x01(\t\x12,\n\rdevice_config\x18\x03 \x01(\x0b\x32\x15.context.DeviceConfig\x12G\n\x19\x64\x65vice_operational_status\x18\x04 \x01(\x0e\x32$.context.DeviceOperationalStatusEnum\x12\x31\n\x0e\x64\x65vice_drivers\x18\x05 \x03(\x0e\x32\x19.context.DeviceDriverEnum\x12+\n\x10\x64\x65vice_endpoints\x18\x06 \x03(\x0b\x32\x11.context.EndPoint\"9\n\x0c\x44\x65viceConfig\x12)\n\x0c\x63onfig_rules\x18\x01 \x03(\x0b\x32\x13.context.ConfigRule\"5\n\x0c\x44\x65viceIdList\x12%\n\ndevice_ids\x18\x01 \x03(\x0b\x32\x11.context.DeviceId\".\n\nDeviceList\x12 \n\x07\x64\x65vices\x18\x01 \x03(\x0b\x32\x0f.context.Device\"R\n\x0b\x44\x65viceEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12$\n\tdevice_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\"*\n\x06LinkId\x12 \n\tlink_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"X\n\x04Link\x12 \n\x07link_id\x18\x01 \x01(\x0b\x32\x0f.context.LinkId\x12.\n\x11link_endpoint_ids\x18\x02 \x03(\x0b\x32\x13.context.EndPointId\"/\n\nLinkIdList\x12!\n\x08link_ids\x18\x01 \x03(\x0b\x32\x0f.context.LinkId\"(\n\x08LinkList\x12\x1c\n\x05links\x18\x01 \x03(\x0b\x32\r.context.Link\"L\n\tLinkEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12 \n\x07link_id\x18\x02 \x01(\x0b\x32\x0f.context.LinkId\"X\n\tServiceId\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12#\n\x0cservice_uuid\x18\x02 \x01(\x0b\x32\r.context.Uuid\"\xa6\x02\n\x07Service\x12&\n\nservice_id\x18\x01 \x01(\x0b\x32\x12.context.ServiceId\x12.\n\x0cservice_type\x18\x02 \x01(\x0e\x32\x18.context.ServiceTypeEnum\x12\x31\n\x14service_endpoint_ids\x18\x03 \x03(\x0b\x32\x13.context.EndPointId\x12\x30\n\x13service_constraints\x18\x04 \x03(\x0b\x32\x13.context.Constraint\x12.\n\x0eservice_status\x18\x05 \x01(\x0b\x32\x16.context.ServiceStatus\x12.\n\x0eservice_config\x18\x06 \x01(\x0b\x32\x16.context.ServiceConfig\"C\n\rServiceStatus\x12\x32\n\x0eservice_status\x18\x01 \x01(\x0e\x32\x1a.context.ServiceStatusEnum\":\n\rServiceConfig\x12)\n\x0c\x63onfig_rules\x18\x01 \x03(\x0b\x32\x13.context.ConfigRule\"8\n\rServiceIdList\x12\'\n\x0bservice_ids\x18\x01 \x03(\x0b\x32\x12.context.ServiceId\"1\n\x0bServiceList\x12\"\n\x08services\x18\x01 \x03(\x0b\x32\x10.context.Service\"U\n\x0cServiceEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12&\n\nservice_id\x18\x02 \x01(\x0b\x32\x12.context.ServiceId\"\x82\x01\n\nEndPointId\x12(\n\x0btopology_id\x18\x01 \x01(\x0b\x32\x13.context.TopologyId\x12$\n\tdevice_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\x12$\n\rendpoint_uuid\x18\x03 \x01(\x0b\x32\r.context.Uuid\"K\n\x08\x45ndPoint\x12(\n\x0b\x65ndpoint_id\x18\x01 \x01(\x0b\x32\x13.context.EndPointId\x12\x15\n\rendpoint_type\x18\x02 \x01(\t\"e\n\nConfigRule\x12)\n\x06\x61\x63tion\x18\x01 \x01(\x0e\x32\x19.context.ConfigActionEnum\x12\x14\n\x0cresource_key\x18\x02 \x01(\t\x12\x16\n\x0eresource_value\x18\x03 \x01(\t\"?\n\nConstraint\x12\x17\n\x0f\x63onstraint_type\x18\x01 \x01(\t\x12\x18\n\x10\x63onstraint_value\x18\x02 \x01(\t\"6\n\x0c\x43onnectionId\x12&\n\x0f\x63onnection_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\x8d\x01\n\nConnection\x12,\n\rconnection_id\x18\x01 \x01(\x0b\x32\x15.context.ConnectionId\x12.\n\x12related_service_id\x18\x02 \x01(\x0b\x32\x12.context.ServiceId\x12!\n\x04path\x18\x03 \x03(\x0b\x32\x13.context.EndPointId\"A\n\x10\x43onnectionIdList\x12-\n\x0e\x63onnection_ids\x18\x01 \x03(\x0b\x32\x15.context.ConnectionId\":\n\x0e\x43onnectionList\x12(\n\x0b\x63onnections\x18\x01 \x03(\x0b\x32\x13.context.Connection\"^\n\x12TeraFlowController\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x12\n\nip_address\x18\x02 \x01(\t\x12\x0c\n\x04port\x18\x03 \x01(\r\"U\n\x14\x41uthenticationResult\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x15\n\rauthenticated\x18\x02 \x01(\x08*j\n\rEventTypeEnum\x12\x17\n\x13\x45VENTTYPE_UNDEFINED\x10\x00\x12\x14\n\x10\x45VENTTYPE_CREATE\x10\x01\x12\x14\n\x10\x45VENTTYPE_UPDATE\x10\x02\x12\x14\n\x10\x45VENTTYPE_REMOVE\x10\x03*\xc5\x01\n\x10\x44\x65viceDriverEnum\x12\x1a\n\x16\x44\x45VICEDRIVER_UNDEFINED\x10\x00\x12\x1b\n\x17\x44\x45VICEDRIVER_OPENCONFIG\x10\x01\x12\x1e\n\x1a\x44\x45VICEDRIVER_TRANSPORT_API\x10\x02\x12\x13\n\x0f\x44\x45VICEDRIVER_P4\x10\x03\x12&\n\"DEVICEDRIVER_IETF_NETWORK_TOPOLOGY\x10\x04\x12\x1b\n\x17\x44\x45VICEDRIVER_ONF_TR_352\x10\x05*\x8f\x01\n\x1b\x44\x65viceOperationalStatusEnum\x12%\n!DEVICEOPERATIONALSTATUS_UNDEFINED\x10\x00\x12$\n DEVICEOPERATIONALSTATUS_DISABLED\x10\x01\x12#\n\x1f\x44\x45VICEOPERATIONALSTATUS_ENABLED\x10\x02*\x81\x01\n\x0fServiceTypeEnum\x12\x17\n\x13SERVICETYPE_UNKNOWN\x10\x00\x12\x14\n\x10SERVICETYPE_L3NM\x10\x01\x12\x14\n\x10SERVICETYPE_L2NM\x10\x02\x12)\n%SERVICETYPE_TAPI_CONNECTIVITY_SERVICE\x10\x03*\x88\x01\n\x11ServiceStatusEnum\x12\x1b\n\x17SERVICESTATUS_UNDEFINED\x10\x00\x12\x19\n\x15SERVICESTATUS_PLANNED\x10\x01\x12\x18\n\x14SERVICESTATUS_ACTIVE\x10\x02\x12!\n\x1dSERVICESTATUS_PENDING_REMOVAL\x10\x03*]\n\x10\x43onfigActionEnum\x12\x1a\n\x16\x43ONFIGACTION_UNDEFINED\x10\x00\x12\x14\n\x10\x43ONFIGACTION_SET\x10\x01\x12\x17\n\x13\x43ONFIGACTION_DELETE\x10\x02\x32\xa5\r\n\x0e\x43ontextService\x12:\n\x0eListContextIds\x12\x0e.context.Empty\x1a\x16.context.ContextIdList\"\x00\x12\x36\n\x0cListContexts\x12\x0e.context.Empty\x1a\x14.context.ContextList\"\x00\x12\x34\n\nGetContext\x12\x12.context.ContextId\x1a\x10.context.Context\"\x00\x12\x34\n\nSetContext\x12\x10.context.Context\x1a\x12.context.ContextId\"\x00\x12\x35\n\rRemoveContext\x12\x12.context.ContextId\x1a\x0e.context.Empty\"\x00\x12=\n\x10GetContextEvents\x12\x0e.context.Empty\x1a\x15.context.ContextEvent\"\x00\x30\x01\x12@\n\x0fListTopologyIds\x12\x12.context.ContextId\x1a\x17.context.TopologyIdList\"\x00\x12=\n\x0eListTopologies\x12\x12.context.ContextId\x1a\x15.context.TopologyList\"\x00\x12\x37\n\x0bGetTopology\x12\x13.context.TopologyId\x1a\x11.context.Topology\"\x00\x12\x37\n\x0bSetTopology\x12\x11.context.Topology\x1a\x13.context.TopologyId\"\x00\x12\x37\n\x0eRemoveTopology\x12\x13.context.TopologyId\x1a\x0e.context.Empty\"\x00\x12?\n\x11GetTopologyEvents\x12\x0e.context.Empty\x1a\x16.context.TopologyEvent\"\x00\x30\x01\x12\x38\n\rListDeviceIds\x12\x0e.context.Empty\x1a\x15.context.DeviceIdList\"\x00\x12\x34\n\x0bListDevices\x12\x0e.context.Empty\x1a\x13.context.DeviceList\"\x00\x12\x31\n\tGetDevice\x12\x11.context.DeviceId\x1a\x0f.context.Device\"\x00\x12\x31\n\tSetDevice\x12\x0f.context.Device\x1a\x11.context.DeviceId\"\x00\x12\x33\n\x0cRemoveDevice\x12\x11.context.DeviceId\x1a\x0e.context.Empty\"\x00\x12;\n\x0fGetDeviceEvents\x12\x0e.context.Empty\x1a\x14.context.DeviceEvent\"\x00\x30\x01\x12\x34\n\x0bListLinkIds\x12\x0e.context.Empty\x1a\x13.context.LinkIdList\"\x00\x12\x30\n\tListLinks\x12\x0e.context.Empty\x1a\x11.context.LinkList\"\x00\x12+\n\x07GetLink\x12\x0f.context.LinkId\x1a\r.context.Link\"\x00\x12+\n\x07SetLink\x12\r.context.Link\x1a\x0f.context.LinkId\"\x00\x12/\n\nRemoveLink\x12\x0f.context.LinkId\x1a\x0e.context.Empty\"\x00\x12\x37\n\rGetLinkEvents\x12\x0e.context.Empty\x1a\x12.context.LinkEvent\"\x00\x30\x01\x12>\n\x0eListServiceIds\x12\x12.context.ContextId\x1a\x16.context.ServiceIdList\"\x00\x12:\n\x0cListServices\x12\x12.context.ContextId\x1a\x14.context.ServiceList\"\x00\x12\x34\n\nGetService\x12\x12.context.ServiceId\x1a\x10.context.Service\"\x00\x12\x34\n\nSetService\x12\x10.context.Service\x1a\x12.context.ServiceId\"\x00\x12\x35\n\rRemoveService\x12\x12.context.ServiceId\x1a\x0e.context.Empty\"\x00\x12=\n\x10GetServiceEvents\x12\x0e.context.Empty\x1a\x15.context.ServiceEvent\"\x00\x30\x01\x62\x06proto3'
-)
+  serialized_pb=b'\n\rcontext.proto\x12\x07\x63ontext\x1a\x16kpi_sample_types.proto\"\x07\n\x05\x45mpty\"\x14\n\x04Uuid\x12\x0c\n\x04uuid\x18\x01 \x01(\t\"F\n\x05\x45vent\x12\x11\n\ttimestamp\x18\x01 \x01(\x01\x12*\n\nevent_type\x18\x02 \x01(\x0e\x32\x16.context.EventTypeEnum\"0\n\tContextId\x12#\n\x0c\x63ontext_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\xb6\x01\n\x07\x43ontext\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12)\n\x0ctopology_ids\x18\x02 \x03(\x0b\x32\x13.context.TopologyId\x12\'\n\x0bservice_ids\x18\x03 \x03(\x0b\x32\x12.context.ServiceId\x12/\n\ncontroller\x18\x04 \x01(\x0b\x32\x1b.context.TeraFlowController\"8\n\rContextIdList\x12\'\n\x0b\x63ontext_ids\x18\x01 \x03(\x0b\x32\x12.context.ContextId\"1\n\x0b\x43ontextList\x12\"\n\x08\x63ontexts\x18\x01 \x03(\x0b\x32\x10.context.Context\"U\n\x0c\x43ontextEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12&\n\ncontext_id\x18\x02 \x01(\x0b\x32\x12.context.ContextId\"Z\n\nTopologyId\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12$\n\rtopology_uuid\x18\x02 \x01(\x0b\x32\r.context.Uuid\"~\n\x08Topology\x12(\n\x0btopology_id\x18\x01 \x01(\x0b\x32\x13.context.TopologyId\x12%\n\ndevice_ids\x18\x02 \x03(\x0b\x32\x11.context.DeviceId\x12!\n\x08link_ids\x18\x03 \x03(\x0b\x32\x0f.context.LinkId\";\n\x0eTopologyIdList\x12)\n\x0ctopology_ids\x18\x01 \x03(\x0b\x32\x13.context.TopologyId\"5\n\x0cTopologyList\x12%\n\ntopologies\x18\x01 \x03(\x0b\x32\x11.context.Topology\"X\n\rTopologyEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12(\n\x0btopology_id\x18\x02 \x01(\x0b\x32\x13.context.TopologyId\".\n\x08\x44\x65viceId\x12\"\n\x0b\x64\x65vice_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\x9a\x02\n\x06\x44\x65vice\x12$\n\tdevice_id\x18\x01 \x01(\x0b\x32\x11.context.DeviceId\x12\x13\n\x0b\x64\x65vice_type\x18\x02 \x01(\t\x12,\n\rdevice_config\x18\x03 \x01(\x0b\x32\x15.context.DeviceConfig\x12G\n\x19\x64\x65vice_operational_status\x18\x04 \x01(\x0e\x32$.context.DeviceOperationalStatusEnum\x12\x31\n\x0e\x64\x65vice_drivers\x18\x05 \x03(\x0e\x32\x19.context.DeviceDriverEnum\x12+\n\x10\x64\x65vice_endpoints\x18\x06 \x03(\x0b\x32\x11.context.EndPoint\"9\n\x0c\x44\x65viceConfig\x12)\n\x0c\x63onfig_rules\x18\x01 \x03(\x0b\x32\x13.context.ConfigRule\"5\n\x0c\x44\x65viceIdList\x12%\n\ndevice_ids\x18\x01 \x03(\x0b\x32\x11.context.DeviceId\".\n\nDeviceList\x12 \n\x07\x64\x65vices\x18\x01 \x03(\x0b\x32\x0f.context.Device\"R\n\x0b\x44\x65viceEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12$\n\tdevice_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\"*\n\x06LinkId\x12 \n\tlink_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"X\n\x04Link\x12 \n\x07link_id\x18\x01 \x01(\x0b\x32\x0f.context.LinkId\x12.\n\x11link_endpoint_ids\x18\x02 \x03(\x0b\x32\x13.context.EndPointId\"/\n\nLinkIdList\x12!\n\x08link_ids\x18\x01 \x03(\x0b\x32\x0f.context.LinkId\"(\n\x08LinkList\x12\x1c\n\x05links\x18\x01 \x03(\x0b\x32\r.context.Link\"L\n\tLinkEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12 \n\x07link_id\x18\x02 \x01(\x0b\x32\x0f.context.LinkId\"X\n\tServiceId\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12#\n\x0cservice_uuid\x18\x02 \x01(\x0b\x32\r.context.Uuid\"\xa6\x02\n\x07Service\x12&\n\nservice_id\x18\x01 \x01(\x0b\x32\x12.context.ServiceId\x12.\n\x0cservice_type\x18\x02 \x01(\x0e\x32\x18.context.ServiceTypeEnum\x12\x31\n\x14service_endpoint_ids\x18\x03 \x03(\x0b\x32\x13.context.EndPointId\x12\x30\n\x13service_constraints\x18\x04 \x03(\x0b\x32\x13.context.Constraint\x12.\n\x0eservice_status\x18\x05 \x01(\x0b\x32\x16.context.ServiceStatus\x12.\n\x0eservice_config\x18\x06 \x01(\x0b\x32\x16.context.ServiceConfig\"C\n\rServiceStatus\x12\x32\n\x0eservice_status\x18\x01 \x01(\x0e\x32\x1a.context.ServiceStatusEnum\":\n\rServiceConfig\x12)\n\x0c\x63onfig_rules\x18\x01 \x03(\x0b\x32\x13.context.ConfigRule\"8\n\rServiceIdList\x12\'\n\x0bservice_ids\x18\x01 \x03(\x0b\x32\x12.context.ServiceId\"1\n\x0bServiceList\x12\"\n\x08services\x18\x01 \x03(\x0b\x32\x10.context.Service\"U\n\x0cServiceEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12&\n\nservice_id\x18\x02 \x01(\x0b\x32\x12.context.ServiceId\"6\n\x0c\x43onnectionId\x12&\n\x0f\x63onnection_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\xc4\x01\n\nConnection\x12,\n\rconnection_id\x18\x01 \x01(\x0b\x32\x15.context.ConnectionId\x12&\n\nservice_id\x18\x02 \x01(\x0b\x32\x12.context.ServiceId\x12\x33\n\x16path_hops_endpoint_ids\x18\x03 \x03(\x0b\x32\x13.context.EndPointId\x12+\n\x0fsub_service_ids\x18\x04 \x03(\x0b\x32\x12.context.ServiceId\"A\n\x10\x43onnectionIdList\x12-\n\x0e\x63onnection_ids\x18\x01 \x03(\x0b\x32\x15.context.ConnectionId\":\n\x0e\x43onnectionList\x12(\n\x0b\x63onnections\x18\x01 \x03(\x0b\x32\x13.context.Connection\"^\n\x0f\x43onnectionEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12,\n\rconnection_id\x18\x02 \x01(\x0b\x32\x15.context.ConnectionId\"\x82\x01\n\nEndPointId\x12(\n\x0btopology_id\x18\x01 \x01(\x0b\x32\x13.context.TopologyId\x12$\n\tdevice_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\x12$\n\rendpoint_uuid\x18\x03 \x01(\x0b\x32\r.context.Uuid\"\x86\x01\n\x08\x45ndPoint\x12(\n\x0b\x65ndpoint_id\x18\x01 \x01(\x0b\x32\x13.context.EndPointId\x12\x15\n\rendpoint_type\x18\x02 \x01(\t\x12\x39\n\x10kpi_sample_types\x18\x03 \x03(\x0e\x32\x1f.kpi_sample_types.KpiSampleType\"e\n\nConfigRule\x12)\n\x06\x61\x63tion\x18\x01 \x01(\x0e\x32\x19.context.ConfigActionEnum\x12\x14\n\x0cresource_key\x18\x02 \x01(\t\x12\x16\n\x0eresource_value\x18\x03 \x01(\t\"?\n\nConstraint\x12\x17\n\x0f\x63onstraint_type\x18\x01 \x01(\t\x12\x18\n\x10\x63onstraint_value\x18\x02 \x01(\t\"^\n\x12TeraFlowController\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x12\n\nip_address\x18\x02 \x01(\t\x12\x0c\n\x04port\x18\x03 \x01(\r\"U\n\x14\x41uthenticationResult\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x15\n\rauthenticated\x18\x02 \x01(\x08*j\n\rEventTypeEnum\x12\x17\n\x13\x45VENTTYPE_UNDEFINED\x10\x00\x12\x14\n\x10\x45VENTTYPE_CREATE\x10\x01\x12\x14\n\x10\x45VENTTYPE_UPDATE\x10\x02\x12\x14\n\x10\x45VENTTYPE_REMOVE\x10\x03*\xc5\x01\n\x10\x44\x65viceDriverEnum\x12\x1a\n\x16\x44\x45VICEDRIVER_UNDEFINED\x10\x00\x12\x1b\n\x17\x44\x45VICEDRIVER_OPENCONFIG\x10\x01\x12\x1e\n\x1a\x44\x45VICEDRIVER_TRANSPORT_API\x10\x02\x12\x13\n\x0f\x44\x45VICEDRIVER_P4\x10\x03\x12&\n\"DEVICEDRIVER_IETF_NETWORK_TOPOLOGY\x10\x04\x12\x1b\n\x17\x44\x45VICEDRIVER_ONF_TR_352\x10\x05*\x8f\x01\n\x1b\x44\x65viceOperationalStatusEnum\x12%\n!DEVICEOPERATIONALSTATUS_UNDEFINED\x10\x00\x12$\n DEVICEOPERATIONALSTATUS_DISABLED\x10\x01\x12#\n\x1f\x44\x45VICEOPERATIONALSTATUS_ENABLED\x10\x02*\x81\x01\n\x0fServiceTypeEnum\x12\x17\n\x13SERVICETYPE_UNKNOWN\x10\x00\x12\x14\n\x10SERVICETYPE_L3NM\x10\x01\x12\x14\n\x10SERVICETYPE_L2NM\x10\x02\x12)\n%SERVICETYPE_TAPI_CONNECTIVITY_SERVICE\x10\x03*\x88\x01\n\x11ServiceStatusEnum\x12\x1b\n\x17SERVICESTATUS_UNDEFINED\x10\x00\x12\x19\n\x15SERVICESTATUS_PLANNED\x10\x01\x12\x18\n\x14SERVICESTATUS_ACTIVE\x10\x02\x12!\n\x1dSERVICESTATUS_PENDING_REMOVAL\x10\x03*]\n\x10\x43onfigActionEnum\x12\x1a\n\x16\x43ONFIGACTION_UNDEFINED\x10\x00\x12\x14\n\x10\x43ONFIGACTION_SET\x10\x01\x12\x17\n\x13\x43ONFIGACTION_DELETE\x10\x02\x32\xad\x10\n\x0e\x43ontextService\x12:\n\x0eListContextIds\x12\x0e.context.Empty\x1a\x16.context.ContextIdList\"\x00\x12\x36\n\x0cListContexts\x12\x0e.context.Empty\x1a\x14.context.ContextList\"\x00\x12\x34\n\nGetContext\x12\x12.context.ContextId\x1a\x10.context.Context\"\x00\x12\x34\n\nSetContext\x12\x10.context.Context\x1a\x12.context.ContextId\"\x00\x12\x35\n\rRemoveContext\x12\x12.context.ContextId\x1a\x0e.context.Empty\"\x00\x12=\n\x10GetContextEvents\x12\x0e.context.Empty\x1a\x15.context.ContextEvent\"\x00\x30\x01\x12@\n\x0fListTopologyIds\x12\x12.context.ContextId\x1a\x17.context.TopologyIdList\"\x00\x12=\n\x0eListTopologies\x12\x12.context.ContextId\x1a\x15.context.TopologyList\"\x00\x12\x37\n\x0bGetTopology\x12\x13.context.TopologyId\x1a\x11.context.Topology\"\x00\x12\x37\n\x0bSetTopology\x12\x11.context.Topology\x1a\x13.context.TopologyId\"\x00\x12\x37\n\x0eRemoveTopology\x12\x13.context.TopologyId\x1a\x0e.context.Empty\"\x00\x12?\n\x11GetTopologyEvents\x12\x0e.context.Empty\x1a\x16.context.TopologyEvent\"\x00\x30\x01\x12\x38\n\rListDeviceIds\x12\x0e.context.Empty\x1a\x15.context.DeviceIdList\"\x00\x12\x34\n\x0bListDevices\x12\x0e.context.Empty\x1a\x13.context.DeviceList\"\x00\x12\x31\n\tGetDevice\x12\x11.context.DeviceId\x1a\x0f.context.Device\"\x00\x12\x31\n\tSetDevice\x12\x0f.context.Device\x1a\x11.context.DeviceId\"\x00\x12\x33\n\x0cRemoveDevice\x12\x11.context.DeviceId\x1a\x0e.context.Empty\"\x00\x12;\n\x0fGetDeviceEvents\x12\x0e.context.Empty\x1a\x14.context.DeviceEvent\"\x00\x30\x01\x12\x34\n\x0bListLinkIds\x12\x0e.context.Empty\x1a\x13.context.LinkIdList\"\x00\x12\x30\n\tListLinks\x12\x0e.context.Empty\x1a\x11.context.LinkList\"\x00\x12+\n\x07GetLink\x12\x0f.context.LinkId\x1a\r.context.Link\"\x00\x12+\n\x07SetLink\x12\r.context.Link\x1a\x0f.context.LinkId\"\x00\x12/\n\nRemoveLink\x12\x0f.context.LinkId\x1a\x0e.context.Empty\"\x00\x12\x37\n\rGetLinkEvents\x12\x0e.context.Empty\x1a\x12.context.LinkEvent\"\x00\x30\x01\x12>\n\x0eListServiceIds\x12\x12.context.ContextId\x1a\x16.context.ServiceIdList\"\x00\x12:\n\x0cListServices\x12\x12.context.ContextId\x1a\x14.context.ServiceList\"\x00\x12\x34\n\nGetService\x12\x12.context.ServiceId\x1a\x10.context.Service\"\x00\x12\x34\n\nSetService\x12\x10.context.Service\x1a\x12.context.ServiceId\"\x00\x12\x35\n\rRemoveService\x12\x12.context.ServiceId\x1a\x0e.context.Empty\"\x00\x12=\n\x10GetServiceEvents\x12\x0e.context.Empty\x1a\x15.context.ServiceEvent\"\x00\x30\x01\x12\x44\n\x11ListConnectionIds\x12\x12.context.ServiceId\x1a\x19.context.ConnectionIdList\"\x00\x12@\n\x0fListConnections\x12\x12.context.ServiceId\x1a\x17.context.ConnectionList\"\x00\x12=\n\rGetConnection\x12\x15.context.ConnectionId\x1a\x13.context.Connection\"\x00\x12=\n\rSetConnection\x12\x13.context.Connection\x1a\x15.context.ConnectionId\"\x00\x12;\n\x10RemoveConnection\x12\x15.context.ConnectionId\x1a\x0e.context.Empty\"\x00\x12\x43\n\x13GetConnectionEvents\x12\x0e.context.Empty\x1a\x18.context.ConnectionEvent\"\x00\x30\x01\x62\x06proto3'
+  ,
+  dependencies=[kpi__sample__types__pb2.DESCRIPTOR,])
 
 _EVENTTYPEENUM = _descriptor.EnumDescriptor(
   name='EventTypeEnum',
@@ -53,8 +55,8 @@ _EVENTTYPEENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=3468,
-  serialized_end=3574,
+  serialized_start=3703,
+  serialized_end=3809,
 )
 _sym_db.RegisterEnumDescriptor(_EVENTTYPEENUM)
 
@@ -99,8 +101,8 @@ _DEVICEDRIVERENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=3577,
-  serialized_end=3774,
+  serialized_start=3812,
+  serialized_end=4009,
 )
 _sym_db.RegisterEnumDescriptor(_DEVICEDRIVERENUM)
 
@@ -130,8 +132,8 @@ _DEVICEOPERATIONALSTATUSENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=3777,
-  serialized_end=3920,
+  serialized_start=4012,
+  serialized_end=4155,
 )
 _sym_db.RegisterEnumDescriptor(_DEVICEOPERATIONALSTATUSENUM)
 
@@ -166,8 +168,8 @@ _SERVICETYPEENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=3923,
-  serialized_end=4052,
+  serialized_start=4158,
+  serialized_end=4287,
 )
 _sym_db.RegisterEnumDescriptor(_SERVICETYPEENUM)
 
@@ -202,8 +204,8 @@ _SERVICESTATUSENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=4055,
-  serialized_end=4191,
+  serialized_start=4290,
+  serialized_end=4426,
 )
 _sym_db.RegisterEnumDescriptor(_SERVICESTATUSENUM)
 
@@ -233,8 +235,8 @@ _CONFIGACTIONENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=4193,
-  serialized_end=4286,
+  serialized_start=4428,
+  serialized_end=4521,
 )
 _sym_db.RegisterEnumDescriptor(_CONFIGACTIONENUM)
 
@@ -286,8 +288,8 @@ _EMPTY = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=26,
-  serialized_end=33,
+  serialized_start=50,
+  serialized_end=57,
 )
 
 
@@ -318,8 +320,8 @@ _UUID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=35,
-  serialized_end=55,
+  serialized_start=59,
+  serialized_end=79,
 )
 
 
@@ -357,8 +359,8 @@ _EVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=57,
-  serialized_end=127,
+  serialized_start=81,
+  serialized_end=151,
 )
 
 
@@ -389,8 +391,8 @@ _CONTEXTID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=129,
-  serialized_end=177,
+  serialized_start=153,
+  serialized_end=201,
 )
 
 
@@ -442,8 +444,8 @@ _CONTEXT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=180,
-  serialized_end=362,
+  serialized_start=204,
+  serialized_end=386,
 )
 
 
@@ -474,8 +476,8 @@ _CONTEXTIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=364,
-  serialized_end=420,
+  serialized_start=388,
+  serialized_end=444,
 )
 
 
@@ -506,8 +508,8 @@ _CONTEXTLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=422,
-  serialized_end=471,
+  serialized_start=446,
+  serialized_end=495,
 )
 
 
@@ -545,8 +547,8 @@ _CONTEXTEVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=473,
-  serialized_end=558,
+  serialized_start=497,
+  serialized_end=582,
 )
 
 
@@ -584,8 +586,8 @@ _TOPOLOGYID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=560,
-  serialized_end=650,
+  serialized_start=584,
+  serialized_end=674,
 )
 
 
@@ -630,8 +632,8 @@ _TOPOLOGY = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=652,
-  serialized_end=778,
+  serialized_start=676,
+  serialized_end=802,
 )
 
 
@@ -662,8 +664,8 @@ _TOPOLOGYIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=780,
-  serialized_end=839,
+  serialized_start=804,
+  serialized_end=863,
 )
 
 
@@ -694,8 +696,8 @@ _TOPOLOGYLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=841,
-  serialized_end=894,
+  serialized_start=865,
+  serialized_end=918,
 )
 
 
@@ -733,8 +735,8 @@ _TOPOLOGYEVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=896,
-  serialized_end=984,
+  serialized_start=920,
+  serialized_end=1008,
 )
 
 
@@ -765,8 +767,8 @@ _DEVICEID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=986,
-  serialized_end=1032,
+  serialized_start=1010,
+  serialized_end=1056,
 )
 
 
@@ -832,8 +834,8 @@ _DEVICE = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1035,
-  serialized_end=1317,
+  serialized_start=1059,
+  serialized_end=1341,
 )
 
 
@@ -864,8 +866,8 @@ _DEVICECONFIG = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1319,
-  serialized_end=1376,
+  serialized_start=1343,
+  serialized_end=1400,
 )
 
 
@@ -896,8 +898,8 @@ _DEVICEIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1378,
-  serialized_end=1431,
+  serialized_start=1402,
+  serialized_end=1455,
 )
 
 
@@ -928,8 +930,8 @@ _DEVICELIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1433,
-  serialized_end=1479,
+  serialized_start=1457,
+  serialized_end=1503,
 )
 
 
@@ -967,8 +969,8 @@ _DEVICEEVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1481,
-  serialized_end=1563,
+  serialized_start=1505,
+  serialized_end=1587,
 )
 
 
@@ -999,8 +1001,8 @@ _LINKID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1565,
-  serialized_end=1607,
+  serialized_start=1589,
+  serialized_end=1631,
 )
 
 
@@ -1038,8 +1040,8 @@ _LINK = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1609,
-  serialized_end=1697,
+  serialized_start=1633,
+  serialized_end=1721,
 )
 
 
@@ -1070,8 +1072,8 @@ _LINKIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1699,
-  serialized_end=1746,
+  serialized_start=1723,
+  serialized_end=1770,
 )
 
 
@@ -1102,8 +1104,8 @@ _LINKLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1748,
-  serialized_end=1788,
+  serialized_start=1772,
+  serialized_end=1812,
 )
 
 
@@ -1141,8 +1143,8 @@ _LINKEVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1790,
-  serialized_end=1866,
+  serialized_start=1814,
+  serialized_end=1890,
 )
 
 
@@ -1180,8 +1182,8 @@ _SERVICEID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1868,
-  serialized_end=1956,
+  serialized_start=1892,
+  serialized_end=1980,
 )
 
 
@@ -1247,8 +1249,8 @@ _SERVICE = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1959,
-  serialized_end=2253,
+  serialized_start=1983,
+  serialized_end=2277,
 )
 
 
@@ -1279,8 +1281,8 @@ _SERVICESTATUS = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2255,
-  serialized_end=2322,
+  serialized_start=2279,
+  serialized_end=2346,
 )
 
 
@@ -1311,8 +1313,8 @@ _SERVICECONFIG = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2324,
-  serialized_end=2382,
+  serialized_start=2348,
+  serialized_end=2406,
 )
 
 
@@ -1343,8 +1345,8 @@ _SERVICEIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2384,
-  serialized_end=2440,
+  serialized_start=2408,
+  serialized_end=2464,
 )
 
 
@@ -1375,8 +1377,8 @@ _SERVICELIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2442,
-  serialized_end=2491,
+  serialized_start=2466,
+  serialized_end=2515,
 )
 
 
@@ -1414,40 +1416,26 @@ _SERVICEEVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2493,
-  serialized_end=2578,
+  serialized_start=2517,
+  serialized_end=2602,
 )
 
 
-_ENDPOINTID = _descriptor.Descriptor(
-  name='EndPointId',
-  full_name='context.EndPointId',
+_CONNECTIONID = _descriptor.Descriptor(
+  name='ConnectionId',
+  full_name='context.ConnectionId',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='topology_id', full_name='context.EndPointId.topology_id', index=0,
+      name='connection_uuid', full_name='context.ConnectionId.connection_uuid', index=0,
       number=1, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='device_id', full_name='context.EndPointId.device_id', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='endpoint_uuid', full_name='context.EndPointId.endpoint_uuid', index=2,
-      number=3, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
   ],
   extensions=[
   ],
@@ -1460,30 +1448,44 @@ _ENDPOINTID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2581,
-  serialized_end=2711,
+  serialized_start=2604,
+  serialized_end=2658,
 )
 
 
-_ENDPOINT = _descriptor.Descriptor(
-  name='EndPoint',
-  full_name='context.EndPoint',
+_CONNECTION = _descriptor.Descriptor(
+  name='Connection',
+  full_name='context.Connection',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='endpoint_id', full_name='context.EndPoint.endpoint_id', index=0,
+      name='connection_id', full_name='context.Connection.connection_id', index=0,
       number=1, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='endpoint_type', full_name='context.EndPoint.endpoint_type', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='service_id', full_name='context.Connection.service_id', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='path_hops_endpoint_ids', full_name='context.Connection.path_hops_endpoint_ids', index=2,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='sub_service_ids', full_name='context.Connection.sub_service_ids', index=3,
+      number=4, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -1499,37 +1501,55 @@ _ENDPOINT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2713,
-  serialized_end=2788,
+  serialized_start=2661,
+  serialized_end=2857,
 )
 
 
-_CONFIGRULE = _descriptor.Descriptor(
-  name='ConfigRule',
-  full_name='context.ConfigRule',
+_CONNECTIONIDLIST = _descriptor.Descriptor(
+  name='ConnectionIdList',
+  full_name='context.ConnectionIdList',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='action', full_name='context.ConfigRule.action', index=0,
-      number=1, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='resource_key', full_name='context.ConfigRule.resource_key', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='connection_ids', full_name='context.ConnectionIdList.connection_ids', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2859,
+  serialized_end=2924,
+)
+
+
+_CONNECTIONLIST = _descriptor.Descriptor(
+  name='ConnectionList',
+  full_name='context.ConnectionList',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
     _descriptor.FieldDescriptor(
-      name='resource_value', full_name='context.ConfigRule.resource_value', index=2,
-      number=3, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='connections', full_name='context.ConnectionList.connections', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -1545,30 +1565,30 @@ _CONFIGRULE = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2790,
-  serialized_end=2891,
+  serialized_start=2926,
+  serialized_end=2984,
 )
 
 
-_CONSTRAINT = _descriptor.Descriptor(
-  name='Constraint',
-  full_name='context.Constraint',
+_CONNECTIONEVENT = _descriptor.Descriptor(
+  name='ConnectionEvent',
+  full_name='context.ConnectionEvent',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='constraint_type', full_name='context.Constraint.constraint_type', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='event', full_name='context.ConnectionEvent.event', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='constraint_value', full_name='context.Constraint.constraint_value', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='connection_id', full_name='context.ConnectionEvent.connection_id', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -1584,26 +1604,40 @@ _CONSTRAINT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2893,
-  serialized_end=2956,
+  serialized_start=2986,
+  serialized_end=3080,
 )
 
 
-_CONNECTIONID = _descriptor.Descriptor(
-  name='ConnectionId',
-  full_name='context.ConnectionId',
+_ENDPOINTID = _descriptor.Descriptor(
+  name='EndPointId',
+  full_name='context.EndPointId',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='connection_uuid', full_name='context.ConnectionId.connection_uuid', index=0,
+      name='topology_id', full_name='context.EndPointId.topology_id', index=0,
       number=1, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='device_id', full_name='context.EndPointId.device_id', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='endpoint_uuid', full_name='context.EndPointId.endpoint_uuid', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
   ],
   extensions=[
   ],
@@ -1616,36 +1650,36 @@ _CONNECTIONID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2958,
-  serialized_end=3012,
+  serialized_start=3083,
+  serialized_end=3213,
 )
 
 
-_CONNECTION = _descriptor.Descriptor(
-  name='Connection',
-  full_name='context.Connection',
+_ENDPOINT = _descriptor.Descriptor(
+  name='EndPoint',
+  full_name='context.EndPoint',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='connection_id', full_name='context.Connection.connection_id', index=0,
+      name='endpoint_id', full_name='context.EndPoint.endpoint_id', index=0,
       number=1, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='related_service_id', full_name='context.Connection.related_service_id', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
+      name='endpoint_type', full_name='context.EndPoint.endpoint_type', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='path', full_name='context.Connection.path', index=2,
-      number=3, type=11, cpp_type=10, label=3,
+      name='kpi_sample_types', full_name='context.EndPoint.kpi_sample_types', index=2,
+      number=3, type=14, cpp_type=8, label=3,
       has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
@@ -1662,23 +1696,37 @@ _CONNECTION = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=3015,
-  serialized_end=3156,
+  serialized_start=3216,
+  serialized_end=3350,
 )
 
 
-_CONNECTIONIDLIST = _descriptor.Descriptor(
-  name='ConnectionIdList',
-  full_name='context.ConnectionIdList',
+_CONFIGRULE = _descriptor.Descriptor(
+  name='ConfigRule',
+  full_name='context.ConfigRule',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='connection_ids', full_name='context.ConnectionIdList.connection_ids', index=0,
-      number=1, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
+      name='action', full_name='context.ConfigRule.action', index=0,
+      number=1, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='resource_key', full_name='context.ConfigRule.resource_key', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='resource_value', full_name='context.ConfigRule.resource_value', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -1694,23 +1742,30 @@ _CONNECTIONIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=3158,
-  serialized_end=3223,
+  serialized_start=3352,
+  serialized_end=3453,
 )
 
 
-_CONNECTIONLIST = _descriptor.Descriptor(
-  name='ConnectionList',
-  full_name='context.ConnectionList',
+_CONSTRAINT = _descriptor.Descriptor(
+  name='Constraint',
+  full_name='context.Constraint',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='connections', full_name='context.ConnectionList.connections', index=0,
-      number=1, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
+      name='constraint_type', full_name='context.Constraint.constraint_type', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='constraint_value', full_name='context.Constraint.constraint_value', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -1726,8 +1781,8 @@ _CONNECTIONLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=3225,
-  serialized_end=3283,
+  serialized_start=3455,
+  serialized_end=3518,
 )
 
 
@@ -1772,8 +1827,8 @@ _TERAFLOWCONTROLLER = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=3285,
-  serialized_end=3379,
+  serialized_start=3520,
+  serialized_end=3614,
 )
 
 
@@ -1811,8 +1866,8 @@ _AUTHENTICATIONRESULT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=3381,
-  serialized_end=3466,
+  serialized_start=3616,
+  serialized_end=3701,
 )
 
 _EVENT.fields_by_name['event_type'].enum_type = _EVENTTYPEENUM
@@ -1866,17 +1921,21 @@ _SERVICEIDLIST.fields_by_name['service_ids'].message_type = _SERVICEID
 _SERVICELIST.fields_by_name['services'].message_type = _SERVICE
 _SERVICEEVENT.fields_by_name['event'].message_type = _EVENT
 _SERVICEEVENT.fields_by_name['service_id'].message_type = _SERVICEID
+_CONNECTIONID.fields_by_name['connection_uuid'].message_type = _UUID
+_CONNECTION.fields_by_name['connection_id'].message_type = _CONNECTIONID
+_CONNECTION.fields_by_name['service_id'].message_type = _SERVICEID
+_CONNECTION.fields_by_name['path_hops_endpoint_ids'].message_type = _ENDPOINTID
+_CONNECTION.fields_by_name['sub_service_ids'].message_type = _SERVICEID
+_CONNECTIONIDLIST.fields_by_name['connection_ids'].message_type = _CONNECTIONID
+_CONNECTIONLIST.fields_by_name['connections'].message_type = _CONNECTION
+_CONNECTIONEVENT.fields_by_name['event'].message_type = _EVENT
+_CONNECTIONEVENT.fields_by_name['connection_id'].message_type = _CONNECTIONID
 _ENDPOINTID.fields_by_name['topology_id'].message_type = _TOPOLOGYID
 _ENDPOINTID.fields_by_name['device_id'].message_type = _DEVICEID
 _ENDPOINTID.fields_by_name['endpoint_uuid'].message_type = _UUID
 _ENDPOINT.fields_by_name['endpoint_id'].message_type = _ENDPOINTID
+_ENDPOINT.fields_by_name['kpi_sample_types'].enum_type = kpi__sample__types__pb2._KPISAMPLETYPE
 _CONFIGRULE.fields_by_name['action'].enum_type = _CONFIGACTIONENUM
-_CONNECTIONID.fields_by_name['connection_uuid'].message_type = _UUID
-_CONNECTION.fields_by_name['connection_id'].message_type = _CONNECTIONID
-_CONNECTION.fields_by_name['related_service_id'].message_type = _SERVICEID
-_CONNECTION.fields_by_name['path'].message_type = _ENDPOINTID
-_CONNECTIONIDLIST.fields_by_name['connection_ids'].message_type = _CONNECTIONID
-_CONNECTIONLIST.fields_by_name['connections'].message_type = _CONNECTION
 _TERAFLOWCONTROLLER.fields_by_name['context_id'].message_type = _CONTEXTID
 _AUTHENTICATIONRESULT.fields_by_name['context_id'].message_type = _CONTEXTID
 DESCRIPTOR.message_types_by_name['Empty'] = _EMPTY
@@ -1910,14 +1969,15 @@ DESCRIPTOR.message_types_by_name['ServiceConfig'] = _SERVICECONFIG
 DESCRIPTOR.message_types_by_name['ServiceIdList'] = _SERVICEIDLIST
 DESCRIPTOR.message_types_by_name['ServiceList'] = _SERVICELIST
 DESCRIPTOR.message_types_by_name['ServiceEvent'] = _SERVICEEVENT
-DESCRIPTOR.message_types_by_name['EndPointId'] = _ENDPOINTID
-DESCRIPTOR.message_types_by_name['EndPoint'] = _ENDPOINT
-DESCRIPTOR.message_types_by_name['ConfigRule'] = _CONFIGRULE
-DESCRIPTOR.message_types_by_name['Constraint'] = _CONSTRAINT
 DESCRIPTOR.message_types_by_name['ConnectionId'] = _CONNECTIONID
 DESCRIPTOR.message_types_by_name['Connection'] = _CONNECTION
 DESCRIPTOR.message_types_by_name['ConnectionIdList'] = _CONNECTIONIDLIST
 DESCRIPTOR.message_types_by_name['ConnectionList'] = _CONNECTIONLIST
+DESCRIPTOR.message_types_by_name['ConnectionEvent'] = _CONNECTIONEVENT
+DESCRIPTOR.message_types_by_name['EndPointId'] = _ENDPOINTID
+DESCRIPTOR.message_types_by_name['EndPoint'] = _ENDPOINT
+DESCRIPTOR.message_types_by_name['ConfigRule'] = _CONFIGRULE
+DESCRIPTOR.message_types_by_name['Constraint'] = _CONSTRAINT
 DESCRIPTOR.message_types_by_name['TeraFlowController'] = _TERAFLOWCONTROLLER
 DESCRIPTOR.message_types_by_name['AuthenticationResult'] = _AUTHENTICATIONRESULT
 DESCRIPTOR.enum_types_by_name['EventTypeEnum'] = _EVENTTYPEENUM
@@ -2145,34 +2205,6 @@ ServiceEvent = _reflection.GeneratedProtocolMessageType('ServiceEvent', (_messag
   })
 _sym_db.RegisterMessage(ServiceEvent)
 
-EndPointId = _reflection.GeneratedProtocolMessageType('EndPointId', (_message.Message,), {
-  'DESCRIPTOR' : _ENDPOINTID,
-  '__module__' : 'context_pb2'
-  # @@protoc_insertion_point(class_scope:context.EndPointId)
-  })
-_sym_db.RegisterMessage(EndPointId)
-
-EndPoint = _reflection.GeneratedProtocolMessageType('EndPoint', (_message.Message,), {
-  'DESCRIPTOR' : _ENDPOINT,
-  '__module__' : 'context_pb2'
-  # @@protoc_insertion_point(class_scope:context.EndPoint)
-  })
-_sym_db.RegisterMessage(EndPoint)
-
-ConfigRule = _reflection.GeneratedProtocolMessageType('ConfigRule', (_message.Message,), {
-  'DESCRIPTOR' : _CONFIGRULE,
-  '__module__' : 'context_pb2'
-  # @@protoc_insertion_point(class_scope:context.ConfigRule)
-  })
-_sym_db.RegisterMessage(ConfigRule)
-
-Constraint = _reflection.GeneratedProtocolMessageType('Constraint', (_message.Message,), {
-  'DESCRIPTOR' : _CONSTRAINT,
-  '__module__' : 'context_pb2'
-  # @@protoc_insertion_point(class_scope:context.Constraint)
-  })
-_sym_db.RegisterMessage(Constraint)
-
 ConnectionId = _reflection.GeneratedProtocolMessageType('ConnectionId', (_message.Message,), {
   'DESCRIPTOR' : _CONNECTIONID,
   '__module__' : 'context_pb2'
@@ -2201,6 +2233,41 @@ ConnectionList = _reflection.GeneratedProtocolMessageType('ConnectionList', (_me
   })
 _sym_db.RegisterMessage(ConnectionList)
 
+ConnectionEvent = _reflection.GeneratedProtocolMessageType('ConnectionEvent', (_message.Message,), {
+  'DESCRIPTOR' : _CONNECTIONEVENT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ConnectionEvent)
+  })
+_sym_db.RegisterMessage(ConnectionEvent)
+
+EndPointId = _reflection.GeneratedProtocolMessageType('EndPointId', (_message.Message,), {
+  'DESCRIPTOR' : _ENDPOINTID,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.EndPointId)
+  })
+_sym_db.RegisterMessage(EndPointId)
+
+EndPoint = _reflection.GeneratedProtocolMessageType('EndPoint', (_message.Message,), {
+  'DESCRIPTOR' : _ENDPOINT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.EndPoint)
+  })
+_sym_db.RegisterMessage(EndPoint)
+
+ConfigRule = _reflection.GeneratedProtocolMessageType('ConfigRule', (_message.Message,), {
+  'DESCRIPTOR' : _CONFIGRULE,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ConfigRule)
+  })
+_sym_db.RegisterMessage(ConfigRule)
+
+Constraint = _reflection.GeneratedProtocolMessageType('Constraint', (_message.Message,), {
+  'DESCRIPTOR' : _CONSTRAINT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Constraint)
+  })
+_sym_db.RegisterMessage(Constraint)
+
 TeraFlowController = _reflection.GeneratedProtocolMessageType('TeraFlowController', (_message.Message,), {
   'DESCRIPTOR' : _TERAFLOWCONTROLLER,
   '__module__' : 'context_pb2'
@@ -2224,8 +2291,8 @@ _CONTEXTSERVICE = _descriptor.ServiceDescriptor(
   index=0,
   serialized_options=None,
   create_key=_descriptor._internal_create_key,
-  serialized_start=4289,
-  serialized_end=5990,
+  serialized_start=4524,
+  serialized_end=6617,
   methods=[
   _descriptor.MethodDescriptor(
     name='ListContextIds',
@@ -2527,6 +2594,66 @@ _CONTEXTSERVICE = _descriptor.ServiceDescriptor(
     serialized_options=None,
     create_key=_descriptor._internal_create_key,
   ),
+  _descriptor.MethodDescriptor(
+    name='ListConnectionIds',
+    full_name='context.ContextService.ListConnectionIds',
+    index=30,
+    containing_service=None,
+    input_type=_SERVICEID,
+    output_type=_CONNECTIONIDLIST,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='ListConnections',
+    full_name='context.ContextService.ListConnections',
+    index=31,
+    containing_service=None,
+    input_type=_SERVICEID,
+    output_type=_CONNECTIONLIST,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetConnection',
+    full_name='context.ContextService.GetConnection',
+    index=32,
+    containing_service=None,
+    input_type=_CONNECTIONID,
+    output_type=_CONNECTION,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='SetConnection',
+    full_name='context.ContextService.SetConnection',
+    index=33,
+    containing_service=None,
+    input_type=_CONNECTION,
+    output_type=_CONNECTIONID,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='RemoveConnection',
+    full_name='context.ContextService.RemoveConnection',
+    index=34,
+    containing_service=None,
+    input_type=_CONNECTIONID,
+    output_type=_EMPTY,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetConnectionEvents',
+    full_name='context.ContextService.GetConnectionEvents',
+    index=35,
+    containing_service=None,
+    input_type=_EMPTY,
+    output_type=_CONNECTIONEVENT,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
 ])
 _sym_db.RegisterServiceDescriptor(_CONTEXTSERVICE)
 
diff --git a/src/opticalattackmitigator/proto/kpi_sample_types_pb2.py b/src/opticalattackmitigator/proto/kpi_sample_types_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..ea7fd2f82757d4c3db02d7e2c7817e2787b0b490
--- /dev/null
+++ b/src/opticalattackmitigator/proto/kpi_sample_types_pb2.py
@@ -0,0 +1,78 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: kpi_sample_types.proto
+"""Generated protocol buffer code."""
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='kpi_sample_types.proto',
+  package='kpi_sample_types',
+  syntax='proto3',
+  serialized_options=None,
+  create_key=_descriptor._internal_create_key,
+  serialized_pb=b'\n\x16kpi_sample_types.proto\x12\x10kpi_sample_types*\xbe\x01\n\rKpiSampleType\x12\x19\n\x15KPISAMPLETYPE_UNKNOWN\x10\x00\x12%\n!KPISAMPLETYPE_PACKETS_TRANSMITTED\x10\x65\x12\"\n\x1eKPISAMPLETYPE_PACKETS_RECEIVED\x10\x66\x12$\n\x1fKPISAMPLETYPE_BYTES_TRANSMITTED\x10\xc9\x01\x12!\n\x1cKPISAMPLETYPE_BYTES_RECEIVED\x10\xca\x01\x62\x06proto3'
+)
+
+_KPISAMPLETYPE = _descriptor.EnumDescriptor(
+  name='KpiSampleType',
+  full_name='kpi_sample_types.KpiSampleType',
+  filename=None,
+  file=DESCRIPTOR,
+  create_key=_descriptor._internal_create_key,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='KPISAMPLETYPE_UNKNOWN', index=0, number=0,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='KPISAMPLETYPE_PACKETS_TRANSMITTED', index=1, number=101,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='KPISAMPLETYPE_PACKETS_RECEIVED', index=2, number=102,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='KPISAMPLETYPE_BYTES_TRANSMITTED', index=3, number=201,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='KPISAMPLETYPE_BYTES_RECEIVED', index=4, number=202,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=45,
+  serialized_end=235,
+)
+_sym_db.RegisterEnumDescriptor(_KPISAMPLETYPE)
+
+KpiSampleType = enum_type_wrapper.EnumTypeWrapper(_KPISAMPLETYPE)
+KPISAMPLETYPE_UNKNOWN = 0
+KPISAMPLETYPE_PACKETS_TRANSMITTED = 101
+KPISAMPLETYPE_PACKETS_RECEIVED = 102
+KPISAMPLETYPE_BYTES_TRANSMITTED = 201
+KPISAMPLETYPE_BYTES_RECEIVED = 202
+
+
+DESCRIPTOR.enum_types_by_name['KpiSampleType'] = _KPISAMPLETYPE
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+
+# @@protoc_insertion_point(module_scope)
diff --git a/src/opticalattackmitigator/proto/optical_attack_mitigator_pb2.py b/src/opticalattackmitigator/proto/optical_attack_mitigator_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..651c1b8e9cff9db06021a4b45934f3676a9f9f5e
--- /dev/null
+++ b/src/opticalattackmitigator/proto/optical_attack_mitigator_pb2.py
@@ -0,0 +1,189 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: optical_attack_mitigator.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from . import context_pb2 as context__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='optical_attack_mitigator.proto',
+  package='optical_attack_mitigator',
+  syntax='proto3',
+  serialized_options=None,
+  create_key=_descriptor._internal_create_key,
+  serialized_pb=b'\n\x1eoptical_attack_mitigator.proto\x12\x18optical_attack_mitigator\x1a\rcontext.proto\"t\n\x11\x41ttackDescription\x12\x1c\n\x05\x63s_id\x18\x01 \x01(\x0b\x32\r.context.Uuid\x12\x11\n\tattack_id\x18\x02 \x01(\x05\x12\x12\n\nconfidence\x18\x03 \x01(\x02\x12\x1a\n\x12\x61ttack_description\x18\x04 \x01(\t\"\xa2\x01\n\x0e\x41ttackResponse\x12\x1c\n\x05\x63s_id\x18\x01 \x01(\x0b\x32\r.context.Uuid\x12\x11\n\tattack_id\x18\x02 \x01(\x05\x12\x1a\n\x12\x61ttack_description\x18\x03 \x01(\t\x12\x1c\n\x14response_strategy_id\x18\x04 \x01(\x05\x12%\n\x1dresponse_strategy_description\x18\x05 \x01(\t2z\n\x0f\x41ttackMitigator\x12g\n\x0cNotifyAttack\x12+.optical_attack_mitigator.AttackDescription\x1a(.optical_attack_mitigator.AttackResponse\"\x00\x62\x06proto3'
+  ,
+  dependencies=[context__pb2.DESCRIPTOR,])
+
+
+
+
+_ATTACKDESCRIPTION = _descriptor.Descriptor(
+  name='AttackDescription',
+  full_name='optical_attack_mitigator.AttackDescription',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='cs_id', full_name='optical_attack_mitigator.AttackDescription.cs_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='attack_id', full_name='optical_attack_mitigator.AttackDescription.attack_id', index=1,
+      number=2, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='confidence', full_name='optical_attack_mitigator.AttackDescription.confidence', index=2,
+      number=3, type=2, cpp_type=6, label=1,
+      has_default_value=False, default_value=float(0),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='attack_description', full_name='optical_attack_mitigator.AttackDescription.attack_description', index=3,
+      number=4, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=75,
+  serialized_end=191,
+)
+
+
+_ATTACKRESPONSE = _descriptor.Descriptor(
+  name='AttackResponse',
+  full_name='optical_attack_mitigator.AttackResponse',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='cs_id', full_name='optical_attack_mitigator.AttackResponse.cs_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='attack_id', full_name='optical_attack_mitigator.AttackResponse.attack_id', index=1,
+      number=2, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='attack_description', full_name='optical_attack_mitigator.AttackResponse.attack_description', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='response_strategy_id', full_name='optical_attack_mitigator.AttackResponse.response_strategy_id', index=3,
+      number=4, type=5, cpp_type=1, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='response_strategy_description', full_name='optical_attack_mitigator.AttackResponse.response_strategy_description', index=4,
+      number=5, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=194,
+  serialized_end=356,
+)
+
+_ATTACKDESCRIPTION.fields_by_name['cs_id'].message_type = context__pb2._UUID
+_ATTACKRESPONSE.fields_by_name['cs_id'].message_type = context__pb2._UUID
+DESCRIPTOR.message_types_by_name['AttackDescription'] = _ATTACKDESCRIPTION
+DESCRIPTOR.message_types_by_name['AttackResponse'] = _ATTACKRESPONSE
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+AttackDescription = _reflection.GeneratedProtocolMessageType('AttackDescription', (_message.Message,), {
+  'DESCRIPTOR' : _ATTACKDESCRIPTION,
+  '__module__' : 'optical_attack_mitigator_pb2'
+  # @@protoc_insertion_point(class_scope:optical_attack_mitigator.AttackDescription)
+  })
+_sym_db.RegisterMessage(AttackDescription)
+
+AttackResponse = _reflection.GeneratedProtocolMessageType('AttackResponse', (_message.Message,), {
+  'DESCRIPTOR' : _ATTACKRESPONSE,
+  '__module__' : 'optical_attack_mitigator_pb2'
+  # @@protoc_insertion_point(class_scope:optical_attack_mitigator.AttackResponse)
+  })
+_sym_db.RegisterMessage(AttackResponse)
+
+
+
+_ATTACKMITIGATOR = _descriptor.ServiceDescriptor(
+  name='AttackMitigator',
+  full_name='optical_attack_mitigator.AttackMitigator',
+  file=DESCRIPTOR,
+  index=0,
+  serialized_options=None,
+  create_key=_descriptor._internal_create_key,
+  serialized_start=358,
+  serialized_end=480,
+  methods=[
+  _descriptor.MethodDescriptor(
+    name='NotifyAttack',
+    full_name='optical_attack_mitigator.AttackMitigator.NotifyAttack',
+    index=0,
+    containing_service=None,
+    input_type=_ATTACKDESCRIPTION,
+    output_type=_ATTACKRESPONSE,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+])
+_sym_db.RegisterServiceDescriptor(_ATTACKMITIGATOR)
+
+DESCRIPTOR.services_by_name['AttackMitigator'] = _ATTACKMITIGATOR
+
+# @@protoc_insertion_point(module_scope)
diff --git a/src/opticalattackmitigator/proto/optical_attack_mitigator_pb2_grpc.py b/src/opticalattackmitigator/proto/optical_attack_mitigator_pb2_grpc.py
new file mode 100644
index 0000000000000000000000000000000000000000..2f12816a1f909e073ece0ad5a4b3d8fda4235d89
--- /dev/null
+++ b/src/opticalattackmitigator/proto/optical_attack_mitigator_pb2_grpc.py
@@ -0,0 +1,66 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
+
+from . import optical_attack_mitigator_pb2 as optical__attack__mitigator__pb2
+
+
+class AttackMitigatorStub(object):
+    """Missing associated documentation comment in .proto file."""
+
+    def __init__(self, channel):
+        """Constructor.
+
+        Args:
+            channel: A grpc.Channel.
+        """
+        self.NotifyAttack = channel.unary_unary(
+                '/optical_attack_mitigator.AttackMitigator/NotifyAttack',
+                request_serializer=optical__attack__mitigator__pb2.AttackDescription.SerializeToString,
+                response_deserializer=optical__attack__mitigator__pb2.AttackResponse.FromString,
+                )
+
+
+class AttackMitigatorServicer(object):
+    """Missing associated documentation comment in .proto file."""
+
+    def NotifyAttack(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+
+def add_AttackMitigatorServicer_to_server(servicer, server):
+    rpc_method_handlers = {
+            'NotifyAttack': grpc.unary_unary_rpc_method_handler(
+                    servicer.NotifyAttack,
+                    request_deserializer=optical__attack__mitigator__pb2.AttackDescription.FromString,
+                    response_serializer=optical__attack__mitigator__pb2.AttackResponse.SerializeToString,
+            ),
+    }
+    generic_handler = grpc.method_handlers_generic_handler(
+            'optical_attack_mitigator.AttackMitigator', rpc_method_handlers)
+    server.add_generic_rpc_handlers((generic_handler,))
+
+
+ # This class is part of an EXPERIMENTAL API.
+class AttackMitigator(object):
+    """Missing associated documentation comment in .proto file."""
+
+    @staticmethod
+    def NotifyAttack(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/optical_attack_mitigator.AttackMitigator/NotifyAttack',
+            optical__attack__mitigator__pb2.AttackDescription.SerializeToString,
+            optical__attack__mitigator__pb2.AttackResponse.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
diff --git a/src/opticalattackmitigator/requirements.in b/src/opticalattackmitigator/requirements.in
new file mode 100644
index 0000000000000000000000000000000000000000..00acd77fe30ca10ffe3af04f7cf7ced2cb9256f8
--- /dev/null
+++ b/src/opticalattackmitigator/requirements.in
@@ -0,0 +1,9 @@
+grpcio-health-checking
+grpcio
+prometheus-client
+pytest
+pytest-benchmark
+redis
+# from the monitoring component
+influxdb
+python-json-logger
diff --git a/src/opticalattackmitigator/service/OpticalAttackMitigatorService.py b/src/opticalattackmitigator/service/OpticalAttackMitigatorService.py
new file mode 100644
index 0000000000000000000000000000000000000000..fc2e86067b437bb4facd1e6608f48439fbf2a03e
--- /dev/null
+++ b/src/opticalattackmitigator/service/OpticalAttackMitigatorService.py
@@ -0,0 +1,58 @@
+import grpc
+import logging
+from concurrent import futures
+from grpc_health.v1.health import HealthServicer, OVERALL_HEALTH
+from grpc_health.v1.health_pb2 import HealthCheckResponse
+from grpc_health.v1.health_pb2_grpc import add_HealthServicer_to_server
+from opticalattackmitigator.proto.optical_attack_mitigator_pb2_grpc import (
+    add_AttackMitigatorServicer_to_server)
+from opticalattackmitigator.service.OpticalAttackMitigatorServiceServicerImpl import (
+    OpticalAttackMitigatorServiceServicerImpl)
+from opticalattackmitigator.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD
+
+BIND_ADDRESS = '0.0.0.0'
+LOGGER = logging.getLogger(__name__)
+
+class OpticalAttackMitigatorService:
+    def __init__(
+        self, address=BIND_ADDRESS, port=GRPC_SERVICE_PORT, max_workers=GRPC_MAX_WORKERS,
+        grace_period=GRPC_GRACE_PERIOD):
+
+        self.address = address
+        self.port = port
+        self.endpoint = None
+        self.max_workers = max_workers
+        self.grace_period = grace_period
+        self.attack_mitigator_servicer = None
+        self.health_servicer = None
+        self.pool = None
+        self.server = None
+
+    def start(self):
+        self.endpoint = '{:s}:{:s}'.format(str(self.address), str(self.port))
+        LOGGER.debug('Starting Service (tentative endpoint: {:s}, max_workers: {:s})...'.format(
+            str(self.endpoint), str(self.max_workers)))
+
+        self.pool = futures.ThreadPoolExecutor(max_workers=self.max_workers)
+        self.server = grpc.server(self.pool) # , interceptors=(tracer_interceptor,))
+
+        self.attack_mitigator_servicer = OpticalAttackMitigatorServiceServicerImpl()
+        add_AttackMitigatorServicer_to_server(self.attack_mitigator_servicer, self.server)
+
+        self.health_servicer = HealthServicer(
+            experimental_non_blocking=True, experimental_thread_pool=futures.ThreadPoolExecutor(max_workers=1))
+        add_HealthServicer_to_server(self.health_servicer, self.server)
+
+        port = self.server.add_insecure_port(self.endpoint)
+        self.endpoint = '{:s}:{:s}'.format(str(self.address), str(port))
+        LOGGER.info('Listening on {:s}...'.format(self.endpoint))
+        self.server.start()
+        self.health_servicer.set(OVERALL_HEALTH, HealthCheckResponse.SERVING) # pylint: disable=maybe-no-member
+
+        LOGGER.debug('Service started')
+
+    def stop(self):
+        LOGGER.debug('Stopping service (grace period {:s} seconds)...'.format(str(self.grace_period)))
+        self.health_servicer.enter_graceful_shutdown()
+        self.server.stop(self.grace_period)
+        LOGGER.debug('Service stopped')
diff --git a/src/opticalattackmitigator/service/OpticalAttackMitigatorServiceServicerImpl.py b/src/opticalattackmitigator/service/OpticalAttackMitigatorServiceServicerImpl.py
new file mode 100644
index 0000000000000000000000000000000000000000..d7cbc6b0956f26fb87e7d2567cc1c9065482cfd4
--- /dev/null
+++ b/src/opticalattackmitigator/service/OpticalAttackMitigatorServiceServicerImpl.py
@@ -0,0 +1,26 @@
+import os, grpc, logging, random
+from influxdb import InfluxDBClient
+from common.rpc_method_wrapper.Decorator import create_metrics, safe_and_metered_rpc_method
+from opticalattackmitigator.proto.optical_attack_mitigator_pb2_grpc import (
+    AttackMitigatorServicer)
+from opticalattackmitigator.proto.optical_attack_mitigator_pb2 import AttackDescription, AttackResponse
+
+LOGGER = logging.getLogger(__name__)
+
+SERVICE_NAME = 'OpticalAttackMitigator'
+METHOD_NAMES = ['NotifyAttack']
+METRICS = create_metrics(SERVICE_NAME, METHOD_NAMES)
+
+
+class OpticalAttackMitigatorServiceServicerImpl(AttackMitigatorServicer):
+
+    def __init__(self):
+        LOGGER.debug('Creating Servicer...')
+        LOGGER.debug('Servicer Created')
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def NotifyAttack(self, request : AttackDescription, context : grpc.ServicerContext) -> AttackResponse:
+        LOGGER.debug(f"NotifyAttack: {request}")
+        response: AttackResponse = AttackResponse()
+        response.response_strategy_description = 'The AttackMitigator has received the attack description.'
+        return response
diff --git a/src/opticalattackmitigator/service/__init__.py b/src/opticalattackmitigator/service/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/opticalattackmitigator/service/__main__.py b/src/opticalattackmitigator/service/__main__.py
new file mode 100644
index 0000000000000000000000000000000000000000..bcbf4e3e4c9c74ef25e6d95f6d70edf5fba25c68
--- /dev/null
+++ b/src/opticalattackmitigator/service/__main__.py
@@ -0,0 +1,50 @@
+import os, logging, signal, sys, time, threading, multiprocessing
+from prometheus_client import start_http_server
+from common.Settings import get_setting
+from opticalattackmitigator.Config import (
+    GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD, LOG_LEVEL, METRICS_PORT)
+from opticalattackmitigator.service.OpticalAttackMitigatorService import OpticalAttackMitigatorService
+
+terminate = threading.Event()
+LOGGER = None
+
+def signal_handler(signal, frame): # pylint: disable=redefined-outer-name
+    LOGGER.warning('Terminate signal received')
+    terminate.set()
+
+def main():
+    global LOGGER # pylint: disable=global-statement
+
+    service_port = get_setting('OPTICALATTACKMITIGATORSERVICE_SERVICE_PORT_GRPC', default=GRPC_SERVICE_PORT)
+    max_workers  = get_setting('MAX_WORKERS',                                     default=GRPC_MAX_WORKERS )
+    grace_period = get_setting('GRACE_PERIOD',                                    default=GRPC_GRACE_PERIOD)
+    log_level    = get_setting('LOG_LEVEL',                                       default=LOG_LEVEL        )
+    metrics_port = get_setting('METRICS_PORT',                                    default=METRICS_PORT     )
+
+    logging.basicConfig(level=log_level)
+    LOGGER = logging.getLogger(__name__)
+
+    signal.signal(signal.SIGINT,  signal_handler)
+    signal.signal(signal.SIGTERM, signal_handler)
+
+    LOGGER.info('Starting...')
+
+    # Start metrics server
+    start_http_server(metrics_port)
+
+    # Starting CentralizedCybersecurity service
+    grpc_service = OpticalAttackMitigatorService(
+        port=service_port, max_workers=max_workers, grace_period=grace_period)
+    grpc_service.start()
+
+    # Wait for Ctrl+C or termination signal
+    while not terminate.wait(timeout=0.1): pass
+
+    LOGGER.info('Terminating...')
+    grpc_service.stop()
+
+    LOGGER.info('Bye')
+    return 0
+
+if __name__ == '__main__':
+    sys.exit(main())
diff --git a/src/opticalattackmitigator/tests/__init__.py b/src/opticalattackmitigator/tests/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/opticalattackmitigator/tests/test_unitary.py b/src/opticalattackmitigator/tests/test_unitary.py
new file mode 100644
index 0000000000000000000000000000000000000000..afcb1a699186fb10408cfdc4309fc790fa91c198
--- /dev/null
+++ b/src/opticalattackmitigator/tests/test_unitary.py
@@ -0,0 +1,28 @@
+import logging, pytest
+from opticalattackmitigator.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD
+from opticalattackmitigator.client.OpticalAttackMitigatorClient import OpticalAttackMitigatorClient
+from opticalattackmitigator.service.OpticalAttackMitigatorService import OpticalAttackMitigatorService
+from opticalattackmitigator.proto.optical_attack_mitigator_pb2 import AttackDescription, AttackResponse
+
+port = 10000 + GRPC_SERVICE_PORT # avoid privileged ports
+
+LOGGER = logging.getLogger(__name__)
+LOGGER.setLevel(logging.DEBUG)
+
+@pytest.fixture(scope='session')
+def optical_attack_mitigator_service():
+    _service = OpticalAttackMitigatorService(
+        port=port, max_workers=GRPC_MAX_WORKERS, grace_period=GRPC_GRACE_PERIOD)
+    _service.start()
+    yield _service
+    _service.stop()
+
+@pytest.fixture(scope='session')
+def optical_attack_mitigator_client(optical_attack_mitigator_service):
+    _client = OpticalAttackMitigatorClient(address='127.0.0.1', port=port)
+    yield _client
+    _client.close()
+
+def test_call_service(optical_attack_mitigator_client: OpticalAttackMitigatorClient):
+    request = AttackDescription()
+    optical_attack_mitigator_client.NotifyAttack(request)
diff --git a/src/opticalcentralizedattackdetector/.gitlab-ci.yml b/src/opticalcentralizedattackdetector/.gitlab-ci.yml
new file mode 100644
index 0000000000000000000000000000000000000000..11a2c97e6b1944584d5aa48774f7f7a85b8e902c
--- /dev/null
+++ b/src/opticalcentralizedattackdetector/.gitlab-ci.yml
@@ -0,0 +1,83 @@
+# build, tag and push the Docker image to the gitlab registry
+build opticalcentralizedattackdetector:
+  variables:
+    IMAGE_NAME: 'opticalcentralizedattackdetector' # name of the microservice
+    IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
+  stage: build
+  before_script:
+    - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
+  script:
+    - docker build -t "$IMAGE_NAME:$IMAGE_TAG" -f ./src/$IMAGE_NAME/Dockerfile ./src/
+    - docker tag "$IMAGE_NAME:$IMAGE_TAG" "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
+    - docker push "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
+  # after_script:
+  #   - docker rmi $(docker images --quiet --filter=dangling=true)
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' 
+    - changes:
+      - src/$IMAGE_NAME/**/*.{py,in,yml}
+      - src/$IMAGE_NAME/Dockerfile
+      - src/$IMAGE_NAME/tests/*.py
+      - src/$IMAGE_NAME/tests/Dockerfile
+      - manifests/$IMAGE_NAME.yaml
+      - .gitlab-ci.yml
+
+# apply unit test to the opticalcentralizedattackdetector component
+unit test opticalcentralizedattackdetector:
+  variables:
+    IMAGE_NAME: 'opticalcentralizedattackdetector' # name of the microservice
+    IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
+  stage: unit_test
+  needs:
+    - build opticalcentralizedattackdetector
+  before_script:
+    - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
+    - if docker network list | grep teraflowbridge; then echo "teraflowbridge is already created"; else docker network create -d bridge teraflowbridge; fi
+    - if docker container ls | grep $IMAGE_NAME; then docker rm -f $IMAGE_NAME; else echo "$IMAGE_NAME image is not in the system"; fi
+  script:
+    - docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
+    - docker run --name $IMAGE_NAME -d -p 10005:10005 --network=teraflowbridge --rm $CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG
+    - sleep 5
+    - docker ps -a
+    - docker exec -i $IMAGE_NAME bash -c "pytest --log-level=DEBUG --verbose $IMAGE_NAME/tests/test_unitary.py"
+  after_script:
+    #- docker rm -f $IMAGE_NAME
+    - docker network rm teraflowbridge
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' 
+    - changes:
+      - src/$IMAGE_NAME/**/*.{py,in,yml}
+      - src/$IMAGE_NAME/Dockerfile
+      - src/$IMAGE_NAME/tests/*.py
+      - src/$IMAGE_NAME/tests/Dockerfile
+      - manifests/$IMAGE_NAMEservice.yaml
+      - .gitlab-ci.yml
+
+
+# Deployment of the opticalcentralizedattackdetector service in Kubernetes Cluster
+deploy opticalcentralizedattackdetector:
+  variables:
+    IMAGE_NAME: 'opticalcentralizedattackdetector' # name of the microservice
+    IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
+  stage: deploy
+  needs:
+    - unit test opticalcentralizedattackdetector
+    # - integ_test execute
+  script:
+    - 'sed -i "s/$IMAGE_NAME:.*/$IMAGE_NAME:$IMAGE_TAG/" manifests/${IMAGE_NAME}service.yaml'
+    - kubectl version
+    - kubectl get all
+    - kubectl apply -f "manifests/$IMAGE_NAME.yaml"
+    - kubectl get all
+  # environment:
+  #   name: test
+  #   url: https://example.com
+  #   kubernetes:
+  #     namespace: test
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+      when: manual    
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
+      when: manual
\ No newline at end of file
diff --git a/src/opticalcentralizedattackdetector/Config.py b/src/opticalcentralizedattackdetector/Config.py
new file mode 100644
index 0000000000000000000000000000000000000000..5a6b7aa5651566fb2305f08baec7c6f33872fd36
--- /dev/null
+++ b/src/opticalcentralizedattackdetector/Config.py
@@ -0,0 +1,26 @@
+import logging
+
+# General settings
+LOG_LEVEL = logging.DEBUG
+
+# gRPC settings
+GRPC_SERVICE_PORT = 10005
+GRPC_MAX_WORKERS  = 10
+GRPC_GRACE_PERIOD = 60
+
+# service settings
+MONITORING_INTERVAL = 2  # monitoring interval in seconds
+#TODO: adjust the addresses below for the specific case
+MONITORING_SERVICE_ADDRESS = 'monitoring'  # address/name of the monitoring service
+# MONITORING_SERVICE_ADDRESS = '10.99.41.20'  # address/name of the monitoring service
+CONTEXT_SERVICE_ADDRESS = 'contextservice'  # address/name of the monitoring service
+# CONTEXT_SERVICE_ADDRESS = '10.107.199.65'  # address/name of the monitoring service
+SERVICE_SERVICE_ADDRESS = 'serviceservice'  # address/name of the service service
+# SERVICE_SERVICE_ADDRESS = '10.99.234.88'  # address/name of the service service
+# INFERENCE_SERVICE_ADDRESS = '10.108.113.78'  # address/name of the inference service
+INFERENCE_SERVICE_ADDRESS = 'dbscanservingservice'  # address/name of the inference service
+# ATTACK_MITIGATOR_SERVICE_ADDRESS = '10.96.248.167'
+ATTACK_MITIGATOR_SERVICE_ADDRESS = 'opticalattackmitigatorservice'
+
+# Prometheus settings
+METRICS_PORT = 9192
diff --git a/src/opticalcentralizedattackdetector/Dockerfile b/src/opticalcentralizedattackdetector/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..43f7b9457fe1413ca9d9dc520433f184717d0c2a
--- /dev/null
+++ b/src/opticalcentralizedattackdetector/Dockerfile
@@ -0,0 +1,42 @@
+FROM python:3-slim
+
+# Install dependencies
+RUN apt-get --yes --quiet --quiet update && \
+    apt-get --yes --quiet --quiet install wget g++ && \
+    rm -rf /var/lib/apt/lists/*
+
+# Set Python to show logs as they occur
+ENV PYTHONUNBUFFERED=0
+
+# Download the gRPC health probe
+RUN GRPC_HEALTH_PROBE_VERSION=v0.2.0 && \
+    wget -qO/bin/grpc_health_probe https://github.com/grpc-ecosystem/grpc-health-probe/releases/download/${GRPC_HEALTH_PROBE_VERSION}/grpc_health_probe-linux-amd64 && \
+    chmod +x /bin/grpc_health_probe
+
+# Get generic Python packages
+RUN python3 -m pip install --upgrade pip setuptools wheel pip-tools
+
+# Set working directory
+WORKDIR /var/teraflow
+
+# Create module sub-folders
+RUN mkdir -p /var/teraflow/opticalcentralizedattackdetector
+
+# Get Python packages per module
+COPY opticalcentralizedattackdetector/requirements.in opticalcentralizedattackdetector/requirements.in
+RUN pip-compile --output-file=opticalcentralizedattackdetector/requirements.txt opticalcentralizedattackdetector/requirements.in
+RUN python3 -m pip install -r opticalcentralizedattackdetector/requirements.txt
+
+# Add files into working directory
+COPY common/. common
+COPY context/. context
+COPY monitoring/. monitoring
+COPY service/. service
+COPY dbscanserving/. dbscanserving
+COPY opticalattackmitigator/. opticalattackmitigator
+COPY opticalcentralizedattackdetector/. opticalcentralizedattackdetector
+
+ENV PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python
+
+# Start opticalcentralizedattackdetector service
+ENTRYPOINT ["python", "-m", "opticalcentralizedattackdetector.service"]
diff --git a/src/opticalcentralizedattackdetector/__init__.py b/src/opticalcentralizedattackdetector/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/centralizedattackdetector/client/CentralizedAttackDetectorClient.py b/src/opticalcentralizedattackdetector/client/OpticalCentralizedAttackDetectorClient.py
similarity index 84%
rename from src/centralizedattackdetector/client/CentralizedAttackDetectorClient.py
rename to src/opticalcentralizedattackdetector/client/OpticalCentralizedAttackDetectorClient.py
index da367972af24601500bf9844e26891cbd7bcc35b..c28507581d99c1df47e92eead1fce6ab1c7db3a7 100644
--- a/src/centralizedattackdetector/client/CentralizedAttackDetectorClient.py
+++ b/src/opticalcentralizedattackdetector/client/OpticalCentralizedAttackDetectorClient.py
@@ -1,14 +1,14 @@
 import grpc, logging
 from common.tools.client.RetryDecorator import retry, delay_exponential
-from centralizedattackdetector.proto.context_pb2 import Empty, Service
-from centralizedattackdetector.proto.monitoring_pb2 import KpiList
-from centralizedattackdetector.proto.centralized_attack_detector_pb2_grpc import CentralizedAttackDetectorServiceStub
+from opticalcentralizedattackdetector.proto.context_pb2 import Empty, Service
+from opticalcentralizedattackdetector.proto.monitoring_pb2 import KpiList
+from opticalcentralizedattackdetector.proto.optical_centralized_attack_detector_pb2_grpc import OpticalCentralizedAttackDetectorServiceStub
 
 LOGGER = logging.getLogger(__name__)
 MAX_RETRIES = 15
 DELAY_FUNCTION = delay_exponential(initial=0.01, increment=2.0, maximum=5.0)
 
-class CentralizedAttackDetectorClient:
+class OpticalCentralizedAttackDetectorClient:
     def __init__(self, address, port):
         self.endpoint = '{:s}:{:s}'.format(str(address), str(port))
         LOGGER.debug('Creating channel to {:s}...'.format(str(self.endpoint)))
@@ -19,7 +19,7 @@ class CentralizedAttackDetectorClient:
 
     def connect(self):
         self.channel = grpc.insecure_channel(self.endpoint)
-        self.stub = CentralizedAttackDetectorServiceStub(self.channel)
+        self.stub = OpticalCentralizedAttackDetectorServiceStub(self.channel)
 
     def close(self):
         if(self.channel is not None): self.channel.close()
diff --git a/src/opticalcentralizedattackdetector/client/__init__.py b/src/opticalcentralizedattackdetector/client/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/centralizedattackdetector/genproto.sh b/src/opticalcentralizedattackdetector/genproto.sh
similarity index 75%
rename from src/centralizedattackdetector/genproto.sh
rename to src/opticalcentralizedattackdetector/genproto.sh
index 548428f37721abde24d35780207343208aa8f4b9..76df9bf83af19ac8f1528c750b7d7f11e0a97cf3 100755
--- a/src/centralizedattackdetector/genproto.sh
+++ b/src/opticalcentralizedattackdetector/genproto.sh
@@ -27,17 +27,20 @@ touch proto/__init__.py
 python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto context.proto
 python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto service.proto
 python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto monitoring.proto
+python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto kpi_sample_types.proto
 
 rm proto/context_pb2_grpc.py
 rm proto/service_pb2_grpc.py
 rm proto/monitoring_pb2_grpc.py
+rm proto/kpi_sample_types_pb2_grpc.py
 
 sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/context_pb2.py
 sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/service_pb2.py
 sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/monitoring_pb2.py
+sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/kpi_sample_types_pb2.py
 
 # building current service protos
-python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto centralized_attack_detector.proto
+python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto optical_centralized_attack_detector.proto
 
-sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/centralized_attack_detector_pb2.py
-sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/centralized_attack_detector_pb2_grpc.py
+sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/optical_centralized_attack_detector_pb2.py
+sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/optical_centralized_attack_detector_pb2_grpc.py
diff --git a/src/opticalcentralizedattackdetector/proto/__init__.py b/src/opticalcentralizedattackdetector/proto/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/opticalcentralizedattackdetector/proto/context_pb2.py b/src/opticalcentralizedattackdetector/proto/context_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..68602b16f264ceac9acc3ef6669b09d5984e72c2
--- /dev/null
+++ b/src/opticalcentralizedattackdetector/proto/context_pb2.py
@@ -0,0 +1,2662 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: context.proto
+"""Generated protocol buffer code."""
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from . import kpi_sample_types_pb2 as kpi__sample__types__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='context.proto',
+  package='context',
+  syntax='proto3',
+  serialized_options=None,
+  create_key=_descriptor._internal_create_key,
+  serialized_pb=b'\n\rcontext.proto\x12\x07\x63ontext\x1a\x16kpi_sample_types.proto\"\x07\n\x05\x45mpty\"\x14\n\x04Uuid\x12\x0c\n\x04uuid\x18\x01 \x01(\t\"F\n\x05\x45vent\x12\x11\n\ttimestamp\x18\x01 \x01(\x01\x12*\n\nevent_type\x18\x02 \x01(\x0e\x32\x16.context.EventTypeEnum\"0\n\tContextId\x12#\n\x0c\x63ontext_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\xb6\x01\n\x07\x43ontext\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12)\n\x0ctopology_ids\x18\x02 \x03(\x0b\x32\x13.context.TopologyId\x12\'\n\x0bservice_ids\x18\x03 \x03(\x0b\x32\x12.context.ServiceId\x12/\n\ncontroller\x18\x04 \x01(\x0b\x32\x1b.context.TeraFlowController\"8\n\rContextIdList\x12\'\n\x0b\x63ontext_ids\x18\x01 \x03(\x0b\x32\x12.context.ContextId\"1\n\x0b\x43ontextList\x12\"\n\x08\x63ontexts\x18\x01 \x03(\x0b\x32\x10.context.Context\"U\n\x0c\x43ontextEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12&\n\ncontext_id\x18\x02 \x01(\x0b\x32\x12.context.ContextId\"Z\n\nTopologyId\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12$\n\rtopology_uuid\x18\x02 \x01(\x0b\x32\r.context.Uuid\"~\n\x08Topology\x12(\n\x0btopology_id\x18\x01 \x01(\x0b\x32\x13.context.TopologyId\x12%\n\ndevice_ids\x18\x02 \x03(\x0b\x32\x11.context.DeviceId\x12!\n\x08link_ids\x18\x03 \x03(\x0b\x32\x0f.context.LinkId\";\n\x0eTopologyIdList\x12)\n\x0ctopology_ids\x18\x01 \x03(\x0b\x32\x13.context.TopologyId\"5\n\x0cTopologyList\x12%\n\ntopologies\x18\x01 \x03(\x0b\x32\x11.context.Topology\"X\n\rTopologyEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12(\n\x0btopology_id\x18\x02 \x01(\x0b\x32\x13.context.TopologyId\".\n\x08\x44\x65viceId\x12\"\n\x0b\x64\x65vice_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\x9a\x02\n\x06\x44\x65vice\x12$\n\tdevice_id\x18\x01 \x01(\x0b\x32\x11.context.DeviceId\x12\x13\n\x0b\x64\x65vice_type\x18\x02 \x01(\t\x12,\n\rdevice_config\x18\x03 \x01(\x0b\x32\x15.context.DeviceConfig\x12G\n\x19\x64\x65vice_operational_status\x18\x04 \x01(\x0e\x32$.context.DeviceOperationalStatusEnum\x12\x31\n\x0e\x64\x65vice_drivers\x18\x05 \x03(\x0e\x32\x19.context.DeviceDriverEnum\x12+\n\x10\x64\x65vice_endpoints\x18\x06 \x03(\x0b\x32\x11.context.EndPoint\"9\n\x0c\x44\x65viceConfig\x12)\n\x0c\x63onfig_rules\x18\x01 \x03(\x0b\x32\x13.context.ConfigRule\"5\n\x0c\x44\x65viceIdList\x12%\n\ndevice_ids\x18\x01 \x03(\x0b\x32\x11.context.DeviceId\".\n\nDeviceList\x12 \n\x07\x64\x65vices\x18\x01 \x03(\x0b\x32\x0f.context.Device\"R\n\x0b\x44\x65viceEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12$\n\tdevice_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\"*\n\x06LinkId\x12 \n\tlink_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"X\n\x04Link\x12 \n\x07link_id\x18\x01 \x01(\x0b\x32\x0f.context.LinkId\x12.\n\x11link_endpoint_ids\x18\x02 \x03(\x0b\x32\x13.context.EndPointId\"/\n\nLinkIdList\x12!\n\x08link_ids\x18\x01 \x03(\x0b\x32\x0f.context.LinkId\"(\n\x08LinkList\x12\x1c\n\x05links\x18\x01 \x03(\x0b\x32\r.context.Link\"L\n\tLinkEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12 \n\x07link_id\x18\x02 \x01(\x0b\x32\x0f.context.LinkId\"X\n\tServiceId\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12#\n\x0cservice_uuid\x18\x02 \x01(\x0b\x32\r.context.Uuid\"\xa6\x02\n\x07Service\x12&\n\nservice_id\x18\x01 \x01(\x0b\x32\x12.context.ServiceId\x12.\n\x0cservice_type\x18\x02 \x01(\x0e\x32\x18.context.ServiceTypeEnum\x12\x31\n\x14service_endpoint_ids\x18\x03 \x03(\x0b\x32\x13.context.EndPointId\x12\x30\n\x13service_constraints\x18\x04 \x03(\x0b\x32\x13.context.Constraint\x12.\n\x0eservice_status\x18\x05 \x01(\x0b\x32\x16.context.ServiceStatus\x12.\n\x0eservice_config\x18\x06 \x01(\x0b\x32\x16.context.ServiceConfig\"C\n\rServiceStatus\x12\x32\n\x0eservice_status\x18\x01 \x01(\x0e\x32\x1a.context.ServiceStatusEnum\":\n\rServiceConfig\x12)\n\x0c\x63onfig_rules\x18\x01 \x03(\x0b\x32\x13.context.ConfigRule\"8\n\rServiceIdList\x12\'\n\x0bservice_ids\x18\x01 \x03(\x0b\x32\x12.context.ServiceId\"1\n\x0bServiceList\x12\"\n\x08services\x18\x01 \x03(\x0b\x32\x10.context.Service\"U\n\x0cServiceEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12&\n\nservice_id\x18\x02 \x01(\x0b\x32\x12.context.ServiceId\"6\n\x0c\x43onnectionId\x12&\n\x0f\x63onnection_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\xc4\x01\n\nConnection\x12,\n\rconnection_id\x18\x01 \x01(\x0b\x32\x15.context.ConnectionId\x12&\n\nservice_id\x18\x02 \x01(\x0b\x32\x12.context.ServiceId\x12\x33\n\x16path_hops_endpoint_ids\x18\x03 \x03(\x0b\x32\x13.context.EndPointId\x12+\n\x0fsub_service_ids\x18\x04 \x03(\x0b\x32\x12.context.ServiceId\"A\n\x10\x43onnectionIdList\x12-\n\x0e\x63onnection_ids\x18\x01 \x03(\x0b\x32\x15.context.ConnectionId\":\n\x0e\x43onnectionList\x12(\n\x0b\x63onnections\x18\x01 \x03(\x0b\x32\x13.context.Connection\"^\n\x0f\x43onnectionEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12,\n\rconnection_id\x18\x02 \x01(\x0b\x32\x15.context.ConnectionId\"\x82\x01\n\nEndPointId\x12(\n\x0btopology_id\x18\x01 \x01(\x0b\x32\x13.context.TopologyId\x12$\n\tdevice_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\x12$\n\rendpoint_uuid\x18\x03 \x01(\x0b\x32\r.context.Uuid\"\x86\x01\n\x08\x45ndPoint\x12(\n\x0b\x65ndpoint_id\x18\x01 \x01(\x0b\x32\x13.context.EndPointId\x12\x15\n\rendpoint_type\x18\x02 \x01(\t\x12\x39\n\x10kpi_sample_types\x18\x03 \x03(\x0e\x32\x1f.kpi_sample_types.KpiSampleType\"e\n\nConfigRule\x12)\n\x06\x61\x63tion\x18\x01 \x01(\x0e\x32\x19.context.ConfigActionEnum\x12\x14\n\x0cresource_key\x18\x02 \x01(\t\x12\x16\n\x0eresource_value\x18\x03 \x01(\t\"?\n\nConstraint\x12\x17\n\x0f\x63onstraint_type\x18\x01 \x01(\t\x12\x18\n\x10\x63onstraint_value\x18\x02 \x01(\t\"^\n\x12TeraFlowController\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x12\n\nip_address\x18\x02 \x01(\t\x12\x0c\n\x04port\x18\x03 \x01(\r\"U\n\x14\x41uthenticationResult\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x15\n\rauthenticated\x18\x02 \x01(\x08*j\n\rEventTypeEnum\x12\x17\n\x13\x45VENTTYPE_UNDEFINED\x10\x00\x12\x14\n\x10\x45VENTTYPE_CREATE\x10\x01\x12\x14\n\x10\x45VENTTYPE_UPDATE\x10\x02\x12\x14\n\x10\x45VENTTYPE_REMOVE\x10\x03*\xc5\x01\n\x10\x44\x65viceDriverEnum\x12\x1a\n\x16\x44\x45VICEDRIVER_UNDEFINED\x10\x00\x12\x1b\n\x17\x44\x45VICEDRIVER_OPENCONFIG\x10\x01\x12\x1e\n\x1a\x44\x45VICEDRIVER_TRANSPORT_API\x10\x02\x12\x13\n\x0f\x44\x45VICEDRIVER_P4\x10\x03\x12&\n\"DEVICEDRIVER_IETF_NETWORK_TOPOLOGY\x10\x04\x12\x1b\n\x17\x44\x45VICEDRIVER_ONF_TR_352\x10\x05*\x8f\x01\n\x1b\x44\x65viceOperationalStatusEnum\x12%\n!DEVICEOPERATIONALSTATUS_UNDEFINED\x10\x00\x12$\n DEVICEOPERATIONALSTATUS_DISABLED\x10\x01\x12#\n\x1f\x44\x45VICEOPERATIONALSTATUS_ENABLED\x10\x02*\x81\x01\n\x0fServiceTypeEnum\x12\x17\n\x13SERVICETYPE_UNKNOWN\x10\x00\x12\x14\n\x10SERVICETYPE_L3NM\x10\x01\x12\x14\n\x10SERVICETYPE_L2NM\x10\x02\x12)\n%SERVICETYPE_TAPI_CONNECTIVITY_SERVICE\x10\x03*\x88\x01\n\x11ServiceStatusEnum\x12\x1b\n\x17SERVICESTATUS_UNDEFINED\x10\x00\x12\x19\n\x15SERVICESTATUS_PLANNED\x10\x01\x12\x18\n\x14SERVICESTATUS_ACTIVE\x10\x02\x12!\n\x1dSERVICESTATUS_PENDING_REMOVAL\x10\x03*]\n\x10\x43onfigActionEnum\x12\x1a\n\x16\x43ONFIGACTION_UNDEFINED\x10\x00\x12\x14\n\x10\x43ONFIGACTION_SET\x10\x01\x12\x17\n\x13\x43ONFIGACTION_DELETE\x10\x02\x32\xad\x10\n\x0e\x43ontextService\x12:\n\x0eListContextIds\x12\x0e.context.Empty\x1a\x16.context.ContextIdList\"\x00\x12\x36\n\x0cListContexts\x12\x0e.context.Empty\x1a\x14.context.ContextList\"\x00\x12\x34\n\nGetContext\x12\x12.context.ContextId\x1a\x10.context.Context\"\x00\x12\x34\n\nSetContext\x12\x10.context.Context\x1a\x12.context.ContextId\"\x00\x12\x35\n\rRemoveContext\x12\x12.context.ContextId\x1a\x0e.context.Empty\"\x00\x12=\n\x10GetContextEvents\x12\x0e.context.Empty\x1a\x15.context.ContextEvent\"\x00\x30\x01\x12@\n\x0fListTopologyIds\x12\x12.context.ContextId\x1a\x17.context.TopologyIdList\"\x00\x12=\n\x0eListTopologies\x12\x12.context.ContextId\x1a\x15.context.TopologyList\"\x00\x12\x37\n\x0bGetTopology\x12\x13.context.TopologyId\x1a\x11.context.Topology\"\x00\x12\x37\n\x0bSetTopology\x12\x11.context.Topology\x1a\x13.context.TopologyId\"\x00\x12\x37\n\x0eRemoveTopology\x12\x13.context.TopologyId\x1a\x0e.context.Empty\"\x00\x12?\n\x11GetTopologyEvents\x12\x0e.context.Empty\x1a\x16.context.TopologyEvent\"\x00\x30\x01\x12\x38\n\rListDeviceIds\x12\x0e.context.Empty\x1a\x15.context.DeviceIdList\"\x00\x12\x34\n\x0bListDevices\x12\x0e.context.Empty\x1a\x13.context.DeviceList\"\x00\x12\x31\n\tGetDevice\x12\x11.context.DeviceId\x1a\x0f.context.Device\"\x00\x12\x31\n\tSetDevice\x12\x0f.context.Device\x1a\x11.context.DeviceId\"\x00\x12\x33\n\x0cRemoveDevice\x12\x11.context.DeviceId\x1a\x0e.context.Empty\"\x00\x12;\n\x0fGetDeviceEvents\x12\x0e.context.Empty\x1a\x14.context.DeviceEvent\"\x00\x30\x01\x12\x34\n\x0bListLinkIds\x12\x0e.context.Empty\x1a\x13.context.LinkIdList\"\x00\x12\x30\n\tListLinks\x12\x0e.context.Empty\x1a\x11.context.LinkList\"\x00\x12+\n\x07GetLink\x12\x0f.context.LinkId\x1a\r.context.Link\"\x00\x12+\n\x07SetLink\x12\r.context.Link\x1a\x0f.context.LinkId\"\x00\x12/\n\nRemoveLink\x12\x0f.context.LinkId\x1a\x0e.context.Empty\"\x00\x12\x37\n\rGetLinkEvents\x12\x0e.context.Empty\x1a\x12.context.LinkEvent\"\x00\x30\x01\x12>\n\x0eListServiceIds\x12\x12.context.ContextId\x1a\x16.context.ServiceIdList\"\x00\x12:\n\x0cListServices\x12\x12.context.ContextId\x1a\x14.context.ServiceList\"\x00\x12\x34\n\nGetService\x12\x12.context.ServiceId\x1a\x10.context.Service\"\x00\x12\x34\n\nSetService\x12\x10.context.Service\x1a\x12.context.ServiceId\"\x00\x12\x35\n\rRemoveService\x12\x12.context.ServiceId\x1a\x0e.context.Empty\"\x00\x12=\n\x10GetServiceEvents\x12\x0e.context.Empty\x1a\x15.context.ServiceEvent\"\x00\x30\x01\x12\x44\n\x11ListConnectionIds\x12\x12.context.ServiceId\x1a\x19.context.ConnectionIdList\"\x00\x12@\n\x0fListConnections\x12\x12.context.ServiceId\x1a\x17.context.ConnectionList\"\x00\x12=\n\rGetConnection\x12\x15.context.ConnectionId\x1a\x13.context.Connection\"\x00\x12=\n\rSetConnection\x12\x13.context.Connection\x1a\x15.context.ConnectionId\"\x00\x12;\n\x10RemoveConnection\x12\x15.context.ConnectionId\x1a\x0e.context.Empty\"\x00\x12\x43\n\x13GetConnectionEvents\x12\x0e.context.Empty\x1a\x18.context.ConnectionEvent\"\x00\x30\x01\x62\x06proto3'
+  ,
+  dependencies=[kpi__sample__types__pb2.DESCRIPTOR,])
+
+_EVENTTYPEENUM = _descriptor.EnumDescriptor(
+  name='EventTypeEnum',
+  full_name='context.EventTypeEnum',
+  filename=None,
+  file=DESCRIPTOR,
+  create_key=_descriptor._internal_create_key,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='EVENTTYPE_UNDEFINED', index=0, number=0,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='EVENTTYPE_CREATE', index=1, number=1,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='EVENTTYPE_UPDATE', index=2, number=2,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='EVENTTYPE_REMOVE', index=3, number=3,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=3703,
+  serialized_end=3809,
+)
+_sym_db.RegisterEnumDescriptor(_EVENTTYPEENUM)
+
+EventTypeEnum = enum_type_wrapper.EnumTypeWrapper(_EVENTTYPEENUM)
+_DEVICEDRIVERENUM = _descriptor.EnumDescriptor(
+  name='DeviceDriverEnum',
+  full_name='context.DeviceDriverEnum',
+  filename=None,
+  file=DESCRIPTOR,
+  create_key=_descriptor._internal_create_key,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='DEVICEDRIVER_UNDEFINED', index=0, number=0,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='DEVICEDRIVER_OPENCONFIG', index=1, number=1,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='DEVICEDRIVER_TRANSPORT_API', index=2, number=2,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='DEVICEDRIVER_P4', index=3, number=3,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='DEVICEDRIVER_IETF_NETWORK_TOPOLOGY', index=4, number=4,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='DEVICEDRIVER_ONF_TR_352', index=5, number=5,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=3812,
+  serialized_end=4009,
+)
+_sym_db.RegisterEnumDescriptor(_DEVICEDRIVERENUM)
+
+DeviceDriverEnum = enum_type_wrapper.EnumTypeWrapper(_DEVICEDRIVERENUM)
+_DEVICEOPERATIONALSTATUSENUM = _descriptor.EnumDescriptor(
+  name='DeviceOperationalStatusEnum',
+  full_name='context.DeviceOperationalStatusEnum',
+  filename=None,
+  file=DESCRIPTOR,
+  create_key=_descriptor._internal_create_key,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='DEVICEOPERATIONALSTATUS_UNDEFINED', index=0, number=0,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='DEVICEOPERATIONALSTATUS_DISABLED', index=1, number=1,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='DEVICEOPERATIONALSTATUS_ENABLED', index=2, number=2,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=4012,
+  serialized_end=4155,
+)
+_sym_db.RegisterEnumDescriptor(_DEVICEOPERATIONALSTATUSENUM)
+
+DeviceOperationalStatusEnum = enum_type_wrapper.EnumTypeWrapper(_DEVICEOPERATIONALSTATUSENUM)
+_SERVICETYPEENUM = _descriptor.EnumDescriptor(
+  name='ServiceTypeEnum',
+  full_name='context.ServiceTypeEnum',
+  filename=None,
+  file=DESCRIPTOR,
+  create_key=_descriptor._internal_create_key,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='SERVICETYPE_UNKNOWN', index=0, number=0,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='SERVICETYPE_L3NM', index=1, number=1,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='SERVICETYPE_L2NM', index=2, number=2,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='SERVICETYPE_TAPI_CONNECTIVITY_SERVICE', index=3, number=3,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=4158,
+  serialized_end=4287,
+)
+_sym_db.RegisterEnumDescriptor(_SERVICETYPEENUM)
+
+ServiceTypeEnum = enum_type_wrapper.EnumTypeWrapper(_SERVICETYPEENUM)
+_SERVICESTATUSENUM = _descriptor.EnumDescriptor(
+  name='ServiceStatusEnum',
+  full_name='context.ServiceStatusEnum',
+  filename=None,
+  file=DESCRIPTOR,
+  create_key=_descriptor._internal_create_key,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='SERVICESTATUS_UNDEFINED', index=0, number=0,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='SERVICESTATUS_PLANNED', index=1, number=1,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='SERVICESTATUS_ACTIVE', index=2, number=2,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='SERVICESTATUS_PENDING_REMOVAL', index=3, number=3,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=4290,
+  serialized_end=4426,
+)
+_sym_db.RegisterEnumDescriptor(_SERVICESTATUSENUM)
+
+ServiceStatusEnum = enum_type_wrapper.EnumTypeWrapper(_SERVICESTATUSENUM)
+_CONFIGACTIONENUM = _descriptor.EnumDescriptor(
+  name='ConfigActionEnum',
+  full_name='context.ConfigActionEnum',
+  filename=None,
+  file=DESCRIPTOR,
+  create_key=_descriptor._internal_create_key,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='CONFIGACTION_UNDEFINED', index=0, number=0,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='CONFIGACTION_SET', index=1, number=1,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='CONFIGACTION_DELETE', index=2, number=2,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=4428,
+  serialized_end=4521,
+)
+_sym_db.RegisterEnumDescriptor(_CONFIGACTIONENUM)
+
+ConfigActionEnum = enum_type_wrapper.EnumTypeWrapper(_CONFIGACTIONENUM)
+EVENTTYPE_UNDEFINED = 0
+EVENTTYPE_CREATE = 1
+EVENTTYPE_UPDATE = 2
+EVENTTYPE_REMOVE = 3
+DEVICEDRIVER_UNDEFINED = 0
+DEVICEDRIVER_OPENCONFIG = 1
+DEVICEDRIVER_TRANSPORT_API = 2
+DEVICEDRIVER_P4 = 3
+DEVICEDRIVER_IETF_NETWORK_TOPOLOGY = 4
+DEVICEDRIVER_ONF_TR_352 = 5
+DEVICEOPERATIONALSTATUS_UNDEFINED = 0
+DEVICEOPERATIONALSTATUS_DISABLED = 1
+DEVICEOPERATIONALSTATUS_ENABLED = 2
+SERVICETYPE_UNKNOWN = 0
+SERVICETYPE_L3NM = 1
+SERVICETYPE_L2NM = 2
+SERVICETYPE_TAPI_CONNECTIVITY_SERVICE = 3
+SERVICESTATUS_UNDEFINED = 0
+SERVICESTATUS_PLANNED = 1
+SERVICESTATUS_ACTIVE = 2
+SERVICESTATUS_PENDING_REMOVAL = 3
+CONFIGACTION_UNDEFINED = 0
+CONFIGACTION_SET = 1
+CONFIGACTION_DELETE = 2
+
+
+
+_EMPTY = _descriptor.Descriptor(
+  name='Empty',
+  full_name='context.Empty',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=50,
+  serialized_end=57,
+)
+
+
+_UUID = _descriptor.Descriptor(
+  name='Uuid',
+  full_name='context.Uuid',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='uuid', full_name='context.Uuid.uuid', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=59,
+  serialized_end=79,
+)
+
+
+_EVENT = _descriptor.Descriptor(
+  name='Event',
+  full_name='context.Event',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='timestamp', full_name='context.Event.timestamp', index=0,
+      number=1, type=1, cpp_type=5, label=1,
+      has_default_value=False, default_value=float(0),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='event_type', full_name='context.Event.event_type', index=1,
+      number=2, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=81,
+  serialized_end=151,
+)
+
+
+_CONTEXTID = _descriptor.Descriptor(
+  name='ContextId',
+  full_name='context.ContextId',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='context_uuid', full_name='context.ContextId.context_uuid', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=153,
+  serialized_end=201,
+)
+
+
+_CONTEXT = _descriptor.Descriptor(
+  name='Context',
+  full_name='context.Context',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='context_id', full_name='context.Context.context_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='topology_ids', full_name='context.Context.topology_ids', index=1,
+      number=2, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='service_ids', full_name='context.Context.service_ids', index=2,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='controller', full_name='context.Context.controller', index=3,
+      number=4, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=204,
+  serialized_end=386,
+)
+
+
+_CONTEXTIDLIST = _descriptor.Descriptor(
+  name='ContextIdList',
+  full_name='context.ContextIdList',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='context_ids', full_name='context.ContextIdList.context_ids', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=388,
+  serialized_end=444,
+)
+
+
+_CONTEXTLIST = _descriptor.Descriptor(
+  name='ContextList',
+  full_name='context.ContextList',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='contexts', full_name='context.ContextList.contexts', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=446,
+  serialized_end=495,
+)
+
+
+_CONTEXTEVENT = _descriptor.Descriptor(
+  name='ContextEvent',
+  full_name='context.ContextEvent',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='event', full_name='context.ContextEvent.event', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='context_id', full_name='context.ContextEvent.context_id', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=497,
+  serialized_end=582,
+)
+
+
+_TOPOLOGYID = _descriptor.Descriptor(
+  name='TopologyId',
+  full_name='context.TopologyId',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='context_id', full_name='context.TopologyId.context_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='topology_uuid', full_name='context.TopologyId.topology_uuid', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=584,
+  serialized_end=674,
+)
+
+
+_TOPOLOGY = _descriptor.Descriptor(
+  name='Topology',
+  full_name='context.Topology',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='topology_id', full_name='context.Topology.topology_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='device_ids', full_name='context.Topology.device_ids', index=1,
+      number=2, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='link_ids', full_name='context.Topology.link_ids', index=2,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=676,
+  serialized_end=802,
+)
+
+
+_TOPOLOGYIDLIST = _descriptor.Descriptor(
+  name='TopologyIdList',
+  full_name='context.TopologyIdList',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='topology_ids', full_name='context.TopologyIdList.topology_ids', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=804,
+  serialized_end=863,
+)
+
+
+_TOPOLOGYLIST = _descriptor.Descriptor(
+  name='TopologyList',
+  full_name='context.TopologyList',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='topologies', full_name='context.TopologyList.topologies', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=865,
+  serialized_end=918,
+)
+
+
+_TOPOLOGYEVENT = _descriptor.Descriptor(
+  name='TopologyEvent',
+  full_name='context.TopologyEvent',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='event', full_name='context.TopologyEvent.event', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='topology_id', full_name='context.TopologyEvent.topology_id', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=920,
+  serialized_end=1008,
+)
+
+
+_DEVICEID = _descriptor.Descriptor(
+  name='DeviceId',
+  full_name='context.DeviceId',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='device_uuid', full_name='context.DeviceId.device_uuid', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1010,
+  serialized_end=1056,
+)
+
+
+_DEVICE = _descriptor.Descriptor(
+  name='Device',
+  full_name='context.Device',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='device_id', full_name='context.Device.device_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='device_type', full_name='context.Device.device_type', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='device_config', full_name='context.Device.device_config', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='device_operational_status', full_name='context.Device.device_operational_status', index=3,
+      number=4, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='device_drivers', full_name='context.Device.device_drivers', index=4,
+      number=5, type=14, cpp_type=8, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='device_endpoints', full_name='context.Device.device_endpoints', index=5,
+      number=6, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1059,
+  serialized_end=1341,
+)
+
+
+_DEVICECONFIG = _descriptor.Descriptor(
+  name='DeviceConfig',
+  full_name='context.DeviceConfig',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='config_rules', full_name='context.DeviceConfig.config_rules', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1343,
+  serialized_end=1400,
+)
+
+
+_DEVICEIDLIST = _descriptor.Descriptor(
+  name='DeviceIdList',
+  full_name='context.DeviceIdList',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='device_ids', full_name='context.DeviceIdList.device_ids', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1402,
+  serialized_end=1455,
+)
+
+
+_DEVICELIST = _descriptor.Descriptor(
+  name='DeviceList',
+  full_name='context.DeviceList',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='devices', full_name='context.DeviceList.devices', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1457,
+  serialized_end=1503,
+)
+
+
+_DEVICEEVENT = _descriptor.Descriptor(
+  name='DeviceEvent',
+  full_name='context.DeviceEvent',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='event', full_name='context.DeviceEvent.event', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='device_id', full_name='context.DeviceEvent.device_id', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1505,
+  serialized_end=1587,
+)
+
+
+_LINKID = _descriptor.Descriptor(
+  name='LinkId',
+  full_name='context.LinkId',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='link_uuid', full_name='context.LinkId.link_uuid', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1589,
+  serialized_end=1631,
+)
+
+
+_LINK = _descriptor.Descriptor(
+  name='Link',
+  full_name='context.Link',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='link_id', full_name='context.Link.link_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='link_endpoint_ids', full_name='context.Link.link_endpoint_ids', index=1,
+      number=2, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1633,
+  serialized_end=1721,
+)
+
+
+_LINKIDLIST = _descriptor.Descriptor(
+  name='LinkIdList',
+  full_name='context.LinkIdList',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='link_ids', full_name='context.LinkIdList.link_ids', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1723,
+  serialized_end=1770,
+)
+
+
+_LINKLIST = _descriptor.Descriptor(
+  name='LinkList',
+  full_name='context.LinkList',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='links', full_name='context.LinkList.links', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1772,
+  serialized_end=1812,
+)
+
+
+_LINKEVENT = _descriptor.Descriptor(
+  name='LinkEvent',
+  full_name='context.LinkEvent',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='event', full_name='context.LinkEvent.event', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='link_id', full_name='context.LinkEvent.link_id', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1814,
+  serialized_end=1890,
+)
+
+
+_SERVICEID = _descriptor.Descriptor(
+  name='ServiceId',
+  full_name='context.ServiceId',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='context_id', full_name='context.ServiceId.context_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='service_uuid', full_name='context.ServiceId.service_uuid', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1892,
+  serialized_end=1980,
+)
+
+
+_SERVICE = _descriptor.Descriptor(
+  name='Service',
+  full_name='context.Service',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='service_id', full_name='context.Service.service_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='service_type', full_name='context.Service.service_type', index=1,
+      number=2, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='service_endpoint_ids', full_name='context.Service.service_endpoint_ids', index=2,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='service_constraints', full_name='context.Service.service_constraints', index=3,
+      number=4, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='service_status', full_name='context.Service.service_status', index=4,
+      number=5, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='service_config', full_name='context.Service.service_config', index=5,
+      number=6, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1983,
+  serialized_end=2277,
+)
+
+
+_SERVICESTATUS = _descriptor.Descriptor(
+  name='ServiceStatus',
+  full_name='context.ServiceStatus',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='service_status', full_name='context.ServiceStatus.service_status', index=0,
+      number=1, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2279,
+  serialized_end=2346,
+)
+
+
+_SERVICECONFIG = _descriptor.Descriptor(
+  name='ServiceConfig',
+  full_name='context.ServiceConfig',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='config_rules', full_name='context.ServiceConfig.config_rules', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2348,
+  serialized_end=2406,
+)
+
+
+_SERVICEIDLIST = _descriptor.Descriptor(
+  name='ServiceIdList',
+  full_name='context.ServiceIdList',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='service_ids', full_name='context.ServiceIdList.service_ids', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2408,
+  serialized_end=2464,
+)
+
+
+_SERVICELIST = _descriptor.Descriptor(
+  name='ServiceList',
+  full_name='context.ServiceList',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='services', full_name='context.ServiceList.services', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2466,
+  serialized_end=2515,
+)
+
+
+_SERVICEEVENT = _descriptor.Descriptor(
+  name='ServiceEvent',
+  full_name='context.ServiceEvent',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='event', full_name='context.ServiceEvent.event', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='service_id', full_name='context.ServiceEvent.service_id', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2517,
+  serialized_end=2602,
+)
+
+
+_CONNECTIONID = _descriptor.Descriptor(
+  name='ConnectionId',
+  full_name='context.ConnectionId',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='connection_uuid', full_name='context.ConnectionId.connection_uuid', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2604,
+  serialized_end=2658,
+)
+
+
+_CONNECTION = _descriptor.Descriptor(
+  name='Connection',
+  full_name='context.Connection',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='connection_id', full_name='context.Connection.connection_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='service_id', full_name='context.Connection.service_id', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='path_hops_endpoint_ids', full_name='context.Connection.path_hops_endpoint_ids', index=2,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='sub_service_ids', full_name='context.Connection.sub_service_ids', index=3,
+      number=4, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2661,
+  serialized_end=2857,
+)
+
+
+_CONNECTIONIDLIST = _descriptor.Descriptor(
+  name='ConnectionIdList',
+  full_name='context.ConnectionIdList',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='connection_ids', full_name='context.ConnectionIdList.connection_ids', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2859,
+  serialized_end=2924,
+)
+
+
+_CONNECTIONLIST = _descriptor.Descriptor(
+  name='ConnectionList',
+  full_name='context.ConnectionList',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='connections', full_name='context.ConnectionList.connections', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2926,
+  serialized_end=2984,
+)
+
+
+_CONNECTIONEVENT = _descriptor.Descriptor(
+  name='ConnectionEvent',
+  full_name='context.ConnectionEvent',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='event', full_name='context.ConnectionEvent.event', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='connection_id', full_name='context.ConnectionEvent.connection_id', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2986,
+  serialized_end=3080,
+)
+
+
+_ENDPOINTID = _descriptor.Descriptor(
+  name='EndPointId',
+  full_name='context.EndPointId',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='topology_id', full_name='context.EndPointId.topology_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='device_id', full_name='context.EndPointId.device_id', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='endpoint_uuid', full_name='context.EndPointId.endpoint_uuid', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=3083,
+  serialized_end=3213,
+)
+
+
+_ENDPOINT = _descriptor.Descriptor(
+  name='EndPoint',
+  full_name='context.EndPoint',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='endpoint_id', full_name='context.EndPoint.endpoint_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='endpoint_type', full_name='context.EndPoint.endpoint_type', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='kpi_sample_types', full_name='context.EndPoint.kpi_sample_types', index=2,
+      number=3, type=14, cpp_type=8, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=3216,
+  serialized_end=3350,
+)
+
+
+_CONFIGRULE = _descriptor.Descriptor(
+  name='ConfigRule',
+  full_name='context.ConfigRule',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='action', full_name='context.ConfigRule.action', index=0,
+      number=1, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='resource_key', full_name='context.ConfigRule.resource_key', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='resource_value', full_name='context.ConfigRule.resource_value', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=3352,
+  serialized_end=3453,
+)
+
+
+_CONSTRAINT = _descriptor.Descriptor(
+  name='Constraint',
+  full_name='context.Constraint',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='constraint_type', full_name='context.Constraint.constraint_type', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='constraint_value', full_name='context.Constraint.constraint_value', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=3455,
+  serialized_end=3518,
+)
+
+
+_TERAFLOWCONTROLLER = _descriptor.Descriptor(
+  name='TeraFlowController',
+  full_name='context.TeraFlowController',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='context_id', full_name='context.TeraFlowController.context_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='ip_address', full_name='context.TeraFlowController.ip_address', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='port', full_name='context.TeraFlowController.port', index=2,
+      number=3, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=3520,
+  serialized_end=3614,
+)
+
+
+_AUTHENTICATIONRESULT = _descriptor.Descriptor(
+  name='AuthenticationResult',
+  full_name='context.AuthenticationResult',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='context_id', full_name='context.AuthenticationResult.context_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='authenticated', full_name='context.AuthenticationResult.authenticated', index=1,
+      number=2, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=3616,
+  serialized_end=3701,
+)
+
+_EVENT.fields_by_name['event_type'].enum_type = _EVENTTYPEENUM
+_CONTEXTID.fields_by_name['context_uuid'].message_type = _UUID
+_CONTEXT.fields_by_name['context_id'].message_type = _CONTEXTID
+_CONTEXT.fields_by_name['topology_ids'].message_type = _TOPOLOGYID
+_CONTEXT.fields_by_name['service_ids'].message_type = _SERVICEID
+_CONTEXT.fields_by_name['controller'].message_type = _TERAFLOWCONTROLLER
+_CONTEXTIDLIST.fields_by_name['context_ids'].message_type = _CONTEXTID
+_CONTEXTLIST.fields_by_name['contexts'].message_type = _CONTEXT
+_CONTEXTEVENT.fields_by_name['event'].message_type = _EVENT
+_CONTEXTEVENT.fields_by_name['context_id'].message_type = _CONTEXTID
+_TOPOLOGYID.fields_by_name['context_id'].message_type = _CONTEXTID
+_TOPOLOGYID.fields_by_name['topology_uuid'].message_type = _UUID
+_TOPOLOGY.fields_by_name['topology_id'].message_type = _TOPOLOGYID
+_TOPOLOGY.fields_by_name['device_ids'].message_type = _DEVICEID
+_TOPOLOGY.fields_by_name['link_ids'].message_type = _LINKID
+_TOPOLOGYIDLIST.fields_by_name['topology_ids'].message_type = _TOPOLOGYID
+_TOPOLOGYLIST.fields_by_name['topologies'].message_type = _TOPOLOGY
+_TOPOLOGYEVENT.fields_by_name['event'].message_type = _EVENT
+_TOPOLOGYEVENT.fields_by_name['topology_id'].message_type = _TOPOLOGYID
+_DEVICEID.fields_by_name['device_uuid'].message_type = _UUID
+_DEVICE.fields_by_name['device_id'].message_type = _DEVICEID
+_DEVICE.fields_by_name['device_config'].message_type = _DEVICECONFIG
+_DEVICE.fields_by_name['device_operational_status'].enum_type = _DEVICEOPERATIONALSTATUSENUM
+_DEVICE.fields_by_name['device_drivers'].enum_type = _DEVICEDRIVERENUM
+_DEVICE.fields_by_name['device_endpoints'].message_type = _ENDPOINT
+_DEVICECONFIG.fields_by_name['config_rules'].message_type = _CONFIGRULE
+_DEVICEIDLIST.fields_by_name['device_ids'].message_type = _DEVICEID
+_DEVICELIST.fields_by_name['devices'].message_type = _DEVICE
+_DEVICEEVENT.fields_by_name['event'].message_type = _EVENT
+_DEVICEEVENT.fields_by_name['device_id'].message_type = _DEVICEID
+_LINKID.fields_by_name['link_uuid'].message_type = _UUID
+_LINK.fields_by_name['link_id'].message_type = _LINKID
+_LINK.fields_by_name['link_endpoint_ids'].message_type = _ENDPOINTID
+_LINKIDLIST.fields_by_name['link_ids'].message_type = _LINKID
+_LINKLIST.fields_by_name['links'].message_type = _LINK
+_LINKEVENT.fields_by_name['event'].message_type = _EVENT
+_LINKEVENT.fields_by_name['link_id'].message_type = _LINKID
+_SERVICEID.fields_by_name['context_id'].message_type = _CONTEXTID
+_SERVICEID.fields_by_name['service_uuid'].message_type = _UUID
+_SERVICE.fields_by_name['service_id'].message_type = _SERVICEID
+_SERVICE.fields_by_name['service_type'].enum_type = _SERVICETYPEENUM
+_SERVICE.fields_by_name['service_endpoint_ids'].message_type = _ENDPOINTID
+_SERVICE.fields_by_name['service_constraints'].message_type = _CONSTRAINT
+_SERVICE.fields_by_name['service_status'].message_type = _SERVICESTATUS
+_SERVICE.fields_by_name['service_config'].message_type = _SERVICECONFIG
+_SERVICESTATUS.fields_by_name['service_status'].enum_type = _SERVICESTATUSENUM
+_SERVICECONFIG.fields_by_name['config_rules'].message_type = _CONFIGRULE
+_SERVICEIDLIST.fields_by_name['service_ids'].message_type = _SERVICEID
+_SERVICELIST.fields_by_name['services'].message_type = _SERVICE
+_SERVICEEVENT.fields_by_name['event'].message_type = _EVENT
+_SERVICEEVENT.fields_by_name['service_id'].message_type = _SERVICEID
+_CONNECTIONID.fields_by_name['connection_uuid'].message_type = _UUID
+_CONNECTION.fields_by_name['connection_id'].message_type = _CONNECTIONID
+_CONNECTION.fields_by_name['service_id'].message_type = _SERVICEID
+_CONNECTION.fields_by_name['path_hops_endpoint_ids'].message_type = _ENDPOINTID
+_CONNECTION.fields_by_name['sub_service_ids'].message_type = _SERVICEID
+_CONNECTIONIDLIST.fields_by_name['connection_ids'].message_type = _CONNECTIONID
+_CONNECTIONLIST.fields_by_name['connections'].message_type = _CONNECTION
+_CONNECTIONEVENT.fields_by_name['event'].message_type = _EVENT
+_CONNECTIONEVENT.fields_by_name['connection_id'].message_type = _CONNECTIONID
+_ENDPOINTID.fields_by_name['topology_id'].message_type = _TOPOLOGYID
+_ENDPOINTID.fields_by_name['device_id'].message_type = _DEVICEID
+_ENDPOINTID.fields_by_name['endpoint_uuid'].message_type = _UUID
+_ENDPOINT.fields_by_name['endpoint_id'].message_type = _ENDPOINTID
+_ENDPOINT.fields_by_name['kpi_sample_types'].enum_type = kpi__sample__types__pb2._KPISAMPLETYPE
+_CONFIGRULE.fields_by_name['action'].enum_type = _CONFIGACTIONENUM
+_TERAFLOWCONTROLLER.fields_by_name['context_id'].message_type = _CONTEXTID
+_AUTHENTICATIONRESULT.fields_by_name['context_id'].message_type = _CONTEXTID
+DESCRIPTOR.message_types_by_name['Empty'] = _EMPTY
+DESCRIPTOR.message_types_by_name['Uuid'] = _UUID
+DESCRIPTOR.message_types_by_name['Event'] = _EVENT
+DESCRIPTOR.message_types_by_name['ContextId'] = _CONTEXTID
+DESCRIPTOR.message_types_by_name['Context'] = _CONTEXT
+DESCRIPTOR.message_types_by_name['ContextIdList'] = _CONTEXTIDLIST
+DESCRIPTOR.message_types_by_name['ContextList'] = _CONTEXTLIST
+DESCRIPTOR.message_types_by_name['ContextEvent'] = _CONTEXTEVENT
+DESCRIPTOR.message_types_by_name['TopologyId'] = _TOPOLOGYID
+DESCRIPTOR.message_types_by_name['Topology'] = _TOPOLOGY
+DESCRIPTOR.message_types_by_name['TopologyIdList'] = _TOPOLOGYIDLIST
+DESCRIPTOR.message_types_by_name['TopologyList'] = _TOPOLOGYLIST
+DESCRIPTOR.message_types_by_name['TopologyEvent'] = _TOPOLOGYEVENT
+DESCRIPTOR.message_types_by_name['DeviceId'] = _DEVICEID
+DESCRIPTOR.message_types_by_name['Device'] = _DEVICE
+DESCRIPTOR.message_types_by_name['DeviceConfig'] = _DEVICECONFIG
+DESCRIPTOR.message_types_by_name['DeviceIdList'] = _DEVICEIDLIST
+DESCRIPTOR.message_types_by_name['DeviceList'] = _DEVICELIST
+DESCRIPTOR.message_types_by_name['DeviceEvent'] = _DEVICEEVENT
+DESCRIPTOR.message_types_by_name['LinkId'] = _LINKID
+DESCRIPTOR.message_types_by_name['Link'] = _LINK
+DESCRIPTOR.message_types_by_name['LinkIdList'] = _LINKIDLIST
+DESCRIPTOR.message_types_by_name['LinkList'] = _LINKLIST
+DESCRIPTOR.message_types_by_name['LinkEvent'] = _LINKEVENT
+DESCRIPTOR.message_types_by_name['ServiceId'] = _SERVICEID
+DESCRIPTOR.message_types_by_name['Service'] = _SERVICE
+DESCRIPTOR.message_types_by_name['ServiceStatus'] = _SERVICESTATUS
+DESCRIPTOR.message_types_by_name['ServiceConfig'] = _SERVICECONFIG
+DESCRIPTOR.message_types_by_name['ServiceIdList'] = _SERVICEIDLIST
+DESCRIPTOR.message_types_by_name['ServiceList'] = _SERVICELIST
+DESCRIPTOR.message_types_by_name['ServiceEvent'] = _SERVICEEVENT
+DESCRIPTOR.message_types_by_name['ConnectionId'] = _CONNECTIONID
+DESCRIPTOR.message_types_by_name['Connection'] = _CONNECTION
+DESCRIPTOR.message_types_by_name['ConnectionIdList'] = _CONNECTIONIDLIST
+DESCRIPTOR.message_types_by_name['ConnectionList'] = _CONNECTIONLIST
+DESCRIPTOR.message_types_by_name['ConnectionEvent'] = _CONNECTIONEVENT
+DESCRIPTOR.message_types_by_name['EndPointId'] = _ENDPOINTID
+DESCRIPTOR.message_types_by_name['EndPoint'] = _ENDPOINT
+DESCRIPTOR.message_types_by_name['ConfigRule'] = _CONFIGRULE
+DESCRIPTOR.message_types_by_name['Constraint'] = _CONSTRAINT
+DESCRIPTOR.message_types_by_name['TeraFlowController'] = _TERAFLOWCONTROLLER
+DESCRIPTOR.message_types_by_name['AuthenticationResult'] = _AUTHENTICATIONRESULT
+DESCRIPTOR.enum_types_by_name['EventTypeEnum'] = _EVENTTYPEENUM
+DESCRIPTOR.enum_types_by_name['DeviceDriverEnum'] = _DEVICEDRIVERENUM
+DESCRIPTOR.enum_types_by_name['DeviceOperationalStatusEnum'] = _DEVICEOPERATIONALSTATUSENUM
+DESCRIPTOR.enum_types_by_name['ServiceTypeEnum'] = _SERVICETYPEENUM
+DESCRIPTOR.enum_types_by_name['ServiceStatusEnum'] = _SERVICESTATUSENUM
+DESCRIPTOR.enum_types_by_name['ConfigActionEnum'] = _CONFIGACTIONENUM
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+Empty = _reflection.GeneratedProtocolMessageType('Empty', (_message.Message,), {
+  'DESCRIPTOR' : _EMPTY,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Empty)
+  })
+_sym_db.RegisterMessage(Empty)
+
+Uuid = _reflection.GeneratedProtocolMessageType('Uuid', (_message.Message,), {
+  'DESCRIPTOR' : _UUID,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Uuid)
+  })
+_sym_db.RegisterMessage(Uuid)
+
+Event = _reflection.GeneratedProtocolMessageType('Event', (_message.Message,), {
+  'DESCRIPTOR' : _EVENT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Event)
+  })
+_sym_db.RegisterMessage(Event)
+
+ContextId = _reflection.GeneratedProtocolMessageType('ContextId', (_message.Message,), {
+  'DESCRIPTOR' : _CONTEXTID,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ContextId)
+  })
+_sym_db.RegisterMessage(ContextId)
+
+Context = _reflection.GeneratedProtocolMessageType('Context', (_message.Message,), {
+  'DESCRIPTOR' : _CONTEXT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Context)
+  })
+_sym_db.RegisterMessage(Context)
+
+ContextIdList = _reflection.GeneratedProtocolMessageType('ContextIdList', (_message.Message,), {
+  'DESCRIPTOR' : _CONTEXTIDLIST,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ContextIdList)
+  })
+_sym_db.RegisterMessage(ContextIdList)
+
+ContextList = _reflection.GeneratedProtocolMessageType('ContextList', (_message.Message,), {
+  'DESCRIPTOR' : _CONTEXTLIST,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ContextList)
+  })
+_sym_db.RegisterMessage(ContextList)
+
+ContextEvent = _reflection.GeneratedProtocolMessageType('ContextEvent', (_message.Message,), {
+  'DESCRIPTOR' : _CONTEXTEVENT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ContextEvent)
+  })
+_sym_db.RegisterMessage(ContextEvent)
+
+TopologyId = _reflection.GeneratedProtocolMessageType('TopologyId', (_message.Message,), {
+  'DESCRIPTOR' : _TOPOLOGYID,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.TopologyId)
+  })
+_sym_db.RegisterMessage(TopologyId)
+
+Topology = _reflection.GeneratedProtocolMessageType('Topology', (_message.Message,), {
+  'DESCRIPTOR' : _TOPOLOGY,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Topology)
+  })
+_sym_db.RegisterMessage(Topology)
+
+TopologyIdList = _reflection.GeneratedProtocolMessageType('TopologyIdList', (_message.Message,), {
+  'DESCRIPTOR' : _TOPOLOGYIDLIST,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.TopologyIdList)
+  })
+_sym_db.RegisterMessage(TopologyIdList)
+
+TopologyList = _reflection.GeneratedProtocolMessageType('TopologyList', (_message.Message,), {
+  'DESCRIPTOR' : _TOPOLOGYLIST,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.TopologyList)
+  })
+_sym_db.RegisterMessage(TopologyList)
+
+TopologyEvent = _reflection.GeneratedProtocolMessageType('TopologyEvent', (_message.Message,), {
+  'DESCRIPTOR' : _TOPOLOGYEVENT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.TopologyEvent)
+  })
+_sym_db.RegisterMessage(TopologyEvent)
+
+DeviceId = _reflection.GeneratedProtocolMessageType('DeviceId', (_message.Message,), {
+  'DESCRIPTOR' : _DEVICEID,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.DeviceId)
+  })
+_sym_db.RegisterMessage(DeviceId)
+
+Device = _reflection.GeneratedProtocolMessageType('Device', (_message.Message,), {
+  'DESCRIPTOR' : _DEVICE,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Device)
+  })
+_sym_db.RegisterMessage(Device)
+
+DeviceConfig = _reflection.GeneratedProtocolMessageType('DeviceConfig', (_message.Message,), {
+  'DESCRIPTOR' : _DEVICECONFIG,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.DeviceConfig)
+  })
+_sym_db.RegisterMessage(DeviceConfig)
+
+DeviceIdList = _reflection.GeneratedProtocolMessageType('DeviceIdList', (_message.Message,), {
+  'DESCRIPTOR' : _DEVICEIDLIST,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.DeviceIdList)
+  })
+_sym_db.RegisterMessage(DeviceIdList)
+
+DeviceList = _reflection.GeneratedProtocolMessageType('DeviceList', (_message.Message,), {
+  'DESCRIPTOR' : _DEVICELIST,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.DeviceList)
+  })
+_sym_db.RegisterMessage(DeviceList)
+
+DeviceEvent = _reflection.GeneratedProtocolMessageType('DeviceEvent', (_message.Message,), {
+  'DESCRIPTOR' : _DEVICEEVENT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.DeviceEvent)
+  })
+_sym_db.RegisterMessage(DeviceEvent)
+
+LinkId = _reflection.GeneratedProtocolMessageType('LinkId', (_message.Message,), {
+  'DESCRIPTOR' : _LINKID,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.LinkId)
+  })
+_sym_db.RegisterMessage(LinkId)
+
+Link = _reflection.GeneratedProtocolMessageType('Link', (_message.Message,), {
+  'DESCRIPTOR' : _LINK,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Link)
+  })
+_sym_db.RegisterMessage(Link)
+
+LinkIdList = _reflection.GeneratedProtocolMessageType('LinkIdList', (_message.Message,), {
+  'DESCRIPTOR' : _LINKIDLIST,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.LinkIdList)
+  })
+_sym_db.RegisterMessage(LinkIdList)
+
+LinkList = _reflection.GeneratedProtocolMessageType('LinkList', (_message.Message,), {
+  'DESCRIPTOR' : _LINKLIST,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.LinkList)
+  })
+_sym_db.RegisterMessage(LinkList)
+
+LinkEvent = _reflection.GeneratedProtocolMessageType('LinkEvent', (_message.Message,), {
+  'DESCRIPTOR' : _LINKEVENT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.LinkEvent)
+  })
+_sym_db.RegisterMessage(LinkEvent)
+
+ServiceId = _reflection.GeneratedProtocolMessageType('ServiceId', (_message.Message,), {
+  'DESCRIPTOR' : _SERVICEID,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ServiceId)
+  })
+_sym_db.RegisterMessage(ServiceId)
+
+Service = _reflection.GeneratedProtocolMessageType('Service', (_message.Message,), {
+  'DESCRIPTOR' : _SERVICE,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Service)
+  })
+_sym_db.RegisterMessage(Service)
+
+ServiceStatus = _reflection.GeneratedProtocolMessageType('ServiceStatus', (_message.Message,), {
+  'DESCRIPTOR' : _SERVICESTATUS,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ServiceStatus)
+  })
+_sym_db.RegisterMessage(ServiceStatus)
+
+ServiceConfig = _reflection.GeneratedProtocolMessageType('ServiceConfig', (_message.Message,), {
+  'DESCRIPTOR' : _SERVICECONFIG,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ServiceConfig)
+  })
+_sym_db.RegisterMessage(ServiceConfig)
+
+ServiceIdList = _reflection.GeneratedProtocolMessageType('ServiceIdList', (_message.Message,), {
+  'DESCRIPTOR' : _SERVICEIDLIST,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ServiceIdList)
+  })
+_sym_db.RegisterMessage(ServiceIdList)
+
+ServiceList = _reflection.GeneratedProtocolMessageType('ServiceList', (_message.Message,), {
+  'DESCRIPTOR' : _SERVICELIST,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ServiceList)
+  })
+_sym_db.RegisterMessage(ServiceList)
+
+ServiceEvent = _reflection.GeneratedProtocolMessageType('ServiceEvent', (_message.Message,), {
+  'DESCRIPTOR' : _SERVICEEVENT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ServiceEvent)
+  })
+_sym_db.RegisterMessage(ServiceEvent)
+
+ConnectionId = _reflection.GeneratedProtocolMessageType('ConnectionId', (_message.Message,), {
+  'DESCRIPTOR' : _CONNECTIONID,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ConnectionId)
+  })
+_sym_db.RegisterMessage(ConnectionId)
+
+Connection = _reflection.GeneratedProtocolMessageType('Connection', (_message.Message,), {
+  'DESCRIPTOR' : _CONNECTION,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Connection)
+  })
+_sym_db.RegisterMessage(Connection)
+
+ConnectionIdList = _reflection.GeneratedProtocolMessageType('ConnectionIdList', (_message.Message,), {
+  'DESCRIPTOR' : _CONNECTIONIDLIST,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ConnectionIdList)
+  })
+_sym_db.RegisterMessage(ConnectionIdList)
+
+ConnectionList = _reflection.GeneratedProtocolMessageType('ConnectionList', (_message.Message,), {
+  'DESCRIPTOR' : _CONNECTIONLIST,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ConnectionList)
+  })
+_sym_db.RegisterMessage(ConnectionList)
+
+ConnectionEvent = _reflection.GeneratedProtocolMessageType('ConnectionEvent', (_message.Message,), {
+  'DESCRIPTOR' : _CONNECTIONEVENT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ConnectionEvent)
+  })
+_sym_db.RegisterMessage(ConnectionEvent)
+
+EndPointId = _reflection.GeneratedProtocolMessageType('EndPointId', (_message.Message,), {
+  'DESCRIPTOR' : _ENDPOINTID,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.EndPointId)
+  })
+_sym_db.RegisterMessage(EndPointId)
+
+EndPoint = _reflection.GeneratedProtocolMessageType('EndPoint', (_message.Message,), {
+  'DESCRIPTOR' : _ENDPOINT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.EndPoint)
+  })
+_sym_db.RegisterMessage(EndPoint)
+
+ConfigRule = _reflection.GeneratedProtocolMessageType('ConfigRule', (_message.Message,), {
+  'DESCRIPTOR' : _CONFIGRULE,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ConfigRule)
+  })
+_sym_db.RegisterMessage(ConfigRule)
+
+Constraint = _reflection.GeneratedProtocolMessageType('Constraint', (_message.Message,), {
+  'DESCRIPTOR' : _CONSTRAINT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Constraint)
+  })
+_sym_db.RegisterMessage(Constraint)
+
+TeraFlowController = _reflection.GeneratedProtocolMessageType('TeraFlowController', (_message.Message,), {
+  'DESCRIPTOR' : _TERAFLOWCONTROLLER,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.TeraFlowController)
+  })
+_sym_db.RegisterMessage(TeraFlowController)
+
+AuthenticationResult = _reflection.GeneratedProtocolMessageType('AuthenticationResult', (_message.Message,), {
+  'DESCRIPTOR' : _AUTHENTICATIONRESULT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.AuthenticationResult)
+  })
+_sym_db.RegisterMessage(AuthenticationResult)
+
+
+
+_CONTEXTSERVICE = _descriptor.ServiceDescriptor(
+  name='ContextService',
+  full_name='context.ContextService',
+  file=DESCRIPTOR,
+  index=0,
+  serialized_options=None,
+  create_key=_descriptor._internal_create_key,
+  serialized_start=4524,
+  serialized_end=6617,
+  methods=[
+  _descriptor.MethodDescriptor(
+    name='ListContextIds',
+    full_name='context.ContextService.ListContextIds',
+    index=0,
+    containing_service=None,
+    input_type=_EMPTY,
+    output_type=_CONTEXTIDLIST,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='ListContexts',
+    full_name='context.ContextService.ListContexts',
+    index=1,
+    containing_service=None,
+    input_type=_EMPTY,
+    output_type=_CONTEXTLIST,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetContext',
+    full_name='context.ContextService.GetContext',
+    index=2,
+    containing_service=None,
+    input_type=_CONTEXTID,
+    output_type=_CONTEXT,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='SetContext',
+    full_name='context.ContextService.SetContext',
+    index=3,
+    containing_service=None,
+    input_type=_CONTEXT,
+    output_type=_CONTEXTID,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='RemoveContext',
+    full_name='context.ContextService.RemoveContext',
+    index=4,
+    containing_service=None,
+    input_type=_CONTEXTID,
+    output_type=_EMPTY,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetContextEvents',
+    full_name='context.ContextService.GetContextEvents',
+    index=5,
+    containing_service=None,
+    input_type=_EMPTY,
+    output_type=_CONTEXTEVENT,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='ListTopologyIds',
+    full_name='context.ContextService.ListTopologyIds',
+    index=6,
+    containing_service=None,
+    input_type=_CONTEXTID,
+    output_type=_TOPOLOGYIDLIST,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='ListTopologies',
+    full_name='context.ContextService.ListTopologies',
+    index=7,
+    containing_service=None,
+    input_type=_CONTEXTID,
+    output_type=_TOPOLOGYLIST,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetTopology',
+    full_name='context.ContextService.GetTopology',
+    index=8,
+    containing_service=None,
+    input_type=_TOPOLOGYID,
+    output_type=_TOPOLOGY,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='SetTopology',
+    full_name='context.ContextService.SetTopology',
+    index=9,
+    containing_service=None,
+    input_type=_TOPOLOGY,
+    output_type=_TOPOLOGYID,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='RemoveTopology',
+    full_name='context.ContextService.RemoveTopology',
+    index=10,
+    containing_service=None,
+    input_type=_TOPOLOGYID,
+    output_type=_EMPTY,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetTopologyEvents',
+    full_name='context.ContextService.GetTopologyEvents',
+    index=11,
+    containing_service=None,
+    input_type=_EMPTY,
+    output_type=_TOPOLOGYEVENT,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='ListDeviceIds',
+    full_name='context.ContextService.ListDeviceIds',
+    index=12,
+    containing_service=None,
+    input_type=_EMPTY,
+    output_type=_DEVICEIDLIST,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='ListDevices',
+    full_name='context.ContextService.ListDevices',
+    index=13,
+    containing_service=None,
+    input_type=_EMPTY,
+    output_type=_DEVICELIST,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetDevice',
+    full_name='context.ContextService.GetDevice',
+    index=14,
+    containing_service=None,
+    input_type=_DEVICEID,
+    output_type=_DEVICE,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='SetDevice',
+    full_name='context.ContextService.SetDevice',
+    index=15,
+    containing_service=None,
+    input_type=_DEVICE,
+    output_type=_DEVICEID,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='RemoveDevice',
+    full_name='context.ContextService.RemoveDevice',
+    index=16,
+    containing_service=None,
+    input_type=_DEVICEID,
+    output_type=_EMPTY,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetDeviceEvents',
+    full_name='context.ContextService.GetDeviceEvents',
+    index=17,
+    containing_service=None,
+    input_type=_EMPTY,
+    output_type=_DEVICEEVENT,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='ListLinkIds',
+    full_name='context.ContextService.ListLinkIds',
+    index=18,
+    containing_service=None,
+    input_type=_EMPTY,
+    output_type=_LINKIDLIST,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='ListLinks',
+    full_name='context.ContextService.ListLinks',
+    index=19,
+    containing_service=None,
+    input_type=_EMPTY,
+    output_type=_LINKLIST,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetLink',
+    full_name='context.ContextService.GetLink',
+    index=20,
+    containing_service=None,
+    input_type=_LINKID,
+    output_type=_LINK,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='SetLink',
+    full_name='context.ContextService.SetLink',
+    index=21,
+    containing_service=None,
+    input_type=_LINK,
+    output_type=_LINKID,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='RemoveLink',
+    full_name='context.ContextService.RemoveLink',
+    index=22,
+    containing_service=None,
+    input_type=_LINKID,
+    output_type=_EMPTY,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetLinkEvents',
+    full_name='context.ContextService.GetLinkEvents',
+    index=23,
+    containing_service=None,
+    input_type=_EMPTY,
+    output_type=_LINKEVENT,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='ListServiceIds',
+    full_name='context.ContextService.ListServiceIds',
+    index=24,
+    containing_service=None,
+    input_type=_CONTEXTID,
+    output_type=_SERVICEIDLIST,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='ListServices',
+    full_name='context.ContextService.ListServices',
+    index=25,
+    containing_service=None,
+    input_type=_CONTEXTID,
+    output_type=_SERVICELIST,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetService',
+    full_name='context.ContextService.GetService',
+    index=26,
+    containing_service=None,
+    input_type=_SERVICEID,
+    output_type=_SERVICE,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='SetService',
+    full_name='context.ContextService.SetService',
+    index=27,
+    containing_service=None,
+    input_type=_SERVICE,
+    output_type=_SERVICEID,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='RemoveService',
+    full_name='context.ContextService.RemoveService',
+    index=28,
+    containing_service=None,
+    input_type=_SERVICEID,
+    output_type=_EMPTY,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetServiceEvents',
+    full_name='context.ContextService.GetServiceEvents',
+    index=29,
+    containing_service=None,
+    input_type=_EMPTY,
+    output_type=_SERVICEEVENT,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='ListConnectionIds',
+    full_name='context.ContextService.ListConnectionIds',
+    index=30,
+    containing_service=None,
+    input_type=_SERVICEID,
+    output_type=_CONNECTIONIDLIST,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='ListConnections',
+    full_name='context.ContextService.ListConnections',
+    index=31,
+    containing_service=None,
+    input_type=_SERVICEID,
+    output_type=_CONNECTIONLIST,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetConnection',
+    full_name='context.ContextService.GetConnection',
+    index=32,
+    containing_service=None,
+    input_type=_CONNECTIONID,
+    output_type=_CONNECTION,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='SetConnection',
+    full_name='context.ContextService.SetConnection',
+    index=33,
+    containing_service=None,
+    input_type=_CONNECTION,
+    output_type=_CONNECTIONID,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='RemoveConnection',
+    full_name='context.ContextService.RemoveConnection',
+    index=34,
+    containing_service=None,
+    input_type=_CONNECTIONID,
+    output_type=_EMPTY,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetConnectionEvents',
+    full_name='context.ContextService.GetConnectionEvents',
+    index=35,
+    containing_service=None,
+    input_type=_EMPTY,
+    output_type=_CONNECTIONEVENT,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+])
+_sym_db.RegisterServiceDescriptor(_CONTEXTSERVICE)
+
+DESCRIPTOR.services_by_name['ContextService'] = _CONTEXTSERVICE
+
+# @@protoc_insertion_point(module_scope)
diff --git a/src/opticalcentralizedattackdetector/proto/kpi_sample_types_pb2.py b/src/opticalcentralizedattackdetector/proto/kpi_sample_types_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..ea7fd2f82757d4c3db02d7e2c7817e2787b0b490
--- /dev/null
+++ b/src/opticalcentralizedattackdetector/proto/kpi_sample_types_pb2.py
@@ -0,0 +1,78 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: kpi_sample_types.proto
+"""Generated protocol buffer code."""
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='kpi_sample_types.proto',
+  package='kpi_sample_types',
+  syntax='proto3',
+  serialized_options=None,
+  create_key=_descriptor._internal_create_key,
+  serialized_pb=b'\n\x16kpi_sample_types.proto\x12\x10kpi_sample_types*\xbe\x01\n\rKpiSampleType\x12\x19\n\x15KPISAMPLETYPE_UNKNOWN\x10\x00\x12%\n!KPISAMPLETYPE_PACKETS_TRANSMITTED\x10\x65\x12\"\n\x1eKPISAMPLETYPE_PACKETS_RECEIVED\x10\x66\x12$\n\x1fKPISAMPLETYPE_BYTES_TRANSMITTED\x10\xc9\x01\x12!\n\x1cKPISAMPLETYPE_BYTES_RECEIVED\x10\xca\x01\x62\x06proto3'
+)
+
+_KPISAMPLETYPE = _descriptor.EnumDescriptor(
+  name='KpiSampleType',
+  full_name='kpi_sample_types.KpiSampleType',
+  filename=None,
+  file=DESCRIPTOR,
+  create_key=_descriptor._internal_create_key,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='KPISAMPLETYPE_UNKNOWN', index=0, number=0,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='KPISAMPLETYPE_PACKETS_TRANSMITTED', index=1, number=101,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='KPISAMPLETYPE_PACKETS_RECEIVED', index=2, number=102,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='KPISAMPLETYPE_BYTES_TRANSMITTED', index=3, number=201,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='KPISAMPLETYPE_BYTES_RECEIVED', index=4, number=202,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=45,
+  serialized_end=235,
+)
+_sym_db.RegisterEnumDescriptor(_KPISAMPLETYPE)
+
+KpiSampleType = enum_type_wrapper.EnumTypeWrapper(_KPISAMPLETYPE)
+KPISAMPLETYPE_UNKNOWN = 0
+KPISAMPLETYPE_PACKETS_TRANSMITTED = 101
+KPISAMPLETYPE_PACKETS_RECEIVED = 102
+KPISAMPLETYPE_BYTES_TRANSMITTED = 201
+KPISAMPLETYPE_BYTES_RECEIVED = 202
+
+
+DESCRIPTOR.enum_types_by_name['KpiSampleType'] = _KPISAMPLETYPE
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+
+# @@protoc_insertion_point(module_scope)
diff --git a/src/centralizedattackdetector/proto/monitoring_pb2.py b/src/opticalcentralizedattackdetector/proto/monitoring_pb2.py
similarity index 51%
rename from src/centralizedattackdetector/proto/monitoring_pb2.py
rename to src/opticalcentralizedattackdetector/proto/monitoring_pb2.py
index 0a5e4d990fb3ea82734080e4fde8086977bdedbb..b313ebb68f0da37a540898e8c362fd204a799076 100644
--- a/src/centralizedattackdetector/proto/monitoring_pb2.py
+++ b/src/opticalcentralizedattackdetector/proto/monitoring_pb2.py
@@ -2,7 +2,6 @@
 # Generated by the protocol buffer compiler.  DO NOT EDIT!
 # source: monitoring.proto
 """Generated protocol buffer code."""
-from google.protobuf.internal import enum_type_wrapper
 from google.protobuf import descriptor as _descriptor
 from google.protobuf import message as _message
 from google.protobuf import reflection as _reflection
@@ -13,6 +12,7 @@ _sym_db = _symbol_database.Default()
 
 
 from . import context_pb2 as context__pb2
+from . import kpi_sample_types_pb2 as kpi__sample__types__pb2
 
 
 DESCRIPTOR = _descriptor.FileDescriptor(
@@ -21,131 +21,53 @@ DESCRIPTOR = _descriptor.FileDescriptor(
   syntax='proto3',
   serialized_options=None,
   create_key=_descriptor._internal_create_key,
-  serialized_pb=b'\n\x10monitoring.proto\x12\nmonitoring\x1a\rcontext.proto\"\x84\x01\n\x10\x43reateKpiRequest\x12\x16\n\x0ekpiDescription\x18\x01 \x01(\t\x12$\n\tdevice_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\x12\x32\n\x0fkpi_sample_type\x18\x03 \x01(\x0e\x32\x19.monitoring.KpiSampleType\"h\n\x11MonitorKpiRequest\x12!\n\x06kpi_id\x18\x01 \x01(\x0b\x32\x11.monitoring.KpiId\x12\x18\n\x10\x63onnexion_time_s\x18\x02 \x01(\r\x12\x16\n\x0esample_rate_ms\x18\x03 \x01(\r\"i\n\x17MonitorDeviceKpiRequest\x12\x1c\n\x03kpi\x18\x01 \x01(\x0b\x32\x0f.monitoring.Kpi\x12\x18\n\x10\x63onnexion_time_s\x18\x02 \x01(\r\x12\x16\n\x0esample_rate_ms\x18\x03 \x01(\r\"s\n\x11IncludeKpiRequest\x12!\n\x06kpi_id\x18\x01 \x01(\x0b\x32\x11.monitoring.KpiId\x12\x12\n\ntime_stamp\x18\x02 \x01(\t\x12\'\n\tkpi_value\x18\x03 \x01(\x0b\x32\x14.monitoring.KpiValue\"\xd6\x01\n\x03Kpi\x12!\n\x06kpi_id\x18\x01 \x01(\x0b\x32\x11.monitoring.KpiId\x12\x11\n\ttimestamp\x18\x02 \x01(\t\x12\x16\n\x0ekpiDescription\x18\x03 \x01(\t\x12\'\n\tkpi_value\x18\x04 \x01(\x0b\x32\x14.monitoring.KpiValue\x12\x32\n\x0fkpi_sample_type\x18\x05 \x01(\x0e\x32\x19.monitoring.KpiSampleType\x12$\n\tdevice_id\x18\x06 \x01(\x0b\x32\x11.context.DeviceId\"&\n\x05KpiId\x12\x1d\n\x06kpi_id\x18\x01 \x01(\x0b\x32\r.context.Uuid\"T\n\tKpiDevice\x12!\n\x06kpi_id\x18\x01 \x01(\x0b\x32\x11.monitoring.KpiId\x12$\n\tdevice_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\"+\n\x07KpiList\x12 \n\x07kpiList\x18\x01 \x03(\x0b\x32\x0f.monitoring.Kpi\"M\n\x08KpiValue\x12\x10\n\x06intVal\x18\x01 \x01(\rH\x00\x12\x13\n\tstringVal\x18\x02 \x01(\tH\x00\x12\x11\n\x07\x62oolVal\x18\x03 \x01(\x08H\x00\x42\x07\n\x05value*x\n\rKpiSampleType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x17\n\x13PACKETS_TRANSMITTED\x10\x65\x12\x14\n\x10PACKETS_RECEIVED\x10\x66\x12\x16\n\x11\x42YTES_TRANSMITTED\x10\xc9\x01\x12\x13\n\x0e\x42YTES_RECEIVED\x10\xca\x01\x32\x8b\x03\n\x11MonitoringService\x12>\n\tCreateKpi\x12\x1c.monitoring.CreateKpiRequest\x1a\x11.monitoring.KpiId\"\x00\x12=\n\nIncludeKpi\x12\x1d.monitoring.IncludeKpiRequest\x1a\x0e.context.Empty\"\x00\x12=\n\nMonitorKpi\x12\x1d.monitoring.MonitorKpiRequest\x1a\x0e.context.Empty\"\x00\x12I\n\x10MonitorDeviceKpi\x12#.monitoring.MonitorDeviceKpiRequest\x1a\x0e.context.Empty\"\x00\x12\x36\n\x0cGetStreamKpi\x12\x11.monitoring.KpiId\x1a\x0f.monitoring.Kpi\"\x00\x30\x01\x12\x35\n\rGetInstantKpi\x12\x11.monitoring.KpiId\x1a\x0f.monitoring.Kpi\"\x00\x62\x06proto3'
+  serialized_pb=b'\n\x10monitoring.proto\x12\nmonitoring\x1a\rcontext.proto\x1a\x16kpi_sample_types.proto\"\xda\x01\n\rKpiDescriptor\x12\x17\n\x0fkpi_description\x18\x01 \x01(\t\x12\x38\n\x0fkpi_sample_type\x18\x02 \x01(\x0e\x32\x1f.kpi_sample_types.KpiSampleType\x12$\n\tdevice_id\x18\x03 \x01(\x0b\x32\x11.context.DeviceId\x12(\n\x0b\x65ndpoint_id\x18\x04 \x01(\x0b\x32\x13.context.EndPointId\x12&\n\nservice_id\x18\x05 \x01(\x0b\x32\x12.context.ServiceId\"p\n\x11MonitorKpiRequest\x12!\n\x06kpi_id\x18\x01 \x01(\x0b\x32\x11.monitoring.KpiId\x12\x1b\n\x13sampling_duration_s\x18\x02 \x01(\x02\x12\x1b\n\x13sampling_interval_s\x18\x03 \x01(\x02\"&\n\x05KpiId\x12\x1d\n\x06kpi_id\x18\x01 \x01(\x0b\x32\r.context.Uuid\"d\n\x03Kpi\x12!\n\x06kpi_id\x18\x01 \x01(\x0b\x32\x11.monitoring.KpiId\x12\x11\n\ttimestamp\x18\x02 \x01(\t\x12\'\n\tkpi_value\x18\x04 \x01(\x0b\x32\x14.monitoring.KpiValue\"a\n\x08KpiValue\x12\x10\n\x06intVal\x18\x01 \x01(\rH\x00\x12\x12\n\x08\x66loatVal\x18\x02 \x01(\x02H\x00\x12\x13\n\tstringVal\x18\x03 \x01(\tH\x00\x12\x11\n\x07\x62oolVal\x18\x04 \x01(\x08H\x00\x42\x07\n\x05value\",\n\x07KpiList\x12!\n\x08kpi_list\x18\x01 \x03(\x0b\x32\x0f.monitoring.Kpi2\xf3\x02\n\x11MonitoringService\x12;\n\tCreateKpi\x12\x19.monitoring.KpiDescriptor\x1a\x11.monitoring.KpiId\"\x00\x12\x42\n\x10GetKpiDescriptor\x12\x11.monitoring.KpiId\x1a\x19.monitoring.KpiDescriptor\"\x00\x12/\n\nIncludeKpi\x12\x0f.monitoring.Kpi\x1a\x0e.context.Empty\"\x00\x12=\n\nMonitorKpi\x12\x1d.monitoring.MonitorKpiRequest\x1a\x0e.context.Empty\"\x00\x12\x36\n\x0cGetStreamKpi\x12\x11.monitoring.KpiId\x1a\x0f.monitoring.Kpi\"\x00\x30\x01\x12\x35\n\rGetInstantKpi\x12\x11.monitoring.KpiId\x1a\x0f.monitoring.Kpi\"\x00\x62\x06proto3'
   ,
-  dependencies=[context__pb2.DESCRIPTOR,])
+  dependencies=[context__pb2.DESCRIPTOR,kpi__sample__types__pb2.DESCRIPTOR,])
 
-_KPISAMPLETYPE = _descriptor.EnumDescriptor(
-  name='KpiSampleType',
-  full_name='monitoring.KpiSampleType',
-  filename=None,
-  file=DESCRIPTOR,
-  create_key=_descriptor._internal_create_key,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='UNKNOWN', index=0, number=0,
-      serialized_options=None,
-      type=None,
-      create_key=_descriptor._internal_create_key),
-    _descriptor.EnumValueDescriptor(
-      name='PACKETS_TRANSMITTED', index=1, number=101,
-      serialized_options=None,
-      type=None,
-      create_key=_descriptor._internal_create_key),
-    _descriptor.EnumValueDescriptor(
-      name='PACKETS_RECEIVED', index=2, number=102,
-      serialized_options=None,
-      type=None,
-      create_key=_descriptor._internal_create_key),
-    _descriptor.EnumValueDescriptor(
-      name='BYTES_TRANSMITTED', index=3, number=201,
-      serialized_options=None,
-      type=None,
-      create_key=_descriptor._internal_create_key),
-    _descriptor.EnumValueDescriptor(
-      name='BYTES_RECEIVED', index=4, number=202,
-      serialized_options=None,
-      type=None,
-      create_key=_descriptor._internal_create_key),
-  ],
-  containing_type=None,
-  serialized_options=None,
-  serialized_start=979,
-  serialized_end=1099,
-)
-_sym_db.RegisterEnumDescriptor(_KPISAMPLETYPE)
 
-KpiSampleType = enum_type_wrapper.EnumTypeWrapper(_KPISAMPLETYPE)
-UNKNOWN = 0
-PACKETS_TRANSMITTED = 101
-PACKETS_RECEIVED = 102
-BYTES_TRANSMITTED = 201
-BYTES_RECEIVED = 202
 
 
-
-_CREATEKPIREQUEST = _descriptor.Descriptor(
-  name='CreateKpiRequest',
-  full_name='monitoring.CreateKpiRequest',
+_KPIDESCRIPTOR = _descriptor.Descriptor(
+  name='KpiDescriptor',
+  full_name='monitoring.KpiDescriptor',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='kpiDescription', full_name='monitoring.CreateKpiRequest.kpiDescription', index=0,
+      name='kpi_description', full_name='monitoring.KpiDescriptor.kpi_description', index=0,
       number=1, type=9, cpp_type=9, label=1,
       has_default_value=False, default_value=b"".decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='device_id', full_name='monitoring.CreateKpiRequest.device_id', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='kpi_sample_type', full_name='monitoring.CreateKpiRequest.kpi_sample_type', index=2,
-      number=3, type=14, cpp_type=8, label=1,
+      name='kpi_sample_type', full_name='monitoring.KpiDescriptor.kpi_sample_type', index=1,
+      number=2, type=14, cpp_type=8, label=1,
       has_default_value=False, default_value=0,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=48,
-  serialized_end=180,
-)
-
-
-_MONITORKPIREQUEST = _descriptor.Descriptor(
-  name='MonitorKpiRequest',
-  full_name='monitoring.MonitorKpiRequest',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  create_key=_descriptor._internal_create_key,
-  fields=[
     _descriptor.FieldDescriptor(
-      name='kpi_id', full_name='monitoring.MonitorKpiRequest.kpi_id', index=0,
-      number=1, type=11, cpp_type=10, label=1,
+      name='device_id', full_name='monitoring.KpiDescriptor.device_id', index=2,
+      number=3, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='connexion_time_s', full_name='monitoring.MonitorKpiRequest.connexion_time_s', index=1,
-      number=2, type=13, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
+      name='endpoint_id', full_name='monitoring.KpiDescriptor.endpoint_id', index=3,
+      number=4, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='sample_rate_ms', full_name='monitoring.MonitorKpiRequest.sample_rate_ms', index=2,
-      number=3, type=13, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
+      name='service_id', full_name='monitoring.KpiDescriptor.service_id', index=4,
+      number=5, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -161,37 +83,37 @@ _MONITORKPIREQUEST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=182,
-  serialized_end=286,
+  serialized_start=72,
+  serialized_end=290,
 )
 
 
-_MONITORDEVICEKPIREQUEST = _descriptor.Descriptor(
-  name='MonitorDeviceKpiRequest',
-  full_name='monitoring.MonitorDeviceKpiRequest',
+_MONITORKPIREQUEST = _descriptor.Descriptor(
+  name='MonitorKpiRequest',
+  full_name='monitoring.MonitorKpiRequest',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='kpi', full_name='monitoring.MonitorDeviceKpiRequest.kpi', index=0,
+      name='kpi_id', full_name='monitoring.MonitorKpiRequest.kpi_id', index=0,
       number=1, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='connexion_time_s', full_name='monitoring.MonitorDeviceKpiRequest.connexion_time_s', index=1,
-      number=2, type=13, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
+      name='sampling_duration_s', full_name='monitoring.MonitorKpiRequest.sampling_duration_s', index=1,
+      number=2, type=2, cpp_type=6, label=1,
+      has_default_value=False, default_value=float(0),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='sample_rate_ms', full_name='monitoring.MonitorDeviceKpiRequest.sample_rate_ms', index=2,
-      number=3, type=13, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
+      name='sampling_interval_s', full_name='monitoring.MonitorKpiRequest.sampling_interval_s', index=2,
+      number=3, type=2, cpp_type=6, label=1,
+      has_default_value=False, default_value=float(0),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -207,40 +129,26 @@ _MONITORDEVICEKPIREQUEST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=288,
-  serialized_end=393,
+  serialized_start=292,
+  serialized_end=404,
 )
 
 
-_INCLUDEKPIREQUEST = _descriptor.Descriptor(
-  name='IncludeKpiRequest',
-  full_name='monitoring.IncludeKpiRequest',
+_KPIID = _descriptor.Descriptor(
+  name='KpiId',
+  full_name='monitoring.KpiId',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='kpi_id', full_name='monitoring.IncludeKpiRequest.kpi_id', index=0,
+      name='kpi_id', full_name='monitoring.KpiId.kpi_id', index=0,
       number=1, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='time_stamp', full_name='monitoring.IncludeKpiRequest.time_stamp', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='kpi_value', full_name='monitoring.IncludeKpiRequest.kpi_value', index=2,
-      number=3, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
   ],
   extensions=[
   ],
@@ -253,8 +161,8 @@ _INCLUDEKPIREQUEST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=395,
-  serialized_end=510,
+  serialized_start=406,
+  serialized_end=444,
 )
 
 
@@ -281,33 +189,12 @@ _KPI = _descriptor.Descriptor(
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='kpiDescription', full_name='monitoring.Kpi.kpiDescription', index=2,
-      number=3, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='kpi_value', full_name='monitoring.Kpi.kpi_value', index=3,
+      name='kpi_value', full_name='monitoring.Kpi.kpi_value', index=2,
       number=4, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='kpi_sample_type', full_name='monitoring.Kpi.kpi_sample_type', index=4,
-      number=5, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='device_id', full_name='monitoring.Kpi.device_id', index=5,
-      number=6, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
   ],
   extensions=[
   ],
@@ -320,62 +207,44 @@ _KPI = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=513,
-  serialized_end=727,
+  serialized_start=446,
+  serialized_end=546,
 )
 
 
-_KPIID = _descriptor.Descriptor(
-  name='KpiId',
-  full_name='monitoring.KpiId',
+_KPIVALUE = _descriptor.Descriptor(
+  name='KpiValue',
+  full_name='monitoring.KpiValue',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='kpi_id', full_name='monitoring.KpiId.kpi_id', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
+      name='intVal', full_name='monitoring.KpiValue.intVal', index=0,
+      number=1, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=729,
-  serialized_end=767,
-)
-
-
-_KPIDEVICE = _descriptor.Descriptor(
-  name='KpiDevice',
-  full_name='monitoring.KpiDevice',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  create_key=_descriptor._internal_create_key,
-  fields=[
     _descriptor.FieldDescriptor(
-      name='kpi_id', full_name='monitoring.KpiDevice.kpi_id', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
+      name='floatVal', full_name='monitoring.KpiValue.floatVal', index=1,
+      number=2, type=2, cpp_type=6, label=1,
+      has_default_value=False, default_value=float(0),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='device_id', full_name='monitoring.KpiDevice.device_id', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
+      name='stringVal', full_name='monitoring.KpiValue.stringVal', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='boolVal', full_name='monitoring.KpiValue.boolVal', index=3,
+      number=4, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -390,9 +259,14 @@ _KPIDEVICE = _descriptor.Descriptor(
   syntax='proto3',
   extension_ranges=[],
   oneofs=[
+    _descriptor.OneofDescriptor(
+      name='value', full_name='monitoring.KpiValue.value',
+      index=0, containing_type=None,
+      create_key=_descriptor._internal_create_key,
+    fields=[]),
   ],
-  serialized_start=769,
-  serialized_end=853,
+  serialized_start=548,
+  serialized_end=645,
 )
 
 
@@ -405,7 +279,7 @@ _KPILIST = _descriptor.Descriptor(
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='kpiList', full_name='monitoring.KpiList.kpiList', index=0,
+      name='kpi_list', full_name='monitoring.KpiList.kpi_list', index=0,
       number=1, type=11, cpp_type=10, label=3,
       has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
@@ -423,102 +297,45 @@ _KPILIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=855,
-  serialized_end=898,
+  serialized_start=647,
+  serialized_end=691,
 )
 
-
-_KPIVALUE = _descriptor.Descriptor(
-  name='KpiValue',
-  full_name='monitoring.KpiValue',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  create_key=_descriptor._internal_create_key,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='intVal', full_name='monitoring.KpiValue.intVal', index=0,
-      number=1, type=13, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='stringVal', full_name='monitoring.KpiValue.stringVal', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='boolVal', full_name='monitoring.KpiValue.boolVal', index=2,
-      number=3, type=8, cpp_type=7, label=1,
-      has_default_value=False, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  serialized_options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-    _descriptor.OneofDescriptor(
-      name='value', full_name='monitoring.KpiValue.value',
-      index=0, containing_type=None,
-      create_key=_descriptor._internal_create_key,
-    fields=[]),
-  ],
-  serialized_start=900,
-  serialized_end=977,
-)
-
-_CREATEKPIREQUEST.fields_by_name['device_id'].message_type = context__pb2._DEVICEID
-_CREATEKPIREQUEST.fields_by_name['kpi_sample_type'].enum_type = _KPISAMPLETYPE
+_KPIDESCRIPTOR.fields_by_name['kpi_sample_type'].enum_type = kpi__sample__types__pb2._KPISAMPLETYPE
+_KPIDESCRIPTOR.fields_by_name['device_id'].message_type = context__pb2._DEVICEID
+_KPIDESCRIPTOR.fields_by_name['endpoint_id'].message_type = context__pb2._ENDPOINTID
+_KPIDESCRIPTOR.fields_by_name['service_id'].message_type = context__pb2._SERVICEID
 _MONITORKPIREQUEST.fields_by_name['kpi_id'].message_type = _KPIID
-_MONITORDEVICEKPIREQUEST.fields_by_name['kpi'].message_type = _KPI
-_INCLUDEKPIREQUEST.fields_by_name['kpi_id'].message_type = _KPIID
-_INCLUDEKPIREQUEST.fields_by_name['kpi_value'].message_type = _KPIVALUE
+_KPIID.fields_by_name['kpi_id'].message_type = context__pb2._UUID
 _KPI.fields_by_name['kpi_id'].message_type = _KPIID
 _KPI.fields_by_name['kpi_value'].message_type = _KPIVALUE
-_KPI.fields_by_name['kpi_sample_type'].enum_type = _KPISAMPLETYPE
-_KPI.fields_by_name['device_id'].message_type = context__pb2._DEVICEID
-_KPIID.fields_by_name['kpi_id'].message_type = context__pb2._UUID
-_KPIDEVICE.fields_by_name['kpi_id'].message_type = _KPIID
-_KPIDEVICE.fields_by_name['device_id'].message_type = context__pb2._DEVICEID
-_KPILIST.fields_by_name['kpiList'].message_type = _KPI
 _KPIVALUE.oneofs_by_name['value'].fields.append(
   _KPIVALUE.fields_by_name['intVal'])
 _KPIVALUE.fields_by_name['intVal'].containing_oneof = _KPIVALUE.oneofs_by_name['value']
+_KPIVALUE.oneofs_by_name['value'].fields.append(
+  _KPIVALUE.fields_by_name['floatVal'])
+_KPIVALUE.fields_by_name['floatVal'].containing_oneof = _KPIVALUE.oneofs_by_name['value']
 _KPIVALUE.oneofs_by_name['value'].fields.append(
   _KPIVALUE.fields_by_name['stringVal'])
 _KPIVALUE.fields_by_name['stringVal'].containing_oneof = _KPIVALUE.oneofs_by_name['value']
 _KPIVALUE.oneofs_by_name['value'].fields.append(
   _KPIVALUE.fields_by_name['boolVal'])
 _KPIVALUE.fields_by_name['boolVal'].containing_oneof = _KPIVALUE.oneofs_by_name['value']
-DESCRIPTOR.message_types_by_name['CreateKpiRequest'] = _CREATEKPIREQUEST
+_KPILIST.fields_by_name['kpi_list'].message_type = _KPI
+DESCRIPTOR.message_types_by_name['KpiDescriptor'] = _KPIDESCRIPTOR
 DESCRIPTOR.message_types_by_name['MonitorKpiRequest'] = _MONITORKPIREQUEST
-DESCRIPTOR.message_types_by_name['MonitorDeviceKpiRequest'] = _MONITORDEVICEKPIREQUEST
-DESCRIPTOR.message_types_by_name['IncludeKpiRequest'] = _INCLUDEKPIREQUEST
-DESCRIPTOR.message_types_by_name['Kpi'] = _KPI
 DESCRIPTOR.message_types_by_name['KpiId'] = _KPIID
-DESCRIPTOR.message_types_by_name['KpiDevice'] = _KPIDEVICE
-DESCRIPTOR.message_types_by_name['KpiList'] = _KPILIST
+DESCRIPTOR.message_types_by_name['Kpi'] = _KPI
 DESCRIPTOR.message_types_by_name['KpiValue'] = _KPIVALUE
-DESCRIPTOR.enum_types_by_name['KpiSampleType'] = _KPISAMPLETYPE
+DESCRIPTOR.message_types_by_name['KpiList'] = _KPILIST
 _sym_db.RegisterFileDescriptor(DESCRIPTOR)
 
-CreateKpiRequest = _reflection.GeneratedProtocolMessageType('CreateKpiRequest', (_message.Message,), {
-  'DESCRIPTOR' : _CREATEKPIREQUEST,
+KpiDescriptor = _reflection.GeneratedProtocolMessageType('KpiDescriptor', (_message.Message,), {
+  'DESCRIPTOR' : _KPIDESCRIPTOR,
   '__module__' : 'monitoring_pb2'
-  # @@protoc_insertion_point(class_scope:monitoring.CreateKpiRequest)
+  # @@protoc_insertion_point(class_scope:monitoring.KpiDescriptor)
   })
-_sym_db.RegisterMessage(CreateKpiRequest)
+_sym_db.RegisterMessage(KpiDescriptor)
 
 MonitorKpiRequest = _reflection.GeneratedProtocolMessageType('MonitorKpiRequest', (_message.Message,), {
   'DESCRIPTOR' : _MONITORKPIREQUEST,
@@ -527,19 +344,12 @@ MonitorKpiRequest = _reflection.GeneratedProtocolMessageType('MonitorKpiRequest'
   })
 _sym_db.RegisterMessage(MonitorKpiRequest)
 
-MonitorDeviceKpiRequest = _reflection.GeneratedProtocolMessageType('MonitorDeviceKpiRequest', (_message.Message,), {
-  'DESCRIPTOR' : _MONITORDEVICEKPIREQUEST,
-  '__module__' : 'monitoring_pb2'
-  # @@protoc_insertion_point(class_scope:monitoring.MonitorDeviceKpiRequest)
-  })
-_sym_db.RegisterMessage(MonitorDeviceKpiRequest)
-
-IncludeKpiRequest = _reflection.GeneratedProtocolMessageType('IncludeKpiRequest', (_message.Message,), {
-  'DESCRIPTOR' : _INCLUDEKPIREQUEST,
+KpiId = _reflection.GeneratedProtocolMessageType('KpiId', (_message.Message,), {
+  'DESCRIPTOR' : _KPIID,
   '__module__' : 'monitoring_pb2'
-  # @@protoc_insertion_point(class_scope:monitoring.IncludeKpiRequest)
+  # @@protoc_insertion_point(class_scope:monitoring.KpiId)
   })
-_sym_db.RegisterMessage(IncludeKpiRequest)
+_sym_db.RegisterMessage(KpiId)
 
 Kpi = _reflection.GeneratedProtocolMessageType('Kpi', (_message.Message,), {
   'DESCRIPTOR' : _KPI,
@@ -548,19 +358,12 @@ Kpi = _reflection.GeneratedProtocolMessageType('Kpi', (_message.Message,), {
   })
 _sym_db.RegisterMessage(Kpi)
 
-KpiId = _reflection.GeneratedProtocolMessageType('KpiId', (_message.Message,), {
-  'DESCRIPTOR' : _KPIID,
-  '__module__' : 'monitoring_pb2'
-  # @@protoc_insertion_point(class_scope:monitoring.KpiId)
-  })
-_sym_db.RegisterMessage(KpiId)
-
-KpiDevice = _reflection.GeneratedProtocolMessageType('KpiDevice', (_message.Message,), {
-  'DESCRIPTOR' : _KPIDEVICE,
+KpiValue = _reflection.GeneratedProtocolMessageType('KpiValue', (_message.Message,), {
+  'DESCRIPTOR' : _KPIVALUE,
   '__module__' : 'monitoring_pb2'
-  # @@protoc_insertion_point(class_scope:monitoring.KpiDevice)
+  # @@protoc_insertion_point(class_scope:monitoring.KpiValue)
   })
-_sym_db.RegisterMessage(KpiDevice)
+_sym_db.RegisterMessage(KpiValue)
 
 KpiList = _reflection.GeneratedProtocolMessageType('KpiList', (_message.Message,), {
   'DESCRIPTOR' : _KPILIST,
@@ -569,13 +372,6 @@ KpiList = _reflection.GeneratedProtocolMessageType('KpiList', (_message.Message,
   })
 _sym_db.RegisterMessage(KpiList)
 
-KpiValue = _reflection.GeneratedProtocolMessageType('KpiValue', (_message.Message,), {
-  'DESCRIPTOR' : _KPIVALUE,
-  '__module__' : 'monitoring_pb2'
-  # @@protoc_insertion_point(class_scope:monitoring.KpiValue)
-  })
-_sym_db.RegisterMessage(KpiValue)
-
 
 
 _MONITORINGSERVICE = _descriptor.ServiceDescriptor(
@@ -585,45 +381,45 @@ _MONITORINGSERVICE = _descriptor.ServiceDescriptor(
   index=0,
   serialized_options=None,
   create_key=_descriptor._internal_create_key,
-  serialized_start=1102,
-  serialized_end=1497,
+  serialized_start=694,
+  serialized_end=1065,
   methods=[
   _descriptor.MethodDescriptor(
     name='CreateKpi',
     full_name='monitoring.MonitoringService.CreateKpi',
     index=0,
     containing_service=None,
-    input_type=_CREATEKPIREQUEST,
+    input_type=_KPIDESCRIPTOR,
     output_type=_KPIID,
     serialized_options=None,
     create_key=_descriptor._internal_create_key,
   ),
   _descriptor.MethodDescriptor(
-    name='IncludeKpi',
-    full_name='monitoring.MonitoringService.IncludeKpi',
+    name='GetKpiDescriptor',
+    full_name='monitoring.MonitoringService.GetKpiDescriptor',
     index=1,
     containing_service=None,
-    input_type=_INCLUDEKPIREQUEST,
-    output_type=context__pb2._EMPTY,
+    input_type=_KPIID,
+    output_type=_KPIDESCRIPTOR,
     serialized_options=None,
     create_key=_descriptor._internal_create_key,
   ),
   _descriptor.MethodDescriptor(
-    name='MonitorKpi',
-    full_name='monitoring.MonitoringService.MonitorKpi',
+    name='IncludeKpi',
+    full_name='monitoring.MonitoringService.IncludeKpi',
     index=2,
     containing_service=None,
-    input_type=_MONITORKPIREQUEST,
+    input_type=_KPI,
     output_type=context__pb2._EMPTY,
     serialized_options=None,
     create_key=_descriptor._internal_create_key,
   ),
   _descriptor.MethodDescriptor(
-    name='MonitorDeviceKpi',
-    full_name='monitoring.MonitoringService.MonitorDeviceKpi',
+    name='MonitorKpi',
+    full_name='monitoring.MonitoringService.MonitorKpi',
     index=3,
     containing_service=None,
-    input_type=_MONITORDEVICEKPIREQUEST,
+    input_type=_MONITORKPIREQUEST,
     output_type=context__pb2._EMPTY,
     serialized_options=None,
     create_key=_descriptor._internal_create_key,
diff --git a/src/centralizedattackdetector/proto/centralized_attack_detector_pb2.py b/src/opticalcentralizedattackdetector/proto/optical_centralized_attack_detector_pb2.py
similarity index 57%
rename from src/centralizedattackdetector/proto/centralized_attack_detector_pb2.py
rename to src/opticalcentralizedattackdetector/proto/optical_centralized_attack_detector_pb2.py
index a8a6d000f7627238b39986f0f7e9911f0d4cf1e7..b97a93fef290a5d27c2369d3b69d1405ea8a6442 100644
--- a/src/centralizedattackdetector/proto/centralized_attack_detector_pb2.py
+++ b/src/opticalcentralizedattackdetector/proto/optical_centralized_attack_detector_pb2.py
@@ -1,6 +1,6 @@
 # -*- coding: utf-8 -*-
 # Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: centralized_attack_detector.proto
+# source: optical_centralized_attack_detector.proto
 """Generated protocol buffer code."""
 from google.protobuf import descriptor as _descriptor
 from google.protobuf import message as _message
@@ -16,12 +16,12 @@ from . import monitoring_pb2 as monitoring__pb2
 
 
 DESCRIPTOR = _descriptor.FileDescriptor(
-  name='centralized_attack_detector.proto',
+  name='optical_centralized_attack_detector.proto',
   package='centralized_attack_detector',
   syntax='proto3',
   serialized_options=None,
   create_key=_descriptor._internal_create_key,
-  serialized_pb=b'\n!centralized_attack_detector.proto\x12\x1b\x63\x65ntralized_attack_detector\x1a\rcontext.proto\x1a\x10monitoring.proto2\x81\x02\n CentralizedAttackDetectorService\x12\x39\n\x13NotifyServiceUpdate\x12\x10.context.Service\x1a\x0e.context.Empty\"\x00\x12\x30\n\x0c\x44\x65tectAttack\x12\x0e.context.Empty\x1a\x0e.context.Empty\"\x00\x12<\n\x13ReportSummarizedKpi\x12\x13.monitoring.KpiList\x1a\x0e.context.Empty\"\x00\x12\x32\n\tReportKpi\x12\x13.monitoring.KpiList\x1a\x0e.context.Empty\"\x00\x62\x06proto3'
+  serialized_pb=b'\n)optical_centralized_attack_detector.proto\x12\x1b\x63\x65ntralized_attack_detector\x1a\rcontext.proto\x1a\x10monitoring.proto2\x88\x02\n\'OpticalCentralizedAttackDetectorService\x12\x39\n\x13NotifyServiceUpdate\x12\x10.context.Service\x1a\x0e.context.Empty\"\x00\x12\x30\n\x0c\x44\x65tectAttack\x12\x0e.context.Empty\x1a\x0e.context.Empty\"\x00\x12<\n\x13ReportSummarizedKpi\x12\x13.monitoring.KpiList\x1a\x0e.context.Empty\"\x00\x12\x32\n\tReportKpi\x12\x13.monitoring.KpiList\x1a\x0e.context.Empty\"\x00\x62\x06proto3'
   ,
   dependencies=[context__pb2.DESCRIPTOR,monitoring__pb2.DESCRIPTOR,])
 
@@ -31,19 +31,19 @@ _sym_db.RegisterFileDescriptor(DESCRIPTOR)
 
 
 
-_CENTRALIZEDATTACKDETECTORSERVICE = _descriptor.ServiceDescriptor(
-  name='CentralizedAttackDetectorService',
-  full_name='centralized_attack_detector.CentralizedAttackDetectorService',
+_OPTICALCENTRALIZEDATTACKDETECTORSERVICE = _descriptor.ServiceDescriptor(
+  name='OpticalCentralizedAttackDetectorService',
+  full_name='centralized_attack_detector.OpticalCentralizedAttackDetectorService',
   file=DESCRIPTOR,
   index=0,
   serialized_options=None,
   create_key=_descriptor._internal_create_key,
-  serialized_start=100,
-  serialized_end=357,
+  serialized_start=108,
+  serialized_end=372,
   methods=[
   _descriptor.MethodDescriptor(
     name='NotifyServiceUpdate',
-    full_name='centralized_attack_detector.CentralizedAttackDetectorService.NotifyServiceUpdate',
+    full_name='centralized_attack_detector.OpticalCentralizedAttackDetectorService.NotifyServiceUpdate',
     index=0,
     containing_service=None,
     input_type=context__pb2._SERVICE,
@@ -53,7 +53,7 @@ _CENTRALIZEDATTACKDETECTORSERVICE = _descriptor.ServiceDescriptor(
   ),
   _descriptor.MethodDescriptor(
     name='DetectAttack',
-    full_name='centralized_attack_detector.CentralizedAttackDetectorService.DetectAttack',
+    full_name='centralized_attack_detector.OpticalCentralizedAttackDetectorService.DetectAttack',
     index=1,
     containing_service=None,
     input_type=context__pb2._EMPTY,
@@ -63,7 +63,7 @@ _CENTRALIZEDATTACKDETECTORSERVICE = _descriptor.ServiceDescriptor(
   ),
   _descriptor.MethodDescriptor(
     name='ReportSummarizedKpi',
-    full_name='centralized_attack_detector.CentralizedAttackDetectorService.ReportSummarizedKpi',
+    full_name='centralized_attack_detector.OpticalCentralizedAttackDetectorService.ReportSummarizedKpi',
     index=2,
     containing_service=None,
     input_type=monitoring__pb2._KPILIST,
@@ -73,7 +73,7 @@ _CENTRALIZEDATTACKDETECTORSERVICE = _descriptor.ServiceDescriptor(
   ),
   _descriptor.MethodDescriptor(
     name='ReportKpi',
-    full_name='centralized_attack_detector.CentralizedAttackDetectorService.ReportKpi',
+    full_name='centralized_attack_detector.OpticalCentralizedAttackDetectorService.ReportKpi',
     index=3,
     containing_service=None,
     input_type=monitoring__pb2._KPILIST,
@@ -82,8 +82,8 @@ _CENTRALIZEDATTACKDETECTORSERVICE = _descriptor.ServiceDescriptor(
     create_key=_descriptor._internal_create_key,
   ),
 ])
-_sym_db.RegisterServiceDescriptor(_CENTRALIZEDATTACKDETECTORSERVICE)
+_sym_db.RegisterServiceDescriptor(_OPTICALCENTRALIZEDATTACKDETECTORSERVICE)
 
-DESCRIPTOR.services_by_name['CentralizedAttackDetectorService'] = _CENTRALIZEDATTACKDETECTORSERVICE
+DESCRIPTOR.services_by_name['OpticalCentralizedAttackDetectorService'] = _OPTICALCENTRALIZEDATTACKDETECTORSERVICE
 
 # @@protoc_insertion_point(module_scope)
diff --git a/src/centralizedattackdetector/proto/centralized_attack_detector_pb2_grpc.py b/src/opticalcentralizedattackdetector/proto/optical_centralized_attack_detector_pb2_grpc.py
similarity index 84%
rename from src/centralizedattackdetector/proto/centralized_attack_detector_pb2_grpc.py
rename to src/opticalcentralizedattackdetector/proto/optical_centralized_attack_detector_pb2_grpc.py
index 418acbf19eb1a2a43889234626adb20d5dce1186..17b839fa3bbaafb8ecfa795db21ba6baba8cd28b 100644
--- a/src/centralizedattackdetector/proto/centralized_attack_detector_pb2_grpc.py
+++ b/src/opticalcentralizedattackdetector/proto/optical_centralized_attack_detector_pb2_grpc.py
@@ -6,7 +6,7 @@ from . import context_pb2 as context__pb2
 from . import monitoring_pb2 as monitoring__pb2
 
 
-class CentralizedAttackDetectorServiceStub(object):
+class OpticalCentralizedAttackDetectorServiceStub(object):
     """Missing associated documentation comment in .proto file."""
 
     def __init__(self, channel):
@@ -16,28 +16,28 @@ class CentralizedAttackDetectorServiceStub(object):
             channel: A grpc.Channel.
         """
         self.NotifyServiceUpdate = channel.unary_unary(
-                '/centralized_attack_detector.CentralizedAttackDetectorService/NotifyServiceUpdate',
+                '/centralized_attack_detector.OpticalCentralizedAttackDetectorService/NotifyServiceUpdate',
                 request_serializer=context__pb2.Service.SerializeToString,
                 response_deserializer=context__pb2.Empty.FromString,
                 )
         self.DetectAttack = channel.unary_unary(
-                '/centralized_attack_detector.CentralizedAttackDetectorService/DetectAttack',
+                '/centralized_attack_detector.OpticalCentralizedAttackDetectorService/DetectAttack',
                 request_serializer=context__pb2.Empty.SerializeToString,
                 response_deserializer=context__pb2.Empty.FromString,
                 )
         self.ReportSummarizedKpi = channel.unary_unary(
-                '/centralized_attack_detector.CentralizedAttackDetectorService/ReportSummarizedKpi',
+                '/centralized_attack_detector.OpticalCentralizedAttackDetectorService/ReportSummarizedKpi',
                 request_serializer=monitoring__pb2.KpiList.SerializeToString,
                 response_deserializer=context__pb2.Empty.FromString,
                 )
         self.ReportKpi = channel.unary_unary(
-                '/centralized_attack_detector.CentralizedAttackDetectorService/ReportKpi',
+                '/centralized_attack_detector.OpticalCentralizedAttackDetectorService/ReportKpi',
                 request_serializer=monitoring__pb2.KpiList.SerializeToString,
                 response_deserializer=context__pb2.Empty.FromString,
                 )
 
 
-class CentralizedAttackDetectorServiceServicer(object):
+class OpticalCentralizedAttackDetectorServiceServicer(object):
     """Missing associated documentation comment in .proto file."""
 
     def NotifyServiceUpdate(self, request, context):
@@ -67,7 +67,7 @@ class CentralizedAttackDetectorServiceServicer(object):
         raise NotImplementedError('Method not implemented!')
 
 
-def add_CentralizedAttackDetectorServiceServicer_to_server(servicer, server):
+def add_OpticalCentralizedAttackDetectorServiceServicer_to_server(servicer, server):
     rpc_method_handlers = {
             'NotifyServiceUpdate': grpc.unary_unary_rpc_method_handler(
                     servicer.NotifyServiceUpdate,
@@ -91,12 +91,12 @@ def add_CentralizedAttackDetectorServiceServicer_to_server(servicer, server):
             ),
     }
     generic_handler = grpc.method_handlers_generic_handler(
-            'centralized_attack_detector.CentralizedAttackDetectorService', rpc_method_handlers)
+            'centralized_attack_detector.OpticalCentralizedAttackDetectorService', rpc_method_handlers)
     server.add_generic_rpc_handlers((generic_handler,))
 
 
  # This class is part of an EXPERIMENTAL API.
-class CentralizedAttackDetectorService(object):
+class OpticalCentralizedAttackDetectorService(object):
     """Missing associated documentation comment in .proto file."""
 
     @staticmethod
@@ -110,7 +110,7 @@ class CentralizedAttackDetectorService(object):
             wait_for_ready=None,
             timeout=None,
             metadata=None):
-        return grpc.experimental.unary_unary(request, target, '/centralized_attack_detector.CentralizedAttackDetectorService/NotifyServiceUpdate',
+        return grpc.experimental.unary_unary(request, target, '/centralized_attack_detector.OpticalCentralizedAttackDetectorService/NotifyServiceUpdate',
             context__pb2.Service.SerializeToString,
             context__pb2.Empty.FromString,
             options, channel_credentials,
@@ -127,7 +127,7 @@ class CentralizedAttackDetectorService(object):
             wait_for_ready=None,
             timeout=None,
             metadata=None):
-        return grpc.experimental.unary_unary(request, target, '/centralized_attack_detector.CentralizedAttackDetectorService/DetectAttack',
+        return grpc.experimental.unary_unary(request, target, '/centralized_attack_detector.OpticalCentralizedAttackDetectorService/DetectAttack',
             context__pb2.Empty.SerializeToString,
             context__pb2.Empty.FromString,
             options, channel_credentials,
@@ -144,7 +144,7 @@ class CentralizedAttackDetectorService(object):
             wait_for_ready=None,
             timeout=None,
             metadata=None):
-        return grpc.experimental.unary_unary(request, target, '/centralized_attack_detector.CentralizedAttackDetectorService/ReportSummarizedKpi',
+        return grpc.experimental.unary_unary(request, target, '/centralized_attack_detector.OpticalCentralizedAttackDetectorService/ReportSummarizedKpi',
             monitoring__pb2.KpiList.SerializeToString,
             context__pb2.Empty.FromString,
             options, channel_credentials,
@@ -161,7 +161,7 @@ class CentralizedAttackDetectorService(object):
             wait_for_ready=None,
             timeout=None,
             metadata=None):
-        return grpc.experimental.unary_unary(request, target, '/centralized_attack_detector.CentralizedAttackDetectorService/ReportKpi',
+        return grpc.experimental.unary_unary(request, target, '/centralized_attack_detector.OpticalCentralizedAttackDetectorService/ReportKpi',
             monitoring__pb2.KpiList.SerializeToString,
             context__pb2.Empty.FromString,
             options, channel_credentials,
diff --git a/src/centralizedattackdetector/proto/service_pb2.py b/src/opticalcentralizedattackdetector/proto/service_pb2.py
similarity index 100%
rename from src/centralizedattackdetector/proto/service_pb2.py
rename to src/opticalcentralizedattackdetector/proto/service_pb2.py
diff --git a/src/opticalcentralizedattackdetector/requirements.in b/src/opticalcentralizedattackdetector/requirements.in
new file mode 100644
index 0000000000000000000000000000000000000000..00acd77fe30ca10ffe3af04f7cf7ced2cb9256f8
--- /dev/null
+++ b/src/opticalcentralizedattackdetector/requirements.in
@@ -0,0 +1,9 @@
+grpcio-health-checking
+grpcio
+prometheus-client
+pytest
+pytest-benchmark
+redis
+# from the monitoring component
+influxdb
+python-json-logger
diff --git a/src/opticalcentralizedattackdetector/requirements.txt b/src/opticalcentralizedattackdetector/requirements.txt
new file mode 100644
index 0000000000000000000000000000000000000000..fca44cbb133ae3c6d5ba2a6d12882ecad9a2c467
--- /dev/null
+++ b/src/opticalcentralizedattackdetector/requirements.txt
@@ -0,0 +1,60 @@
+#
+# This file is autogenerated by pip-compile with python 3.9
+# To update, run:
+#
+#    pip-compile --output-file=opticalcentralizedattackdetector/requirements.txt opticalcentralizedattackdetector/requirements.in
+#
+attrs==21.2.0
+    # via pytest
+certifi==2021.10.8
+    # via influxdb-client
+grpcio==1.41.0
+    # via
+    #   -r opticalcentralizedattackdetector/requirements.in
+    #   grpcio-health-checking
+grpcio-health-checking==1.41.0
+    # via -r opticalcentralizedattackdetector/requirements.in
+influxdb-client==1.23.0
+    # via -r opticalcentralizedattackdetector/requirements.in
+iniconfig==1.1.1
+    # via pytest
+packaging==21.0
+    # via pytest
+pluggy==1.0.0
+    # via pytest
+prometheus-client==0.11.0
+    # via -r opticalcentralizedattackdetector/requirements.in
+protobuf==3.18.0
+    # via grpcio-health-checking
+py==1.10.0
+    # via pytest
+py-cpuinfo==8.0.0
+    # via pytest-benchmark
+pyparsing==2.4.7
+    # via packaging
+pytest==6.2.5
+    # via
+    #   -r opticalcentralizedattackdetector/requirements.in
+    #   pytest-benchmark
+pytest-benchmark==3.4.1
+    # via -r opticalcentralizedattackdetector/requirements.in
+python-dateutil==2.8.2
+    # via influxdb-client
+pytz==2021.3
+    # via influxdb-client
+redis==3.5.3
+    # via -r opticalcentralizedattackdetector/requirements.in
+rx==3.2.0
+    # via influxdb-client
+six==1.16.0
+    # via
+    #   grpcio
+    #   influxdb-client
+    #   python-dateutil
+toml==0.10.2
+    # via pytest
+urllib3==1.26.7
+    # via influxdb-client
+
+# The following packages are considered to be unsafe in a requirements file:
+# setuptools
diff --git a/src/centralizedattackdetector/service/CentralizedAttackDetectorService.py b/src/opticalcentralizedattackdetector/service/OpticalCentralizedAttackDetectorService.py
similarity index 74%
rename from src/centralizedattackdetector/service/CentralizedAttackDetectorService.py
rename to src/opticalcentralizedattackdetector/service/OpticalCentralizedAttackDetectorService.py
index 16c8841c9e09325e21f092ccece5aad84e7f53d6..a017292da6228bdf1ba67b48c1b1dcba5dff6ee8 100644
--- a/src/centralizedattackdetector/service/CentralizedAttackDetectorService.py
+++ b/src/opticalcentralizedattackdetector/service/OpticalCentralizedAttackDetectorService.py
@@ -4,16 +4,16 @@ from concurrent import futures
 from grpc_health.v1.health import HealthServicer, OVERALL_HEALTH
 from grpc_health.v1.health_pb2 import HealthCheckResponse
 from grpc_health.v1.health_pb2_grpc import add_HealthServicer_to_server
-from centralizedattackdetector.proto.centralized_attack_detector_pb2_grpc import (
-    add_CentralizedAttackDetectorServiceServicer_to_server)
-from centralizedattackdetector.service.CentralizedAttackDetectorServiceServicerImpl import (
-    CentralizedAttackDetectorServiceServicerImpl)
-from centralizedattackdetector.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD
+from opticalcentralizedattackdetector.proto.optical_centralized_attack_detector_pb2_grpc import (
+    add_OpticalCentralizedAttackDetectorServiceServicer_to_server)
+from opticalcentralizedattackdetector.service.OpticalCentralizedAttackDetectorServiceServicerImpl import (
+    OpticalCentralizedAttackDetectorServiceServicerImpl)
+from opticalcentralizedattackdetector.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD
 
 BIND_ADDRESS = '0.0.0.0'
 LOGGER = logging.getLogger(__name__)
 
-class CentralizedAttackDetectorService:
+class OpticalCentralizedAttackDetectorService:
     def __init__(
         self, address=BIND_ADDRESS, port=GRPC_SERVICE_PORT, max_workers=GRPC_MAX_WORKERS,
         grace_period=GRPC_GRACE_PERIOD):
@@ -36,8 +36,8 @@ class CentralizedAttackDetectorService:
         self.pool = futures.ThreadPoolExecutor(max_workers=self.max_workers)
         self.server = grpc.server(self.pool) # , interceptors=(tracer_interceptor,))
 
-        self.centralized_attack_detector_servicer = CentralizedAttackDetectorServiceServicerImpl()
-        add_CentralizedAttackDetectorServiceServicer_to_server(self.centralized_attack_detector_servicer, self.server)
+        self.centralized_attack_detector_servicer = OpticalCentralizedAttackDetectorServiceServicerImpl()
+        add_OpticalCentralizedAttackDetectorServiceServicer_to_server(self.centralized_attack_detector_servicer, self.server)
 
         self.health_servicer = HealthServicer(
             experimental_non_blocking=True, experimental_thread_pool=futures.ThreadPoolExecutor(max_workers=1))
diff --git a/src/opticalcentralizedattackdetector/service/OpticalCentralizedAttackDetectorServiceServicerImpl.py b/src/opticalcentralizedattackdetector/service/OpticalCentralizedAttackDetectorServiceServicerImpl.py
new file mode 100644
index 0000000000000000000000000000000000000000..48e72a72d2d68e4107957228fca04c9deb55ae4b
--- /dev/null
+++ b/src/opticalcentralizedattackdetector/service/OpticalCentralizedAttackDetectorServiceServicerImpl.py
@@ -0,0 +1,126 @@
+import os, grpc, logging, random
+from influxdb import InfluxDBClient
+from common.rpc_method_wrapper.Decorator import create_metrics, safe_and_metered_rpc_method
+from context.client.ContextClient import ContextClient
+from context.Config import GRPC_SERVICE_PORT as CONTEXT_GRPC_SERVICE_PORT
+from monitoring.client.monitoring_client import MonitoringClient
+from monitoring.Config import GRPC_SERVICE_PORT as MONITORING_GRPC_SERVICE_PORT
+from service.client.ServiceClient import ServiceClient
+from service.Config import GRPC_SERVICE_PORT as SERVICE_GRPC_SERVICE_PORT
+from dbscanserving.proto.dbscanserving_pb2 import DetectionRequest, DetectionResponse, Sample
+from dbscanserving.client.DbscanServingClient import DbscanServingClient
+from dbscanserving.Config import GRPC_SERVICE_PORT as DBSCANSERVING_GRPC_SERVICE_PORT
+from opticalattackmitigator.client.OpticalAttackMitigatorClient import OpticalAttackMitigatorClient
+from opticalattackmitigator.proto.optical_attack_mitigator_pb2 import AttackDescription, AttackResponse
+from opticalattackmitigator.Config import GRPC_SERVICE_PORT as ATTACK_MITIGATOR_GRPC_SERVICE_PORT
+from opticalcentralizedattackdetector.proto.context_pb2 import (Empty,
+    Context,  ContextId,  ContextIdList,  ContextList,
+    Service,  ServiceId,  ServiceIdList,  ServiceList
+)
+from opticalcentralizedattackdetector.proto.monitoring_pb2 import KpiList
+from opticalcentralizedattackdetector.proto.optical_centralized_attack_detector_pb2_grpc import (
+    OpticalCentralizedAttackDetectorServiceServicer)
+from opticalcentralizedattackdetector.Config import (
+    CONTEXT_SERVICE_ADDRESS, SERVICE_SERVICE_ADDRESS, INFERENCE_SERVICE_ADDRESS, MONITORING_SERVICE_ADDRESS,
+    ATTACK_MITIGATOR_SERVICE_ADDRESS)
+
+
+LOGGER = logging.getLogger(__name__)
+
+SERVICE_NAME = 'OpticalCentralizedAttackDetector'
+METHOD_NAMES = ['NotifyServiceUpdate', 'DetectAttack', 'ReportSummarizedKpi', 'ReportKpi']
+METRICS = create_metrics(SERVICE_NAME, METHOD_NAMES)
+
+INFLUXDB_HOSTNAME = os.environ.get("INFLUXDB_HOSTNAME")
+INFLUXDB_USER = os.environ.get("INFLUXDB_USER")
+INFLUXDB_PASSWORD = os.environ.get("INFLUXDB_PASSWORD")
+INFLUXDB_DATABASE = os.environ.get("INFLUXDB_DATABASE")
+context_client: ContextClient = ContextClient(address=CONTEXT_SERVICE_ADDRESS, port=CONTEXT_GRPC_SERVICE_PORT)
+influxdb_client: InfluxDBClient = InfluxDBClient(host=MONITORING_SERVICE_ADDRESS, port=8086, username=INFLUXDB_USER, password=INFLUXDB_PASSWORD, database=INFLUXDB_DATABASE)
+monitoring_client: MonitoringClient = MonitoringClient(server=MONITORING_SERVICE_ADDRESS, port=MONITORING_GRPC_SERVICE_PORT)
+dbscanserving_client: DbscanServingClient = DbscanServingClient(address=INFERENCE_SERVICE_ADDRESS, port=DBSCANSERVING_GRPC_SERVICE_PORT)
+service_client: ServiceClient = ServiceClient(SERVICE_SERVICE_ADDRESS, SERVICE_GRPC_SERVICE_PORT)
+attack_mitigator_client: OpticalAttackMitigatorClient = OpticalAttackMitigatorClient(address=ATTACK_MITIGATOR_SERVICE_ADDRESS, port=ATTACK_MITIGATOR_GRPC_SERVICE_PORT)
+
+
+class OpticalCentralizedAttackDetectorServiceServicerImpl(OpticalCentralizedAttackDetectorServiceServicer):
+
+    def __init__(self):
+        LOGGER.debug('Creating Servicer...')
+        LOGGER.debug('Servicer Created')
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def NotifyServiceUpdate(self, request : Service, context : grpc.ServicerContext) -> Empty:
+        return Empty()
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def DetectAttack(self, request : Empty, context : grpc.ServicerContext) -> Empty:
+        
+        # retrieve list with current contexts
+        # import pdb; pdb.set_trace()
+        context_ids: ContextIdList = context_client.ListContextIds(Empty())
+
+        # for each context, retrieve list of current services
+        services = []
+        for context_id in context_ids.context_ids:
+
+            context_services: ServiceIdList = context_client.ListServices(context_id)
+            for service in context_services.services:
+                services.append(service)
+
+        # get monitoring data for each of the current services
+        results = influxdb_client.query('select * from samples;')
+
+        for service in services:
+            for endpoint in service.service_endpoint_ids:
+                # get instant KPI for this endpoint
+                LOGGER.warning(f'service: {service.service_id.service_uuid.uuid}\t endpoint: {endpoint.endpoint_uuid.uuid}\tdevice: {endpoint.device_id.device_uuid.uuid}')
+                # how to get all KPIs for a particular device?
+                points = results.get_points(tags={'device_id': endpoint.device_id.device_uuid.uuid})
+                print('points:', points)
+                for point in points:
+                    print('\t', point)
+
+                # run attack detection for every service
+                request: DetectionRequest = DetectionRequest()
+
+                request.num_samples = 310
+                request.num_features = 100
+                request.eps = 100.5
+                request.min_samples = 50
+
+                for _ in range(200):
+                    grpc_sample = Sample()
+                    for __ in range(100):
+                        grpc_sample.features.append(random.uniform(0., 10.))
+                    request.samples.append(grpc_sample)
+                    
+                for _ in range(100):
+                    grpc_sample = Sample()
+                    for __ in range(100):
+                        grpc_sample.features.append(random.uniform(50., 60.))
+                    request.samples.append(grpc_sample)
+                    
+                for _ in range(10):
+                    grpc_sample = Sample()
+                    for __ in range(100):
+                        grpc_sample.features.append(random.uniform(5000., 6000.))
+                    request.samples.append(grpc_sample)
+
+                response: DetectionResponse = dbscanserving_client.Detect(request)
+
+                if -1 in response.cluster_indices:  # attack detected
+                    attack = AttackDescription()
+                    attack.cs_id.uuid = service.service_id.service_uuid.uuid
+                    response: AttackResponse = attack_mitigator_client.NotifyAttack(attack)
+
+        # if attack is detected, run the attack mitigator
+        return Empty()
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def ReportSummarizedKpi(self, request : KpiList, context : grpc.ServicerContext) -> Empty:
+        return Empty()
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def ReportKpi(self, request : KpiList, context : grpc.ServicerContext) -> Empty:
+        return Empty()
diff --git a/src/opticalcentralizedattackdetector/service/__init__.py b/src/opticalcentralizedattackdetector/service/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/opticalcentralizedattackdetector/service/__main__.py b/src/opticalcentralizedattackdetector/service/__main__.py
new file mode 100644
index 0000000000000000000000000000000000000000..37cff6b7dec1554b69d93148307e074240f2dca2
--- /dev/null
+++ b/src/opticalcentralizedattackdetector/service/__main__.py
@@ -0,0 +1,80 @@
+import os, logging, signal, sys, time, threading, multiprocessing
+from prometheus_client import start_http_server
+
+from common.Settings import get_setting
+from opticalcentralizedattackdetector.Config import (
+    GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD, LOG_LEVEL, METRICS_PORT,
+    MONITORING_INTERVAL)
+from opticalcentralizedattackdetector.proto.context_pb2 import (Empty,
+    Context,  ContextId,  ContextIdList,  ContextList,
+    Service,  ServiceId,  ServiceIdList,  ServiceList
+)
+from opticalcentralizedattackdetector.service.OpticalCentralizedAttackDetectorService import OpticalCentralizedAttackDetectorService
+from opticalcentralizedattackdetector.client.OpticalCentralizedAttackDetectorClient import OpticalCentralizedAttackDetectorClient
+
+terminate = threading.Event()
+LOGGER = None
+
+client: OpticalCentralizedAttackDetectorClient = None
+
+def signal_handler(signal, frame): # pylint: disable=redefined-outer-name
+    LOGGER.warning('Terminate signal received')
+    terminate.set()
+
+def detect_attack(monitoring_interval):
+    time.sleep(10)  # wait for the service to start
+    LOGGER.info("Starting the attack detection loop")
+    client = OpticalCentralizedAttackDetectorClient(address='localhost', port=GRPC_SERVICE_PORT)
+    client.connect()
+    while True:  # infinite loop that runs until the terminate is set
+        if terminate.is_set():  # if terminate is set
+            LOGGER.warning("Stopping execution...")
+            client.close()
+            break  # break the while and stop execution
+        client.DetectAttack(Empty())
+        # sleep
+        LOGGER.debug("Sleeping for {} seconds...".format(monitoring_interval))
+        time.sleep(monitoring_interval)
+
+def main():
+    global LOGGER # pylint: disable=global-statement
+
+    service_port = get_setting('OPTICALCENTRALIZEDATTACKDETECTORSERVICE_SERVICE_PORT_GRPC', default=GRPC_SERVICE_PORT)
+    max_workers  = get_setting('MAX_WORKERS',                                               default=GRPC_MAX_WORKERS )
+    grace_period = get_setting('GRACE_PERIOD',                                              default=GRPC_GRACE_PERIOD)
+    log_level    = get_setting('LOG_LEVEL',                                                 default=LOG_LEVEL        )
+    metrics_port = get_setting('METRICS_PORT',                                              default=METRICS_PORT     )
+    monitoring_interval = get_setting('MONITORING_INTERVAL',                                              default=MONITORING_INTERVAL     )
+
+    logging.basicConfig(level=log_level)
+    LOGGER = logging.getLogger(__name__)
+
+    signal.signal(signal.SIGINT,  signal_handler)
+    signal.signal(signal.SIGTERM, signal_handler)
+
+    LOGGER.info('Starting...')
+
+    # Start metrics server
+    start_http_server(metrics_port)
+
+    # Starting CentralizedCybersecurity service
+    grpc_service = OpticalCentralizedAttackDetectorService(
+        port=service_port, max_workers=max_workers, grace_period=grace_period)
+    grpc_service.start()
+
+    # p = multiprocessing.Process(target=detect_attack, args=(monitoring_interval, ))
+    # p.start()
+    detect_attack(monitoring_interval)
+
+    # Wait for Ctrl+C or termination signal
+    while not terminate.wait(timeout=0.1): pass
+
+    LOGGER.info('Terminating...')
+    grpc_service.stop()
+    # p.kill()
+
+    LOGGER.info('Bye')
+    return 0
+
+if __name__ == '__main__':
+    sys.exit(main())
diff --git a/src/opticalcentralizedattackdetector/tests/__init__.py b/src/opticalcentralizedattackdetector/tests/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/opticalcentralizedattackdetector/tests/example_objects.py b/src/opticalcentralizedattackdetector/tests/example_objects.py
new file mode 100644
index 0000000000000000000000000000000000000000..206d17cb70869a66cf1739f54a83598b1e031edb
--- /dev/null
+++ b/src/opticalcentralizedattackdetector/tests/example_objects.py
@@ -0,0 +1,201 @@
+from copy import deepcopy
+from common.Constants import DEFAULT_CONTEXT_UUID, DEFAULT_TOPOLOGY_UUID
+from context.proto.context_pb2 import (
+    ConfigActionEnum, DeviceDriverEnum, DeviceOperationalStatusEnum, ServiceStatusEnum, ServiceTypeEnum)
+
+# Some example objects to be used by the tests
+
+# Helper methods
+def config_rule(action, resource_key, resource_value):
+    return {'action': action, 'resource_key': resource_key, 'resource_value': resource_value}
+
+def endpoint_id(topology_id, device_id, endpoint_uuid):
+    return {'topology_id': deepcopy(topology_id), 'device_id': deepcopy(device_id),
+            'endpoint_uuid': {'uuid': endpoint_uuid}}
+
+def endpoint(topology_id, device_id, endpoint_uuid, endpoint_type):
+    return {'endpoint_id': endpoint_id(topology_id, device_id, endpoint_uuid), 'endpoint_type': endpoint_type}
+
+## use "deepcopy" to prevent propagating forced changes during tests
+CONTEXT_ID = {'context_uuid': {'uuid': DEFAULT_CONTEXT_UUID}}
+CONTEXT = {
+    'context_id': deepcopy(CONTEXT_ID),
+    'topology_ids': [],
+    'service_ids': [],
+}
+
+CONTEXT_ID_2 = {'context_uuid': {'uuid': 'test'}}
+CONTEXT_2 = {
+    'context_id': deepcopy(CONTEXT_ID_2),
+    'topology_ids': [],
+    'service_ids': [],
+}
+
+TOPOLOGY_ID = {
+    'context_id': deepcopy(CONTEXT_ID),
+    'topology_uuid': {'uuid': DEFAULT_TOPOLOGY_UUID},
+}
+TOPOLOGY = {
+    'topology_id': deepcopy(TOPOLOGY_ID),
+    'device_ids': [],
+    'link_ids': [],
+}
+
+DEVICE1_UUID = 'DEV1'
+DEVICE1_ID = {'device_uuid': {'uuid': DEVICE1_UUID}}
+DEVICE1 = {
+    'device_id': deepcopy(DEVICE1_ID),
+    'device_type': 'packet-router',
+    'device_config': {'config_rules': [
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'dev/rsrc1/value', 'value1'),
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'dev/rsrc2/value', 'value2'),
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'dev/rsrc3/value', 'value3'),
+    ]},
+    'device_operational_status': DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_ENABLED,
+    'device_drivers': [DeviceDriverEnum.DEVICEDRIVER_OPENCONFIG, DeviceDriverEnum.DEVICEDRIVER_P4],
+    'device_endpoints': [
+        endpoint(TOPOLOGY_ID, DEVICE1_ID, 'EP2', 'port-packet-100G'),
+        endpoint(TOPOLOGY_ID, DEVICE1_ID, 'EP3', 'port-packet-100G'),
+        endpoint(TOPOLOGY_ID, DEVICE1_ID, 'EP100', 'port-packet-10G'),
+    ],
+}
+
+DEVICE2_UUID = 'DEV2'
+DEVICE2_ID = {'device_uuid': {'uuid': DEVICE2_UUID}}
+DEVICE2 = {
+    'device_id': deepcopy(DEVICE2_ID),
+    'device_type': 'packet-router',
+    'device_config': {'config_rules': [
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'dev/rsrc1/value', 'value4'),
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'dev/rsrc2/value', 'value5'),
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'dev/rsrc3/value', 'value6'),
+    ]},
+    'device_operational_status': DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_ENABLED,
+    'device_drivers': [DeviceDriverEnum.DEVICEDRIVER_OPENCONFIG, DeviceDriverEnum.DEVICEDRIVER_P4],
+    'device_endpoints': [
+        endpoint(TOPOLOGY_ID, DEVICE2_ID, 'EP1', 'port-packet-100G'),
+        endpoint(TOPOLOGY_ID, DEVICE2_ID, 'EP3', 'port-packet-100G'),
+        endpoint(TOPOLOGY_ID, DEVICE2_ID, 'EP100', 'port-packet-10G'),
+    ],
+}
+
+DEVICE3_UUID = 'DEV3'
+DEVICE3_ID = {'device_uuid': {'uuid': DEVICE3_UUID}}
+DEVICE3 = {
+    'device_id': deepcopy(DEVICE3_ID),
+    'device_type': 'packet-router',
+    'device_config': {'config_rules': [
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'dev/rsrc1/value', 'value4'),
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'dev/rsrc2/value', 'value5'),
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'dev/rsrc3/value', 'value6'),
+    ]},
+    'device_operational_status': DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_ENABLED,
+    'device_drivers': [DeviceDriverEnum.DEVICEDRIVER_OPENCONFIG, DeviceDriverEnum.DEVICEDRIVER_P4],
+    'device_endpoints': [
+        endpoint(TOPOLOGY_ID, DEVICE3_ID, 'EP1', 'port-packet-100G'),
+        endpoint(TOPOLOGY_ID, DEVICE3_ID, 'EP2', 'port-packet-100G'),
+        endpoint(TOPOLOGY_ID, DEVICE3_ID, 'EP100', 'port-packet-10G'),
+    ],
+}
+
+LINK_DEV1_DEV2_UUID = 'DEV1/EP2 ==> DEV2/EP1'
+LINK_DEV1_DEV2_ID = {'link_uuid': {'uuid': LINK_DEV1_DEV2_UUID}}
+LINK_DEV1_DEV2 = {
+    'link_id': deepcopy(LINK_DEV1_DEV2_ID),
+    'link_endpoint_ids' : [
+        endpoint_id(TOPOLOGY_ID, DEVICE1_ID, 'EP2'),
+        endpoint_id(TOPOLOGY_ID, DEVICE2_ID, 'EP1'),
+    ]
+}
+
+LINK_DEV2_DEV3_UUID = 'DEV2/EP3 ==> DEV3/EP2'
+LINK_DEV2_DEV3_ID = {'link_uuid': {'uuid': LINK_DEV2_DEV3_UUID}}
+LINK_DEV2_DEV3 = {
+    'link_id': deepcopy(LINK_DEV2_DEV3_ID),
+    'link_endpoint_ids' : [
+        endpoint_id(TOPOLOGY_ID, DEVICE2_ID, 'EP3'),
+        endpoint_id(TOPOLOGY_ID, DEVICE3_ID, 'EP2'),
+    ]
+}
+
+LINK_DEV1_DEV3_UUID = 'DEV1/EP3 ==> DEV3/EP1'
+LINK_DEV1_DEV3_ID = {'link_uuid': {'uuid': LINK_DEV1_DEV3_UUID}}
+LINK_DEV1_DEV3 = {
+    'link_id': deepcopy(LINK_DEV1_DEV3_ID),
+    'link_endpoint_ids' : [
+        endpoint_id(TOPOLOGY_ID, DEVICE1_ID, 'EP3'),
+        endpoint_id(TOPOLOGY_ID, DEVICE3_ID, 'EP1'),
+    ]
+}
+
+SERVICE_DEV1_DEV2_UUID = 'SVC:DEV1/EP100-DEV2/EP100'
+SERVICE_DEV1_DEV2_ID = {
+    'context_id': deepcopy(CONTEXT_ID),
+    'service_uuid': {'uuid': SERVICE_DEV1_DEV2_UUID},
+}
+SERVICE_DEV1_DEV2 = {
+    'service_id': deepcopy(SERVICE_DEV1_DEV2_ID),
+    'service_type': ServiceTypeEnum.SERVICETYPE_L3NM,
+    'service_endpoint_ids' : [
+        endpoint_id(TOPOLOGY_ID, DEVICE1_ID, 'EP100'),
+        endpoint_id(TOPOLOGY_ID, DEVICE2_ID, 'EP100'),
+    ],
+    'service_constraints': [
+        {'constraint_type': 'latency_ms', 'constraint_value': '15.2'},
+        {'constraint_type': 'jitter_us', 'constraint_value': '1.2'},
+    ],
+    'service_status': {'service_status': ServiceStatusEnum.SERVICESTATUS_ACTIVE},
+    'service_config': {'config_rules': [
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'svc/rsrc1/value', 'value7'),
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'svc/rsrc2/value', 'value8'),
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'svc/rsrc3/value', 'value9'),
+    ]},
+}
+
+SERVICE_DEV1_DEV3_UUID = 'SVC:DEV1/EP100-DEV3/EP100'
+SERVICE_DEV1_DEV3_ID = {
+    'context_id': deepcopy(CONTEXT_ID),
+    'service_uuid': {'uuid': SERVICE_DEV1_DEV3_UUID},
+}
+SERVICE_DEV1_DEV3 = {
+    'service_id': deepcopy(SERVICE_DEV1_DEV3_ID),
+    'service_type': ServiceTypeEnum.SERVICETYPE_L3NM,
+    'service_endpoint_ids' : [
+        endpoint_id(TOPOLOGY_ID, DEVICE1_ID, 'EP100'),
+        endpoint_id(TOPOLOGY_ID, DEVICE3_ID, 'EP100'),
+    ],
+    'service_constraints': [
+        {'constraint_type': 'latency_ms', 'constraint_value': '5.8'},
+        {'constraint_type': 'jitter_us', 'constraint_value': '0.1'},
+    ],
+    'service_status': {'service_status': ServiceStatusEnum.SERVICESTATUS_ACTIVE},
+    'service_config': {'config_rules': [
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'svc/rsrc1/value', 'value7'),
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'svc/rsrc2/value', 'value8'),
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'svc/rsrc3/value', 'value9'),
+    ]},
+}
+
+SERVICE_DEV2_DEV3_UUID = 'SVC:DEV2/EP100-DEV3/EP100'
+SERVICE_DEV2_DEV3_ID = {
+    'context_id': deepcopy(CONTEXT_ID),
+    'service_uuid': {'uuid': SERVICE_DEV2_DEV3_UUID},
+}
+SERVICE_DEV2_DEV3 = {
+    'service_id': deepcopy(SERVICE_DEV2_DEV3_ID),
+    'service_type': ServiceTypeEnum.SERVICETYPE_L3NM,
+    'service_endpoint_ids' : [
+        endpoint_id(TOPOLOGY_ID, DEVICE2_ID, 'EP100'),
+        endpoint_id(TOPOLOGY_ID, DEVICE3_ID, 'EP100'),
+    ],
+    'service_constraints': [
+        {'constraint_type': 'latency_ms', 'constraint_value': '23.1'},
+        {'constraint_type': 'jitter_us', 'constraint_value': '3.4'},
+    ],
+    'service_status': {'service_status': ServiceStatusEnum.SERVICESTATUS_ACTIVE},
+    'service_config': {'config_rules': [
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'svc/rsrc1/value', 'value7'),
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'svc/rsrc2/value', 'value8'),
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'svc/rsrc3/value', 'value9'),
+    ]},
+}
diff --git a/src/opticalcentralizedattackdetector/tests/test_unitary.py b/src/opticalcentralizedattackdetector/tests/test_unitary.py
new file mode 100644
index 0000000000000000000000000000000000000000..a04afdf2379569ec4d723fa9c5c0895a0d9c47f8
--- /dev/null
+++ b/src/opticalcentralizedattackdetector/tests/test_unitary.py
@@ -0,0 +1,176 @@
+import logging, pytest
+from unittest.mock import patch
+from opticalcentralizedattackdetector.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD
+from opticalcentralizedattackdetector.client.OpticalCentralizedAttackDetectorClient import OpticalCentralizedAttackDetectorClient
+from opticalcentralizedattackdetector.proto.context_pb2 import ContextIdList, ContextId, Empty, Service, ContextId, ServiceList
+from opticalcentralizedattackdetector.proto.monitoring_pb2 import Kpi, KpiList
+from opticalcentralizedattackdetector.service.OpticalCentralizedAttackDetectorService import OpticalCentralizedAttackDetectorService
+from .example_objects import CONTEXT_ID, CONTEXT_ID_2, SERVICE_DEV1_DEV2
+
+port = 10000 + GRPC_SERVICE_PORT # avoid privileged ports
+
+LOGGER = logging.getLogger(__name__)
+LOGGER.setLevel(logging.DEBUG)
+
+@pytest.fixture(scope='session')
+def optical_centralized_attack_detector_service():
+    _service = OpticalCentralizedAttackDetectorService(
+        port=port, max_workers=GRPC_MAX_WORKERS, grace_period=GRPC_GRACE_PERIOD)
+    # mocker_context_client = mock.patch('opticalcentralizedattackdetector.service.OpticalCentralizedAttackDetectorServiceServicerImpl.context_client')
+    # mocker_context_client.start()
+
+    # mocker_influx_db = mock.patch('opticalcentralizedattackdetector.service.OpticalCentralizedAttackDetectorServiceServicerImpl.influxdb_client')
+    # mocker_influx_db.start()
+
+    _service.start()
+    yield _service
+    _service.stop()
+    # mocker_context_client.stop()
+    # mocker_influx_db.stop()
+
+@pytest.fixture(scope='session')
+def optical_centralized_attack_detector_client(optical_centralized_attack_detector_service):
+    _client = OpticalCentralizedAttackDetectorClient(address='127.0.0.1', port=port)
+    yield _client
+    _client.close()
+
+def test_notify_service_update(optical_centralized_attack_detector_client: OpticalCentralizedAttackDetectorClient):
+    service = Service()
+    optical_centralized_attack_detector_client.NotifyServiceUpdate(service)
+
+def test_detect_attack_no_contexts(optical_centralized_attack_detector_client: OpticalCentralizedAttackDetectorClient):
+    with patch('opticalcentralizedattackdetector.service.OpticalCentralizedAttackDetectorServiceServicerImpl.context_client') as context, \
+         patch('opticalcentralizedattackdetector.service.OpticalCentralizedAttackDetectorServiceServicerImpl.influxdb_client') as influxdb:
+        request = Empty()
+        optical_centralized_attack_detector_client.DetectAttack(request)
+        context.ListContextIds.assert_called_once()
+        influxdb.query.assert_called_once()
+        context.ListServices.assert_not_called()
+
+def test_detect_attack_with_context(optical_centralized_attack_detector_client: OpticalCentralizedAttackDetectorClient,):
+    with patch('opticalcentralizedattackdetector.service.OpticalCentralizedAttackDetectorServiceServicerImpl.context_client') as context, \
+         patch('opticalcentralizedattackdetector.service.OpticalCentralizedAttackDetectorServiceServicerImpl.influxdb_client') as influxdb:
+        # setting up the mock
+        cid_list = ContextIdList()
+        cid_list.context_ids.append(ContextId(**CONTEXT_ID))
+        context.ListContextIds.return_value = cid_list
+
+        # making the test
+        request = Empty()
+        optical_centralized_attack_detector_client.DetectAttack(request)
+
+        # checking behavior
+        context.ListContextIds.assert_called_once()
+        context.ListServices.assert_called_with(cid_list.context_ids[0])
+        influxdb.query.assert_called_once()
+
+def test_detect_attack_with_contexts(optical_centralized_attack_detector_client: OpticalCentralizedAttackDetectorClient,):
+    with patch('opticalcentralizedattackdetector.service.OpticalCentralizedAttackDetectorServiceServicerImpl.context_client') as context, \
+         patch('opticalcentralizedattackdetector.service.OpticalCentralizedAttackDetectorServiceServicerImpl.influxdb_client') as influxdb:
+        # setting up the mock
+        cid_list = ContextIdList()
+        cid_list.context_ids.append(ContextId(**CONTEXT_ID))
+        cid_list.context_ids.append(ContextId(**CONTEXT_ID_2))
+        context.ListContextIds.return_value = cid_list
+
+        # making the test
+        request = Empty()
+        optical_centralized_attack_detector_client.DetectAttack(request)
+
+        # checking behavior
+        context.ListContextIds.assert_called_once()
+        context.ListServices.assert_any_call(cid_list.context_ids[0])
+        context.ListServices.assert_any_call(cid_list.context_ids[1])
+        influxdb.query.assert_called_once()
+
+def test_detect_attack_with_service(optical_centralized_attack_detector_client: OpticalCentralizedAttackDetectorClient,):
+    with patch('opticalcentralizedattackdetector.service.OpticalCentralizedAttackDetectorServiceServicerImpl.context_client') as context, \
+         patch('opticalcentralizedattackdetector.service.OpticalCentralizedAttackDetectorServiceServicerImpl.influxdb_client') as influxdb, \
+         patch('opticalcentralizedattackdetector.service.OpticalCentralizedAttackDetectorServiceServicerImpl.dbscanserving_client') as dbscan:
+
+        # setting up the mock
+        cid_list = ContextIdList()
+        cid_list.context_ids.append(ContextId(**CONTEXT_ID))
+        context.ListContextIds.return_value = cid_list
+
+        service_list = ServiceList()
+        service_list.services.append(Service(**SERVICE_DEV1_DEV2))
+        context.ListServices.return_value = service_list
+
+        influxdb.query.return_value.get_points.return_value = [(1, 2), (3, 4)]
+
+        # making the test
+        request = Empty()
+        optical_centralized_attack_detector_client.DetectAttack(request)
+
+        # checking behavior
+        context.ListContextIds.assert_called_once()
+        context.ListServices.assert_called_with(cid_list.context_ids[0])
+        influxdb.query.assert_called_once()
+        dbscan.Detect.assert_called()
+
+def test_detect_attack_no_attack(optical_centralized_attack_detector_client: OpticalCentralizedAttackDetectorClient,):
+    with patch('opticalcentralizedattackdetector.service.OpticalCentralizedAttackDetectorServiceServicerImpl.context_client') as context, \
+         patch('opticalcentralizedattackdetector.service.OpticalCentralizedAttackDetectorServiceServicerImpl.influxdb_client') as influxdb, \
+         patch('opticalcentralizedattackdetector.service.OpticalCentralizedAttackDetectorServiceServicerImpl.dbscanserving_client') as dbscan, \
+         patch('opticalcentralizedattackdetector.service.OpticalCentralizedAttackDetectorServiceServicerImpl.attack_mitigator_client') as mitigator:
+
+        # setting up the mock
+        cid_list = ContextIdList()
+        cid_list.context_ids.append(ContextId(**CONTEXT_ID))
+        context.ListContextIds.return_value = cid_list
+
+        service_list = ServiceList()
+        service_list.services.append(Service(**SERVICE_DEV1_DEV2))
+        context.ListServices.return_value = service_list
+
+        # dbscan.Detect.return_value = object()
+        dbscan.Detect.return_value.cluster_indices = [0, 1, 2, 3, 4, 5]
+
+        # making the test
+        request = Empty()
+        optical_centralized_attack_detector_client.DetectAttack(request)
+
+        # checking behavior
+        context.ListContextIds.assert_called_once()
+        context.ListServices.assert_called_with(cid_list.context_ids[0])
+        influxdb.query.assert_called_once()
+        dbscan.Detect.assert_called()
+        mitigator.NotifyAttack.assert_not_called()
+
+def test_detect_attack_with_attack(optical_centralized_attack_detector_client: OpticalCentralizedAttackDetectorClient,):
+    with patch('opticalcentralizedattackdetector.service.OpticalCentralizedAttackDetectorServiceServicerImpl.context_client') as context, \
+         patch('opticalcentralizedattackdetector.service.OpticalCentralizedAttackDetectorServiceServicerImpl.influxdb_client') as influxdb, \
+         patch('opticalcentralizedattackdetector.service.OpticalCentralizedAttackDetectorServiceServicerImpl.dbscanserving_client') as dbscan, \
+         patch('opticalcentralizedattackdetector.service.OpticalCentralizedAttackDetectorServiceServicerImpl.attack_mitigator_client') as mitigator:
+
+        # setting up the mock
+        cid_list = ContextIdList()
+        cid_list.context_ids.append(ContextId(**CONTEXT_ID))
+        context.ListContextIds.return_value = cid_list
+
+        service_list = ServiceList()
+        service_list.services.append(Service(**SERVICE_DEV1_DEV2))
+        context.ListServices.return_value = service_list
+
+        # dbscan.Detect.return_value = object()
+        dbscan.Detect.return_value.cluster_indices = [0, 1, 2, 3, 4, -1]
+
+        # making the test
+        request = Empty()
+        optical_centralized_attack_detector_client.DetectAttack(request)
+
+        # checking behavior
+        context.ListContextIds.assert_called_once()
+        context.ListServices.assert_called_with(cid_list.context_ids[0])
+        influxdb.query.assert_called_once()
+        dbscan.Detect.assert_called()
+        mitigator.NotifyAttack.assert_called()
+
+def test_report_summarized_kpi(optical_centralized_attack_detector_client: OpticalCentralizedAttackDetectorClient):
+    kpi_list = KpiList()
+    optical_centralized_attack_detector_client.ReportSummarizedKpi(kpi_list)
+
+def test_report_kpi(optical_centralized_attack_detector_client: OpticalCentralizedAttackDetectorClient):
+    kpi_list = KpiList()
+    optical_centralized_attack_detector_client.ReportKpi(kpi_list)
diff --git a/src/policy/.gitlab-ci.yml b/src/policy/.gitlab-ci.yml
index 18ab5531ebf756cf01d9eab39d06374510c361df..13a071124e2cac693401e155412dee48e1f36b56 100644
--- a/src/policy/.gitlab-ci.yml
+++ b/src/policy/.gitlab-ci.yml
@@ -33,7 +33,7 @@ unit_test policy:
         - src/$IMAGE_NAME/**
         - .gitlab-ci.yml
 
-# Deployment of automation service in Kubernetes Cluster
+# Deployment of policy service in Kubernetes Cluster
 deploy policy:
   stage: deploy
   needs:
diff --git a/src/policy/README.md b/src/policy/README.md
index a32311b6845bed01b74207fbe9c0866c098831ab..4268343577871cf98b9f701a32cd8a1ff4d9a72a 100644
--- a/src/policy/README.md
+++ b/src/policy/README.md
@@ -1,5 +1,6 @@
-# How to run locally the policy service (tested in Ubuntu 20.04)
+# Policy Management TeraFlow OS service
 
+The Policy Management service is tested on Ubuntu 20.04. Follow the instructions below to build, test, and run this service on your local environment.
 
 ## Compile code
 
diff --git a/src/service/.gitlab-ci.yml b/src/service/.gitlab-ci.yml
index edff5aad2ab260310668f60e04fd7e64dc2d0154..e4da10db792f6782cdeb80e631c17271a657e962 100644
--- a/src/service/.gitlab-ci.yml
+++ b/src/service/.gitlab-ci.yml
@@ -1,8 +1,7 @@
-# Build, tag, and push the Docker images to the GitLab Docker registry
+# Build, tag and push the Docker image to the GitLab registry
 build service:
   variables:
     IMAGE_NAME: 'service' # name of the microservice
-    IMAGE_NAME_TEST: 'service-test' # name of the microservice
     IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: build
   before_script:
@@ -11,50 +10,79 @@ build service:
     - docker build -t "$IMAGE_NAME:$IMAGE_TAG" -f ./src/$IMAGE_NAME/Dockerfile ./src/
     - docker tag "$IMAGE_NAME:$IMAGE_TAG" "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
     - docker push "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
+  after_script:
+    - docker images --filter="dangling=true" --quiet | xargs -r docker rmi
   rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' 
     - changes:
-      - src/$IMAGE_NAME/**
+      - src/$IMAGE_NAME/**/*.{py,in,yml}
+      - src/$IMAGE_NAME/Dockerfile
+      - src/$IMAGE_NAME/tests/*.py
+      - manifests/${IMAGE_NAME}service.yaml
       - .gitlab-ci.yml
 
-# Pull, execute, and run unitary tests for the Docker image from the GitLab registry
-unit_test service:
+# Apply unit test to the component
+unit test service:
   variables:
     IMAGE_NAME: 'service' # name of the microservice
-    IMAGE_NAME_TEST: 'service-test' # name of the microservice
     IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: unit_test
   needs:
     - build service
   before_script:
     - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
-    - if docker network list | grep teraflowbridge; then echo "teraflowbridge is already created"; else docker network create -d bridge teraflowbridge; fi  
+    - if docker network list | grep teraflowbridge; then echo "teraflowbridge is already created"; else docker network create -d bridge teraflowbridge; fi
+    - if docker container ls | grep $IMAGE_NAME; then docker rm -f $IMAGE_NAME; else echo "$IMAGE_NAME image is not in the system"; fi
   script:
     - docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
-    - docker run -d -p 3030:3030 --name $IMAGE_NAME --network=teraflowbridge "$IMAGE_NAME:$IMAGE_TAG"
-    - docker ps -a
+    - docker run --name $IMAGE_NAME -d -p 3030:3030 -v "$PWD/src/$IMAGE_NAME/tests:/opt/results" --network=teraflowbridge $CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG
     - sleep 5
     - docker ps -a
     - docker logs $IMAGE_NAME
-    - docker exec -i $IMAGE_NAME bash -c "pytest --log-level=DEBUG --verbose $IMAGE_NAME/tests/test_unitary.py"
+    - docker exec -i $IMAGE_NAME bash -c "coverage run -m pytest --log-level=INFO --verbose $IMAGE_NAME/tests/test_unitary.py --junitxml=/opt/results/${IMAGE_NAME}_report.xml; coverage xml -o /opt/results/${IMAGE_NAME}_coverage.xml; coverage report --include='${IMAGE_NAME}/*' --show-missing"
+  coverage: '/TOTAL\s+\d+\s+\d+\s+(\d+%)/'
   after_script:
-    - docker stop $IMAGE_NAME
-    - docker rm $IMAGE_NAME
+    - docker rm -f $IMAGE_NAME
+    - docker network rm teraflowbridge
   rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' 
     - changes:
-      - src/$IMAGE_NAME/**
+      - src/$IMAGE_NAME/**/*.{py,in,yml}
+      - src/$IMAGE_NAME/Dockerfile
+      - src/$IMAGE_NAME/tests/*.py
+      - src/$IMAGE_NAME/tests/Dockerfile
+      - manifests/${IMAGE_NAME}service.yaml
       - .gitlab-ci.yml
+  artifacts:
+      when: always
+      reports:
+        junit: src/$IMAGE_NAME/tests/${IMAGE_NAME}_report.xml
+        cobertura: src/$IMAGE_NAME/tests/${IMAGE_NAME}_coverage.xml
 
 # Deployment of the service in Kubernetes Cluster
 deploy service:
+  variables:
+    IMAGE_NAME: 'service' # name of the microservice
+    IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: deploy
   needs:
-    - build service
-    - unit_test service
-    - dependencies all
-    - integ_test execute
+    - unit test service
+    # - integ_test execute
   script:
+    - 'sed -i "s/$IMAGE_NAME:.*/$IMAGE_NAME:$IMAGE_TAG/" manifests/${IMAGE_NAME}service.yaml'
     - kubectl version
     - kubectl get all
-    - kubectl apply -f "manifests/serviceservice.yaml"
-    - kubectl delete pods --selector app=serviceservice
+    - kubectl apply -f "manifests/${IMAGE_NAME}service.yaml"
     - kubectl get all
+  # environment:
+  #   name: test
+  #   url: https://example.com
+  #   kubernetes:
+  #     namespace: test
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+      when: manual    
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
+      when: manual
diff --git a/src/service/Config.py b/src/service/Config.py
index 592392a1bde5757f83fd67589a7b7b7d6cc3e6c0..79ca2232c1ee273dbda9d3c6642ded229b32ab6f 100644
--- a/src/service/Config.py
+++ b/src/service/Config.py
@@ -10,3 +10,10 @@ GRPC_GRACE_PERIOD = 60
 
 # Prometheus settings
 METRICS_PORT = 9192
+
+# Dependency micro-service connection settings
+CONTEXT_SERVICE_HOST = '127.0.0.1'
+CONTEXT_SERVICE_PORT = 1010
+
+DEVICE_SERVICE_HOST = '127.0.0.1'
+DEVICE_SERVICE_PORT = 2020
diff --git a/src/service/Dockerfile b/src/service/Dockerfile
index 306379a06d33c67c082540e19f3b1ca349becff2..3e8dcaa31acdc2c97f71fbd12a3ca730e93a7677 100644
--- a/src/service/Dockerfile
+++ b/src/service/Dockerfile
@@ -29,6 +29,9 @@ RUN python3 -m pip install -r service/requirements.in
 
 # Add files into working directory
 COPY common/. common
+COPY context/. context
+COPY device/. device
+COPY monitoring/. monitoring
 COPY service/. service
 
 # Start service service
diff --git a/src/service/_old_code_backup/ServiceServiceServicerImpl.py b/src/service/_old_code_backup/ServiceServiceServicerImpl.py
deleted file mode 100644
index b1f370abc1d3eed1b3b756bf31b95c01209539fd..0000000000000000000000000000000000000000
--- a/src/service/_old_code_backup/ServiceServiceServicerImpl.py
+++ /dev/null
@@ -1,277 +0,0 @@
-from typing import Dict
-import grpc, logging
-from prometheus_client import Counter, Histogram
-from common.database.api.Database import Database
-from common.exceptions.ServiceException import ServiceException
-from service.proto.context_pb2 import Empty
-from service.proto.service_pb2 import ConnectionList, Service, ServiceId, ServiceList
-from service.proto.service_pb2_grpc import ServiceServiceServicer
-from service.service.Tools import check_service_id_request, check_service_request
-
-LOGGER = logging.getLogger(__name__)
-
-GETSERVICELIST_COUNTER_STARTED    = Counter  ('service_getservicelist_counter_started',
-                                              'Service:GetServiceList counter of requests started'  )
-GETSERVICELIST_COUNTER_COMPLETED  = Counter  ('service_getservicelist_counter_completed',
-                                              'Service:GetServiceList counter of requests completed')
-GETSERVICELIST_COUNTER_FAILED     = Counter  ('service_getservicelist_counter_failed',
-                                              'Service:GetServiceList counter of requests failed'   )
-GETSERVICELIST_HISTOGRAM_DURATION = Histogram('service_getservicelist_histogram_duration',
-                                              'Service:GetServiceList histogram of request duration')
-
-CREATESERVICE_COUNTER_STARTED    = Counter  ('service_createservice_counter_started',
-                                             'Service:CreateService counter of requests started'  )
-CREATESERVICE_COUNTER_COMPLETED  = Counter  ('service_createservice_counter_completed',
-                                             'Service:CreateService counter of requests completed')
-CREATESERVICE_COUNTER_FAILED     = Counter  ('service_createservice_counter_failed',
-                                             'Service:CreateService counter of requests failed'   )
-CREATESERVICE_HISTOGRAM_DURATION = Histogram('service_createservice_histogram_duration',
-                                             'Service:CreateService histogram of request duration')
-
-UPDATESERVICE_COUNTER_STARTED    = Counter  ('service_updateservice_counter_started',
-                                             'Service:UpdateService counter of requests started'  )
-UPDATESERVICE_COUNTER_COMPLETED  = Counter  ('service_updateservice_counter_completed',
-                                             'Service:UpdateService counter of requests completed')
-UPDATESERVICE_COUNTER_FAILED     = Counter  ('service_updateservice_counter_failed',
-                                             'Service:UpdateService counter of requests failed'   )
-UPDATESERVICE_HISTOGRAM_DURATION = Histogram('service_updateservice_histogram_duration',
-                                             'Service:UpdateService histogram of request duration')
-
-DELETESERVICE_COUNTER_STARTED    = Counter  ('service_deleteservice_counter_started',
-                                             'Service:DeleteService counter of requests started'  )
-DELETESERVICE_COUNTER_COMPLETED  = Counter  ('service_deleteservice_counter_completed',
-                                             'Service:DeleteService counter of requests completed')
-DELETESERVICE_COUNTER_FAILED     = Counter  ('service_deleteservice_counter_failed',
-                                             'Service:DeleteService counter of requests failed'   )
-DELETESERVICE_HISTOGRAM_DURATION = Histogram('service_deleteservice_histogram_duration',
-                                             'Service:DeleteService histogram of request duration')
-
-GETSERVICEBYID_COUNTER_STARTED    = Counter  ('service_getservicebyid_counter_started',
-                                              'Service:GetServiceById counter of requests started'  )
-GETSERVICEBYID_COUNTER_COMPLETED  = Counter  ('service_getservicebyid_counter_completed',
-                                              'Service:GetServiceById counter of requests completed')
-GETSERVICEBYID_COUNTER_FAILED     = Counter  ('service_getservicebyid_counter_failed',
-                                              'Service:GetServiceById counter of requests failed'   )
-GETSERVICEBYID_HISTOGRAM_DURATION = Histogram('service_getservicebyid_histogram_duration',
-                                              'Service:GetServiceById histogram of request duration')
-
-GETCONNECTIONLIST_COUNTER_STARTED    = Counter  ('service_getconnectionlist_counter_started',
-                                                 'Service:GetConnectionList counter of requests started'  )
-GETCONNECTIONLIST_COUNTER_COMPLETED  = Counter  ('service_getconnectionlist_counter_completed',
-                                                 'Service:GetConnectionList counter of requests completed')
-GETCONNECTIONLIST_COUNTER_FAILED     = Counter  ('service_getconnectionlist_counter_failed',
-                                                 'Service:GetConnectionList counter of requests failed'   )
-GETCONNECTIONLIST_HISTOGRAM_DURATION = Histogram('service_getconnectionlist_histogram_duration',
-                                                 'Service:GetConnectionList histogram of request duration')
-
-class ServiceServiceServicerImpl(ServiceServiceServicer):
-    def __init__(self, database : Database):
-        LOGGER.debug('Creating Servicer...')
-        self.database = database
-        LOGGER.debug('Servicer Created')
-
-    @GETSERVICELIST_HISTOGRAM_DURATION.time()
-    def GetServiceList(self, request : Empty, grpc_context : grpc.ServicerContext) -> ServiceList:
-        GETSERVICELIST_COUNTER_STARTED.inc()
-        try:
-            LOGGER.debug('GetServiceList request: {}'.format(str(request)))
-
-            # ----- Validate request data and pre-conditions -----------------------------------------------------------
-
-            # ----- Retrieve data from the database --------------------------------------------------------------------
-            db_context_uuids = self.database.contexts.get()
-            json_services = []
-            for db_context_uuid in db_context_uuids:
-                db_context = self.database.context(db_context_uuid)
-                json_services.extend(db_context.dump_services())
-
-            # ----- Compose reply --------------------------------------------------------------------------------------
-            reply = ServiceList(cs=json_services)
-            LOGGER.debug('GetServiceList reply: {}'.format(str(reply)))
-            GETSERVICELIST_COUNTER_COMPLETED.inc()
-            return reply
-        except ServiceException as e:                               # pragma: no cover (ServiceException not thrown)
-            LOGGER.exception('GetServiceList exception')
-            GETSERVICELIST_COUNTER_FAILED.inc()
-            grpc_context.abort(e.code, e.details)
-        except Exception as e:                                      # pragma: no cover
-            LOGGER.exception('GetServiceList exception')
-            GETSERVICELIST_COUNTER_FAILED.inc()
-            grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))
-
-    @CREATESERVICE_HISTOGRAM_DURATION.time()
-    def CreateService(self, request : Service, grpc_context : grpc.ServicerContext) -> ServiceId:
-        CREATESERVICE_COUNTER_STARTED.inc()
-        try:
-            LOGGER.debug('CreateService request: {}'.format(str(request)))
-
-            # ----- Validate request data and pre-conditions -----------------------------------------------------------
-            context_id, service_id, service_type, service_config, service_state, db_endpoints, constraint_tuples = \
-                check_service_request('CreateService', request, self.database, LOGGER)
-
-            # ----- Implement changes in the database ------------------------------------------------------------------
-            db_context = self.database.context(context_id)
-            db_service = db_context.service(service_id)
-            db_service.create(service_type, service_config, service_state)
-
-            for db_endpoint in db_endpoints:
-                service_endpoint_id = '{}:{}/{}'.format(
-                    db_endpoint.topology_uuid, db_endpoint.device_uuid, db_endpoint.endpoint_uuid)
-                db_service.endpoint(service_endpoint_id).create(db_endpoint)
-
-            for cons_type,cons_value in constraint_tuples: db_service.constraint(cons_type).create(cons_value)
-
-            # ----- Compose reply --------------------------------------------------------------------------------------
-            reply = ServiceId(**db_service.dump_id())
-            LOGGER.debug('CreateService reply: {}'.format(str(reply)))
-            CREATESERVICE_COUNTER_COMPLETED.inc()
-            return reply
-        except ServiceException as e:
-            LOGGER.exception('CreateService exception')
-            CREATESERVICE_COUNTER_FAILED.inc()
-            grpc_context.abort(e.code, e.details)
-        except Exception as e:                                      # pragma: no cover
-            LOGGER.exception('CreateService exception')
-            CREATESERVICE_COUNTER_FAILED.inc()
-            grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))
-
-    @UPDATESERVICE_HISTOGRAM_DURATION.time()
-    def UpdateService(self, request : Service, grpc_context : grpc.ServicerContext) -> ServiceId:
-        UPDATESERVICE_COUNTER_STARTED.inc()
-        try:
-            LOGGER.debug('UpdateService request: {}'.format(str(request)))
-
-            # ----- Validate request data and pre-conditions -----------------------------------------------------------
-            context_id, service_id, service_type, service_config, service_state, db_endpoints, constraint_tuples = \
-                check_service_request('UpdateService', request, self.database, LOGGER)
-
-            # ----- Implement changes in the database ------------------------------------------------------------------
-            db_context = self.database.context(context_id)
-            db_service = db_context.service(service_id)
-
-            # Update service attributes
-            db_service.update(update_attributes={
-                'service_type'  : service_type,
-                'service_config': service_config,
-                'service_state' : service_state,
-            })
-
-            # Update service constraints; first add missing, then remove existing, but not added to Service
-            db_service_constraint_types = set(db_service.constraints.get())
-            for constraint_type,constraint_value in constraint_tuples:
-                if constraint_type in db_service_constraint_types:
-                    db_service.constraint(constraint_type).update(update_attributes={
-                        'constraint_value': constraint_value
-                    })
-                else:
-                    db_service.constraint(constraint_type).create(constraint_value)
-                db_service_constraint_types.discard(constraint_type)
-
-            for constraint_type in db_service_constraint_types:
-                db_service.constraint(constraint_type).delete()
-
-            # Update service endpoints; first add missing, then remove existing, but not added to Service
-            db_service_endpoint_uuids = set(db_service.endpoints.get())
-            for db_endpoint in db_endpoints:
-                service_endpoint_id = '{}:{}/{}'.format(
-                    db_endpoint.topology_uuid, db_endpoint.device_uuid, db_endpoint.endpoint_uuid)
-                if service_endpoint_id not in db_service_endpoint_uuids:
-                    db_service.endpoint(service_endpoint_id).create(db_endpoint)
-                db_service_endpoint_uuids.discard(service_endpoint_id)
-
-            for db_service_endpoint_uuid in db_service_endpoint_uuids:
-                db_service.endpoint(db_service_endpoint_uuid).delete()
-
-            # ----- Compose reply --------------------------------------------------------------------------------------
-            reply = ServiceId(**db_service.dump_id())
-            LOGGER.debug('UpdateService reply: {}'.format(str(reply)))
-            UPDATESERVICE_COUNTER_COMPLETED.inc()
-            return reply
-        except ServiceException as e:
-            LOGGER.exception('UpdateService exception')
-            UPDATESERVICE_COUNTER_FAILED.inc()
-            grpc_context.abort(e.code, e.details)
-        except Exception as e:                                      # pragma: no cover
-            LOGGER.exception('UpdateService exception')
-            UPDATESERVICE_COUNTER_FAILED.inc()
-            grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))
-
-    @DELETESERVICE_HISTOGRAM_DURATION.time()
-    def DeleteService(self, request : ServiceId, grpc_context : grpc.ServicerContext) -> Empty:
-        DELETESERVICE_COUNTER_STARTED.inc()
-        try:
-            LOGGER.debug('DeleteService request: {}'.format(str(request)))
-
-            # ----- Validate request data and pre-conditions -----------------------------------------------------------
-            context_id, service_id = check_service_id_request('DeleteService', request, self.database, LOGGER)
-
-            # ----- Implement changes in the database ------------------------------------------------------------------
-            db_context = self.database.context(context_id)
-            db_service = db_context.service(service_id)
-            db_service.delete()
-
-            # ----- Compose reply --------------------------------------------------------------------------------------
-            reply = Empty()
-            LOGGER.debug('DeleteService reply: {}'.format(str(reply)))
-            DELETESERVICE_COUNTER_COMPLETED.inc()
-            return reply
-        except ServiceException as e:
-            LOGGER.exception('DeleteService exception')
-            DELETESERVICE_COUNTER_FAILED.inc()
-            grpc_context.abort(e.code, e.details)
-        except Exception as e:                                      # pragma: no cover
-            LOGGER.exception('DeleteService exception')
-            DELETESERVICE_COUNTER_FAILED.inc()
-            grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))
-
-    @GETSERVICEBYID_HISTOGRAM_DURATION.time()
-    def GetServiceById(self, request : ServiceId, grpc_context : grpc.ServicerContext) -> Service:
-        GETSERVICEBYID_COUNTER_STARTED.inc()
-        try:
-            LOGGER.debug('GetServiceById request: {}'.format(str(request)))
-
-            # ----- Validate request data and pre-conditions -----------------------------------------------------------
-            context_id, service_id = check_service_id_request('GetServiceById', request, self.database, LOGGER)
-
-            # ----- Retrieve data from the database --------------------------------------------------------------------
-            db_context = self.database.context(context_id)
-            db_service = db_context.service(service_id)
-
-            # ----- Compose reply --------------------------------------------------------------------------------------
-            reply = Service(**db_service.dump())
-            LOGGER.debug('GetServiceById reply: {}'.format(str(reply)))
-            GETSERVICEBYID_COUNTER_COMPLETED.inc()
-            return reply
-        except ServiceException as e:
-            LOGGER.exception('GetServiceById exception')
-            GETSERVICEBYID_COUNTER_FAILED.inc()
-            grpc_context.abort(e.code, e.details)
-        except Exception as e:                                      # pragma: no cover
-            LOGGER.exception('GetServiceById exception')
-            GETSERVICEBYID_COUNTER_FAILED.inc()
-            grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))
-
-    @GETCONNECTIONLIST_HISTOGRAM_DURATION.time()
-    def GetConnectionList(self, request : Empty, grpc_context : grpc.ServicerContext) -> ConnectionList:
-        GETCONNECTIONLIST_COUNTER_STARTED.inc()
-        try:
-            LOGGER.debug('GetConnectionList request: {}'.format(str(request)))
-
-            # ----- Validate request data and pre-conditions -----------------------------------------------------------
-
-            # ----- Retrieve data from the database --------------------------------------------------------------------
-            raise ServiceException(grpc.StatusCode.UNIMPLEMENTED, 'RPC GetConnectionList() not implemented')
-
-            # ----- Compose reply --------------------------------------------------------------------------------------
-            #reply = ConnectionList()
-            #LOGGER.debug('GetConnectionList reply: {}'.format(str(reply)))
-            #GETCONNECTIONLIST_COUNTER_COMPLETED.inc()
-            #return reply
-        except ServiceException as e:
-            LOGGER.exception('GetConnectionList exception')
-            GETCONNECTIONLIST_COUNTER_FAILED.inc()
-            grpc_context.abort(e.code, e.details)
-        except Exception as e:                                      # pragma: no cover
-            LOGGER.exception('GetConnectionList exception')
-            GETCONNECTIONLIST_COUNTER_FAILED.inc()
-            grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))
diff --git a/src/service/_old_code_backup/Tools.py b/src/service/_old_code_backup/Tools.py
deleted file mode 100644
index 62d602c058a55cdd229050cf3ba6301b4519fdb3..0000000000000000000000000000000000000000
--- a/src/service/_old_code_backup/Tools.py
+++ /dev/null
@@ -1,143 +0,0 @@
-import grpc, logging
-from typing import Dict, List, Set, Tuple
-from common.Checkers import chk_options, chk_string
-from common.database.api.Database import Database
-from common.database.api.context.Constants import DEFAULT_TOPOLOGY_ID
-from common.database.api.context.topology.device.Endpoint import Endpoint
-from common.database.api.context.service.ServiceState import ServiceState, servicestate_enum_values, \
-    to_servicestate_enum
-from common.database.api.context.service.ServiceType import ServiceType, servicetype_enum_values, to_servicetype_enum
-from common.exceptions.ServiceException import ServiceException
-from common.tools.service.DeviceCheckers import check_device_endpoint_exists
-from common.tools.service.EndpointIdCheckers import check_endpoint_id
-from common.tools.service.EnumCheckers import check_enum
-from common.tools.service.ServiceCheckers import check_service_exists, check_service_not_exists
-from service.proto.context_pb2 import Constraint
-from service.proto.service_pb2 import Service, ServiceId
-
-# For each method name, define acceptable service types. Empty set means accept all.
-ACCEPTED_SERVICE_TYPES : Dict[str, Set[ServiceType]] = {
-    'CreateService': set([ServiceType.L2NM, ServiceType.L3NM, ServiceType.TAPI_CONNECTIVITY_SERVICE]),
-    'UpdateService': set([ServiceType.L2NM, ServiceType.L3NM, ServiceType.TAPI_CONNECTIVITY_SERVICE]),
-}
-
-# For each method name, define acceptable service states. Empty set means accept all.
-ACCEPTED_SERVICE_STATES : Dict[str, Set[ServiceState]] = {
-    'CreateService': set([ServiceState.PLANNED]),
-    'UpdateService': set([ServiceState.PLANNED, ServiceState.ACTIVE, ServiceState.PENDING_REMOVAL]),
-}
-
-def _check_service_exists(method_name : str, database : Database, context_id : str, service_id : str):
-    if method_name in ['CreateService']:
-        check_service_not_exists(database, context_id, service_id)
-    elif method_name in ['UpdateService', 'DeleteService', 'GetServiceById']:
-        check_service_exists(database, context_id, service_id)
-    else:                                       # pragma: no cover (test requires malforming the code)
-        msg = 'Unexpected condition [_check_service_exists(method_name={}, context_id={}, service_id={})]'
-        msg = msg.format(str(method_name), str(context_id), str(service_id))
-        raise ServiceException(grpc.StatusCode.UNIMPLEMENTED, msg)
-
-def check_service_type(method_name : str, value : str) -> ServiceType:
-    return check_enum('ServiceType', method_name, value, to_servicetype_enum, ACCEPTED_SERVICE_TYPES)
-
-def check_service_state(method_name : str, value : str) -> ServiceState:
-    return check_enum('ServiceState', method_name, value, to_servicestate_enum, ACCEPTED_SERVICE_STATES)
-
-def check_service_constraint(
-    logger : logging.Logger, constraint_number : int, parent_name : str, constraint : Constraint,
-    add_constraints : Dict[str, Dict[str, Set[str]]]) -> Tuple[str, str]:
-
-    try:
-        constraint_type  = chk_string('constraint[#{}].constraint_type'.format(constraint_number),
-                                      constraint.constraint_type,
-                                      allow_empty=False)
-        constraint_value = chk_string('constraint[#{}].constraint_value'.format(constraint_number),
-                                      constraint.constraint_value,
-                                      allow_empty=False)
-    except Exception as e:
-        logger.exception('Invalid arguments:')
-        raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e))
-
-    if constraint_type in add_constraints:
-        msg = 'Duplicated ConstraintType({}) in {}.'
-        msg = msg.format(constraint_type, parent_name)
-        raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
-
-    add_constraints[constraint_type] = constraint_value
-    return constraint_type, constraint_value
-
-def check_service_request(
-    method_name : str, request : Service, database : Database, logger : logging.Logger
-    ) -> Tuple[str, str, ServiceType, str, ServiceState, List[Endpoint], List[Tuple[str, str]]]:
-
-    # ----- Parse attributes -------------------------------------------------------------------------------------------
-    try:
-        context_id     = chk_string ('service.cs_id.contextId.contextUuid.uuid',
-                                    request.cs_id.contextId.contextUuid.uuid,
-                                    allow_empty=False)
-        service_id     = chk_string ('service.cs_id.cs_id.uuid',
-                                    request.cs_id.cs_id.uuid,
-                                    allow_empty=False)
-        service_type   = chk_options('service.serviceType',
-                                    request.serviceType,
-                                    servicetype_enum_values())
-        service_config = chk_string ('service.serviceConfig.serviceConfig',
-                                    request.serviceConfig.serviceConfig,
-                                    allow_empty=True)
-        service_state  = chk_options('service.serviceState.serviceState',
-                                    request.serviceState.serviceState,
-                                    servicestate_enum_values())
-    except Exception as e:
-        logger.exception('Invalid arguments:')
-        raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e))
-
-    service_type = check_service_type(method_name, service_type)
-    service_state = check_service_state(method_name, service_state)
-
-    # ----- Check if service exists in database ------------------------------------------------------------------------
-    _check_service_exists(method_name, database, context_id, service_id)
-
-    # ----- Parse constraints ------------------------------------------------------------------------------------------
-    add_constraints : Dict[str, str] = {}
-    constraint_tuples : List[Tuple[str, str]] = []
-    for constraint_number,constraint in enumerate(request.constraint):
-        parent_name = 'Constraint(#{}) of Context({})/Service({})'.format(constraint_number, context_id, service_id)
-        constraint_type, constraint_value = check_service_constraint(
-            logger, constraint_number, parent_name, constraint, add_constraints)
-        constraint_tuples.append((constraint_type, constraint_value))
-
-    # ----- Parse endpoints and check if they exist in the database as device endpoints --------------------------------
-    add_topology_devices_endpoints : Dict[str, Dict[str, Set[str]]] = {}
-    db_endpoints : List[Endpoint] = []
-    for endpoint_number,endpoint_id in enumerate(request.endpointList):
-        parent_name = 'Endpoint(#{}) of Context({})/Service({})'.format(endpoint_number, context_id, service_id)
-
-        ep_topology_id, ep_device_id, ep_port_id = check_endpoint_id(
-            logger, endpoint_number, parent_name, endpoint_id, add_topology_devices_endpoints,
-            predefined_context_id=context_id, acceptable_context_ids=set([context_id]))
-
-        db_endpoint = check_device_endpoint_exists(
-            database, parent_name, context_id, ep_topology_id, ep_device_id, ep_port_id)
-        db_endpoints.append(db_endpoint)
-
-    return context_id, service_id, service_type, service_config, service_state, db_endpoints, constraint_tuples
-
-def check_service_id_request(
-    method_name : str, request : ServiceId, database : Database, logger : logging.Logger) -> Tuple[str, str]:
-
-    # ----- Parse attributes -------------------------------------------------------------------------------------------
-    try:
-        context_id     = chk_string ('service_id.contextId.contextUuid.uuid',
-                                    request.contextId.contextUuid.uuid,
-                                    allow_empty=False)
-        service_id     = chk_string ('service_id.cs_id.uuid',
-                                    request.cs_id.uuid,
-                                    allow_empty=False)
-    except Exception as e:
-        logger.exception('Invalid arguments:')
-        raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e))
-
-    # ----- Check if service exists in database ------------------------------------------------------------------------
-    _check_service_exists(method_name, database, context_id, service_id)
-
-    return context_id, service_id
diff --git a/src/service/_old_code_backup/test_unitary.py b/src/service/_old_code_backup/test_unitary.py
deleted file mode 100644
index fb7d1465d3308261e0f2bdc5bc534f67d89fae1e..0000000000000000000000000000000000000000
--- a/src/service/_old_code_backup/test_unitary.py
+++ /dev/null
@@ -1,364 +0,0 @@
-import copy, grpc, logging, pytest
-from google.protobuf.json_format import MessageToDict
-from common.database.Factory import get_database, DatabaseEngineEnum
-from common.database.api.Database import Database
-from common.database.api.context.Constants import DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID
-from common.database.tests.script import populate_example
-from common.tests.Assertions import validate_empty, validate_service, validate_service_id, \
-    validate_service_list_is_empty, validate_service_list_is_not_empty
-from service.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD
-from service.client.ServiceClient import ServiceClient
-from service.proto.context_pb2 import Empty
-from service.proto.service_pb2 import Service, ServiceId, ServiceStateEnum, ServiceType
-from service.service.ServiceService import ServiceService
-
-port = 10000 + GRPC_SERVICE_PORT # avoid privileged ports
-
-LOGGER = logging.getLogger(__name__)
-LOGGER.setLevel(logging.DEBUG)
-
-# use "copy.deepcopy" to prevent propagating forced changes during tests
-CONTEXT_ID = {'contextUuid': {'uuid': DEFAULT_CONTEXT_ID}}
-TOPOLOGY_ID = {'contextId': copy.deepcopy(CONTEXT_ID), 'topoId': {'uuid': DEFAULT_TOPOLOGY_ID}}
-SERVICE_ID = {'contextId': copy.deepcopy(CONTEXT_ID), 'cs_id': {'uuid': 'DEV1'}}
-SERVICE = {
-    'cs_id': copy.deepcopy(SERVICE_ID),
-    'serviceType': ServiceType.L3NM,
-    'serviceConfig': {'serviceConfig': '<config/>'},
-    'serviceState': {'serviceState': ServiceStateEnum.PLANNED},
-    'constraint': [
-        {'constraint_type': 'latency_ms', 'constraint_value': '100'},
-        {'constraint_type': 'hops', 'constraint_value': '5'},
-    ],
-    'endpointList' : [
-        {'topoId': copy.deepcopy(TOPOLOGY_ID), 'dev_id': {'device_id': {'uuid': 'DEV1'}}, 'port_id': {'uuid' : 'EP5'}},
-        {'topoId': copy.deepcopy(TOPOLOGY_ID), 'dev_id': {'device_id': {'uuid': 'DEV2'}}, 'port_id': {'uuid' : 'EP5'}},
-        {'topoId': copy.deepcopy(TOPOLOGY_ID), 'dev_id': {'device_id': {'uuid': 'DEV3'}}, 'port_id': {'uuid' : 'EP5'}},
-    ]
-}
-
-@pytest.fixture(scope='session')
-def database():
-    _database = get_database(engine=DatabaseEngineEnum.INMEMORY)
-    populate_example(_database, add_services=False)
-    return _database
-
-@pytest.fixture(scope='session')
-def service_service(database):
-    _service = ServiceService(
-        database, port=port, max_workers=GRPC_MAX_WORKERS, grace_period=GRPC_GRACE_PERIOD)
-    _service.start()
-    yield _service
-    _service.stop()
-
-@pytest.fixture(scope='session')
-def service_client(service_service):
-    _client = ServiceClient(address='127.0.0.1', port=port)
-    yield _client
-    _client.close()
-
-def test_get_services_empty(service_client : ServiceClient):
-    # should work
-    validate_service_list_is_empty(MessageToDict(
-        service_client.GetServiceList(Empty()),
-        including_default_value_fields=True, preserving_proto_field_name=True,
-        use_integers_for_enums=False))
-
-def test_create_service_wrong_service_attributes(service_client : ServiceClient):
-    # should fail with wrong service context
-    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-        copy_service = copy.deepcopy(SERVICE)
-        copy_service['cs_id']['contextId']['contextUuid']['uuid'] = ''
-        service_client.CreateService(Service(**copy_service))
-    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-    msg = 'service.cs_id.contextId.contextUuid.uuid() is out of range: '\
-          'allow_empty(False) min_length(None) max_length(None) allowed_lengths(None).'
-    assert e.value.details() == msg
-
-    # should fail with service context does not exist
-    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-        copy_service = copy.deepcopy(SERVICE)
-        copy_service['cs_id']['contextId']['contextUuid']['uuid'] = 'wrong-context'
-        service_client.CreateService(Service(**copy_service))
-    assert e.value.code() == grpc.StatusCode.NOT_FOUND
-    msg = 'Context(wrong-context) does not exist in the database.'
-    assert e.value.details() == msg
-
-    # should fail with wrong service id
-    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-        copy_service = copy.deepcopy(SERVICE)
-        copy_service['cs_id']['cs_id']['uuid'] = ''
-        service_client.CreateService(Service(**copy_service))
-    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-    msg = 'service.cs_id.cs_id.uuid() is out of range: '\
-          'allow_empty(False) min_length(None) max_length(None) allowed_lengths(None).'
-    assert e.value.details() == msg
-
-    # should fail with wrong service type
-    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-        copy_service = copy.deepcopy(SERVICE)
-        copy_service['serviceType'] = ServiceType.UNKNOWN
-        service_client.CreateService(Service(**copy_service))
-    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-    msg = 'Method(CreateService) does not accept ServiceType(UNKNOWN). '\
-          'Permitted values for Method(CreateService) are '\
-          'ServiceType([\'L2NM\', \'L3NM\', \'TAPI_CONNECTIVITY_SERVICE\']).'
-    assert e.value.details() == msg
-
-    # should fail with wrong service state
-    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-        copy_service = copy.deepcopy(SERVICE)
-        copy_service['serviceState']['serviceState'] = ServiceStateEnum.PENDING_REMOVAL
-        service_client.CreateService(Service(**copy_service))
-    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-    msg = 'Method(CreateService) does not accept ServiceState(PENDING_REMOVAL). '\
-          'Permitted values for Method(CreateService) are '\
-          'ServiceState([\'PLANNED\']).'
-    assert e.value.details() == msg
-
-def test_create_service_wrong_constraint(service_client : ServiceClient):
-    # should fail with wrong constraint type
-    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-        copy_service = copy.deepcopy(SERVICE)
-        copy_service['constraint'][0]['constraint_type'] = ''
-        service_client.CreateService(Service(**copy_service))
-    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-    msg = 'constraint[#0].constraint_type() is out of range: '\
-          'allow_empty(False) min_length(None) max_length(None) allowed_lengths(None).'
-    assert e.value.details() == msg
-
-    # should fail with wrong constraint value
-    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-        copy_service = copy.deepcopy(SERVICE)
-        copy_service['constraint'][0]['constraint_value'] = ''
-        service_client.CreateService(Service(**copy_service))
-    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-    msg = 'constraint[#0].constraint_value() is out of range: '\
-          'allow_empty(False) min_length(None) max_length(None) allowed_lengths(None).'
-    assert e.value.details() == msg
-
-    # should fail with dupplicated constraint type
-    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-        copy_service = copy.deepcopy(SERVICE)
-        copy_service['constraint'][1] = copy_service['constraint'][0]
-        service_client.CreateService(Service(**copy_service))
-    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-    msg = 'Duplicated ConstraintType(latency_ms) in Constraint(#1) of Context(admin)/Service(DEV1).'
-    assert e.value.details() == msg
-
-def test_create_service_wrong_endpoint(service_client : ServiceClient, database : Database):
-    # should fail with wrong endpoint context
-    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-        copy_service = copy.deepcopy(SERVICE)
-        copy_service['endpointList'][0]['topoId']['contextId']['contextUuid']['uuid'] = 'wrong-context'
-        print(copy_service)
-        service_client.CreateService(Service(**copy_service))
-    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-    msg = 'Context(wrong-context) in Endpoint(#0) of '\
-          'Context(admin)/Service(DEV1) mismatches acceptable Contexts({\'admin\'}). '\
-          'Optionally, leave field empty to use predefined Context(admin).'
-    assert e.value.details() == msg
-
-    # should fail with wrong endpoint topology
-    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-        copy_service = copy.deepcopy(SERVICE)
-        copy_service['endpointList'][0]['topoId']['topoId']['uuid'] = 'wrong-topo'
-        service_client.CreateService(Service(**copy_service))
-    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-    msg = 'Context(admin)/Topology(wrong-topo) in Endpoint(#0) of '\
-          'Context(admin)/Service(DEV1) mismatches acceptable Topologies({\'admin\'}). '\
-          'Optionally, leave field empty to use predefined Topology(admin).'
-    assert e.value.details() == msg
-
-    # should fail with endpoint device is empty
-    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-        copy_service = copy.deepcopy(SERVICE)
-        copy_service['endpointList'][0]['dev_id']['device_id']['uuid'] = ''
-        service_client.CreateService(Service(**copy_service))
-    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-    msg = 'endpoint_id[#0].dev_id.device_id.uuid() is out of range: '\
-          'allow_empty(False) min_length(None) max_length(None) allowed_lengths(None).'
-    assert e.value.details() == msg
-
-    # should fail with endpoint device not found
-    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-        copy_service = copy.deepcopy(SERVICE)
-        copy_service['endpointList'][0]['dev_id']['device_id']['uuid'] = 'wrong-device'
-        service_client.CreateService(Service(**copy_service))
-    assert e.value.code() == grpc.StatusCode.NOT_FOUND
-    msg = 'Context(admin)/Topology(admin)/Device(wrong-device) in Endpoint(#0) of '\
-          'Context(admin)/Service(DEV1) does not exist in the database.'
-    assert e.value.details() == msg
-
-    # should fail with endpoint device duplicated
-    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-        copy_service = copy.deepcopy(SERVICE)
-        copy_service['endpointList'][1] = copy_service['endpointList'][0]
-        service_client.CreateService(Service(**copy_service))
-    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-    msg = 'Duplicated Context(admin)/Topology(admin)/Device(DEV1) in Endpoint(#1) of '\
-          'Context(admin)/Service(DEV1).'
-    assert e.value.details() == msg
-
-    # should fail with endpoint port is empty
-    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-        copy_service = copy.deepcopy(SERVICE)
-        copy_service['endpointList'][0]['port_id']['uuid'] = ''
-        service_client.CreateService(Service(**copy_service))
-    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-    msg = 'endpoint_id[#0].port_id.uuid() is out of range: '\
-          'allow_empty(False) min_length(None) max_length(None) allowed_lengths(None).'
-    assert e.value.details() == msg
-
-    # should fail with endpoint port not found
-    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-        copy_service = copy.deepcopy(SERVICE)
-        copy_service['endpointList'][0]['port_id']['uuid'] = 'wrong-port'
-        service_client.CreateService(Service(**copy_service))
-    assert e.value.code() == grpc.StatusCode.NOT_FOUND
-    msg = 'Context(admin)/Topology(admin)/Device(DEV1)/Port(wrong-port) in Endpoint(#0) of '\
-          'Context(admin)/Service(DEV1) does not exist in the database.'
-    assert e.value.details() == msg
-
-def test_get_service_does_not_exist(service_client : ServiceClient):
-    # should fail with service context does not exist
-    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-        copy_service_id = copy.deepcopy(SERVICE_ID)
-        copy_service_id['contextId']['contextUuid']['uuid'] = 'wrong-context'
-        service_client.GetServiceById(ServiceId(**copy_service_id))
-    assert e.value.code() == grpc.StatusCode.NOT_FOUND
-    msg = 'Context(wrong-context) does not exist in the database.'
-    assert e.value.details() == msg
-
-    # should fail with service does not exist
-    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-        service_client.GetServiceById(ServiceId(**SERVICE_ID))
-    assert e.value.code() == grpc.StatusCode.NOT_FOUND
-    msg = 'Context(admin)/Service(DEV1) does not exist in the database.'
-    assert e.value.details() == msg
-
-def test_update_service_does_not_exist(service_client : ServiceClient):
-    # should fail with service does not exist
-    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-        service_client.UpdateService(Service(**SERVICE))
-    assert e.value.code() == grpc.StatusCode.NOT_FOUND
-    msg = 'Context(admin)/Service(DEV1) does not exist in the database.'
-    assert e.value.details() == msg
-
-def test_create_service(service_client : ServiceClient):
-    # should work
-    validate_service_id(MessageToDict(
-        service_client.CreateService(Service(**SERVICE)),
-        including_default_value_fields=True, preserving_proto_field_name=True,
-        use_integers_for_enums=False))
-
-def test_create_service_already_exists(service_client : ServiceClient):
-    # should fail with service already exists
-    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-        service_client.CreateService(Service(**SERVICE))
-    assert e.value.code() == grpc.StatusCode.ALREADY_EXISTS
-    msg = 'Context(admin)/Service(DEV1) already exists in the database.'
-    assert e.value.details() == msg
-
-def test_get_service(service_client : ServiceClient):
-    # should work
-    validate_service(MessageToDict(
-        service_client.GetServiceById(ServiceId(**SERVICE_ID)),
-        including_default_value_fields=True, preserving_proto_field_name=True,
-        use_integers_for_enums=False))
-
-def test_update_service(service_client : ServiceClient):
-    # should work
-    copy_service = copy.deepcopy(SERVICE)
-    copy_service['serviceConfig']['serviceConfig'] = '<newconfig/>'
-    copy_service['serviceState']['serviceState'] = ServiceStateEnum.ACTIVE
-    copy_service['constraint'] = [
-        {'constraint_type': 'latency_ms', 'constraint_value': '200'},
-        {'constraint_type': 'bandwidth_gbps', 'constraint_value': '100'},
-    ]
-    copy_service['endpointList'] = [
-        {
-            'topoId': {'contextId': {'contextUuid': {'uuid': 'admin'}}, 'topoId': {'uuid': 'admin'}},
-            'dev_id': {'device_id': {'uuid': 'DEV1'}},
-            'port_id': {'uuid' : 'EP5'}
-        },
-        {
-            'topoId': {'contextId': {'contextUuid': {'uuid': 'admin'}}, 'topoId': {'uuid': 'admin'}},
-            'dev_id': {'device_id': {'uuid': 'DEV2'}},
-            'port_id': {'uuid' : 'EP6'}
-        },
-    ]
-    validate_service_id(MessageToDict(
-        service_client.UpdateService(Service(**copy_service)),
-        including_default_value_fields=True, preserving_proto_field_name=True,
-        use_integers_for_enums=False))
-
-def test_delete_service_wrong_service_id(service_client : ServiceClient):
-    # should fail with service context is empty
-    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-        copy_service_id = copy.deepcopy(SERVICE_ID)
-        copy_service_id['contextId']['contextUuid']['uuid'] = ''
-        service_client.DeleteService(ServiceId(**copy_service_id))
-    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-    msg = 'service_id.contextId.contextUuid.uuid() is out of range: '\
-          'allow_empty(False) min_length(None) max_length(None) allowed_lengths(None).'
-    assert e.value.details() == msg
-
-    # should fail with service context does not exist
-    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-        copy_service_id = copy.deepcopy(SERVICE_ID)
-        copy_service_id['contextId']['contextUuid']['uuid'] = 'wrong-context'
-        service_client.DeleteService(ServiceId(**copy_service_id))
-    assert e.value.code() == grpc.StatusCode.NOT_FOUND
-    msg = 'Context(wrong-context) does not exist in the database.'
-    assert e.value.details() == msg
-
-    # should fail with service id is empty
-    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-        copy_service_id = copy.deepcopy(SERVICE_ID)
-        copy_service_id['cs_id']['uuid'] = ''
-        service_client.DeleteService(ServiceId(**copy_service_id))
-    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-    msg = 'service_id.cs_id.uuid() is out of range: '\
-          'allow_empty(False) min_length(None) max_length(None) allowed_lengths(None).'
-    assert e.value.details() == msg
-
-    # should fail with service id is empty
-    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-        copy_service_id = copy.deepcopy(SERVICE_ID)
-        copy_service_id['cs_id']['uuid'] = 'wrong-service'
-        service_client.DeleteService(ServiceId(**copy_service_id))
-    assert e.value.code() == grpc.StatusCode.NOT_FOUND
-    msg = 'Context(admin)/Service(wrong-service) does not exist in the database.'
-    assert e.value.details() == msg
-
-def test_delete_service(service_client : ServiceClient):
-    # should work
-    validate_empty(MessageToDict(
-        service_client.DeleteService(ServiceId(**SERVICE_ID)),
-        including_default_value_fields=True, preserving_proto_field_name=True,
-        use_integers_for_enums=False))
-
-def test_get_services_empty_2(service_client : ServiceClient):
-    # should work
-    validate_service_list_is_empty(MessageToDict(
-        service_client.GetServiceList(Empty()),
-        including_default_value_fields=True, preserving_proto_field_name=True,
-        use_integers_for_enums=False))
-
-def test_create_service_empty_endpoints(service_client : ServiceClient):
-    # should work
-    copy_service = copy.deepcopy(SERVICE)
-    copy_service['endpointList'][0]['topoId']['contextId']['contextUuid']['uuid'] = ''
-    copy_service['endpointList'][0]['topoId']['topoId']['uuid'] = ''
-    validate_service_id(MessageToDict(
-        service_client.CreateService(Service(**copy_service)),
-        including_default_value_fields=True, preserving_proto_field_name=True,
-        use_integers_for_enums=False))
-
-def test_get_services_full(service_client : ServiceClient):
-    # should work
-    validate_service_list_is_not_empty(MessageToDict(
-        service_client.GetServiceList(Empty()),
-        including_default_value_fields=True, preserving_proto_field_name=True,
-        use_integers_for_enums=False))
diff --git a/src/service/client/ServiceClient.py b/src/service/client/ServiceClient.py
index 958e066238446d9642ba38c8a907f32df1dcf030..b9d123b88b1c004192098caccedf4b67dc92ac2b 100644
--- a/src/service/client/ServiceClient.py
+++ b/src/service/client/ServiceClient.py
@@ -21,7 +21,7 @@ class ServiceClient:
         self.stub = ServiceServiceStub(self.channel)
 
     def close(self):
-        if(self.channel is not None): self.channel.close()
+        if self.channel is not None: self.channel.close()
         self.channel = None
         self.stub = None
 
diff --git a/src/service/genproto.sh b/src/service/genproto.sh
index 9b6387a8a0739ce1cbf2cb75f1e178880a5dbc8e..7ea496d6fc49bccbd57acfea9c2ac4dce6ae1fa1 100755
--- a/src/service/genproto.sh
+++ b/src/service/genproto.sh
@@ -25,9 +25,12 @@ touch proto/__init__.py
 
 python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto context.proto
 python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto service.proto
+python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto kpi_sample_types.proto
 
 rm proto/context_pb2_grpc.py
+rm proto/kpi_sample_types_pb2_grpc.py
 
 sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/context_pb2.py
 sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/service_pb2.py
 sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/service_pb2_grpc.py
+sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/kpi_sample_types_pb2.py
diff --git a/src/service/proto/context_pb2.py b/src/service/proto/context_pb2.py
index 8b4848bc33bfb0eba76590c8a3a627b2db84ca9f..68602b16f264ceac9acc3ef6669b09d5984e72c2 100644
--- a/src/service/proto/context_pb2.py
+++ b/src/service/proto/context_pb2.py
@@ -12,6 +12,7 @@ from google.protobuf import symbol_database as _symbol_database
 _sym_db = _symbol_database.Default()
 
 
+from . import kpi_sample_types_pb2 as kpi__sample__types__pb2
 
 
 DESCRIPTOR = _descriptor.FileDescriptor(
@@ -20,8 +21,9 @@ DESCRIPTOR = _descriptor.FileDescriptor(
   syntax='proto3',
   serialized_options=None,
   create_key=_descriptor._internal_create_key,
-  serialized_pb=b'\n\rcontext.proto\x12\x07\x63ontext\"\x07\n\x05\x45mpty\"\x14\n\x04Uuid\x12\x0c\n\x04uuid\x18\x01 \x01(\t\"F\n\x05\x45vent\x12\x11\n\ttimestamp\x18\x01 \x01(\x01\x12*\n\nevent_type\x18\x02 \x01(\x0e\x32\x16.context.EventTypeEnum\"0\n\tContextId\x12#\n\x0c\x63ontext_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\xb6\x01\n\x07\x43ontext\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12)\n\x0ctopology_ids\x18\x02 \x03(\x0b\x32\x13.context.TopologyId\x12\'\n\x0bservice_ids\x18\x03 \x03(\x0b\x32\x12.context.ServiceId\x12/\n\ncontroller\x18\x04 \x01(\x0b\x32\x1b.context.TeraFlowController\"8\n\rContextIdList\x12\'\n\x0b\x63ontext_ids\x18\x01 \x03(\x0b\x32\x12.context.ContextId\"1\n\x0b\x43ontextList\x12\"\n\x08\x63ontexts\x18\x01 \x03(\x0b\x32\x10.context.Context\"U\n\x0c\x43ontextEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12&\n\ncontext_id\x18\x02 \x01(\x0b\x32\x12.context.ContextId\"Z\n\nTopologyId\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12$\n\rtopology_uuid\x18\x02 \x01(\x0b\x32\r.context.Uuid\"~\n\x08Topology\x12(\n\x0btopology_id\x18\x01 \x01(\x0b\x32\x13.context.TopologyId\x12%\n\ndevice_ids\x18\x02 \x03(\x0b\x32\x11.context.DeviceId\x12!\n\x08link_ids\x18\x03 \x03(\x0b\x32\x0f.context.LinkId\";\n\x0eTopologyIdList\x12)\n\x0ctopology_ids\x18\x01 \x03(\x0b\x32\x13.context.TopologyId\"5\n\x0cTopologyList\x12%\n\ntopologies\x18\x01 \x03(\x0b\x32\x11.context.Topology\"X\n\rTopologyEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12(\n\x0btopology_id\x18\x02 \x01(\x0b\x32\x13.context.TopologyId\".\n\x08\x44\x65viceId\x12\"\n\x0b\x64\x65vice_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\x9a\x02\n\x06\x44\x65vice\x12$\n\tdevice_id\x18\x01 \x01(\x0b\x32\x11.context.DeviceId\x12\x13\n\x0b\x64\x65vice_type\x18\x02 \x01(\t\x12,\n\rdevice_config\x18\x03 \x01(\x0b\x32\x15.context.DeviceConfig\x12G\n\x19\x64\x65vice_operational_status\x18\x04 \x01(\x0e\x32$.context.DeviceOperationalStatusEnum\x12\x31\n\x0e\x64\x65vice_drivers\x18\x05 \x03(\x0e\x32\x19.context.DeviceDriverEnum\x12+\n\x10\x64\x65vice_endpoints\x18\x06 \x03(\x0b\x32\x11.context.EndPoint\"9\n\x0c\x44\x65viceConfig\x12)\n\x0c\x63onfig_rules\x18\x01 \x03(\x0b\x32\x13.context.ConfigRule\"5\n\x0c\x44\x65viceIdList\x12%\n\ndevice_ids\x18\x01 \x03(\x0b\x32\x11.context.DeviceId\".\n\nDeviceList\x12 \n\x07\x64\x65vices\x18\x01 \x03(\x0b\x32\x0f.context.Device\"R\n\x0b\x44\x65viceEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12$\n\tdevice_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\"*\n\x06LinkId\x12 \n\tlink_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"X\n\x04Link\x12 \n\x07link_id\x18\x01 \x01(\x0b\x32\x0f.context.LinkId\x12.\n\x11link_endpoint_ids\x18\x02 \x03(\x0b\x32\x13.context.EndPointId\"/\n\nLinkIdList\x12!\n\x08link_ids\x18\x01 \x03(\x0b\x32\x0f.context.LinkId\"(\n\x08LinkList\x12\x1c\n\x05links\x18\x01 \x03(\x0b\x32\r.context.Link\"L\n\tLinkEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12 \n\x07link_id\x18\x02 \x01(\x0b\x32\x0f.context.LinkId\"X\n\tServiceId\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12#\n\x0cservice_uuid\x18\x02 \x01(\x0b\x32\r.context.Uuid\"\xa6\x02\n\x07Service\x12&\n\nservice_id\x18\x01 \x01(\x0b\x32\x12.context.ServiceId\x12.\n\x0cservice_type\x18\x02 \x01(\x0e\x32\x18.context.ServiceTypeEnum\x12\x31\n\x14service_endpoint_ids\x18\x03 \x03(\x0b\x32\x13.context.EndPointId\x12\x30\n\x13service_constraints\x18\x04 \x03(\x0b\x32\x13.context.Constraint\x12.\n\x0eservice_status\x18\x05 \x01(\x0b\x32\x16.context.ServiceStatus\x12.\n\x0eservice_config\x18\x06 \x01(\x0b\x32\x16.context.ServiceConfig\"C\n\rServiceStatus\x12\x32\n\x0eservice_status\x18\x01 \x01(\x0e\x32\x1a.context.ServiceStatusEnum\":\n\rServiceConfig\x12)\n\x0c\x63onfig_rules\x18\x01 \x03(\x0b\x32\x13.context.ConfigRule\"8\n\rServiceIdList\x12\'\n\x0bservice_ids\x18\x01 \x03(\x0b\x32\x12.context.ServiceId\"1\n\x0bServiceList\x12\"\n\x08services\x18\x01 \x03(\x0b\x32\x10.context.Service\"U\n\x0cServiceEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12&\n\nservice_id\x18\x02 \x01(\x0b\x32\x12.context.ServiceId\"\x82\x01\n\nEndPointId\x12(\n\x0btopology_id\x18\x01 \x01(\x0b\x32\x13.context.TopologyId\x12$\n\tdevice_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\x12$\n\rendpoint_uuid\x18\x03 \x01(\x0b\x32\r.context.Uuid\"K\n\x08\x45ndPoint\x12(\n\x0b\x65ndpoint_id\x18\x01 \x01(\x0b\x32\x13.context.EndPointId\x12\x15\n\rendpoint_type\x18\x02 \x01(\t\"e\n\nConfigRule\x12)\n\x06\x61\x63tion\x18\x01 \x01(\x0e\x32\x19.context.ConfigActionEnum\x12\x14\n\x0cresource_key\x18\x02 \x01(\t\x12\x16\n\x0eresource_value\x18\x03 \x01(\t\"?\n\nConstraint\x12\x17\n\x0f\x63onstraint_type\x18\x01 \x01(\t\x12\x18\n\x10\x63onstraint_value\x18\x02 \x01(\t\"6\n\x0c\x43onnectionId\x12&\n\x0f\x63onnection_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\x8d\x01\n\nConnection\x12,\n\rconnection_id\x18\x01 \x01(\x0b\x32\x15.context.ConnectionId\x12.\n\x12related_service_id\x18\x02 \x01(\x0b\x32\x12.context.ServiceId\x12!\n\x04path\x18\x03 \x03(\x0b\x32\x13.context.EndPointId\"A\n\x10\x43onnectionIdList\x12-\n\x0e\x63onnection_ids\x18\x01 \x03(\x0b\x32\x15.context.ConnectionId\":\n\x0e\x43onnectionList\x12(\n\x0b\x63onnections\x18\x01 \x03(\x0b\x32\x13.context.Connection\"^\n\x12TeraFlowController\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x12\n\nip_address\x18\x02 \x01(\t\x12\x0c\n\x04port\x18\x03 \x01(\r\"U\n\x14\x41uthenticationResult\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x15\n\rauthenticated\x18\x02 \x01(\x08*j\n\rEventTypeEnum\x12\x17\n\x13\x45VENTTYPE_UNDEFINED\x10\x00\x12\x14\n\x10\x45VENTTYPE_CREATE\x10\x01\x12\x14\n\x10\x45VENTTYPE_UPDATE\x10\x02\x12\x14\n\x10\x45VENTTYPE_REMOVE\x10\x03*\xc5\x01\n\x10\x44\x65viceDriverEnum\x12\x1a\n\x16\x44\x45VICEDRIVER_UNDEFINED\x10\x00\x12\x1b\n\x17\x44\x45VICEDRIVER_OPENCONFIG\x10\x01\x12\x1e\n\x1a\x44\x45VICEDRIVER_TRANSPORT_API\x10\x02\x12\x13\n\x0f\x44\x45VICEDRIVER_P4\x10\x03\x12&\n\"DEVICEDRIVER_IETF_NETWORK_TOPOLOGY\x10\x04\x12\x1b\n\x17\x44\x45VICEDRIVER_ONF_TR_352\x10\x05*\x8f\x01\n\x1b\x44\x65viceOperationalStatusEnum\x12%\n!DEVICEOPERATIONALSTATUS_UNDEFINED\x10\x00\x12$\n DEVICEOPERATIONALSTATUS_DISABLED\x10\x01\x12#\n\x1f\x44\x45VICEOPERATIONALSTATUS_ENABLED\x10\x02*\x81\x01\n\x0fServiceTypeEnum\x12\x17\n\x13SERVICETYPE_UNKNOWN\x10\x00\x12\x14\n\x10SERVICETYPE_L3NM\x10\x01\x12\x14\n\x10SERVICETYPE_L2NM\x10\x02\x12)\n%SERVICETYPE_TAPI_CONNECTIVITY_SERVICE\x10\x03*\x88\x01\n\x11ServiceStatusEnum\x12\x1b\n\x17SERVICESTATUS_UNDEFINED\x10\x00\x12\x19\n\x15SERVICESTATUS_PLANNED\x10\x01\x12\x18\n\x14SERVICESTATUS_ACTIVE\x10\x02\x12!\n\x1dSERVICESTATUS_PENDING_REMOVAL\x10\x03*]\n\x10\x43onfigActionEnum\x12\x1a\n\x16\x43ONFIGACTION_UNDEFINED\x10\x00\x12\x14\n\x10\x43ONFIGACTION_SET\x10\x01\x12\x17\n\x13\x43ONFIGACTION_DELETE\x10\x02\x32\xa5\r\n\x0e\x43ontextService\x12:\n\x0eListContextIds\x12\x0e.context.Empty\x1a\x16.context.ContextIdList\"\x00\x12\x36\n\x0cListContexts\x12\x0e.context.Empty\x1a\x14.context.ContextList\"\x00\x12\x34\n\nGetContext\x12\x12.context.ContextId\x1a\x10.context.Context\"\x00\x12\x34\n\nSetContext\x12\x10.context.Context\x1a\x12.context.ContextId\"\x00\x12\x35\n\rRemoveContext\x12\x12.context.ContextId\x1a\x0e.context.Empty\"\x00\x12=\n\x10GetContextEvents\x12\x0e.context.Empty\x1a\x15.context.ContextEvent\"\x00\x30\x01\x12@\n\x0fListTopologyIds\x12\x12.context.ContextId\x1a\x17.context.TopologyIdList\"\x00\x12=\n\x0eListTopologies\x12\x12.context.ContextId\x1a\x15.context.TopologyList\"\x00\x12\x37\n\x0bGetTopology\x12\x13.context.TopologyId\x1a\x11.context.Topology\"\x00\x12\x37\n\x0bSetTopology\x12\x11.context.Topology\x1a\x13.context.TopologyId\"\x00\x12\x37\n\x0eRemoveTopology\x12\x13.context.TopologyId\x1a\x0e.context.Empty\"\x00\x12?\n\x11GetTopologyEvents\x12\x0e.context.Empty\x1a\x16.context.TopologyEvent\"\x00\x30\x01\x12\x38\n\rListDeviceIds\x12\x0e.context.Empty\x1a\x15.context.DeviceIdList\"\x00\x12\x34\n\x0bListDevices\x12\x0e.context.Empty\x1a\x13.context.DeviceList\"\x00\x12\x31\n\tGetDevice\x12\x11.context.DeviceId\x1a\x0f.context.Device\"\x00\x12\x31\n\tSetDevice\x12\x0f.context.Device\x1a\x11.context.DeviceId\"\x00\x12\x33\n\x0cRemoveDevice\x12\x11.context.DeviceId\x1a\x0e.context.Empty\"\x00\x12;\n\x0fGetDeviceEvents\x12\x0e.context.Empty\x1a\x14.context.DeviceEvent\"\x00\x30\x01\x12\x34\n\x0bListLinkIds\x12\x0e.context.Empty\x1a\x13.context.LinkIdList\"\x00\x12\x30\n\tListLinks\x12\x0e.context.Empty\x1a\x11.context.LinkList\"\x00\x12+\n\x07GetLink\x12\x0f.context.LinkId\x1a\r.context.Link\"\x00\x12+\n\x07SetLink\x12\r.context.Link\x1a\x0f.context.LinkId\"\x00\x12/\n\nRemoveLink\x12\x0f.context.LinkId\x1a\x0e.context.Empty\"\x00\x12\x37\n\rGetLinkEvents\x12\x0e.context.Empty\x1a\x12.context.LinkEvent\"\x00\x30\x01\x12>\n\x0eListServiceIds\x12\x12.context.ContextId\x1a\x16.context.ServiceIdList\"\x00\x12:\n\x0cListServices\x12\x12.context.ContextId\x1a\x14.context.ServiceList\"\x00\x12\x34\n\nGetService\x12\x12.context.ServiceId\x1a\x10.context.Service\"\x00\x12\x34\n\nSetService\x12\x10.context.Service\x1a\x12.context.ServiceId\"\x00\x12\x35\n\rRemoveService\x12\x12.context.ServiceId\x1a\x0e.context.Empty\"\x00\x12=\n\x10GetServiceEvents\x12\x0e.context.Empty\x1a\x15.context.ServiceEvent\"\x00\x30\x01\x62\x06proto3'
-)
+  serialized_pb=b'\n\rcontext.proto\x12\x07\x63ontext\x1a\x16kpi_sample_types.proto\"\x07\n\x05\x45mpty\"\x14\n\x04Uuid\x12\x0c\n\x04uuid\x18\x01 \x01(\t\"F\n\x05\x45vent\x12\x11\n\ttimestamp\x18\x01 \x01(\x01\x12*\n\nevent_type\x18\x02 \x01(\x0e\x32\x16.context.EventTypeEnum\"0\n\tContextId\x12#\n\x0c\x63ontext_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\xb6\x01\n\x07\x43ontext\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12)\n\x0ctopology_ids\x18\x02 \x03(\x0b\x32\x13.context.TopologyId\x12\'\n\x0bservice_ids\x18\x03 \x03(\x0b\x32\x12.context.ServiceId\x12/\n\ncontroller\x18\x04 \x01(\x0b\x32\x1b.context.TeraFlowController\"8\n\rContextIdList\x12\'\n\x0b\x63ontext_ids\x18\x01 \x03(\x0b\x32\x12.context.ContextId\"1\n\x0b\x43ontextList\x12\"\n\x08\x63ontexts\x18\x01 \x03(\x0b\x32\x10.context.Context\"U\n\x0c\x43ontextEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12&\n\ncontext_id\x18\x02 \x01(\x0b\x32\x12.context.ContextId\"Z\n\nTopologyId\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12$\n\rtopology_uuid\x18\x02 \x01(\x0b\x32\r.context.Uuid\"~\n\x08Topology\x12(\n\x0btopology_id\x18\x01 \x01(\x0b\x32\x13.context.TopologyId\x12%\n\ndevice_ids\x18\x02 \x03(\x0b\x32\x11.context.DeviceId\x12!\n\x08link_ids\x18\x03 \x03(\x0b\x32\x0f.context.LinkId\";\n\x0eTopologyIdList\x12)\n\x0ctopology_ids\x18\x01 \x03(\x0b\x32\x13.context.TopologyId\"5\n\x0cTopologyList\x12%\n\ntopologies\x18\x01 \x03(\x0b\x32\x11.context.Topology\"X\n\rTopologyEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12(\n\x0btopology_id\x18\x02 \x01(\x0b\x32\x13.context.TopologyId\".\n\x08\x44\x65viceId\x12\"\n\x0b\x64\x65vice_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\x9a\x02\n\x06\x44\x65vice\x12$\n\tdevice_id\x18\x01 \x01(\x0b\x32\x11.context.DeviceId\x12\x13\n\x0b\x64\x65vice_type\x18\x02 \x01(\t\x12,\n\rdevice_config\x18\x03 \x01(\x0b\x32\x15.context.DeviceConfig\x12G\n\x19\x64\x65vice_operational_status\x18\x04 \x01(\x0e\x32$.context.DeviceOperationalStatusEnum\x12\x31\n\x0e\x64\x65vice_drivers\x18\x05 \x03(\x0e\x32\x19.context.DeviceDriverEnum\x12+\n\x10\x64\x65vice_endpoints\x18\x06 \x03(\x0b\x32\x11.context.EndPoint\"9\n\x0c\x44\x65viceConfig\x12)\n\x0c\x63onfig_rules\x18\x01 \x03(\x0b\x32\x13.context.ConfigRule\"5\n\x0c\x44\x65viceIdList\x12%\n\ndevice_ids\x18\x01 \x03(\x0b\x32\x11.context.DeviceId\".\n\nDeviceList\x12 \n\x07\x64\x65vices\x18\x01 \x03(\x0b\x32\x0f.context.Device\"R\n\x0b\x44\x65viceEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12$\n\tdevice_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\"*\n\x06LinkId\x12 \n\tlink_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"X\n\x04Link\x12 \n\x07link_id\x18\x01 \x01(\x0b\x32\x0f.context.LinkId\x12.\n\x11link_endpoint_ids\x18\x02 \x03(\x0b\x32\x13.context.EndPointId\"/\n\nLinkIdList\x12!\n\x08link_ids\x18\x01 \x03(\x0b\x32\x0f.context.LinkId\"(\n\x08LinkList\x12\x1c\n\x05links\x18\x01 \x03(\x0b\x32\r.context.Link\"L\n\tLinkEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12 \n\x07link_id\x18\x02 \x01(\x0b\x32\x0f.context.LinkId\"X\n\tServiceId\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12#\n\x0cservice_uuid\x18\x02 \x01(\x0b\x32\r.context.Uuid\"\xa6\x02\n\x07Service\x12&\n\nservice_id\x18\x01 \x01(\x0b\x32\x12.context.ServiceId\x12.\n\x0cservice_type\x18\x02 \x01(\x0e\x32\x18.context.ServiceTypeEnum\x12\x31\n\x14service_endpoint_ids\x18\x03 \x03(\x0b\x32\x13.context.EndPointId\x12\x30\n\x13service_constraints\x18\x04 \x03(\x0b\x32\x13.context.Constraint\x12.\n\x0eservice_status\x18\x05 \x01(\x0b\x32\x16.context.ServiceStatus\x12.\n\x0eservice_config\x18\x06 \x01(\x0b\x32\x16.context.ServiceConfig\"C\n\rServiceStatus\x12\x32\n\x0eservice_status\x18\x01 \x01(\x0e\x32\x1a.context.ServiceStatusEnum\":\n\rServiceConfig\x12)\n\x0c\x63onfig_rules\x18\x01 \x03(\x0b\x32\x13.context.ConfigRule\"8\n\rServiceIdList\x12\'\n\x0bservice_ids\x18\x01 \x03(\x0b\x32\x12.context.ServiceId\"1\n\x0bServiceList\x12\"\n\x08services\x18\x01 \x03(\x0b\x32\x10.context.Service\"U\n\x0cServiceEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12&\n\nservice_id\x18\x02 \x01(\x0b\x32\x12.context.ServiceId\"6\n\x0c\x43onnectionId\x12&\n\x0f\x63onnection_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\xc4\x01\n\nConnection\x12,\n\rconnection_id\x18\x01 \x01(\x0b\x32\x15.context.ConnectionId\x12&\n\nservice_id\x18\x02 \x01(\x0b\x32\x12.context.ServiceId\x12\x33\n\x16path_hops_endpoint_ids\x18\x03 \x03(\x0b\x32\x13.context.EndPointId\x12+\n\x0fsub_service_ids\x18\x04 \x03(\x0b\x32\x12.context.ServiceId\"A\n\x10\x43onnectionIdList\x12-\n\x0e\x63onnection_ids\x18\x01 \x03(\x0b\x32\x15.context.ConnectionId\":\n\x0e\x43onnectionList\x12(\n\x0b\x63onnections\x18\x01 \x03(\x0b\x32\x13.context.Connection\"^\n\x0f\x43onnectionEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12,\n\rconnection_id\x18\x02 \x01(\x0b\x32\x15.context.ConnectionId\"\x82\x01\n\nEndPointId\x12(\n\x0btopology_id\x18\x01 \x01(\x0b\x32\x13.context.TopologyId\x12$\n\tdevice_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\x12$\n\rendpoint_uuid\x18\x03 \x01(\x0b\x32\r.context.Uuid\"\x86\x01\n\x08\x45ndPoint\x12(\n\x0b\x65ndpoint_id\x18\x01 \x01(\x0b\x32\x13.context.EndPointId\x12\x15\n\rendpoint_type\x18\x02 \x01(\t\x12\x39\n\x10kpi_sample_types\x18\x03 \x03(\x0e\x32\x1f.kpi_sample_types.KpiSampleType\"e\n\nConfigRule\x12)\n\x06\x61\x63tion\x18\x01 \x01(\x0e\x32\x19.context.ConfigActionEnum\x12\x14\n\x0cresource_key\x18\x02 \x01(\t\x12\x16\n\x0eresource_value\x18\x03 \x01(\t\"?\n\nConstraint\x12\x17\n\x0f\x63onstraint_type\x18\x01 \x01(\t\x12\x18\n\x10\x63onstraint_value\x18\x02 \x01(\t\"^\n\x12TeraFlowController\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x12\n\nip_address\x18\x02 \x01(\t\x12\x0c\n\x04port\x18\x03 \x01(\r\"U\n\x14\x41uthenticationResult\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x15\n\rauthenticated\x18\x02 \x01(\x08*j\n\rEventTypeEnum\x12\x17\n\x13\x45VENTTYPE_UNDEFINED\x10\x00\x12\x14\n\x10\x45VENTTYPE_CREATE\x10\x01\x12\x14\n\x10\x45VENTTYPE_UPDATE\x10\x02\x12\x14\n\x10\x45VENTTYPE_REMOVE\x10\x03*\xc5\x01\n\x10\x44\x65viceDriverEnum\x12\x1a\n\x16\x44\x45VICEDRIVER_UNDEFINED\x10\x00\x12\x1b\n\x17\x44\x45VICEDRIVER_OPENCONFIG\x10\x01\x12\x1e\n\x1a\x44\x45VICEDRIVER_TRANSPORT_API\x10\x02\x12\x13\n\x0f\x44\x45VICEDRIVER_P4\x10\x03\x12&\n\"DEVICEDRIVER_IETF_NETWORK_TOPOLOGY\x10\x04\x12\x1b\n\x17\x44\x45VICEDRIVER_ONF_TR_352\x10\x05*\x8f\x01\n\x1b\x44\x65viceOperationalStatusEnum\x12%\n!DEVICEOPERATIONALSTATUS_UNDEFINED\x10\x00\x12$\n DEVICEOPERATIONALSTATUS_DISABLED\x10\x01\x12#\n\x1f\x44\x45VICEOPERATIONALSTATUS_ENABLED\x10\x02*\x81\x01\n\x0fServiceTypeEnum\x12\x17\n\x13SERVICETYPE_UNKNOWN\x10\x00\x12\x14\n\x10SERVICETYPE_L3NM\x10\x01\x12\x14\n\x10SERVICETYPE_L2NM\x10\x02\x12)\n%SERVICETYPE_TAPI_CONNECTIVITY_SERVICE\x10\x03*\x88\x01\n\x11ServiceStatusEnum\x12\x1b\n\x17SERVICESTATUS_UNDEFINED\x10\x00\x12\x19\n\x15SERVICESTATUS_PLANNED\x10\x01\x12\x18\n\x14SERVICESTATUS_ACTIVE\x10\x02\x12!\n\x1dSERVICESTATUS_PENDING_REMOVAL\x10\x03*]\n\x10\x43onfigActionEnum\x12\x1a\n\x16\x43ONFIGACTION_UNDEFINED\x10\x00\x12\x14\n\x10\x43ONFIGACTION_SET\x10\x01\x12\x17\n\x13\x43ONFIGACTION_DELETE\x10\x02\x32\xad\x10\n\x0e\x43ontextService\x12:\n\x0eListContextIds\x12\x0e.context.Empty\x1a\x16.context.ContextIdList\"\x00\x12\x36\n\x0cListContexts\x12\x0e.context.Empty\x1a\x14.context.ContextList\"\x00\x12\x34\n\nGetContext\x12\x12.context.ContextId\x1a\x10.context.Context\"\x00\x12\x34\n\nSetContext\x12\x10.context.Context\x1a\x12.context.ContextId\"\x00\x12\x35\n\rRemoveContext\x12\x12.context.ContextId\x1a\x0e.context.Empty\"\x00\x12=\n\x10GetContextEvents\x12\x0e.context.Empty\x1a\x15.context.ContextEvent\"\x00\x30\x01\x12@\n\x0fListTopologyIds\x12\x12.context.ContextId\x1a\x17.context.TopologyIdList\"\x00\x12=\n\x0eListTopologies\x12\x12.context.ContextId\x1a\x15.context.TopologyList\"\x00\x12\x37\n\x0bGetTopology\x12\x13.context.TopologyId\x1a\x11.context.Topology\"\x00\x12\x37\n\x0bSetTopology\x12\x11.context.Topology\x1a\x13.context.TopologyId\"\x00\x12\x37\n\x0eRemoveTopology\x12\x13.context.TopologyId\x1a\x0e.context.Empty\"\x00\x12?\n\x11GetTopologyEvents\x12\x0e.context.Empty\x1a\x16.context.TopologyEvent\"\x00\x30\x01\x12\x38\n\rListDeviceIds\x12\x0e.context.Empty\x1a\x15.context.DeviceIdList\"\x00\x12\x34\n\x0bListDevices\x12\x0e.context.Empty\x1a\x13.context.DeviceList\"\x00\x12\x31\n\tGetDevice\x12\x11.context.DeviceId\x1a\x0f.context.Device\"\x00\x12\x31\n\tSetDevice\x12\x0f.context.Device\x1a\x11.context.DeviceId\"\x00\x12\x33\n\x0cRemoveDevice\x12\x11.context.DeviceId\x1a\x0e.context.Empty\"\x00\x12;\n\x0fGetDeviceEvents\x12\x0e.context.Empty\x1a\x14.context.DeviceEvent\"\x00\x30\x01\x12\x34\n\x0bListLinkIds\x12\x0e.context.Empty\x1a\x13.context.LinkIdList\"\x00\x12\x30\n\tListLinks\x12\x0e.context.Empty\x1a\x11.context.LinkList\"\x00\x12+\n\x07GetLink\x12\x0f.context.LinkId\x1a\r.context.Link\"\x00\x12+\n\x07SetLink\x12\r.context.Link\x1a\x0f.context.LinkId\"\x00\x12/\n\nRemoveLink\x12\x0f.context.LinkId\x1a\x0e.context.Empty\"\x00\x12\x37\n\rGetLinkEvents\x12\x0e.context.Empty\x1a\x12.context.LinkEvent\"\x00\x30\x01\x12>\n\x0eListServiceIds\x12\x12.context.ContextId\x1a\x16.context.ServiceIdList\"\x00\x12:\n\x0cListServices\x12\x12.context.ContextId\x1a\x14.context.ServiceList\"\x00\x12\x34\n\nGetService\x12\x12.context.ServiceId\x1a\x10.context.Service\"\x00\x12\x34\n\nSetService\x12\x10.context.Service\x1a\x12.context.ServiceId\"\x00\x12\x35\n\rRemoveService\x12\x12.context.ServiceId\x1a\x0e.context.Empty\"\x00\x12=\n\x10GetServiceEvents\x12\x0e.context.Empty\x1a\x15.context.ServiceEvent\"\x00\x30\x01\x12\x44\n\x11ListConnectionIds\x12\x12.context.ServiceId\x1a\x19.context.ConnectionIdList\"\x00\x12@\n\x0fListConnections\x12\x12.context.ServiceId\x1a\x17.context.ConnectionList\"\x00\x12=\n\rGetConnection\x12\x15.context.ConnectionId\x1a\x13.context.Connection\"\x00\x12=\n\rSetConnection\x12\x13.context.Connection\x1a\x15.context.ConnectionId\"\x00\x12;\n\x10RemoveConnection\x12\x15.context.ConnectionId\x1a\x0e.context.Empty\"\x00\x12\x43\n\x13GetConnectionEvents\x12\x0e.context.Empty\x1a\x18.context.ConnectionEvent\"\x00\x30\x01\x62\x06proto3'
+  ,
+  dependencies=[kpi__sample__types__pb2.DESCRIPTOR,])
 
 _EVENTTYPEENUM = _descriptor.EnumDescriptor(
   name='EventTypeEnum',
@@ -53,8 +55,8 @@ _EVENTTYPEENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=3468,
-  serialized_end=3574,
+  serialized_start=3703,
+  serialized_end=3809,
 )
 _sym_db.RegisterEnumDescriptor(_EVENTTYPEENUM)
 
@@ -99,8 +101,8 @@ _DEVICEDRIVERENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=3577,
-  serialized_end=3774,
+  serialized_start=3812,
+  serialized_end=4009,
 )
 _sym_db.RegisterEnumDescriptor(_DEVICEDRIVERENUM)
 
@@ -130,8 +132,8 @@ _DEVICEOPERATIONALSTATUSENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=3777,
-  serialized_end=3920,
+  serialized_start=4012,
+  serialized_end=4155,
 )
 _sym_db.RegisterEnumDescriptor(_DEVICEOPERATIONALSTATUSENUM)
 
@@ -166,8 +168,8 @@ _SERVICETYPEENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=3923,
-  serialized_end=4052,
+  serialized_start=4158,
+  serialized_end=4287,
 )
 _sym_db.RegisterEnumDescriptor(_SERVICETYPEENUM)
 
@@ -202,8 +204,8 @@ _SERVICESTATUSENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=4055,
-  serialized_end=4191,
+  serialized_start=4290,
+  serialized_end=4426,
 )
 _sym_db.RegisterEnumDescriptor(_SERVICESTATUSENUM)
 
@@ -233,8 +235,8 @@ _CONFIGACTIONENUM = _descriptor.EnumDescriptor(
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=4193,
-  serialized_end=4286,
+  serialized_start=4428,
+  serialized_end=4521,
 )
 _sym_db.RegisterEnumDescriptor(_CONFIGACTIONENUM)
 
@@ -286,8 +288,8 @@ _EMPTY = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=26,
-  serialized_end=33,
+  serialized_start=50,
+  serialized_end=57,
 )
 
 
@@ -318,8 +320,8 @@ _UUID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=35,
-  serialized_end=55,
+  serialized_start=59,
+  serialized_end=79,
 )
 
 
@@ -357,8 +359,8 @@ _EVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=57,
-  serialized_end=127,
+  serialized_start=81,
+  serialized_end=151,
 )
 
 
@@ -389,8 +391,8 @@ _CONTEXTID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=129,
-  serialized_end=177,
+  serialized_start=153,
+  serialized_end=201,
 )
 
 
@@ -442,8 +444,8 @@ _CONTEXT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=180,
-  serialized_end=362,
+  serialized_start=204,
+  serialized_end=386,
 )
 
 
@@ -474,8 +476,8 @@ _CONTEXTIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=364,
-  serialized_end=420,
+  serialized_start=388,
+  serialized_end=444,
 )
 
 
@@ -506,8 +508,8 @@ _CONTEXTLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=422,
-  serialized_end=471,
+  serialized_start=446,
+  serialized_end=495,
 )
 
 
@@ -545,8 +547,8 @@ _CONTEXTEVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=473,
-  serialized_end=558,
+  serialized_start=497,
+  serialized_end=582,
 )
 
 
@@ -584,8 +586,8 @@ _TOPOLOGYID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=560,
-  serialized_end=650,
+  serialized_start=584,
+  serialized_end=674,
 )
 
 
@@ -630,8 +632,8 @@ _TOPOLOGY = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=652,
-  serialized_end=778,
+  serialized_start=676,
+  serialized_end=802,
 )
 
 
@@ -662,8 +664,8 @@ _TOPOLOGYIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=780,
-  serialized_end=839,
+  serialized_start=804,
+  serialized_end=863,
 )
 
 
@@ -694,8 +696,8 @@ _TOPOLOGYLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=841,
-  serialized_end=894,
+  serialized_start=865,
+  serialized_end=918,
 )
 
 
@@ -733,8 +735,8 @@ _TOPOLOGYEVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=896,
-  serialized_end=984,
+  serialized_start=920,
+  serialized_end=1008,
 )
 
 
@@ -765,8 +767,8 @@ _DEVICEID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=986,
-  serialized_end=1032,
+  serialized_start=1010,
+  serialized_end=1056,
 )
 
 
@@ -832,8 +834,8 @@ _DEVICE = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1035,
-  serialized_end=1317,
+  serialized_start=1059,
+  serialized_end=1341,
 )
 
 
@@ -864,8 +866,8 @@ _DEVICECONFIG = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1319,
-  serialized_end=1376,
+  serialized_start=1343,
+  serialized_end=1400,
 )
 
 
@@ -896,8 +898,8 @@ _DEVICEIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1378,
-  serialized_end=1431,
+  serialized_start=1402,
+  serialized_end=1455,
 )
 
 
@@ -928,8 +930,8 @@ _DEVICELIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1433,
-  serialized_end=1479,
+  serialized_start=1457,
+  serialized_end=1503,
 )
 
 
@@ -967,8 +969,8 @@ _DEVICEEVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1481,
-  serialized_end=1563,
+  serialized_start=1505,
+  serialized_end=1587,
 )
 
 
@@ -999,8 +1001,8 @@ _LINKID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1565,
-  serialized_end=1607,
+  serialized_start=1589,
+  serialized_end=1631,
 )
 
 
@@ -1038,8 +1040,8 @@ _LINK = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1609,
-  serialized_end=1697,
+  serialized_start=1633,
+  serialized_end=1721,
 )
 
 
@@ -1070,8 +1072,8 @@ _LINKIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1699,
-  serialized_end=1746,
+  serialized_start=1723,
+  serialized_end=1770,
 )
 
 
@@ -1102,8 +1104,8 @@ _LINKLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1748,
-  serialized_end=1788,
+  serialized_start=1772,
+  serialized_end=1812,
 )
 
 
@@ -1141,8 +1143,8 @@ _LINKEVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1790,
-  serialized_end=1866,
+  serialized_start=1814,
+  serialized_end=1890,
 )
 
 
@@ -1180,8 +1182,8 @@ _SERVICEID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1868,
-  serialized_end=1956,
+  serialized_start=1892,
+  serialized_end=1980,
 )
 
 
@@ -1247,8 +1249,8 @@ _SERVICE = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1959,
-  serialized_end=2253,
+  serialized_start=1983,
+  serialized_end=2277,
 )
 
 
@@ -1279,8 +1281,8 @@ _SERVICESTATUS = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2255,
-  serialized_end=2322,
+  serialized_start=2279,
+  serialized_end=2346,
 )
 
 
@@ -1311,8 +1313,8 @@ _SERVICECONFIG = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2324,
-  serialized_end=2382,
+  serialized_start=2348,
+  serialized_end=2406,
 )
 
 
@@ -1343,8 +1345,8 @@ _SERVICEIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2384,
-  serialized_end=2440,
+  serialized_start=2408,
+  serialized_end=2464,
 )
 
 
@@ -1375,8 +1377,8 @@ _SERVICELIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2442,
-  serialized_end=2491,
+  serialized_start=2466,
+  serialized_end=2515,
 )
 
 
@@ -1414,40 +1416,26 @@ _SERVICEEVENT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2493,
-  serialized_end=2578,
+  serialized_start=2517,
+  serialized_end=2602,
 )
 
 
-_ENDPOINTID = _descriptor.Descriptor(
-  name='EndPointId',
-  full_name='context.EndPointId',
+_CONNECTIONID = _descriptor.Descriptor(
+  name='ConnectionId',
+  full_name='context.ConnectionId',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='topology_id', full_name='context.EndPointId.topology_id', index=0,
+      name='connection_uuid', full_name='context.ConnectionId.connection_uuid', index=0,
       number=1, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='device_id', full_name='context.EndPointId.device_id', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='endpoint_uuid', full_name='context.EndPointId.endpoint_uuid', index=2,
-      number=3, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
   ],
   extensions=[
   ],
@@ -1460,30 +1448,44 @@ _ENDPOINTID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2581,
-  serialized_end=2711,
+  serialized_start=2604,
+  serialized_end=2658,
 )
 
 
-_ENDPOINT = _descriptor.Descriptor(
-  name='EndPoint',
-  full_name='context.EndPoint',
+_CONNECTION = _descriptor.Descriptor(
+  name='Connection',
+  full_name='context.Connection',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='endpoint_id', full_name='context.EndPoint.endpoint_id', index=0,
+      name='connection_id', full_name='context.Connection.connection_id', index=0,
       number=1, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='endpoint_type', full_name='context.EndPoint.endpoint_type', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='service_id', full_name='context.Connection.service_id', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='path_hops_endpoint_ids', full_name='context.Connection.path_hops_endpoint_ids', index=2,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='sub_service_ids', full_name='context.Connection.sub_service_ids', index=3,
+      number=4, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -1499,37 +1501,55 @@ _ENDPOINT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2713,
-  serialized_end=2788,
+  serialized_start=2661,
+  serialized_end=2857,
 )
 
 
-_CONFIGRULE = _descriptor.Descriptor(
-  name='ConfigRule',
-  full_name='context.ConfigRule',
+_CONNECTIONIDLIST = _descriptor.Descriptor(
+  name='ConnectionIdList',
+  full_name='context.ConnectionIdList',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='action', full_name='context.ConfigRule.action', index=0,
-      number=1, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='resource_key', full_name='context.ConfigRule.resource_key', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='connection_ids', full_name='context.ConnectionIdList.connection_ids', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2859,
+  serialized_end=2924,
+)
+
+
+_CONNECTIONLIST = _descriptor.Descriptor(
+  name='ConnectionList',
+  full_name='context.ConnectionList',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
     _descriptor.FieldDescriptor(
-      name='resource_value', full_name='context.ConfigRule.resource_value', index=2,
-      number=3, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='connections', full_name='context.ConnectionList.connections', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -1545,30 +1565,30 @@ _CONFIGRULE = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2790,
-  serialized_end=2891,
+  serialized_start=2926,
+  serialized_end=2984,
 )
 
 
-_CONSTRAINT = _descriptor.Descriptor(
-  name='Constraint',
-  full_name='context.Constraint',
+_CONNECTIONEVENT = _descriptor.Descriptor(
+  name='ConnectionEvent',
+  full_name='context.ConnectionEvent',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='constraint_type', full_name='context.Constraint.constraint_type', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='event', full_name='context.ConnectionEvent.event', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='constraint_value', full_name='context.Constraint.constraint_value', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='connection_id', full_name='context.ConnectionEvent.connection_id', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -1584,26 +1604,40 @@ _CONSTRAINT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2893,
-  serialized_end=2956,
+  serialized_start=2986,
+  serialized_end=3080,
 )
 
 
-_CONNECTIONID = _descriptor.Descriptor(
-  name='ConnectionId',
-  full_name='context.ConnectionId',
+_ENDPOINTID = _descriptor.Descriptor(
+  name='EndPointId',
+  full_name='context.EndPointId',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='connection_uuid', full_name='context.ConnectionId.connection_uuid', index=0,
+      name='topology_id', full_name='context.EndPointId.topology_id', index=0,
       number=1, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='device_id', full_name='context.EndPointId.device_id', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='endpoint_uuid', full_name='context.EndPointId.endpoint_uuid', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
   ],
   extensions=[
   ],
@@ -1616,36 +1650,36 @@ _CONNECTIONID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=2958,
-  serialized_end=3012,
+  serialized_start=3083,
+  serialized_end=3213,
 )
 
 
-_CONNECTION = _descriptor.Descriptor(
-  name='Connection',
-  full_name='context.Connection',
+_ENDPOINT = _descriptor.Descriptor(
+  name='EndPoint',
+  full_name='context.EndPoint',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='connection_id', full_name='context.Connection.connection_id', index=0,
+      name='endpoint_id', full_name='context.EndPoint.endpoint_id', index=0,
       number=1, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='related_service_id', full_name='context.Connection.related_service_id', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
+      name='endpoint_type', full_name='context.EndPoint.endpoint_type', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='path', full_name='context.Connection.path', index=2,
-      number=3, type=11, cpp_type=10, label=3,
+      name='kpi_sample_types', full_name='context.EndPoint.kpi_sample_types', index=2,
+      number=3, type=14, cpp_type=8, label=3,
       has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
@@ -1662,23 +1696,37 @@ _CONNECTION = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=3015,
-  serialized_end=3156,
+  serialized_start=3216,
+  serialized_end=3350,
 )
 
 
-_CONNECTIONIDLIST = _descriptor.Descriptor(
-  name='ConnectionIdList',
-  full_name='context.ConnectionIdList',
+_CONFIGRULE = _descriptor.Descriptor(
+  name='ConfigRule',
+  full_name='context.ConfigRule',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='connection_ids', full_name='context.ConnectionIdList.connection_ids', index=0,
-      number=1, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
+      name='action', full_name='context.ConfigRule.action', index=0,
+      number=1, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='resource_key', full_name='context.ConfigRule.resource_key', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='resource_value', full_name='context.ConfigRule.resource_value', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -1694,23 +1742,30 @@ _CONNECTIONIDLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=3158,
-  serialized_end=3223,
+  serialized_start=3352,
+  serialized_end=3453,
 )
 
 
-_CONNECTIONLIST = _descriptor.Descriptor(
-  name='ConnectionList',
-  full_name='context.ConnectionList',
+_CONSTRAINT = _descriptor.Descriptor(
+  name='Constraint',
+  full_name='context.Constraint',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='connections', full_name='context.ConnectionList.connections', index=0,
-      number=1, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
+      name='constraint_type', full_name='context.Constraint.constraint_type', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='constraint_value', full_name='context.Constraint.constraint_value', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -1726,8 +1781,8 @@ _CONNECTIONLIST = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=3225,
-  serialized_end=3283,
+  serialized_start=3455,
+  serialized_end=3518,
 )
 
 
@@ -1772,8 +1827,8 @@ _TERAFLOWCONTROLLER = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=3285,
-  serialized_end=3379,
+  serialized_start=3520,
+  serialized_end=3614,
 )
 
 
@@ -1811,8 +1866,8 @@ _AUTHENTICATIONRESULT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=3381,
-  serialized_end=3466,
+  serialized_start=3616,
+  serialized_end=3701,
 )
 
 _EVENT.fields_by_name['event_type'].enum_type = _EVENTTYPEENUM
@@ -1866,17 +1921,21 @@ _SERVICEIDLIST.fields_by_name['service_ids'].message_type = _SERVICEID
 _SERVICELIST.fields_by_name['services'].message_type = _SERVICE
 _SERVICEEVENT.fields_by_name['event'].message_type = _EVENT
 _SERVICEEVENT.fields_by_name['service_id'].message_type = _SERVICEID
+_CONNECTIONID.fields_by_name['connection_uuid'].message_type = _UUID
+_CONNECTION.fields_by_name['connection_id'].message_type = _CONNECTIONID
+_CONNECTION.fields_by_name['service_id'].message_type = _SERVICEID
+_CONNECTION.fields_by_name['path_hops_endpoint_ids'].message_type = _ENDPOINTID
+_CONNECTION.fields_by_name['sub_service_ids'].message_type = _SERVICEID
+_CONNECTIONIDLIST.fields_by_name['connection_ids'].message_type = _CONNECTIONID
+_CONNECTIONLIST.fields_by_name['connections'].message_type = _CONNECTION
+_CONNECTIONEVENT.fields_by_name['event'].message_type = _EVENT
+_CONNECTIONEVENT.fields_by_name['connection_id'].message_type = _CONNECTIONID
 _ENDPOINTID.fields_by_name['topology_id'].message_type = _TOPOLOGYID
 _ENDPOINTID.fields_by_name['device_id'].message_type = _DEVICEID
 _ENDPOINTID.fields_by_name['endpoint_uuid'].message_type = _UUID
 _ENDPOINT.fields_by_name['endpoint_id'].message_type = _ENDPOINTID
+_ENDPOINT.fields_by_name['kpi_sample_types'].enum_type = kpi__sample__types__pb2._KPISAMPLETYPE
 _CONFIGRULE.fields_by_name['action'].enum_type = _CONFIGACTIONENUM
-_CONNECTIONID.fields_by_name['connection_uuid'].message_type = _UUID
-_CONNECTION.fields_by_name['connection_id'].message_type = _CONNECTIONID
-_CONNECTION.fields_by_name['related_service_id'].message_type = _SERVICEID
-_CONNECTION.fields_by_name['path'].message_type = _ENDPOINTID
-_CONNECTIONIDLIST.fields_by_name['connection_ids'].message_type = _CONNECTIONID
-_CONNECTIONLIST.fields_by_name['connections'].message_type = _CONNECTION
 _TERAFLOWCONTROLLER.fields_by_name['context_id'].message_type = _CONTEXTID
 _AUTHENTICATIONRESULT.fields_by_name['context_id'].message_type = _CONTEXTID
 DESCRIPTOR.message_types_by_name['Empty'] = _EMPTY
@@ -1910,14 +1969,15 @@ DESCRIPTOR.message_types_by_name['ServiceConfig'] = _SERVICECONFIG
 DESCRIPTOR.message_types_by_name['ServiceIdList'] = _SERVICEIDLIST
 DESCRIPTOR.message_types_by_name['ServiceList'] = _SERVICELIST
 DESCRIPTOR.message_types_by_name['ServiceEvent'] = _SERVICEEVENT
-DESCRIPTOR.message_types_by_name['EndPointId'] = _ENDPOINTID
-DESCRIPTOR.message_types_by_name['EndPoint'] = _ENDPOINT
-DESCRIPTOR.message_types_by_name['ConfigRule'] = _CONFIGRULE
-DESCRIPTOR.message_types_by_name['Constraint'] = _CONSTRAINT
 DESCRIPTOR.message_types_by_name['ConnectionId'] = _CONNECTIONID
 DESCRIPTOR.message_types_by_name['Connection'] = _CONNECTION
 DESCRIPTOR.message_types_by_name['ConnectionIdList'] = _CONNECTIONIDLIST
 DESCRIPTOR.message_types_by_name['ConnectionList'] = _CONNECTIONLIST
+DESCRIPTOR.message_types_by_name['ConnectionEvent'] = _CONNECTIONEVENT
+DESCRIPTOR.message_types_by_name['EndPointId'] = _ENDPOINTID
+DESCRIPTOR.message_types_by_name['EndPoint'] = _ENDPOINT
+DESCRIPTOR.message_types_by_name['ConfigRule'] = _CONFIGRULE
+DESCRIPTOR.message_types_by_name['Constraint'] = _CONSTRAINT
 DESCRIPTOR.message_types_by_name['TeraFlowController'] = _TERAFLOWCONTROLLER
 DESCRIPTOR.message_types_by_name['AuthenticationResult'] = _AUTHENTICATIONRESULT
 DESCRIPTOR.enum_types_by_name['EventTypeEnum'] = _EVENTTYPEENUM
@@ -2145,34 +2205,6 @@ ServiceEvent = _reflection.GeneratedProtocolMessageType('ServiceEvent', (_messag
   })
 _sym_db.RegisterMessage(ServiceEvent)
 
-EndPointId = _reflection.GeneratedProtocolMessageType('EndPointId', (_message.Message,), {
-  'DESCRIPTOR' : _ENDPOINTID,
-  '__module__' : 'context_pb2'
-  # @@protoc_insertion_point(class_scope:context.EndPointId)
-  })
-_sym_db.RegisterMessage(EndPointId)
-
-EndPoint = _reflection.GeneratedProtocolMessageType('EndPoint', (_message.Message,), {
-  'DESCRIPTOR' : _ENDPOINT,
-  '__module__' : 'context_pb2'
-  # @@protoc_insertion_point(class_scope:context.EndPoint)
-  })
-_sym_db.RegisterMessage(EndPoint)
-
-ConfigRule = _reflection.GeneratedProtocolMessageType('ConfigRule', (_message.Message,), {
-  'DESCRIPTOR' : _CONFIGRULE,
-  '__module__' : 'context_pb2'
-  # @@protoc_insertion_point(class_scope:context.ConfigRule)
-  })
-_sym_db.RegisterMessage(ConfigRule)
-
-Constraint = _reflection.GeneratedProtocolMessageType('Constraint', (_message.Message,), {
-  'DESCRIPTOR' : _CONSTRAINT,
-  '__module__' : 'context_pb2'
-  # @@protoc_insertion_point(class_scope:context.Constraint)
-  })
-_sym_db.RegisterMessage(Constraint)
-
 ConnectionId = _reflection.GeneratedProtocolMessageType('ConnectionId', (_message.Message,), {
   'DESCRIPTOR' : _CONNECTIONID,
   '__module__' : 'context_pb2'
@@ -2201,6 +2233,41 @@ ConnectionList = _reflection.GeneratedProtocolMessageType('ConnectionList', (_me
   })
 _sym_db.RegisterMessage(ConnectionList)
 
+ConnectionEvent = _reflection.GeneratedProtocolMessageType('ConnectionEvent', (_message.Message,), {
+  'DESCRIPTOR' : _CONNECTIONEVENT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ConnectionEvent)
+  })
+_sym_db.RegisterMessage(ConnectionEvent)
+
+EndPointId = _reflection.GeneratedProtocolMessageType('EndPointId', (_message.Message,), {
+  'DESCRIPTOR' : _ENDPOINTID,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.EndPointId)
+  })
+_sym_db.RegisterMessage(EndPointId)
+
+EndPoint = _reflection.GeneratedProtocolMessageType('EndPoint', (_message.Message,), {
+  'DESCRIPTOR' : _ENDPOINT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.EndPoint)
+  })
+_sym_db.RegisterMessage(EndPoint)
+
+ConfigRule = _reflection.GeneratedProtocolMessageType('ConfigRule', (_message.Message,), {
+  'DESCRIPTOR' : _CONFIGRULE,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ConfigRule)
+  })
+_sym_db.RegisterMessage(ConfigRule)
+
+Constraint = _reflection.GeneratedProtocolMessageType('Constraint', (_message.Message,), {
+  'DESCRIPTOR' : _CONSTRAINT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Constraint)
+  })
+_sym_db.RegisterMessage(Constraint)
+
 TeraFlowController = _reflection.GeneratedProtocolMessageType('TeraFlowController', (_message.Message,), {
   'DESCRIPTOR' : _TERAFLOWCONTROLLER,
   '__module__' : 'context_pb2'
@@ -2224,8 +2291,8 @@ _CONTEXTSERVICE = _descriptor.ServiceDescriptor(
   index=0,
   serialized_options=None,
   create_key=_descriptor._internal_create_key,
-  serialized_start=4289,
-  serialized_end=5990,
+  serialized_start=4524,
+  serialized_end=6617,
   methods=[
   _descriptor.MethodDescriptor(
     name='ListContextIds',
@@ -2527,6 +2594,66 @@ _CONTEXTSERVICE = _descriptor.ServiceDescriptor(
     serialized_options=None,
     create_key=_descriptor._internal_create_key,
   ),
+  _descriptor.MethodDescriptor(
+    name='ListConnectionIds',
+    full_name='context.ContextService.ListConnectionIds',
+    index=30,
+    containing_service=None,
+    input_type=_SERVICEID,
+    output_type=_CONNECTIONIDLIST,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='ListConnections',
+    full_name='context.ContextService.ListConnections',
+    index=31,
+    containing_service=None,
+    input_type=_SERVICEID,
+    output_type=_CONNECTIONLIST,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetConnection',
+    full_name='context.ContextService.GetConnection',
+    index=32,
+    containing_service=None,
+    input_type=_CONNECTIONID,
+    output_type=_CONNECTION,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='SetConnection',
+    full_name='context.ContextService.SetConnection',
+    index=33,
+    containing_service=None,
+    input_type=_CONNECTION,
+    output_type=_CONNECTIONID,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='RemoveConnection',
+    full_name='context.ContextService.RemoveConnection',
+    index=34,
+    containing_service=None,
+    input_type=_CONNECTIONID,
+    output_type=_EMPTY,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetConnectionEvents',
+    full_name='context.ContextService.GetConnectionEvents',
+    index=35,
+    containing_service=None,
+    input_type=_EMPTY,
+    output_type=_CONNECTIONEVENT,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
 ])
 _sym_db.RegisterServiceDescriptor(_CONTEXTSERVICE)
 
diff --git a/src/service/proto/kpi_sample_types_pb2.py b/src/service/proto/kpi_sample_types_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..ea7fd2f82757d4c3db02d7e2c7817e2787b0b490
--- /dev/null
+++ b/src/service/proto/kpi_sample_types_pb2.py
@@ -0,0 +1,78 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: kpi_sample_types.proto
+"""Generated protocol buffer code."""
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='kpi_sample_types.proto',
+  package='kpi_sample_types',
+  syntax='proto3',
+  serialized_options=None,
+  create_key=_descriptor._internal_create_key,
+  serialized_pb=b'\n\x16kpi_sample_types.proto\x12\x10kpi_sample_types*\xbe\x01\n\rKpiSampleType\x12\x19\n\x15KPISAMPLETYPE_UNKNOWN\x10\x00\x12%\n!KPISAMPLETYPE_PACKETS_TRANSMITTED\x10\x65\x12\"\n\x1eKPISAMPLETYPE_PACKETS_RECEIVED\x10\x66\x12$\n\x1fKPISAMPLETYPE_BYTES_TRANSMITTED\x10\xc9\x01\x12!\n\x1cKPISAMPLETYPE_BYTES_RECEIVED\x10\xca\x01\x62\x06proto3'
+)
+
+_KPISAMPLETYPE = _descriptor.EnumDescriptor(
+  name='KpiSampleType',
+  full_name='kpi_sample_types.KpiSampleType',
+  filename=None,
+  file=DESCRIPTOR,
+  create_key=_descriptor._internal_create_key,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='KPISAMPLETYPE_UNKNOWN', index=0, number=0,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='KPISAMPLETYPE_PACKETS_TRANSMITTED', index=1, number=101,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='KPISAMPLETYPE_PACKETS_RECEIVED', index=2, number=102,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='KPISAMPLETYPE_BYTES_TRANSMITTED', index=3, number=201,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='KPISAMPLETYPE_BYTES_RECEIVED', index=4, number=202,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=45,
+  serialized_end=235,
+)
+_sym_db.RegisterEnumDescriptor(_KPISAMPLETYPE)
+
+KpiSampleType = enum_type_wrapper.EnumTypeWrapper(_KPISAMPLETYPE)
+KPISAMPLETYPE_UNKNOWN = 0
+KPISAMPLETYPE_PACKETS_TRANSMITTED = 101
+KPISAMPLETYPE_PACKETS_RECEIVED = 102
+KPISAMPLETYPE_BYTES_TRANSMITTED = 201
+KPISAMPLETYPE_BYTES_RECEIVED = 202
+
+
+DESCRIPTOR.enum_types_by_name['KpiSampleType'] = _KPISAMPLETYPE
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+
+# @@protoc_insertion_point(module_scope)
diff --git a/src/service/requirements.in b/src/service/requirements.in
index 25abdad1b5767117956a88b816399635348884c7..eb922871fa487db730696063a02083722ee7cfc3 100644
--- a/src/service/requirements.in
+++ b/src/service/requirements.in
@@ -1,6 +1,18 @@
+anytree
+apscheduler
+fastcache
+flask-restful
 grpcio-health-checking
 grpcio
+Jinja2
+netconf-client #1.7.3
 prometheus-client
 pytest
 pytest-benchmark
+python-json-logger
+pytz
 redis
+requests
+xmltodict
+p4runtime==1.3.0
+coverage
diff --git a/src/service/service/ServiceService.py b/src/service/service/ServiceService.py
index 6280af0b70eb37b8cf9c27954bf4db4690743198..bff0d7b2f32bf0498adaa834a49226efb373b786 100644
--- a/src/service/service/ServiceService.py
+++ b/src/service/service/ServiceService.py
@@ -1,21 +1,31 @@
-import grpc
-import logging
+import grpc, logging
 from concurrent import futures
 from grpc_health.v1.health import HealthServicer, OVERALL_HEALTH
 from grpc_health.v1.health_pb2 import HealthCheckResponse
 from grpc_health.v1.health_pb2_grpc import add_HealthServicer_to_server
+from common.orm.backend.BackendEnum import BackendEnum
+from common.orm.Database import Database
+from common.orm.Factory import get_database_backend
+from context.client.ContextClient import ContextClient
+from device.client.DeviceClient import DeviceClient
 from service.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD
 from service.proto.service_pb2_grpc import add_ServiceServiceServicer_to_server
 from .ServiceServiceServicerImpl import ServiceServiceServicerImpl
+from .service_handler_api.ServiceHandlerFactory import ServiceHandlerFactory
 
 BIND_ADDRESS = '0.0.0.0'
 LOGGER = logging.getLogger(__name__)
 
 class ServiceService:
     def __init__(
-        self, address=BIND_ADDRESS, port=GRPC_SERVICE_PORT, max_workers=GRPC_MAX_WORKERS,
+        self, context_client : ContextClient, device_client : DeviceClient,
+        service_handler_factory : ServiceHandlerFactory,
+        address=BIND_ADDRESS, port=GRPC_SERVICE_PORT, max_workers=GRPC_MAX_WORKERS,
         grace_period=GRPC_GRACE_PERIOD):
 
+        self.context_client = context_client
+        self.device_client = device_client
+        self.service_handler_factory = service_handler_factory
         self.address = address
         self.port = port
         self.endpoint = None
@@ -26,6 +36,8 @@ class ServiceService:
         self.pool = None
         self.server = None
 
+        self.database = Database(get_database_backend(backend=BackendEnum.INMEMORY))
+
     def start(self):
         self.endpoint = '{:s}:{:s}'.format(str(self.address), str(self.port))
         LOGGER.info('Starting Service (tentative endpoint: {:s}, max_workers: {:s})...'.format(
@@ -34,7 +46,8 @@ class ServiceService:
         self.pool = futures.ThreadPoolExecutor(max_workers=self.max_workers)
         self.server = grpc.server(self.pool) # , interceptors=(tracer_interceptor,))
 
-        self.service_servicer = ServiceServiceServicerImpl()
+        self.service_servicer = ServiceServiceServicerImpl(
+            self.context_client, self.device_client, self.database, self.service_handler_factory)
         add_ServiceServiceServicer_to_server(self.service_servicer, self.server)
 
         self.health_servicer = HealthServicer(
diff --git a/src/service/service/ServiceServiceServicerImpl.py b/src/service/service/ServiceServiceServicerImpl.py
index e400d0b6e4e6741d387eef7c156eedd65ffb76da..ca93c098010031823fcd38059a624e7b0c62a5bb 100644
--- a/src/service/service/ServiceServiceServicerImpl.py
+++ b/src/service/service/ServiceServiceServicerImpl.py
@@ -1,10 +1,30 @@
-#from typing import Dict
-import grpc, logging
-#from common.exceptions.ServiceException import ServiceException
+import grpc, json, logging
+from typing import Any, List, Optional, Tuple
+from google.protobuf.json_format import MessageToDict
+from common.orm.Database import Database
+from common.orm.HighLevel import get_object
+from common.orm.backend.Tools import key_to_str
 from common.rpc_method_wrapper.Decorator import create_metrics, safe_and_metered_rpc_method
-from service.proto.context_pb2 import ConnectionList, Empty, Service, ServiceId #, ServiceList
+from common.rpc_method_wrapper.ServiceExceptions import (
+    InvalidArgumentException, NotFoundException, OperationFailedException)
+from context.client.ContextClient import ContextClient
+from device.client.DeviceClient import DeviceClient
+from service.proto.context_pb2 import ConnectionList, Empty, Service, ServiceId
 from service.proto.service_pb2_grpc import ServiceServiceServicer
-#from service.service.Tools import check_service_id_request, check_service_request
+from .database.ConfigModel import ConfigModel, ConfigRuleModel
+from .database.ConstraintModel import ConstraintModel, ConstraintsModel
+from .database.DatabaseServiceTools import (
+    delete_service_from_context, sync_service_from_context, sync_service_to_context, update_service_in_local_database)
+from .database.RelationModels import ServiceEndPointModel
+from .database.ServiceModel import ServiceModel
+from .service_handler_api._ServiceHandler import _ServiceHandler
+from .service_handler_api.ServiceHandlerFactory import ServiceHandlerFactory
+from .service_handler_api.Tools import (
+    check_errors_deleteconfig, check_errors_deleteconstraint, check_errors_deleteendpoint, check_errors_setconfig,
+    check_errors_setconstraint, check_errors_setendpoint)
+from .Tools import (
+    classify_config_rules, classify_constraints, classify_endpointids, get_service_handler_class,
+    sync_devices_from_context)
 
 LOGGER = logging.getLogger(__name__)
 
@@ -13,86 +33,173 @@ METHOD_NAMES = ['CreateService', 'UpdateService', 'DeleteService',  'GetConnecti
 METRICS = create_metrics(SERVICE_NAME, METHOD_NAMES)
 
 class ServiceServiceServicerImpl(ServiceServiceServicer):
-    def __init__(self):
+    def __init__(
+        self, context_client : ContextClient, device_client : DeviceClient, database : Database,
+        service_handler_factory : ServiceHandlerFactory):
+
         LOGGER.debug('Creating Servicer...')
+        self.context_client = context_client
+        self.device_client = device_client
+        self.database = database
+        self.service_handler_factory = service_handler_factory
         LOGGER.debug('Servicer Created')
 
-    #@safe_and_metered_rpc_method(METRICS, LOGGER)
-    #def GetServiceList(self, request : Empty, context : grpc.ServicerContext) -> ServiceList:
-    #    db_context_uuids = self.database.contexts.get()
-    #    json_services = []
-    #    for db_context_uuid in db_context_uuids:
-    #        db_context = self.database.context(db_context_uuid)
-    #        json_services.extend(db_context.dump_services())
-    #    return ServiceList(cs=json_services)
-
     @safe_and_metered_rpc_method(METRICS, LOGGER)
     def CreateService(self, request : Service, context : grpc.ServicerContext) -> ServiceId:
-        #context_id, service_id, service_type, service_config, service_state, db_endpoints, constraint_tuples = \
-        #    check_service_request('CreateService', request, self.database, LOGGER)
-        #db_context = self.database.context(context_id)
-        #db_service = db_context.service(service_id)
-        #db_service.create(service_type, service_config, service_state)
-        #for db_endpoint in db_endpoints:
-        #    service_endpoint_id = '{}:{}/{}'.format(
-        #        db_endpoint.topology_uuid, db_endpoint.device_uuid, db_endpoint.endpoint_uuid)
-        #    db_service.endpoint(service_endpoint_id).create(db_endpoint)
-        #for cons_type,cons_value in constraint_tuples: db_service.constraint(cons_type).create(cons_value)
-        #return ServiceId(**db_service.dump_id())
-        return ServiceId()
+        service_id = request.service_id
+        service_uuid = service_id.service_uuid.uuid
+        service_context_uuid = service_id.context_id.context_uuid.uuid
+
+        if len(request.service_endpoint_ids) > 0:
+            unexpected_endpoints = []
+            for service_endpoint_id in request.service_endpoint_ids:
+                unexpected_endpoints.append(MessageToDict(
+                    service_endpoint_id, including_default_value_fields=True, preserving_proto_field_name=True,
+                    use_integers_for_enums=True))
+            str_unexpected_endpoints = json.dumps(unexpected_endpoints, sort_keys=True)
+            raise InvalidArgumentException(
+                'service.service_endpoint_ids', str_unexpected_endpoints,
+                extra_details='RPC method CreateService does not accept Endpoints. '\
+                              'Endpoints should be configured after creating the service.')
+
+        if len(request.service_constraints) > 0:
+            unexpected_constraints = []
+            for service_constraint in request.service_constraints:
+                unexpected_constraints.append(MessageToDict(
+                    service_constraint, including_default_value_fields=True, preserving_proto_field_name=True,
+                    use_integers_for_enums=True))
+            str_unexpected_constraints = json.dumps(unexpected_constraints, sort_keys=True)
+            raise InvalidArgumentException(
+                'service.service_constraints', str_unexpected_constraints,
+                extra_details='RPC method CreateService does not accept Constraints. '\
+                              'Constraints should be configured after creating the service.')
+
+        if len(request.service_config.config_rules) > 0:
+            unexpected_config_rules = MessageToDict(
+                request.service_config, including_default_value_fields=True,
+                preserving_proto_field_name=True, use_integers_for_enums=True)
+            unexpected_config_rules = unexpected_config_rules['config_rules']
+            str_unexpected_config_rules = json.dumps(unexpected_config_rules, sort_keys=True)
+            raise InvalidArgumentException(
+                'service.service_config.config_rules', str_unexpected_config_rules,
+                extra_details='RPC method CreateService does not accept Config Rules. '\
+                              'Config Rules should be configured after creating the service.')
+
+        sync_service_from_context(service_context_uuid, service_uuid, self.context_client, self.database)
+        db_service,_ = update_service_in_local_database(self.database, request)
+
+        LOGGER.info('[CreateService] db_service = {:s}'.format(str(db_service.dump(
+            include_endpoint_ids=True, include_constraints=True, include_config_rules=True))))
+
+        sync_service_to_context(db_service, self.context_client)
+        return ServiceId(**db_service.dump_id())
 
     @safe_and_metered_rpc_method(METRICS, LOGGER)
     def UpdateService(self, request : Service, context : grpc.ServicerContext) -> ServiceId:
-        #context_id, service_id, service_type, service_config, service_state, db_endpoints, constraint_tuples = \
-        #    check_service_request('UpdateService', request, self.database, LOGGER)
-        #db_context = self.database.context(context_id)
-        #db_service = db_context.service(service_id)
-        #db_service.update(update_attributes={
-        #    'service_type'  : service_type,
-        #    'service_config': service_config,
-        #    'service_state' : service_state,
-        #})
-        ## Update service constraints; first add missing, then remove existing, but not added to Service
-        #db_service_constraint_types = set(db_service.constraints.get())
-        #for constraint_type,constraint_value in constraint_tuples:
-        #    if constraint_type in db_service_constraint_types:
-        #        db_service.constraint(constraint_type).update(update_attributes={
-        #            'constraint_value': constraint_value
-        #        })
-        #    else:
-        #        db_service.constraint(constraint_type).create(constraint_value)
-        #    db_service_constraint_types.discard(constraint_type)
-        #for constraint_type in db_service_constraint_types:
-        #    db_service.constraint(constraint_type).delete()
-        ## Update service endpoints; first add missing, then remove existing, but not added to Service
-        #db_service_endpoint_uuids = set(db_service.endpoints.get())
-        #for db_endpoint in db_endpoints:
-        #    service_endpoint_id = '{}:{}/{}'.format(
-        #        db_endpoint.topology_uuid, db_endpoint.device_uuid, db_endpoint.endpoint_uuid)
-        #    if service_endpoint_id not in db_service_endpoint_uuids:
-        #        db_service.endpoint(service_endpoint_id).create(db_endpoint)
-        #    db_service_endpoint_uuids.discard(service_endpoint_id)
-        #for db_service_endpoint_uuid in db_service_endpoint_uuids:
-        #    db_service.endpoint(db_service_endpoint_uuid).delete()
-        #return ServiceId(**db_service.dump_id())
-        return ServiceId()
+        service_id = request.service_id
+        service_uuid = service_id.service_uuid.uuid
+        service_context_uuid = service_id.context_id.context_uuid.uuid
+        str_service_key = key_to_str([service_context_uuid, service_uuid])
+
+        # Sync before updating service to ensure we have devices, endpoints, constraints, and config rules to be
+        # set/deleted before actuallymodifying them in the local in-memory database.
+
+        sync_service_from_context(service_context_uuid, service_uuid, self.context_client, self.database)
+        db_service = get_object(self.database, ServiceModel, str_service_key, raise_if_not_found=False)
+        if db_service is None: raise NotFoundException('Service', str_service_key)
+        LOGGER.info('[UpdateService] db_service = {:s}'.format(str(db_service.dump(
+            include_endpoint_ids=True, include_constraints=True, include_config_rules=True))))
+
+        db_devices = sync_devices_from_context(self.context_client, db_service, request.service_endpoint_ids)
+
+        resources_to_set    : List[Tuple[str, Any]] = [] # resource_key, resource_value
+        resources_to_delete : List[Tuple[str, Any]] = [] # resource_key, resource_value
+        classify_config_rules(db_service, request.service_config.config_rules, resources_to_set, resources_to_delete)
+
+        constraints_to_set    : List[Tuple[str, str]] = [] # constraint_type, constraint_value
+        constraints_to_delete : List[Tuple[str, str]] = [] # constraint_type, constraint_value
+        classify_constraints(db_service, request.service_constraints, constraints_to_set, constraints_to_delete)
+
+        endpointids_to_set    : List[Tuple[str, str, Optional[str]]] = [] # device_uuid, endpoint_uuid, topology_uuid
+        endpointids_to_delete : List[Tuple[str, str, Optional[str]]] = [] # device_uuid, endpoint_uuid, topology_uuid
+        classify_endpointids(db_service, request.service_endpoint_ids, endpointids_to_set, endpointids_to_delete)
+
+        service_handler_class = get_service_handler_class(self.service_handler_factory, db_service, db_devices)
+        service_handler_settings = {}
+        service_handler : _ServiceHandler = service_handler_class(
+            db_service, self.database, self.context_client, self.device_client, **service_handler_settings)
+
+        errors = []
+
+        if len(errors) == 0:
+            results_deleteendpoint = service_handler.DeleteEndpoint(endpointids_to_delete)
+            errors.extend(check_errors_deleteendpoint(endpointids_to_delete, results_deleteendpoint))
+
+        if len(errors) == 0:
+            results_deleteconstraint = service_handler.DeleteConstraint(constraints_to_delete)
+            errors.extend(check_errors_deleteconstraint(constraints_to_delete, results_deleteconstraint))
+
+        if len(errors) == 0:
+            results_deleteconfig = service_handler.DeleteConfig(resources_to_delete)
+            errors.extend(check_errors_deleteconfig(resources_to_delete, results_deleteconfig))
+
+        if len(errors) == 0:
+            results_setconfig = service_handler.SetConfig(resources_to_set)
+            errors.extend(check_errors_setconfig(resources_to_set, results_setconfig))
+
+        if len(errors) == 0:
+            results_setconstraint = service_handler.SetConstraint(constraints_to_set)
+            errors.extend(check_errors_setconstraint(constraints_to_set, results_setconstraint))
+
+        if len(errors) == 0:
+            results_setendpoint = service_handler.SetEndpoint(endpointids_to_set)
+            errors.extend(check_errors_setendpoint(endpointids_to_set, results_setendpoint))
+
+        if len(errors) > 0:
+            raise OperationFailedException('UpdateService', extra_details=errors)
+
+        db_service,_ = update_service_in_local_database(self.database, request)
+        LOGGER.info('[UpdateService] db_service = {:s}'.format(str(db_service.dump(
+            include_endpoint_ids=True, include_constraints=True, include_config_rules=True))))
+
+        #db_entries = self.database.dump()
+        #LOGGER.info('----- Database Dump [{:3d} entries] -------------------------'.format(len(db_entries)))
+        #for db_entry in db_entries:
+        #    LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
+        #LOGGER.info('-----------------------------------------------------------')
+
+        sync_service_to_context(db_service, self.context_client)
+        return ServiceId(**db_service.dump_id())
 
     @safe_and_metered_rpc_method(METRICS, LOGGER)
     def DeleteService(self, request : ServiceId, context : grpc.ServicerContext) -> Empty:
-        #context_id, service_id = check_service_id_request('DeleteService', request, self.database, LOGGER)
-        #db_context = self.database.context(context_id)
-        #db_service = db_context.service(service_id)
-        #db_service.delete()
-        return Empty()
+        service_uuid = request.service_uuid.uuid
+        service_context_uuid = request.context_id.context_uuid.uuid
+
+        sync_service_from_context(service_context_uuid, service_uuid, self.context_client, self.database)
+        str_service_key = key_to_str([service_context_uuid, service_uuid])
+        db_service : ServiceModel = get_object(self.database, ServiceModel, str_service_key, raise_if_not_found=False)
+        if db_service is None: return Empty()
+
+        delete_service_from_context(db_service, self.context_client)
 
-    #@safe_and_metered_rpc_method(METRICS, LOGGER)
-    #def GetServiceById(self, request : ServiceId, context : grpc.ServicerContext) -> Service:
-    #    context_id, service_id = check_service_id_request('GetServiceById', request, self.database, LOGGER)
-    #    db_context = self.database.context(context_id)
-    #    db_service = db_context.service(service_id)
-    #    return Service(**db_service.dump())
+        for db_service_endpoint_pk,_ in db_service.references(ServiceEndPointModel):
+            ServiceEndPointModel(self.database, db_service_endpoint_pk).delete()
+
+        db_running_config = ConfigModel(self.database, db_service.service_config_fk)
+        for db_config_rule_pk,_ in db_running_config.references(ConfigRuleModel):
+            ConfigRuleModel(self.database, db_config_rule_pk).delete()
+
+        db_running_constraints = ConstraintsModel(self.database, db_service.service_constraints_fk)
+        for db_constraint_pk,_ in db_running_constraints.references(ConstraintModel):
+            ConstraintModel(self.database, db_constraint_pk).delete()
+
+        db_service.delete()
+        db_running_config.delete()
+        db_running_constraints.delete()
+        return Empty()
 
     @safe_and_metered_rpc_method(METRICS, LOGGER)
-    def GetConnectionList(self, request : Empty, context : grpc.ServicerContext) -> ConnectionList:
+    def GetConnectionList(self, request : ServiceId, context : grpc.ServicerContext) -> ConnectionList:
         #raise ServiceException(grpc.StatusCode.UNIMPLEMENTED, 'RPC GetConnectionList() not implemented')
         return ConnectionList()
diff --git a/src/service/service/Tools.py b/src/service/service/Tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..80084f363231ee18c1fe75559b5633f0b5b02e68
--- /dev/null
+++ b/src/service/service/Tools.py
@@ -0,0 +1,160 @@
+import logging
+from typing import Any, Dict, List, Optional, Tuple
+from common.orm.HighLevel import get_object, get_related_objects
+from common.rpc_method_wrapper.ServiceExceptions import NotFoundException
+from context.client.ContextClient import ContextClient
+from service.proto.context_pb2 import ConfigRule, Constraint, EndPointId
+from service.service.database.ConstraintModel import get_constraints, grpc_constraints_to_raw
+from service.service.database.DatabaseDeviceTools import sync_device_from_context
+from service.service.database.EndPointModel import EndPointModel, grpc_endpointids_to_raw
+from .database.ConfigModel import ORM_ConfigActionEnum, get_config_rules, grpc_config_rules_to_raw
+from .database.DeviceModel import DeviceModel, DriverModel
+from .database.RelationModels import ServiceEndPointModel
+from .database.ServiceModel import ServiceModel
+from .service_handler_api._ServiceHandler import _ServiceHandler
+from .service_handler_api.FilterFields import FilterFieldEnum
+from .service_handler_api.ServiceHandlerFactory import ServiceHandlerFactory
+
+LOGGER = logging.getLogger(__name__)
+
+def sync_devices_from_context(
+    context_client : ContextClient, db_service : ServiceModel, service_endpoint_ids : List[EndPointId]
+    ) -> Dict[str, DeviceModel]:
+
+    database = db_service.database
+
+    required_device_uuids = set()
+    db_endpoints = get_related_objects(db_service, ServiceEndPointModel, 'endpoint_fk')
+    for db_endpoint in db_endpoints:
+        db_device = DeviceModel(database, db_endpoint.device_fk)
+        required_device_uuids.add(db_device.device_uuid)
+
+    for endpoint_id in service_endpoint_ids:
+        required_device_uuids.add(endpoint_id.device_id.device_uuid.uuid)
+
+    db_devices = {}
+    devices_not_found = set()
+    for device_uuid in required_device_uuids:
+        sync_device_from_context(device_uuid, context_client, database)
+        db_device = get_object(database, DeviceModel, device_uuid, raise_if_not_found=False)
+        if db_device is None:
+            devices_not_found.add(device_uuid)
+        else:
+            db_devices[device_uuid] = db_device
+
+    if len(devices_not_found) > 0:
+        extra_details = ['Devices({:s}) cannot be retrieved from Context'.format(str(devices_not_found))]
+        raise NotFoundException('Device', '...', extra_details=extra_details)
+
+    return db_devices
+
+def classify_config_rules(
+    db_service : ServiceModel, service_config_rules : List[ConfigRule],
+    resources_to_set: List[Tuple[str, Any]], resources_to_delete : List[Tuple[str, Any]]):
+
+    context_config_rules = get_config_rules(db_service.database, db_service.pk, 'running')
+    context_config_rules = {config_rule[1]: config_rule[2] for config_rule in context_config_rules}
+    #LOGGER.info('[classify_config_rules] context_config_rules = {:s}'.format(str(context_config_rules)))
+
+    request_config_rules = grpc_config_rules_to_raw(service_config_rules)
+    #LOGGER.info('[classify_config_rules] request_config_rules = {:s}'.format(str(request_config_rules)))
+
+    for config_rule in request_config_rules:
+        action, key, value = config_rule
+        if action == ORM_ConfigActionEnum.SET:
+            if (key not in context_config_rules) or (context_config_rules[key] != value):
+                resources_to_set.append((key, value))
+        elif action == ORM_ConfigActionEnum.DELETE:
+            if key in context_config_rules:
+                resources_to_delete.append((key, value))
+
+    #LOGGER.info('[classify_config_rules] resources_to_set = {:s}'.format(str(resources_to_set)))
+    #LOGGER.info('[classify_config_rules] resources_to_delete = {:s}'.format(str(resources_to_delete)))
+
+def classify_constraints(
+    db_service : ServiceModel, service_constraints : List[Constraint],
+    constraints_to_set: List[Tuple[str, str]], constraints_to_delete : List[Tuple[str, str]]):
+
+    context_constraints = get_constraints(db_service.database, db_service.pk, 'running')
+    context_constraints = {constraint[0]: constraint[1] for constraint in context_constraints}
+    #LOGGER.info('[classify_constraints] context_constraints = {:s}'.format(str(context_constraints)))
+
+    request_constraints = grpc_constraints_to_raw(service_constraints)
+    #LOGGER.info('[classify_constraints] request_constraints = {:s}'.format(str(request_constraints)))
+
+    for constraint in request_constraints:
+        constraint_type, constraint_value = constraint
+        if constraint_type in context_constraints:
+            if context_constraints[constraint_type] != constraint_value:
+                constraints_to_set.append(constraint)
+        else:
+            constraints_to_set.append(constraint)
+        context_constraints.pop(constraint_type, None)
+
+    for constraint in context_constraints:
+        constraints_to_delete.append(constraint)
+
+    #LOGGER.info('[classify_constraints] constraints_to_set = {:s}'.format(str(constraints_to_set)))
+    #LOGGER.info('[classify_constraints] constraints_to_delete = {:s}'.format(str(constraints_to_delete)))
+
+def get_service_endpointids(db_service : ServiceModel) -> List[Tuple[str, str, Optional[str]]]:
+    db_endpoints : List[EndPointModel] = get_related_objects(db_service, ServiceEndPointModel, 'endpoint_fk')
+    endpoint_ids = [db_endpoint.dump_id() for db_endpoint in db_endpoints]
+    return [
+        (endpoint_id['device_id']['device_uuid']['uuid'], endpoint_id['endpoint_uuid']['uuid'],
+            endpoint_id.get('topology_id', {}).get('topology_uuid', {}).get('uuid', None))
+        for endpoint_id in endpoint_ids
+    ]
+
+def classify_endpointids(
+    db_service : ServiceModel, service_endpoint_ids : List[EndPointId],
+    endpointids_to_set: List[Tuple[str, str, Optional[str]]],
+    endpointids_to_delete : List[Tuple[str, str, Optional[str]]]):
+
+    context_endpoint_ids = get_service_endpointids(db_service)
+    #LOGGER.info('[classify_endpointids] context_endpoint_ids = {:s}'.format(str(context_endpoint_ids)))
+    context_endpoint_ids = set(context_endpoint_ids)
+    #LOGGER.info('[classify_endpointids] context_endpoint_ids = {:s}'.format(str(context_endpoint_ids)))
+
+    request_endpoint_ids = grpc_endpointids_to_raw(service_endpoint_ids)
+    #LOGGER.info('[classify_endpointids] request_endpoint_ids = {:s}'.format(str(request_endpoint_ids)))
+
+    for endpoint_id in request_endpoint_ids:
+        if endpoint_id not in context_endpoint_ids:
+            endpointids_to_set.append(endpoint_id)
+        context_endpoint_ids.discard(endpoint_id)
+
+    for endpoint_id in context_endpoint_ids:
+        endpointids_to_delete.append(endpoint_id)
+
+    #LOGGER.info('[classify_endpointids] endpointids_to_set = {:s}'.format(str(endpointids_to_set)))
+    #LOGGER.info('[classify_endpointids] endpointids_to_delete = {:s}'.format(str(endpointids_to_delete)))
+
+def get_service_handler_class(
+    service_handler_factory : ServiceHandlerFactory, db_service : ServiceModel, db_devices : Dict[str, DeviceModel]
+    ) -> Optional[_ServiceHandler]:
+
+    str_service_key = db_service.pk
+    database = db_service.database
+
+    # Assume all devices involved in the service must support at least one driver in common
+    device_drivers = None
+    for _,db_device in db_devices.items():
+        db_driver_pks = db_device.references(DriverModel)
+        db_driver_names = [DriverModel(database, pk).driver.value for pk,_ in db_driver_pks]
+        if device_drivers is None:
+            device_drivers = set(db_driver_names)
+        else:
+            device_drivers.intersection_update(db_driver_names)
+
+    filter_fields = {
+        FilterFieldEnum.SERVICE_TYPE.value  : db_service.service_type.value,    # must be supported
+        FilterFieldEnum.DEVICE_DRIVER.value : device_drivers,                   # at least one must be supported
+    }
+
+    msg = 'Selecting service handler for service({:s}) with filter_fields({:s})...'
+    LOGGER.info(msg.format(str(str_service_key), str(filter_fields)))
+    service_handler_class = service_handler_factory.get_service_handler_class(**filter_fields)
+    msg = 'ServiceHandler({:s}) selected for service({:s}) with filter_fields({:s})...'
+    LOGGER.info(msg.format(str(service_handler_class.__name__), str(str_service_key), str(filter_fields)))
+    return service_handler_class
diff --git a/src/service/service/__main__.py b/src/service/service/__main__.py
index 7de072b007d16fbd1c3274ee6a1ba04a5e0e56e5..d6a0e9fd32c4e707dd731e5185aa7751e8bd65ee 100644
--- a/src/service/service/__main__.py
+++ b/src/service/service/__main__.py
@@ -1,8 +1,14 @@
 import logging, signal, sys, threading
 from prometheus_client import start_http_server
 from common.Settings import get_setting
-from service.service.ServiceService import ServiceService
-from service.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD, LOG_LEVEL, METRICS_PORT
+from context.client.ContextClient import ContextClient
+from device.client.DeviceClient import DeviceClient
+from service.Config import (
+    CONTEXT_SERVICE_HOST, CONTEXT_SERVICE_PORT, DEVICE_SERVICE_HOST, DEVICE_SERVICE_PORT, GRPC_SERVICE_PORT,
+    GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD, LOG_LEVEL, METRICS_PORT)
+from .ServiceService import ServiceService
+from .service_handler_api.ServiceHandlerFactory import ServiceHandlerFactory
+from .service_handlers import SERVICE_HANDLERS
 
 terminate = threading.Event()
 LOGGER = None
@@ -14,11 +20,15 @@ def signal_handler(signal, frame): # pylint: disable=redefined-outer-name
 def main():
     global LOGGER # pylint: disable=global-statement
 
-    service_port = get_setting('SERVICESERVICE_SERVICE_PORT_GRPC', default=GRPC_SERVICE_PORT)
-    max_workers  = get_setting('MAX_WORKERS',                      default=GRPC_MAX_WORKERS )
-    grace_period = get_setting('GRACE_PERIOD',                     default=GRPC_GRACE_PERIOD)
-    log_level    = get_setting('LOG_LEVEL',                        default=LOG_LEVEL        )
-    metrics_port = get_setting('METRICS_PORT',                     default=METRICS_PORT     )
+    grpc_service_port    = get_setting('SERVICESERVICE_SERVICE_PORT_GRPC', default=GRPC_SERVICE_PORT   )
+    max_workers          = get_setting('MAX_WORKERS',                      default=GRPC_MAX_WORKERS    )
+    grace_period         = get_setting('GRACE_PERIOD',                     default=GRPC_GRACE_PERIOD   )
+    log_level            = get_setting('LOG_LEVEL',                        default=LOG_LEVEL           )
+    metrics_port         = get_setting('METRICS_PORT',                     default=METRICS_PORT        )
+    context_service_host = get_setting('CONTEXTSERVICE_SERVICE_HOST',      default=CONTEXT_SERVICE_HOST)
+    context_service_port = get_setting('CONTEXTSERVICE_SERVICE_PORT_GRPC', default=CONTEXT_SERVICE_PORT)
+    device_service_host  = get_setting('DEVICESERVICE_SERVICE_HOST',       default=DEVICE_SERVICE_HOST )
+    device_service_port  = get_setting('DEVICESERVICE_SERVICE_PORT_GRPC',  default=DEVICE_SERVICE_PORT )
 
     logging.basicConfig(level=log_level)
     LOGGER = logging.getLogger(__name__)
@@ -31,8 +41,24 @@ def main():
     # Start metrics server
     start_http_server(metrics_port)
 
+    # Initialize Context Client
+    if context_service_host is None or context_service_port is None:
+        raise Exception('Wrong address({:s}):port({:s}) of Context component'.format(
+            str(context_service_host), str(context_service_port)))
+    context_client = ContextClient(context_service_host, context_service_port)
+
+    # Initialize Device Client
+    if device_service_host is None or device_service_port is None:
+        raise Exception('Wrong address({:s}):port({:s}) of Device component'.format(
+            str(device_service_host), str(device_service_port)))
+    device_client = DeviceClient(device_service_host, device_service_port)
+
+    service_handler_factory = ServiceHandlerFactory(SERVICE_HANDLERS)
+
     # Starting service service
-    grpc_service = ServiceService(port=service_port, max_workers=max_workers, grace_period=grace_period)
+    grpc_service = ServiceService(
+        context_client, device_client, service_handler_factory, port=grpc_service_port, max_workers=max_workers,
+        grace_period=grace_period)
     grpc_service.start()
 
     # Wait for Ctrl+C or termination signal
diff --git a/src/service/service/database/ConfigModel.py b/src/service/service/database/ConfigModel.py
new file mode 100644
index 0000000000000000000000000000000000000000..39a2b5a794fa88d2700603975f36a3fa4ef90450
--- /dev/null
+++ b/src/service/service/database/ConfigModel.py
@@ -0,0 +1,98 @@
+import functools, logging, operator
+from enum import Enum
+from typing import Dict, List, Tuple, Union
+from common.orm.Database import Database
+from common.orm.HighLevel import get_object, get_or_create_object, update_or_create_object
+from common.orm.backend.Tools import key_to_str
+from common.orm.fields.EnumeratedField import EnumeratedField
+from common.orm.fields.ForeignKeyField import ForeignKeyField
+from common.orm.fields.IntegerField import IntegerField
+from common.orm.fields.PrimaryKeyField import PrimaryKeyField
+from common.orm.fields.StringField import StringField
+from common.orm.model.Model import Model
+from service.proto.context_pb2 import ConfigActionEnum
+from .Tools import fast_hasher, grpc_to_enum, remove_dict_key
+
+LOGGER = logging.getLogger(__name__)
+
+class ORM_ConfigActionEnum(Enum):
+    UNDEFINED = ConfigActionEnum.CONFIGACTION_UNDEFINED
+    SET       = ConfigActionEnum.CONFIGACTION_SET
+    DELETE    = ConfigActionEnum.CONFIGACTION_DELETE
+
+grpc_to_enum__config_action = functools.partial(
+    grpc_to_enum, ConfigActionEnum, ORM_ConfigActionEnum)
+
+class ConfigModel(Model): # pylint: disable=abstract-method
+    pk = PrimaryKeyField()
+
+    def dump(self) -> List[Dict]:
+        db_config_rule_pks = self.references(ConfigRuleModel)
+        config_rules = [ConfigRuleModel(self.database, pk).dump(include_position=True) for pk,_ in db_config_rule_pks]
+        config_rules = sorted(config_rules, key=operator.itemgetter('position'))
+        return [remove_dict_key(config_rule, 'position') for config_rule in config_rules]
+
+class ConfigRuleModel(Model): # pylint: disable=abstract-method
+    pk = PrimaryKeyField()
+    config_fk = ForeignKeyField(ConfigModel)
+    position = IntegerField(min_value=0, required=True)
+    action = EnumeratedField(ORM_ConfigActionEnum, required=True)
+    key = StringField(required=True, allow_empty=False)
+    value = StringField(required=False, allow_empty=True)
+
+    def dump(self, include_position=True) -> Dict: # pylint: disable=arguments-differ
+        result = {
+            'action': self.action.value,
+            'resource_key': self.key,
+            'resource_value': self.value,
+        }
+        if include_position: result['position'] = self.position
+        return result
+
+def delete_all_config_rules(database : Database, db_parent_pk : str, config_name : str) -> None:
+    str_config_key = key_to_str([db_parent_pk, config_name], separator=':')
+    db_config : ConfigModel = get_object(database, ConfigModel, str_config_key, raise_if_not_found=False)
+    if db_config is None: return
+    db_config_rule_pks = db_config.references(ConfigRuleModel)
+    for pk,_ in db_config_rule_pks: ConfigRuleModel(database, pk).delete()
+
+def grpc_config_rules_to_raw(grpc_config_rules) -> List[Tuple[ORM_ConfigActionEnum, str, str]]:
+    def translate(grpc_config_rule):
+        action = grpc_to_enum__config_action(grpc_config_rule.action)
+        return action, grpc_config_rule.resource_key, grpc_config_rule.resource_value
+    return [translate(grpc_config_rule) for grpc_config_rule in grpc_config_rules]
+
+def get_config_rules(
+    database : Database, db_parent_pk : str, config_name : str
+    ) -> List[Tuple[ORM_ConfigActionEnum, str, str]]:
+
+    str_config_key = key_to_str([db_parent_pk, config_name], separator=':')
+    db_config = get_object(database, ConfigModel, str_config_key, raise_if_not_found=False)
+    return [] if db_config is None else [
+        (ORM_ConfigActionEnum._value2member_map_.get(config_rule['action']),
+            config_rule['resource_key'], config_rule['resource_value'])
+        for config_rule in db_config.dump()
+    ]
+
+def update_config(
+    database : Database, db_parent_pk : str, config_name : str,
+    raw_config_rules : List[Tuple[ORM_ConfigActionEnum, str, str]]
+    ) -> List[Tuple[Union[ConfigModel, ConfigRuleModel], bool]]:
+
+    str_config_key = key_to_str([db_parent_pk, config_name], separator=':')
+    result : Tuple[ConfigModel, bool] = get_or_create_object(database, ConfigModel, str_config_key)
+    db_config, created = result
+
+    db_objects : List[Tuple[Union[ConfigModel, ConfigRuleModel], bool]] = [(db_config, created)]
+
+    for position,(action, resource_key, resource_value) in enumerate(raw_config_rules):
+        str_rule_key_hash = fast_hasher(resource_key)
+        str_config_rule_key = key_to_str([db_config.pk, str_rule_key_hash], separator=':')
+        result : Tuple[ConfigRuleModel, bool] = update_or_create_object(
+            database, ConfigRuleModel, str_config_rule_key, {
+                'config_fk': db_config, 'position': position, 'action': action, 'key': resource_key,
+                'value': resource_value})
+        db_config_rule, updated = result
+        db_objects.append((db_config_rule, updated))
+
+    return db_objects
diff --git a/src/service/service/database/ConstraintModel.py b/src/service/service/database/ConstraintModel.py
new file mode 100644
index 0000000000000000000000000000000000000000..b2a4933e4caa9697a41947e4a9266b1a0d6ee5a7
--- /dev/null
+++ b/src/service/service/database/ConstraintModel.py
@@ -0,0 +1,82 @@
+import logging, operator
+from typing import Dict, List, Tuple, Union
+from common.orm.Database import Database
+from common.orm.HighLevel import get_object, get_or_create_object, update_or_create_object
+from common.orm.backend.Tools import key_to_str
+from common.orm.fields.ForeignKeyField import ForeignKeyField
+from common.orm.fields.IntegerField import IntegerField
+from common.orm.fields.PrimaryKeyField import PrimaryKeyField
+from common.orm.fields.StringField import StringField
+from common.orm.model.Model import Model
+from service.service.database.Tools import fast_hasher, remove_dict_key
+
+LOGGER = logging.getLogger(__name__)
+
+class ConstraintsModel(Model): # pylint: disable=abstract-method
+    pk = PrimaryKeyField()
+
+    def dump(self) -> List[Dict]:
+        db_constraint_pks = self.references(ConstraintModel)
+        constraints = [ConstraintModel(self.database, pk).dump(include_position=True) for pk,_ in db_constraint_pks]
+        constraints = sorted(constraints, key=operator.itemgetter('position'))
+        return [remove_dict_key(constraint, 'position') for constraint in constraints]
+
+class ConstraintModel(Model): # pylint: disable=abstract-method
+    pk = PrimaryKeyField()
+    constraints_fk = ForeignKeyField(ConstraintsModel)
+    position = IntegerField(min_value=0, required=True)
+    constraint_type = StringField(required=True, allow_empty=False)
+    constraint_value = StringField(required=True, allow_empty=False)
+
+    def dump(self, include_position=True) -> Dict: # pylint: disable=arguments-differ
+        result = {
+            'constraint_type': self.constraint_type,
+            'constraint_value': self.constraint_value,
+        }
+        if include_position: result['position'] = self.position
+        return result
+
+def delete_all_constraints(database : Database, db_parent_pk : str, constraints_name : str) -> None:
+    str_constraints_key = key_to_str([db_parent_pk, constraints_name], separator=':')
+    db_constraints : ConstraintsModel = get_object(
+        database, ConstraintsModel, str_constraints_key, raise_if_not_found=False)
+    if db_constraints is None: return
+    db_constraint_pks = db_constraints.references(ConstraintModel)
+    for pk,_ in db_constraint_pks: ConstraintModel(database, pk).delete()
+
+def grpc_constraints_to_raw(grpc_constraints) -> List[Tuple[str, str]]:
+    return [
+        (grpc_constraint.constraint_type, grpc_constraint.constraint_value)
+        for grpc_constraint in grpc_constraints
+    ]
+
+def get_constraints(database : Database, db_parent_pk : str, constraints_name : str) -> List[Tuple[str, str]]:
+    str_constraints_key = key_to_str([db_parent_pk, constraints_name], separator=':')
+    db_constraints : ConstraintsModel = get_object(
+        database, ConstraintsModel, str_constraints_key, raise_if_not_found=False)
+    return [] if db_constraints is None else [
+        (constraint['constraint_type'], constraint['constraint_value'])
+        for constraint in db_constraints.dump()
+    ]
+
+def update_constraints(
+    database : Database, db_parent_pk : str, constraints_name : str, raw_constraints : List[Tuple[str, str]]
+    ) -> List[Tuple[Union[ConstraintsModel, ConstraintModel], bool]]:
+
+    str_constraints_key = key_to_str([db_parent_pk, constraints_name], separator=':')
+    result : Tuple[ConstraintsModel, bool] = get_or_create_object(database, ConstraintsModel, str_constraints_key)
+    db_constraints, created = result
+
+    db_objects : List[Tuple[Union[ConstraintsModel, ConstraintModel], bool]] = [(db_constraints, created)]
+
+    for position,(constraint_type, constraint_value) in enumerate(raw_constraints):
+        str_constraint_key_hash = fast_hasher(constraint_type)
+        str_constraint_key = key_to_str([db_constraints.pk, str_constraint_key_hash], separator=':')
+        result : Tuple[ConstraintModel, bool] = update_or_create_object(
+            database, ConstraintModel, str_constraint_key, {
+                'constraints_fk': db_constraints, 'position': position, 'constraint_type': constraint_type,
+                'constraint_value': constraint_value})
+        db_constraints_rule, updated = result
+        db_objects.append((db_constraints_rule, updated))
+
+    return db_objects
diff --git a/src/service/service/database/ContextModel.py b/src/service/service/database/ContextModel.py
new file mode 100644
index 0000000000000000000000000000000000000000..74e577e5ba5ab366cd7c3ca07c8730d21d5e8ec9
--- /dev/null
+++ b/src/service/service/database/ContextModel.py
@@ -0,0 +1,30 @@
+import logging
+from typing import Dict #, List
+from common.orm.fields.PrimaryKeyField import PrimaryKeyField
+from common.orm.fields.StringField import StringField
+from common.orm.model.Model import Model
+
+LOGGER = logging.getLogger(__name__)
+
+class ContextModel(Model):
+    pk = PrimaryKeyField()
+    context_uuid = StringField(required=True, allow_empty=False)
+
+    def dump_id(self) -> Dict:
+        return {'context_uuid': {'uuid': self.context_uuid}}
+
+#    def dump_service_ids(self) -> List[Dict]:
+#        from .ServiceModel import ServiceModel # pylint: disable=import-outside-toplevel
+#        db_service_pks = self.references(ServiceModel)
+#        return [ServiceModel(self.database, pk).dump_id() for pk,_ in db_service_pks]
+#
+#    def dump_topology_ids(self) -> List[Dict]:
+#        from .TopologyModel import TopologyModel # pylint: disable=import-outside-toplevel
+#        db_topology_pks = self.references(TopologyModel)
+#        return [TopologyModel(self.database, pk).dump_id() for pk,_ in db_topology_pks]
+#
+#    def dump(self, include_services=True, include_topologies=True) -> Dict: # pylint: disable=arguments-differ
+#        result = {'context_id': self.dump_id()}
+#        if include_services: result['service_ids'] = self.dump_service_ids()
+#        if include_topologies: result['topology_ids'] = self.dump_topology_ids()
+#        return result
diff --git a/src/service/service/database/DatabaseDeviceTools.py b/src/service/service/database/DatabaseDeviceTools.py
new file mode 100644
index 0000000000000000000000000000000000000000..9cf0252831921b07abeba629bf154dc2c8b475da
--- /dev/null
+++ b/src/service/service/database/DatabaseDeviceTools.py
@@ -0,0 +1,87 @@
+import grpc
+from typing import Tuple
+from common.orm.Database import Database
+from common.orm.HighLevel import get_or_create_object, update_or_create_object
+from common.orm.backend.Tools import key_to_str
+from common.rpc_method_wrapper.ServiceExceptions import InvalidArgumentException
+from context.client.ContextClient import ContextClient
+from device.proto.context_pb2 import Device, DeviceId
+from .ConfigModel import delete_all_config_rules, grpc_config_rules_to_raw, update_config
+from .ContextModel import ContextModel
+from .DeviceModel import DeviceModel, grpc_to_enum__device_operational_status, set_drivers
+from .EndPointModel import EndPointModel
+from .TopologyModel import TopologyModel
+
+def update_device_in_local_database(database : Database, device : Device) -> Tuple[DeviceModel, bool]:
+    device_uuid = device.device_id.device_uuid.uuid
+
+    for i,endpoint in enumerate(device.device_endpoints):
+        endpoint_device_uuid = endpoint.endpoint_id.device_id.device_uuid.uuid
+        if len(endpoint_device_uuid) == 0: endpoint_device_uuid = device_uuid
+        if device_uuid != endpoint_device_uuid:
+            raise InvalidArgumentException(
+                'request.device_endpoints[{:d}].device_id.device_uuid.uuid'.format(i), endpoint_device_uuid,
+                ['should be == {:s}({:s})'.format('request.device_id.device_uuid.uuid', device_uuid)])
+
+    config_rules = grpc_config_rules_to_raw(device.device_config.config_rules)
+    delete_all_config_rules(database, device_uuid, 'running')
+    running_config_result = update_config(database, device_uuid, 'running', config_rules)
+
+    result : Tuple[DeviceModel, bool] = update_or_create_object(database, DeviceModel, device_uuid, {
+        'device_uuid'              : device_uuid,
+        'device_type'              : device.device_type,
+        'device_operational_status': grpc_to_enum__device_operational_status(device.device_operational_status),
+        'device_config_fk'         : running_config_result[0][0],
+    })
+    db_device, updated = result
+    set_drivers(database, db_device, device.device_drivers)
+
+    for i,endpoint in enumerate(device.device_endpoints):
+        endpoint_uuid = endpoint.endpoint_id.endpoint_uuid.uuid
+        endpoint_device_uuid = endpoint.endpoint_id.device_id.device_uuid.uuid
+        if len(endpoint_device_uuid) == 0: endpoint_device_uuid = device_uuid
+
+        str_endpoint_key = key_to_str([device_uuid, endpoint_uuid])
+        endpoint_attributes = {
+            'device_fk'    : db_device,
+            'endpoint_uuid': endpoint_uuid,
+            'endpoint_type': endpoint.endpoint_type,
+        }
+
+        endpoint_topology_context_uuid = endpoint.endpoint_id.topology_id.context_id.context_uuid.uuid
+        endpoint_topology_uuid = endpoint.endpoint_id.topology_id.topology_uuid.uuid
+        if len(endpoint_topology_context_uuid) > 0 and len(endpoint_topology_uuid) > 0:
+            result : Tuple[ContextModel, bool] = get_or_create_object(
+                database, ContextModel, endpoint_topology_context_uuid, defaults={
+                    'context_uuid': endpoint_topology_context_uuid,
+                })
+            db_context, _ = result
+
+            str_topology_key = key_to_str([endpoint_topology_context_uuid, endpoint_topology_uuid])
+            result : Tuple[TopologyModel, bool] = get_or_create_object(
+                database, TopologyModel, str_topology_key, defaults={
+                    'context_fk': db_context,
+                    'topology_uuid': endpoint_topology_uuid,
+                })
+            db_topology, _ = result
+
+            str_endpoint_key = key_to_str([str_endpoint_key, str_topology_key], separator=':')
+            endpoint_attributes['topology_fk'] = db_topology
+
+        result : Tuple[EndPointModel, bool] = update_or_create_object(
+            database, EndPointModel, str_endpoint_key, endpoint_attributes)
+        _, db_endpoint_updated = result
+        updated = updated or db_endpoint_updated
+
+    return db_device, updated
+
+def sync_device_from_context(
+    device_uuid : str, context_client : ContextClient, database : Database
+    ) -> Tuple[DeviceModel, bool]:
+
+    try:
+        device : Device = context_client.GetDevice(DeviceId(device_uuid={'uuid': device_uuid}))
+    except grpc.RpcError as e:
+        if e.code() != grpc.StatusCode.NOT_FOUND: raise # pylint: disable=no-member
+        return None
+    return update_device_in_local_database(database, device)
diff --git a/src/service/service/database/DatabaseServiceTools.py b/src/service/service/database/DatabaseServiceTools.py
new file mode 100644
index 0000000000000000000000000000000000000000..0b538f82a62b73aaa194628b986810204dcbc46b
--- /dev/null
+++ b/src/service/service/database/DatabaseServiceTools.py
@@ -0,0 +1,129 @@
+import grpc
+from typing import Tuple
+from common.Constants import DEFAULT_CONTEXT_UUID, DEFAULT_TOPOLOGY_UUID
+from common.orm.Database import Database
+from common.orm.HighLevel import get_object, get_or_create_object, update_or_create_object
+from common.orm.backend.Tools import key_to_str
+from common.rpc_method_wrapper.ServiceExceptions import InvalidArgumentException
+from context.client.ContextClient import ContextClient
+from service.proto.context_pb2 import Service, ServiceId
+from .ConfigModel import delete_all_config_rules, grpc_config_rules_to_raw, update_config
+from .ConstraintModel import delete_all_constraints, grpc_constraints_to_raw, update_constraints
+from .ContextModel import ContextModel
+from .EndPointModel import EndPointModel
+from .RelationModels import ServiceEndPointModel
+from .ServiceModel import ServiceModel, grpc_to_enum__service_status, grpc_to_enum__service_type
+from .TopologyModel import TopologyModel
+
+def update_service_in_local_database(database : Database, service : Service) -> Tuple[ServiceModel, bool]:
+    service_uuid = service.service_id.service_uuid.uuid
+    service_context_uuid = service.service_id.context_id.context_uuid.uuid
+    if len(service_context_uuid) == 0: service_context_uuid = DEFAULT_CONTEXT_UUID
+
+    topology_uuids = {}
+    for i,endpoint_id in enumerate(service.service_endpoint_ids):
+        endpoint_uuid                  = endpoint_id.endpoint_uuid.uuid
+        endpoint_device_uuid           = endpoint_id.device_id.device_uuid.uuid
+        endpoint_topology_uuid         = endpoint_id.topology_id.topology_uuid.uuid
+        endpoint_topology_context_uuid = endpoint_id.topology_id.context_id.context_uuid.uuid
+
+        if len(endpoint_device_uuid) == 0:
+            raise InvalidArgumentException(
+                'request.service_endpoint_ids[{:d}].device_id.device_uuid.uuid'.format(i), endpoint_device_uuid,
+                ['not set'])
+
+        if len(endpoint_topology_context_uuid) == 0: endpoint_topology_context_uuid = service_context_uuid
+        if service_context_uuid != endpoint_topology_context_uuid:
+            raise InvalidArgumentException(
+                'request.service_endpoint_ids[{:d}].topology_id.context_id.context_uuid.uuid'.format(i),
+                endpoint_device_uuid,
+                ['should be == {:s}({:s})'.format('service_id.context_id.context_uuid.uuid', service_context_uuid)])
+
+        topology_uuids.setdefault(endpoint_topology_uuid, set()).add(
+            'request.service_endpoint_ids[{:d}].device_id.device_uuid.uuid'.format(i))
+
+    if len(topology_uuids) > 1:
+        raise InvalidArgumentException(
+            'request.service_endpoint_ids', '...',
+            ['Multiple different topology_uuid values specified: {:s}'.format(str(topology_uuids))])
+    if len(topology_uuids) == 1:
+        topology_uuid = topology_uuids.popitem()[0]
+    else:
+        topology_uuid = DEFAULT_TOPOLOGY_UUID
+
+    result : Tuple[ContextModel, bool] = get_or_create_object(
+        database, ContextModel, service_context_uuid, defaults={'context_uuid': service_context_uuid})
+    db_context, _ = result
+
+    str_topology_key = None
+    if len(topology_uuid) > 0:
+        str_topology_key = key_to_str([service_context_uuid, topology_uuid])
+        result : Tuple[TopologyModel, bool] = get_or_create_object(
+            database, TopologyModel, str_topology_key, defaults={'context_fk': db_context, 'topology_uuid': topology_uuid})
+        #db_topology, _ = result
+
+    str_service_key = key_to_str([service_context_uuid, service_uuid])
+
+    config_rules = grpc_config_rules_to_raw(service.service_config.config_rules)
+    delete_all_config_rules(database, str_service_key, 'running')
+    running_config_result = update_config(database, str_service_key, 'running', config_rules)
+
+    constraints = grpc_constraints_to_raw(service.service_constraints)
+    delete_all_constraints(database, str_service_key, 'running')
+    running_constraints_result = update_constraints(database, str_service_key, 'running', constraints)
+
+    result : Tuple[ContextModel, bool] = get_or_create_object(
+        database, ContextModel, service_context_uuid, defaults={
+            'context_uuid': service_context_uuid,
+        })
+    db_context, _ = result
+
+    result : Tuple[ServiceModel, bool] = update_or_create_object(database, ServiceModel, str_service_key, {
+        'context_fk'            : db_context,
+        'service_uuid'          : service_uuid,
+        'service_type'          : grpc_to_enum__service_type(service.service_type),
+        'service_status'        : grpc_to_enum__service_status(service.service_status.service_status),
+        'service_constraints_fk': running_constraints_result[0][0],
+        'service_config_fk'     : running_config_result[0][0],
+    })
+    db_service, updated = result
+
+    for i,endpoint_id in enumerate(service.service_endpoint_ids):
+        endpoint_uuid        = endpoint_id.endpoint_uuid.uuid
+        endpoint_device_uuid = endpoint_id.device_id.device_uuid.uuid
+
+        str_endpoint_key = key_to_str([endpoint_device_uuid, endpoint_uuid])
+        if str_topology_key is not None:
+            str_endpoint_key = key_to_str([str_endpoint_key, str_topology_key], separator=':')
+        db_endpoint : EndPointModel = get_object(database, EndPointModel, str_endpoint_key)
+
+        str_service_endpoint_key = key_to_str([str_service_key, str_endpoint_key], separator='--')
+        result : Tuple[ServiceEndPointModel, bool] = get_or_create_object(
+            database, ServiceEndPointModel, str_service_endpoint_key, {
+                'service_fk': db_service, 'endpoint_fk': db_endpoint})
+        _, service_endpoint_created = result
+        updated = updated or service_endpoint_created
+
+    return db_service, updated
+
+def sync_service_from_context(
+    context_uuid : str, service_uuid : str, context_client : ContextClient, database : Database
+    ) -> Tuple[ServiceModel, bool]:
+
+    try:
+        service : Service = context_client.GetService(ServiceId(
+            context_id={'context_uuid': {'uuid': context_uuid}},
+            service_uuid={'uuid': service_uuid}))
+    except grpc.RpcError as e:
+        if e.code() != grpc.StatusCode.NOT_FOUND: raise # pylint: disable=no-member
+        return None
+    return update_service_in_local_database(database, service)
+
+def sync_service_to_context(db_service : ServiceModel, context_client : ContextClient) -> None:
+    if db_service is None: return
+    context_client.SetService(Service(**db_service.dump(
+        include_endpoint_ids=True, include_constraints=True, include_config_rules=True)))
+
+def delete_service_from_context(db_service : ServiceModel, context_client : ContextClient) -> None:
+    if db_service is None: return
+    context_client.RemoveService(ServiceId(**db_service.dump_id()))
diff --git a/src/service/service/database/DeviceModel.py b/src/service/service/database/DeviceModel.py
new file mode 100644
index 0000000000000000000000000000000000000000..d005292acfd4cf20619548dbfa5b48a08780ec2f
--- /dev/null
+++ b/src/service/service/database/DeviceModel.py
@@ -0,0 +1,87 @@
+import functools, logging
+from enum import Enum
+from typing import Dict, List
+from common.orm.Database import Database
+from common.orm.backend.Tools import key_to_str
+from common.orm.fields.EnumeratedField import EnumeratedField
+from common.orm.fields.ForeignKeyField import ForeignKeyField
+from common.orm.fields.PrimaryKeyField import PrimaryKeyField
+from common.orm.fields.StringField import StringField
+from common.orm.model.Model import Model
+from context.proto.context_pb2 import DeviceDriverEnum, DeviceOperationalStatusEnum
+from .ConfigModel import ConfigModel
+from .Tools import grpc_to_enum
+
+LOGGER = logging.getLogger(__name__)
+
+class ORM_DeviceDriverEnum(Enum):
+    UNDEFINED             = DeviceDriverEnum.DEVICEDRIVER_UNDEFINED
+    OPENCONFIG            = DeviceDriverEnum.DEVICEDRIVER_OPENCONFIG
+    TRANSPORT_API         = DeviceDriverEnum.DEVICEDRIVER_TRANSPORT_API
+    P4                    = DeviceDriverEnum.DEVICEDRIVER_P4
+    IETF_NETWORK_TOPOLOGY = DeviceDriverEnum.DEVICEDRIVER_IETF_NETWORK_TOPOLOGY
+    ONF_TR_352            = DeviceDriverEnum.DEVICEDRIVER_ONF_TR_352
+
+grpc_to_enum__device_driver = functools.partial(
+    grpc_to_enum, DeviceDriverEnum, ORM_DeviceDriverEnum)
+
+class ORM_DeviceOperationalStatusEnum(Enum):
+    UNDEFINED = DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_UNDEFINED
+    DISABLED  = DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_DISABLED
+    ENABLED   = DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_ENABLED
+
+grpc_to_enum__device_operational_status = functools.partial(
+    grpc_to_enum, DeviceOperationalStatusEnum, ORM_DeviceOperationalStatusEnum)
+
+class DeviceModel(Model):
+    pk = PrimaryKeyField()
+    device_uuid = StringField(required=True, allow_empty=False)
+    device_type = StringField()
+    device_config_fk = ForeignKeyField(ConfigModel)
+    device_operational_status = EnumeratedField(ORM_DeviceOperationalStatusEnum, required=True)
+
+    def dump_id(self) -> Dict:
+        return {'device_uuid': {'uuid': self.device_uuid}}
+
+    def dump_config(self) -> Dict:
+        return ConfigModel(self.database, self.device_config_fk).dump()
+
+    def dump_drivers(self) -> List[int]:
+        db_driver_pks = self.references(DriverModel)
+        return [DriverModel(self.database, pk).dump() for pk,_ in db_driver_pks]
+
+    def dump_endpoints(self) -> List[Dict]:
+        from .EndPointModel import EndPointModel # pylint: disable=import-outside-toplevel
+        db_endpoints_pks = self.references(EndPointModel)
+        return [EndPointModel(self.database, pk).dump() for pk,_ in db_endpoints_pks]
+
+    def dump(   # pylint: disable=arguments-differ
+            self, include_config_rules=True, include_drivers=True, include_endpoints=True
+        ) -> Dict:
+        result = {
+            'device_id': self.dump_id(),
+            'device_type': self.device_type,
+            'device_operational_status': self.device_operational_status.value,
+        }
+        if include_config_rules: result.setdefault('device_config', {})['config_rules'] = self.dump_config()
+        if include_drivers: result['device_drivers'] = self.dump_drivers()
+        if include_endpoints: result['device_endpoints'] = self.dump_endpoints()
+        return result
+
+class DriverModel(Model): # pylint: disable=abstract-method
+    pk = PrimaryKeyField()
+    device_fk = ForeignKeyField(DeviceModel)
+    driver = EnumeratedField(ORM_DeviceDriverEnum, required=True)
+
+    def dump(self) -> Dict:
+        return self.driver.value
+
+def set_drivers(database : Database, db_device : DeviceModel, grpc_device_drivers):
+    db_device_pk = db_device.pk
+    for driver in grpc_device_drivers:
+        orm_driver = grpc_to_enum__device_driver(driver)
+        str_device_driver_key = key_to_str([db_device_pk, orm_driver.name])
+        db_device_driver = DriverModel(database, str_device_driver_key)
+        db_device_driver.device_fk = db_device
+        db_device_driver.driver = orm_driver
+        db_device_driver.save()
diff --git a/src/service/service/database/EndPointModel.py b/src/service/service/database/EndPointModel.py
new file mode 100644
index 0000000000000000000000000000000000000000..78b00db8628a841c8deae89e3db8ca1056fa1db5
--- /dev/null
+++ b/src/service/service/database/EndPointModel.py
@@ -0,0 +1,43 @@
+import logging
+from typing import Dict, List, Optional, Tuple
+from common.orm.fields.ForeignKeyField import ForeignKeyField
+from common.orm.fields.PrimaryKeyField import PrimaryKeyField
+from common.orm.fields.StringField import StringField
+from common.orm.model.Model import Model
+from service.proto.context_pb2 import EndPointId
+from .DeviceModel import DeviceModel
+from .TopologyModel import TopologyModel
+
+LOGGER = logging.getLogger(__name__)
+
+class EndPointModel(Model):
+    pk = PrimaryKeyField()
+    topology_fk = ForeignKeyField(TopologyModel, required=False)
+    device_fk = ForeignKeyField(DeviceModel)
+    endpoint_uuid = StringField(required=True, allow_empty=False)
+    endpoint_type = StringField()
+
+    def dump_id(self) -> Dict:
+        device_id = DeviceModel(self.database, self.device_fk).dump_id()
+        result = {
+            'device_id': device_id,
+            'endpoint_uuid': {'uuid': self.endpoint_uuid},
+        }
+        if self.topology_fk is not None:
+            result['topology_id'] = TopologyModel(self.database, self.topology_fk).dump_id()
+        return result
+
+    def dump(self) -> Dict:
+        return {
+            'endpoint_id': self.dump_id(),
+            'endpoint_type': self.endpoint_type,
+        }
+
+def grpc_endpointids_to_raw(grpc_endpointids : List[EndPointId]) -> List[Tuple[str, str, Optional[str]]]:
+    def translate(grpc_endpointid : EndPointId) -> Tuple[str, str, Optional[str]]:
+        device_uuid   = grpc_endpointid.device_id.device_uuid.uuid
+        endpoint_uuid = grpc_endpointid.endpoint_uuid.uuid
+        topology_uuid = grpc_endpointid.topology_id.topology_uuid.uuid
+        if len(topology_uuid) == 0: topology_uuid = None
+        return device_uuid, endpoint_uuid, topology_uuid
+    return [translate(grpc_endpointid) for grpc_endpointid in grpc_endpointids]
diff --git a/src/service/service/database/RelationModels.py b/src/service/service/database/RelationModels.py
new file mode 100644
index 0000000000000000000000000000000000000000..fbf93feff84a99aa20acb43a31b9abb30ae14a20
--- /dev/null
+++ b/src/service/service/database/RelationModels.py
@@ -0,0 +1,31 @@
+import logging
+from common.orm.fields.ForeignKeyField import ForeignKeyField
+from common.orm.fields.PrimaryKeyField import PrimaryKeyField
+from common.orm.model.Model import Model
+from .DeviceModel import DeviceModel
+from .EndPointModel import EndPointModel
+#from .LinkModel import LinkModel
+from .ServiceModel import ServiceModel
+from .TopologyModel import TopologyModel
+
+LOGGER = logging.getLogger(__name__)
+
+#class LinkEndPointModel(Model): # pylint: disable=abstract-method
+#    pk = PrimaryKeyField()
+#    link_fk = ForeignKeyField(LinkModel)
+#    endpoint_fk = ForeignKeyField(EndPointModel)
+
+class ServiceEndPointModel(Model): # pylint: disable=abstract-method
+    pk = PrimaryKeyField()
+    service_fk = ForeignKeyField(ServiceModel)
+    endpoint_fk = ForeignKeyField(EndPointModel)
+
+class TopologyDeviceModel(Model): # pylint: disable=abstract-method
+    pk = PrimaryKeyField()
+    topology_fk = ForeignKeyField(TopologyModel)
+    device_fk = ForeignKeyField(DeviceModel)
+
+#class TopologyLinkModel(Model): # pylint: disable=abstract-method
+#    pk = PrimaryKeyField()
+#    topology_fk = ForeignKeyField(TopologyModel)
+#    link_fk = ForeignKeyField(LinkModel)
diff --git a/src/service/service/database/ServiceModel.py b/src/service/service/database/ServiceModel.py
new file mode 100644
index 0000000000000000000000000000000000000000..f6bd2e9c47f77538fac6d65aa0e02ca720126abb
--- /dev/null
+++ b/src/service/service/database/ServiceModel.py
@@ -0,0 +1,74 @@
+import functools, logging, operator
+from enum import Enum
+from typing import Dict, List
+from common.orm.fields.EnumeratedField import EnumeratedField
+from common.orm.fields.ForeignKeyField import ForeignKeyField
+from common.orm.fields.PrimaryKeyField import PrimaryKeyField
+from common.orm.fields.StringField import StringField
+from common.orm.model.Model import Model
+from common.orm.HighLevel import get_related_objects
+from context.proto.context_pb2 import ServiceStatusEnum, ServiceTypeEnum
+from .ConfigModel import ConfigModel
+from .ConstraintModel import ConstraintsModel
+from .ContextModel import ContextModel
+from .Tools import grpc_to_enum
+
+LOGGER = logging.getLogger(__name__)
+
+class ORM_ServiceTypeEnum(Enum):
+    UNKNOWN                   = ServiceTypeEnum.SERVICETYPE_UNKNOWN
+    L3NM                      = ServiceTypeEnum.SERVICETYPE_L3NM
+    L2NM                      = ServiceTypeEnum.SERVICETYPE_L2NM
+    TAPI_CONNECTIVITY_SERVICE = ServiceTypeEnum.SERVICETYPE_TAPI_CONNECTIVITY_SERVICE
+
+grpc_to_enum__service_type = functools.partial(
+    grpc_to_enum, ServiceTypeEnum, ORM_ServiceTypeEnum)
+
+class ORM_ServiceStatusEnum(Enum):
+    UNDEFINED       = ServiceStatusEnum.SERVICESTATUS_UNDEFINED
+    PLANNED         = ServiceStatusEnum.SERVICESTATUS_PLANNED
+    ACTIVE          = ServiceStatusEnum.SERVICESTATUS_ACTIVE
+    PENDING_REMOVAL = ServiceStatusEnum.SERVICESTATUS_PENDING_REMOVAL
+
+grpc_to_enum__service_status = functools.partial(
+    grpc_to_enum, ServiceStatusEnum, ORM_ServiceStatusEnum)
+
+class ServiceModel(Model):
+    pk = PrimaryKeyField()
+    context_fk = ForeignKeyField(ContextModel)
+    service_uuid = StringField(required=True, allow_empty=False)
+    service_type = EnumeratedField(ORM_ServiceTypeEnum, required=True)
+    service_constraints_fk = ForeignKeyField(ConstraintsModel)
+    service_status = EnumeratedField(ORM_ServiceStatusEnum, required=True)
+    service_config_fk = ForeignKeyField(ConfigModel)
+
+    def dump_id(self) -> Dict:
+        context_id = ContextModel(self.database, self.context_fk).dump_id()
+        return {
+            'context_id': context_id,
+            'service_uuid': {'uuid': self.service_uuid},
+        }
+
+    def dump_endpoint_ids(self) -> List[Dict]:
+        from .RelationModels import ServiceEndPointModel # pylint: disable=import-outside-toplevel
+        db_endpoints = get_related_objects(self, ServiceEndPointModel, 'endpoint_fk')
+        return [db_endpoint.dump_id() for db_endpoint in sorted(db_endpoints, key=operator.attrgetter('pk'))]
+
+    def dump_constraints(self) -> List[Dict]:
+        return ConstraintsModel(self.database, self.service_constraints_fk).dump()
+
+    def dump_config(self) -> Dict:
+        return ConfigModel(self.database, self.service_config_fk).dump()
+
+    def dump(   # pylint: disable=arguments-differ
+            self, include_endpoint_ids=True, include_constraints=True, include_config_rules=True
+        ) -> Dict:
+        result = {
+            'service_id': self.dump_id(),
+            'service_type': self.service_type.value,
+            'service_status': {'service_status': self.service_status.value},
+        }
+        if include_endpoint_ids: result['service_endpoint_ids'] = self.dump_endpoint_ids()
+        if include_constraints: result['service_constraints'] = self.dump_constraints()
+        if include_config_rules: result.setdefault('service_config', {})['config_rules'] = self.dump_config()
+        return result
diff --git a/src/service/service/database/Tools.py b/src/service/service/database/Tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..36ffbcd46fcf686371b0799445ce4f9ce5b75838
--- /dev/null
+++ b/src/service/service/database/Tools.py
@@ -0,0 +1,58 @@
+import hashlib, re
+from enum import Enum
+from typing import Dict, List, Tuple, Union
+
+# Convenient helper function to remove dictionary items in dict/list/set comprehensions.
+
+def remove_dict_key(dictionary : Dict, key : str):
+    dictionary.pop(key, None)
+    return dictionary
+
+# Enumeration classes are redundant with gRPC classes, but gRPC does not provide a programmatical method to retrieve
+# the values it expects from strings containing the desired value symbol or its integer value, so a kind of mapping is
+# required. Besides, ORM Models expect Enum classes in EnumeratedFields; we create specific and conveniently defined
+# Enum classes to serve both purposes.
+
+def grpc_to_enum(grpc_enum_class, orm_enum_class : Enum, grpc_enum_value):
+    grpc_enum_name = grpc_enum_class.Name(grpc_enum_value)
+    grpc_enum_prefix = orm_enum_class.__name__.upper()
+    grpc_enum_prefix = re.sub(r'^ORM_(.+)$', r'\1', grpc_enum_prefix)
+    grpc_enum_prefix = re.sub(r'^(.+)ENUM$', r'\1', grpc_enum_prefix)
+    grpc_enum_prefix = grpc_enum_prefix + '_'
+    orm_enum_name = grpc_enum_name.replace(grpc_enum_prefix, '')
+    orm_enum_value = orm_enum_class._member_map_.get(orm_enum_name) # pylint: disable=protected-access
+    return orm_enum_value
+
+# For some models, it is convenient to produce a string hash for fast comparisons of existence or modification. Method
+# fast_hasher computes configurable length (between 1 and 64 byte) hashes and retrieves them in hex representation.
+
+FASTHASHER_ITEM_ACCEPTED_FORMAT = 'Union[bytes, str]'
+FASTHASHER_DATA_ACCEPTED_FORMAT = 'Union[{fmt:s}, List[{fmt:s}], Tuple[{fmt:s}]]'.format(
+    fmt=FASTHASHER_ITEM_ACCEPTED_FORMAT)
+
+def fast_hasher(data : Union[bytes, str, List[Union[bytes, str]], Tuple[Union[bytes, str]]], digest_size : int = 8):
+    hasher = hashlib.blake2b(digest_size=digest_size)
+    # Do not accept sets, dicts, or other unordered dats tructures since their order is arbitrary thus producing
+    # different hashes depending on the order. Consider adding support for sets or dicts with previous sorting of
+    # items by their key.
+
+    if isinstance(data, bytes):
+        data = [data]
+    elif isinstance(data, str):
+        data = [data.encode('UTF-8')]
+    elif isinstance(data, (list, tuple)):
+        pass
+    else:
+        msg = 'data({:s}) must be {:s}, found {:s}'
+        raise TypeError(msg.format(str(data), FASTHASHER_DATA_ACCEPTED_FORMAT, str(type(data))))
+
+    for i,item in enumerate(data):
+        if isinstance(item, str):
+            item = item.encode('UTF-8')
+        elif isinstance(item, bytes):
+            pass
+        else:
+            msg = 'data[{:d}]({:s}) must be {:s}, found {:s}'
+            raise TypeError(msg.format(i, str(item), FASTHASHER_ITEM_ACCEPTED_FORMAT, str(type(item))))
+        hasher.update(item)
+    return hasher.hexdigest()
diff --git a/src/service/service/database/TopologyModel.py b/src/service/service/database/TopologyModel.py
new file mode 100644
index 0000000000000000000000000000000000000000..54a7e75e5059e0f1f605fe6235740c0094ab73c5
--- /dev/null
+++ b/src/service/service/database/TopologyModel.py
@@ -0,0 +1,40 @@
+import logging #, operator
+#from typing import Dict, List
+from common.orm.fields.ForeignKeyField import ForeignKeyField
+from common.orm.fields.PrimaryKeyField import PrimaryKeyField
+from common.orm.fields.StringField import StringField
+from common.orm.model.Model import Model
+#from common.orm.HighLevel import get_related_objects
+from .ContextModel import ContextModel
+
+LOGGER = logging.getLogger(__name__)
+
+class TopologyModel(Model):
+    pk = PrimaryKeyField()
+    context_fk = ForeignKeyField(ContextModel)
+    topology_uuid = StringField(required=True, allow_empty=False)
+
+#    def dump_id(self) -> Dict:
+#        context_id = ContextModel(self.database, self.context_fk).dump_id()
+#        return {
+#            'context_id': context_id,
+#            'topology_uuid': {'uuid': self.topology_uuid},
+#        }
+#
+#    def dump_device_ids(self) -> List[Dict]:
+#        from .RelationModels import TopologyDeviceModel # pylint: disable=import-outside-toplevel
+#        db_devices = get_related_objects(self, TopologyDeviceModel, 'device_fk')
+#        return [db_device.dump_id() for db_device in sorted(db_devices, key=operator.attrgetter('pk'))]
+#
+#    def dump_link_ids(self) -> List[Dict]:
+#        from .RelationModels import TopologyLinkModel # pylint: disable=import-outside-toplevel
+#        db_links = get_related_objects(self, TopologyLinkModel, 'link_fk')
+#        return [db_link.dump_id() for db_link in sorted(db_links, key=operator.attrgetter('pk'))]
+#
+#    def dump(   # pylint: disable=arguments-differ
+#            self, include_devices=True, include_links=True
+#        ) -> Dict:
+#        result = {'topology_id': self.dump_id()}
+#        if include_devices: result['device_ids'] = self.dump_device_ids()
+#        if include_links: result['link_ids'] = self.dump_link_ids()
+#        return result
diff --git a/src/service/service/database/__init__.py b/src/service/service/database/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..5d2c65947e2775c4c9dc5dda8e7737bb04edb8cd
--- /dev/null
+++ b/src/service/service/database/__init__.py
@@ -0,0 +1,2 @@
+# In-Memory database with a simplified representation of Context Database focused on the Service model.
+# Used as an internal data cache, for message validation, and message formatting purposes.
diff --git a/src/service/service/service_handler_api/AnyTreeTools.py b/src/service/service/service_handler_api/AnyTreeTools.py
new file mode 100644
index 0000000000000000000000000000000000000000..435fccf56b5166000fde2b6f4e555af29c550531
--- /dev/null
+++ b/src/service/service/service_handler_api/AnyTreeTools.py
@@ -0,0 +1,78 @@
+import anytree
+from typing import Any, List, Optional, Union
+
+class TreeNode(anytree.node.Node):
+    def __init__(self, name, parent=None, children=None, **kwargs) -> None:
+        super().__init__(name, parent=parent, children=children, **kwargs)
+        self.value : Optional[Any] = None
+
+    def get_full_path(self):
+        return self.separator.join([''] + [str(node.name) for node in self.path])
+
+class RawStyle(anytree.render.AbstractStyle):
+    def __init__(self):
+        """
+        Raw style.
+
+        >>> from anytree import Node, RenderTree
+        >>> root = Node("root")
+        >>> s0 = Node("sub0", parent=root)
+        >>> s0b = Node("sub0B", parent=s0)
+        >>> s0a = Node("sub0A", parent=s0)
+        >>> s1 = Node("sub1", parent=root)
+        >>> print(RenderTree(root, style=RawStyle()))
+        Node('/root')
+        Node('/root/sub0')
+        Node('/root/sub0/sub0B')
+        Node('/root/sub0/sub0A')
+        Node('/root/sub1')
+        """
+        super(RawStyle, self).__init__('', '', '')
+
+def get_subnode(
+    resolver : anytree.Resolver, root : TreeNode, key_or_path : Union[str, List[str]], default : Optional[Any] = None):
+
+    if not isinstance(root, TreeNode): raise Exception('root must be a TreeNode')
+    if isinstance(key_or_path, str): key_or_path = key_or_path.split('/')
+    node = root
+    for path_item in key_or_path:
+        try:
+            node = resolver.get(node, path_item)
+        except anytree.ChildResolverError:
+            return default
+    return node
+
+def set_subnode_value(resolver : anytree.Resolver, root : TreeNode, key_or_path : Union[str, List[str]], value : Any):
+    if not isinstance(root, TreeNode): raise Exception('root must be a TreeNode')
+    if isinstance(key_or_path, str): key_or_path = key_or_path.split('/')
+    node = root
+    for path_item in key_or_path:
+        try:
+            node = resolver.get(node, path_item)
+        except anytree.ChildResolverError:
+            node = TreeNode(path_item, parent=node)
+    if isinstance(node.value, dict) and isinstance(value, dict):
+        node.value.update(value)
+    else:
+        node.value = value
+
+def delete_subnode(resolver : anytree.Resolver, root : TreeNode, key_or_path : Union[str, List[str]]):
+    if not isinstance(root, TreeNode): raise Exception('root must be a TreeNode')
+    node = get_subnode(resolver, root, key_or_path, default=None)
+    if node is None: return
+    parent : TreeNode = node.parent
+    children = list(parent.children)
+    children.remove(node)
+    parent.children = tuple(children)
+
+def dump_subtree(root : TreeNode):
+    if not isinstance(root, TreeNode): raise Exception('root must be a TreeNode')
+    results = []
+    for row in anytree.RenderTree(root, style=RawStyle()):
+        node : TreeNode = row.node
+        path = node.get_full_path()[2:] # get full path except the heading root placeholder "/."
+        if len(path) == 0: continue
+        value = node.value
+        if value is None: continue
+        results.append((path, value))
+    return results
diff --git a/src/service/service/service_handler_api/Exceptions.py b/src/service/service/service_handler_api/Exceptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..d52df91786ddd8a08068be792a6e6f36bb1b3ebb
--- /dev/null
+++ b/src/service/service/service_handler_api/Exceptions.py
@@ -0,0 +1,52 @@
+class UnsatisfiedFilterException(Exception):
+    def __init__(self, filter_fields):
+        msg = 'No ServiceHandler satisfies FilterFields({:s})'
+        super().__init__(msg.format(str(filter_fields)))
+
+class UnsupportedServiceHandlerClassException(Exception):
+    def __init__(self, service_handler_class_name):
+        msg = 'Class({:s}) is not a subclass of _ServiceHandler'
+        super().__init__(msg.format(str(service_handler_class_name)))
+
+class UnsupportedFilterFieldException(Exception):
+    def __init__(self, unsupported_filter_fields, service_handler_class_name=None):
+        if service_handler_class_name:
+            msg = 'FilterFields({:s}) specified by ServiceHandler({:s}) are not supported'
+            msg = msg.format(str(unsupported_filter_fields), str(service_handler_class_name))
+        else:
+            msg = 'FilterFields({:s}) specified in Filter are not supported'
+            msg = msg.format(str(unsupported_filter_fields))
+        super().__init__(msg)
+
+class UnsupportedFilterFieldValueException(Exception):
+    def __init__(
+        self, filter_field_name, filter_field_value, allowed_filter_field_values, service_handler_class_name=None):
+
+        if service_handler_class_name:
+            msg = 'FilterField({:s}={:s}) specified by ServiceHandler({:s}) is not supported. Allowed values are {:s}'
+            msg = msg.format(
+                str(filter_field_name), str(filter_field_value), str(service_handler_class_name),
+                str(allowed_filter_field_values))
+        else:
+            msg = 'FilterField({:s}={:s}) specified in Filter is not supported. Allowed values are {:s}'
+            msg = msg.format(str(filter_field_name), str(filter_field_value), str(allowed_filter_field_values))
+        super().__init__(msg)
+
+#class UnsupportedResourceKeyException(Exception):
+#    def __init__(self, resource_key):
+#        msg = 'ResourceKey({:s}) not supported'
+#        msg = msg.format(str(resource_key))
+#        super().__init__(msg)
+#
+#class ConfigFieldNotFoundException(Exception):
+#    def __init__(self, config_field_name):
+#        msg = 'ConfigField({:s}) not specified in resource'
+#        msg = msg.format(str(config_field_name))
+#        super().__init__(msg)
+#
+#class ConfigFieldsNotSupportedException(Exception):
+#    def __init__(self, config_fields):
+#        msg = 'ConfigFields({:s}) not supported in resource'
+#        msg = msg.format(str(config_fields))
+#        super().__init__(msg)
+#
\ No newline at end of file
diff --git a/src/service/service/service_handler_api/FilterFields.py b/src/service/service/service_handler_api/FilterFields.py
new file mode 100644
index 0000000000000000000000000000000000000000..fc1a3d5afee4468bca344e18a356728807367bbf
--- /dev/null
+++ b/src/service/service/service_handler_api/FilterFields.py
@@ -0,0 +1,13 @@
+from enum import Enum
+from service.service.database.ServiceModel import ORM_ServiceTypeEnum
+from service.service.database.DeviceModel import ORM_DeviceDriverEnum
+
+class FilterFieldEnum(Enum):
+    SERVICE_TYPE  = 'service_type'
+    DEVICE_DRIVER = 'device_driver'
+
+# Map allowed filter fields to allowed values per Filter field. If no restriction (free text) None is specified
+FILTER_FIELD_ALLOWED_VALUES = {
+    FilterFieldEnum.SERVICE_TYPE.value  : {i.value for i in ORM_ServiceTypeEnum},
+    FilterFieldEnum.DEVICE_DRIVER.value : {i.value for i in ORM_DeviceDriverEnum},
+}
diff --git a/src/service/service/service_handler_api/ServiceHandlerFactory.py b/src/service/service/service_handler_api/ServiceHandlerFactory.py
new file mode 100644
index 0000000000000000000000000000000000000000..3f04b449de84b4fd566b4e10cec0b46839183dfb
--- /dev/null
+++ b/src/service/service/service_handler_api/ServiceHandlerFactory.py
@@ -0,0 +1,79 @@
+import logging, operator
+from enum import Enum
+from typing import Any, Dict, Iterable, List, Set, Tuple
+from service.service.service_handler_api._ServiceHandler import _ServiceHandler
+from .Exceptions import (
+    UnsatisfiedFilterException, UnsupportedServiceHandlerClassException, UnsupportedFilterFieldException,
+    UnsupportedFilterFieldValueException)
+from .FilterFields import FILTER_FIELD_ALLOWED_VALUES, FilterFieldEnum
+
+LOGGER = logging.getLogger(__name__)
+
+class ServiceHandlerFactory:
+    def __init__(self, service_handlers : List[Tuple[type, List[Dict[FilterFieldEnum, Any]]]]) -> None:
+        # Dict{field_name => Dict{field_value => Set{ServiceHandler}}}
+        self.__indices : Dict[str, Dict[str, Set[_ServiceHandler]]] = {}
+
+        for service_handler_class,filter_field_sets in service_handlers:
+            for filter_fields in filter_field_sets:
+                filter_fields = {k.value:v for k,v in filter_fields.items()}
+                self.register_service_handler_class(service_handler_class, **filter_fields)
+
+    def register_service_handler_class(self, service_handler_class, **filter_fields):
+        if not issubclass(service_handler_class, _ServiceHandler):
+            raise UnsupportedServiceHandlerClassException(str(service_handler_class))
+
+        service_handler_name = service_handler_class.__name__
+        supported_filter_fields = set(FILTER_FIELD_ALLOWED_VALUES.keys())
+        unsupported_filter_fields = set(filter_fields.keys()).difference(supported_filter_fields)
+        if len(unsupported_filter_fields) > 0:
+            raise UnsupportedFilterFieldException(
+                unsupported_filter_fields, service_handler_class_name=service_handler_name)
+
+        for field_name, field_values in filter_fields.items():
+            field_indice = self.__indices.setdefault(field_name, dict())
+            field_enum_values = FILTER_FIELD_ALLOWED_VALUES.get(field_name)
+            if not isinstance(field_values, Iterable) or isinstance(field_values, str):
+                field_values = [field_values]
+            for field_value in field_values:
+                if isinstance(field_value, Enum): field_value = field_value.value
+                if field_enum_values is not None and field_value not in field_enum_values:
+                    raise UnsupportedFilterFieldValueException(
+                        field_name, field_value, field_enum_values, service_handler_class_name=service_handler_name)
+                field_indice_service_handlers = field_indice.setdefault(field_value, set())
+                field_indice_service_handlers.add(service_handler_class)
+
+    def get_service_handler_class(self, **filter_fields) -> _ServiceHandler:
+        supported_filter_fields = set(FILTER_FIELD_ALLOWED_VALUES.keys())
+        unsupported_filter_fields = set(filter_fields.keys()).difference(supported_filter_fields)
+        if len(unsupported_filter_fields) > 0: raise UnsupportedFilterFieldException(unsupported_filter_fields)
+
+        candidate_service_handler_classes : Dict[_ServiceHandler, int] = None # num. filter hits per service_handler
+        for field_name, field_values in filter_fields.items():
+            field_indice = self.__indices.get(field_name)
+            if field_indice is None: continue
+            if not isinstance(field_values, Iterable) or isinstance(field_values, str):
+                field_values = [field_values]
+
+            field_enum_values = FILTER_FIELD_ALLOWED_VALUES.get(field_name)
+
+            field_candidate_service_handler_classes = set()
+            for field_value in field_values:
+                if field_enum_values is not None and field_value not in field_enum_values:
+                    raise UnsupportedFilterFieldValueException(field_name, field_value, field_enum_values)
+                field_indice_service_handlers = field_indice.get(field_value)
+                if field_indice_service_handlers is None: continue
+                field_candidate_service_handler_classes = field_candidate_service_handler_classes.union(
+                    field_indice_service_handlers)
+
+            if candidate_service_handler_classes is None:
+                candidate_service_handler_classes = {k:1 for k in field_candidate_service_handler_classes}
+            else:
+                for candidate_service_handler_class in candidate_service_handler_classes:
+                    if candidate_service_handler_class not in field_candidate_service_handler_classes: continue
+                    candidate_service_handler_classes[candidate_service_handler_class] += 1
+
+        if len(candidate_service_handler_classes) == 0: raise UnsatisfiedFilterException(filter_fields)
+        candidate_service_handler_classes = sorted(
+            candidate_service_handler_classes.items(), key=operator.itemgetter(1), reverse=True)
+        return candidate_service_handler_classes[0][0]
diff --git a/src/service/service/service_handler_api/Tools.py b/src/service/service/service_handler_api/Tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..af900690ed5bffec77434bb73c320d1df5a02ed8
--- /dev/null
+++ b/src/service/service/service_handler_api/Tools.py
@@ -0,0 +1,28 @@
+import functools
+from typing import Any, List, Union
+
+ACTION_MSG_SET_ENDPOINT      = 'Set EndPoint(device_uuid={:s}, endpoint_uuid={:s}, topology_uuid={:s})'
+ACTION_MSG_DELETE_ENDPOINT   = 'Delete EndPoint(device_uuid={:s}, endpoint_uuid={:s}, topology_uuid={:s})'
+
+ACTION_MSG_SET_CONSTRAINT    = 'Set Constraint(constraint_type={:s}, constraint_value={:s})'
+ACTION_MSG_DELETE_CONSTRAINT = 'Delete Constraint(constraint_type={:s}, constraint_value={:s})'
+
+ACTION_MSG_SET_CONFIG        = 'Set Resource(key={:s}, value={:s})'
+ACTION_MSG_DELETE_CONFIG     = 'Delete Resource(key={:s}, value={:s})'
+
+def _check_errors(
+        message : str, parameters_list : List[Any], results_list : List[Union[bool, Exception]]
+    ) -> List[str]:
+    errors = []
+    for parameters, results in zip(parameters_list, results_list):
+        if not isinstance(results, Exception): continue
+        message = message.format(*tuple(map(str, parameters)))
+        errors.append('Unable to {:s}; error({:s})'.format(message, str(results)))
+    return errors
+
+check_errors_setendpoint      = functools.partial(_check_errors, ACTION_MSG_SET_ENDPOINT     )
+check_errors_deleteendpoint   = functools.partial(_check_errors, ACTION_MSG_DELETE_ENDPOINT  )
+check_errors_setconstraint    = functools.partial(_check_errors, ACTION_MSG_SET_CONSTRAINT   )
+check_errors_deleteconstraint = functools.partial(_check_errors, ACTION_MSG_DELETE_CONSTRAINT)
+check_errors_setconfig        = functools.partial(_check_errors, ACTION_MSG_SET_CONFIG       )
+check_errors_deleteconfig     = functools.partial(_check_errors, ACTION_MSG_DELETE_CONFIG    )
diff --git a/src/service/service/service_handler_api/_ServiceHandler.py b/src/service/service/service_handler_api/_ServiceHandler.py
new file mode 100644
index 0000000000000000000000000000000000000000..55de4ce7a470c0b5bf87e3883f42d60f5c2d3ece
--- /dev/null
+++ b/src/service/service/service_handler_api/_ServiceHandler.py
@@ -0,0 +1,109 @@
+from typing import Any, List, Optional, Tuple, Union
+from common.orm.Database import Database
+from context.client.ContextClient import ContextClient
+from device.client.DeviceClient import DeviceClient
+from service.service.database.ServiceModel import ServiceModel
+
+class _ServiceHandler:
+    def __init__(
+        self, db_service : ServiceModel, database : Database, context_client : ContextClient,
+        device_client : DeviceClient, **settings
+    ) -> None:
+        """ Initialize Driver.
+            Parameters:
+                db_service
+                    The service instance from the local in-memory database.
+                database
+                    The instance of the local in-memory database.
+                context_client
+                    An instance of context client to be used to retrieve information from the service and the devices.
+                device_client
+                    An instance of device client to be used to configure the devices.
+                **settings
+                    Extra settings required by the service handler.
+        """
+        raise NotImplementedError()
+
+    def SetEndpoint(self, endpoints : List[Tuple[str, str, Optional[str]]]) -> List[Union[bool, Exception]]:
+        """ Set endpoints from a list.
+            Parameters:
+                endpoints : List[Tuple[str, str, Optional[str]]]
+                    List of tuples, each containing a device_uuid, endpoint_uuid and, optionally, the topology_uuid
+                    of the endpoint to be added.
+            Returns:
+                results : List[Union[bool, Exception]]
+                    List of results for endpoint changes requested. Return values must be in the same order than
+                    endpoints requested. If an endpoint is properly added, True must be retrieved; otherwise, the
+                    Exception that is raised during the processing must be retrieved.
+        """
+        raise NotImplementedError()
+
+    def DeleteEndpoint(self, endpoints : List[Tuple[str, str, Optional[str]]]) -> List[Union[bool, Exception]]:
+        """ Delete endpoints form a list.
+            Parameters:
+                endpoints : List[Tuple[str, str, Optional[str]]]
+                    List of tuples, each containing a device_uuid, endpoint_uuid, and the topology_uuid of the endpoint
+                    to be removed.
+            Returns:
+                results : List[Union[bool, Exception]]
+                    List of results for endpoint deletions requested. Return values must be in the same order than
+                    endpoints requested. If an endpoint is properly deleted, True must be retrieved; otherwise, the
+                    Exception that is raised during the processing must be retrieved.
+        """
+        raise NotImplementedError()
+
+    def SetConstraint(self, constraints : List[Tuple[str, Any]]) -> List[Union[bool, Exception]]:
+        """ Create/Update constraints.
+            Parameters:
+                constraints : List[Tuple[str, Any]]
+                    List of tuples, each containing a constraint_type and the new constraint_value to be set.
+            Returns:
+                results : List[Union[bool, Exception]]
+                    List of results for constraint changes requested. Return values must be in the same order than
+                    constraints requested. If a constraint is properly set, True must be retrieved; otherwise, the
+                    Exception that is raised during the processing must be retrieved.
+        """
+        raise NotImplementedError()
+
+    def DeleteConstraint(self, constraints : List[Tuple[str, Any]]) -> List[Union[bool, Exception]]:
+        """ Delete constraints.
+            Parameters:
+                constraints : List[Tuple[str, Any]]
+                    List of tuples, each containing a constraint_type pointing to the constraint to be deleted, and a
+                    constraint_value containing possible additionally required values to locate the constraint to be
+                    removed.
+            Returns:
+                results : List[Union[bool, Exception]]
+                    List of results for constraint deletions requested. Return values must be in the same order than
+                    constraints requested. If a constraint is properly deleted, True must be retrieved; otherwise, the
+                    Exception that is raised during the processing must be retrieved.
+        """
+        raise NotImplementedError()
+
+    def SetConfig(self, resources : List[Tuple[str, Any]]) -> List[Union[bool, Exception]]:
+        """ Create/Update configuration for a list of resources.
+            Parameters:
+                resources : List[Tuple[str, Any]]
+                    List of tuples, each containing a resource_key pointing the resource to be modified, and a
+                    resource_value containing the new value to be set.
+            Returns:
+                results : List[Union[bool, Exception]]
+                    List of results for resource key changes requested. Return values must be in the same order than
+                    resource keys requested. If a resource is properly set, True must be retrieved; otherwise, the
+                    Exception that is raised during the processing must be retrieved.
+        """
+        raise NotImplementedError()
+
+    def DeleteConfig(self, resources : List[Tuple[str, Any]]) -> List[Union[bool, Exception]]:
+        """ Delete configuration for a list of resources.
+            Parameters:
+                resources : List[Tuple[str, Any]]
+                    List of tuples, each containing a resource_key pointing the resource to be modified, and a
+                    resource_value containing possible additionally required values to locate the value to be removed.
+            Returns:
+                results : List[Union[bool, Exception]]
+                    List of results for resource key deletions requested. Return values must be in the same order than
+                    resource keys requested. If a resource is properly deleted, True must be retrieved; otherwise, the
+                    Exception that is raised during the processing must be retrieved.
+        """
+        raise NotImplementedError()
diff --git a/src/service/service/service_handler_api/__init__.py b/src/service/service/service_handler_api/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/service/service/service_handlers/Tools.py b/src/service/service/service_handlers/Tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..1bd4b8f1f6643e621feff84aceeb401dac120348
--- /dev/null
+++ b/src/service/service/service_handlers/Tools.py
@@ -0,0 +1,38 @@
+import json
+from typing import Any, Dict, Union
+from service.proto.context_pb2 import ConfigActionEnum
+
+def config_rule(action : ConfigActionEnum, resource_key : str, resource_value : Union[str, Dict[str, Any]]):
+    if not isinstance(resource_value, str): resource_value = json.dumps(resource_value, sort_keys=True)
+    return {'action': action, 'resource_key': resource_key, 'resource_value': resource_value}
+
+def config_rule_set(resource_key : str, resource_value : Union[str, Dict[str, Any]]):
+    return config_rule(ConfigActionEnum.CONFIGACTION_SET, resource_key, resource_value)
+
+def config_rule_delete(resource_key : str, resource_value : Union[str, Dict[str, Any]]):
+    return config_rule(ConfigActionEnum.CONFIGACTION_DELETE, resource_key, resource_value)
+
+def constraint(constraint_type, constraint_value):
+    return {'constraint_type': str(constraint_type), 'constraint_value': str(constraint_value)}
+
+def context_id(context_uuid):
+    return {'context_uuid': {'uuid': context_uuid}}
+
+def topology_id(topology_uuid, context_uuid=None):
+    result = {'topology_uuid': {'uuid': topology_uuid}}
+    if context_uuid is not None: result['context_id'] = context_id(context_uuid)
+    return result
+
+def device_id(device_uuid):
+    return {'device_uuid': {'uuid': device_uuid}}
+
+def endpoint_id(device_uuid, endpoint_uuid, context_uuid=None, topology_uuid=None):
+    result = {'device_id': device_id(device_uuid), 'endpoint_uuid': {'uuid': endpoint_uuid}}
+    if topology_id is not None: result['topology_id'] = topology_id(topology_uuid, context_uuid=context_uuid)
+    return result
+
+def endpoint(device_uuid, endpoint_uuid, endpoint_type, context_uuid=None, topology_uuid=None):
+    return {
+        'endpoint_id': endpoint_id(device_uuid, endpoint_uuid, context_uuid=context_uuid, topology_uuid=topology_uuid),
+        'endpoint_type': endpoint_type,
+    }
diff --git a/src/service/service/service_handlers/__init__.py b/src/service/service/service_handlers/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..73777c52ec1459adc398b9e2a6e1ddb53339a911
--- /dev/null
+++ b/src/service/service/service_handlers/__init__.py
@@ -0,0 +1,18 @@
+from ..service_handler_api.FilterFields import FilterFieldEnum, ORM_DeviceDriverEnum, ORM_ServiceTypeEnum
+from .l3nm_emulated.L3NMEmulatedServiceHandler import L3NMEmulatedServiceHandler
+from .l3nm_openconfig.L3NMOpenConfigServiceHandler import L3NMOpenConfigServiceHandler
+
+SERVICE_HANDLERS = [
+    (L3NMEmulatedServiceHandler, [
+        {
+            FilterFieldEnum.SERVICE_TYPE  : ORM_ServiceTypeEnum.L3NM,
+            FilterFieldEnum.DEVICE_DRIVER : ORM_DeviceDriverEnum.UNDEFINED,
+        }
+    ]),
+    (L3NMOpenConfigServiceHandler, [
+        {
+            FilterFieldEnum.SERVICE_TYPE  : ORM_ServiceTypeEnum.L3NM,
+            FilterFieldEnum.DEVICE_DRIVER : ORM_DeviceDriverEnum.OPENCONFIG,
+        }
+    ]),
+]
diff --git a/src/service/service/service_handlers/l3nm_emulated/L3NMEmulatedServiceHandler.py b/src/service/service/service_handlers/l3nm_emulated/L3NMEmulatedServiceHandler.py
new file mode 100644
index 0000000000000000000000000000000000000000..4b5636c6b1284189c65f1c8589e0966ac6d57860
--- /dev/null
+++ b/src/service/service/service_handlers/l3nm_emulated/L3NMEmulatedServiceHandler.py
@@ -0,0 +1,209 @@
+import anytree, json, logging
+from typing import Any, Dict, List, Optional, Tuple, Union
+from common.orm.Database import Database
+from common.orm.HighLevel import get_object
+from common.orm.backend.Tools import key_to_str
+from common.type_checkers.Checkers import chk_length, chk_type
+from context.client.ContextClient import ContextClient
+from device.client.DeviceClient import DeviceClient
+from device.proto.context_pb2 import Device
+from service.service.database.ConfigModel import ORM_ConfigActionEnum, get_config_rules
+from service.service.database.ContextModel import ContextModel
+from service.service.database.DeviceModel import DeviceModel
+from service.service.database.ServiceModel import ServiceModel
+from service.service.service_handler_api._ServiceHandler import _ServiceHandler
+from service.service.service_handler_api.AnyTreeTools import TreeNode, delete_subnode, get_subnode, set_subnode_value
+from service.service.service_handlers.Tools import config_rule_set, config_rule_delete
+
+LOGGER = logging.getLogger(__name__)
+
+class L3NMEmulatedServiceHandler(_ServiceHandler):
+    def __init__(   # pylint: disable=super-init-not-called
+        self, db_service : ServiceModel, database : Database, context_client : ContextClient,
+        device_client : DeviceClient, **settings
+    ) -> None:
+        self.__db_service = db_service
+        self.__database = database
+        self.__context_client = context_client # pylint: disable=unused-private-member
+        self.__device_client = device_client
+
+        self.__db_context : ContextModel = get_object(self.__database, ContextModel, self.__db_service.context_fk)
+        str_service_key = key_to_str([self.__db_context.context_uuid, self.__db_service.service_uuid])
+        db_config = get_config_rules(self.__database, str_service_key, 'running')
+        self.__resolver = anytree.Resolver(pathattr='name')
+        self.__config = TreeNode('.')
+        for action, resource_key, resource_value in db_config:
+            if action == ORM_ConfigActionEnum.SET:
+                try:
+                    resource_value = json.loads(resource_value)
+                except: # pylint: disable=bare-except
+                    pass
+                set_subnode_value(self.__resolver, self.__config, resource_key, resource_value)
+            elif action == ORM_ConfigActionEnum.DELETE:
+                delete_subnode(self.__resolver, self.__config, resource_key)
+
+    def SetEndpoint(self, endpoints : List[Tuple[str, str, Optional[str]]]) -> List[Union[bool, Exception]]:
+        chk_type('endpoints', endpoints, list)
+        if len(endpoints) == 0: return []
+
+        service_uuid              = self.__db_service.service_uuid
+        network_instance_name     = '{:s}-NetInst'.format(service_uuid)
+        network_interface_name    = '{:s}-NetIf'.format(service_uuid)
+        network_subinterface_name = '{:s}-NetSubIf'.format(service_uuid)
+
+        settings : TreeNode = get_subnode(self.__resolver, self.__config, 'settings', None)
+        if settings is None: raise Exception('Unable to retrieve service settings')
+        json_settings : Dict = settings.value
+        route_distinguisher = json_settings.get('route_distinguisher', '0:0')    # '60001:801'
+        mtu                 = json_settings.get('mtu',                 1450 )    # 1512
+        address_families    = json_settings.get('address_families',    []   )    # ['IPV4']
+
+        results = []
+        for endpoint in endpoints:
+            try:
+                chk_type('endpoint', endpoint, (tuple, list))
+                chk_length('endpoint', endpoint, min_length=2, max_length=3)
+                if len(endpoint) == 2:
+                    device_uuid, endpoint_uuid = endpoint
+                else:
+                    device_uuid, endpoint_uuid, _ = endpoint # ignore topology_uuid by now
+
+                endpoint_settings_uri = 'device[{:s}]/endpoint[{:s}]/settings'.format(device_uuid, endpoint_uuid)
+                endpoint_settings : TreeNode = get_subnode(self.__resolver, self.__config, endpoint_settings_uri, None)
+                if endpoint_settings is None:
+                    raise Exception('Unable to retrieve service settings for endpoint({:s})'.format(
+                        str(endpoint_settings_uri)))
+                json_endpoint_settings : Dict = endpoint_settings.value
+                router_id           = json_endpoint_settings.get('router_id',           '0.0.0.0')  # '10.95.0.10'
+                sub_interface_index = json_endpoint_settings.get('sub_interface_index', 0        )  # 1
+
+                db_device : DeviceModel = get_object(self.__database, DeviceModel, device_uuid, raise_if_not_found=True)
+                json_device = db_device.dump(include_config_rules=False, include_drivers=True, include_endpoints=True)
+                json_device_config : Dict = json_device.setdefault('device_config', {})
+                json_device_config_rules : List = json_device_config.setdefault('config_rules', [])
+                json_device_config_rules.extend([
+                    config_rule_set(
+                        '/network_instance[{:s}]'.format(network_instance_name), {
+                            'name': network_instance_name, 'type': 'L3VRF', 'router_id': router_id,
+                            'route_distinguisher': route_distinguisher, 'address_families': address_families,
+                    }),
+                    config_rule_set(
+                        '/interface[{:s}]'.format(endpoint_uuid), {
+                            'name': endpoint_uuid, 'description': network_interface_name, 'mtu': mtu,
+                    }),
+                    config_rule_set(
+                        '/interface[{:s}]/subinterface[{:d}]'.format(endpoint_uuid, sub_interface_index), {
+                            'name': endpoint_uuid, 'index': sub_interface_index,
+                            'description': network_subinterface_name, 'mtu': mtu,
+                    }),
+                    config_rule_set(
+                        '/network_instance[{:s}]/interface[{:s}]'.format(network_instance_name, endpoint_uuid), {
+                            'name': network_instance_name, 'id': endpoint_uuid,
+                    }),
+                ])
+                self.__device_client.ConfigureDevice(Device(**json_device))
+                results.append(True)
+            except Exception as e: # pylint: disable=broad-except
+                LOGGER.exception('Unable to SetEndpoint({:s})'.format(str(endpoint)))
+                results.append(e)
+
+        return results
+
+    def DeleteEndpoint(self, endpoints : List[Tuple[str, str, Optional[str]]]) -> List[Union[bool, Exception]]:
+        chk_type('endpoints', endpoints, list)
+        if len(endpoints) == 0: return []
+
+        service_uuid              = self.__db_service.service_uuid
+        network_instance_name     = '{:s}-NetInst'.format(service_uuid)
+
+        results = []
+        for endpoint in endpoints:
+            try:
+                chk_type('endpoint', endpoint, (tuple, list))
+                chk_length('endpoint', endpoint, min_length=2, max_length=3)
+                if len(endpoint) == 2:
+                    device_uuid, endpoint_uuid = endpoint
+                else:
+                    device_uuid, endpoint_uuid, _ = endpoint # ignore topology_uuid by now
+
+                endpoint_settings_uri = 'device[{:s}]/endpoint[{:s}]/settings'.format(device_uuid, endpoint_uuid)
+                endpoint_settings : TreeNode = get_subnode(self.__resolver, self.__config, endpoint_settings_uri, None)
+                if endpoint_settings is None:
+                    raise Exception('Unable to retrieve service settings for endpoint({:s})'.format(
+                        str(endpoint_settings_uri)))
+                json_endpoint_settings : Dict = endpoint_settings.value
+                sub_interface_index = json_endpoint_settings.get('sub_interface_index', 0        )  # 1
+
+                db_device : DeviceModel = get_object(self.__database, DeviceModel, device_uuid, raise_if_not_found=True)
+                json_device = db_device.dump(include_config_rules=False, include_drivers=True, include_endpoints=True)
+                json_device_config : Dict = json_device.setdefault('device_config', {})
+                json_device_config_rules : List = json_device_config.setdefault('config_rules', [])
+                json_device_config_rules.extend([
+                    config_rule_delete(
+                        '/network_instance[{:s}]/interface[{:s}]'.format(network_instance_name, endpoint_uuid), {
+                            'name': network_instance_name, 'id': endpoint_uuid
+                    }),
+                    config_rule_delete(
+                        '/interface[{:s}]/subinterface[{:d}]'.format(endpoint_uuid, sub_interface_index), {
+                            'name': endpoint_uuid, 'index': sub_interface_index,
+                    }),
+                    config_rule_delete(
+                        '/network_instance[{:s}]'.format(network_instance_name), {
+                            'name': network_instance_name
+                    }),
+                ])
+                self.__device_client.ConfigureDevice(Device(**json_device))
+                results.append(True)
+            except Exception as e: # pylint: disable=broad-except
+                LOGGER.exception('Unable to DeleteEndpoint({:s})'.format(str(endpoint)))
+                results.append(e)
+
+        return results
+
+    def SetConstraint(self, constraints : List[Tuple[str, Any]]) -> List[Union[bool, Exception]]:
+        chk_type('constraints', constraints, list)
+        if len(constraints) == 0: return []
+
+        msg = '[SetConstraint] Method not implemented. Constraints({:s}) are being ignored.'
+        LOGGER.warning(msg.format(str(constraints)))
+        return [True for _ in range(len(constraints))]
+
+    def DeleteConstraint(self, constraints : List[Tuple[str, Any]]) -> List[Union[bool, Exception]]:
+        chk_type('constraints', constraints, list)
+        if len(constraints) == 0: return []
+
+        msg = '[DeleteConstraint] Method not implemented. Constraints({:s}) are being ignored.'
+        LOGGER.warning(msg.format(str(constraints)))
+        return [True for _ in range(len(constraints))]
+
+    def SetConfig(self, resources : List[Tuple[str, Any]]) -> List[Union[bool, Exception]]:
+        chk_type('resources', resources, list)
+        if len(resources) == 0: return []
+
+        results = []
+        for resource in resources:
+            try:
+                resource_key, resource_value = resource
+                resource_value = json.loads(resource_value)
+                set_subnode_value(self.__resolver, self.__config, resource_key, resource_value)
+                results.append(True)
+            except Exception as e: # pylint: disable=broad-except
+                LOGGER.exception('Unable to SetConfig({:s})'.format(str(resource)))
+                results.append(e)
+
+        return results
+
+    def DeleteConfig(self, resources : List[Tuple[str, Any]]) -> List[Union[bool, Exception]]:
+        chk_type('resources', resources, list)
+        if len(resources) == 0: return []
+
+        results = []
+        for resource in resources:
+            try:
+                resource_key, _ = resource
+                delete_subnode(self.__resolver, self.__config, resource_key)
+            except Exception as e: # pylint: disable=broad-except
+                LOGGER.exception('Unable to DeleteConfig({:s})'.format(str(resource)))
+                results.append(e)
+
+        return results
diff --git a/src/service/service/service_handlers/l3nm_emulated/__init__.py b/src/service/service/service_handlers/l3nm_emulated/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/service/service/service_handlers/l3nm_openconfig/L3NMOpenConfigServiceHandler.py b/src/service/service/service_handlers/l3nm_openconfig/L3NMOpenConfigServiceHandler.py
new file mode 100644
index 0000000000000000000000000000000000000000..eb1914fc301cfd759cf55910634f8023eec442ce
--- /dev/null
+++ b/src/service/service/service_handlers/l3nm_openconfig/L3NMOpenConfigServiceHandler.py
@@ -0,0 +1,209 @@
+import anytree, json, logging
+from typing import Any, Dict, List, Optional, Tuple, Union
+from common.orm.Database import Database
+from common.orm.HighLevel import get_object
+from common.orm.backend.Tools import key_to_str
+from common.type_checkers.Checkers import chk_length, chk_type
+from context.client.ContextClient import ContextClient
+from device.client.DeviceClient import DeviceClient
+from device.proto.context_pb2 import Device
+from service.service.database.ConfigModel import ORM_ConfigActionEnum, get_config_rules
+from service.service.database.ContextModel import ContextModel
+from service.service.database.DeviceModel import DeviceModel
+from service.service.database.ServiceModel import ServiceModel
+from service.service.service_handler_api._ServiceHandler import _ServiceHandler
+from service.service.service_handler_api.AnyTreeTools import TreeNode, delete_subnode, get_subnode, set_subnode_value
+from service.service.service_handlers.Tools import config_rule_set, config_rule_delete
+
+LOGGER = logging.getLogger(__name__)
+
+class L3NMOpenConfigServiceHandler(_ServiceHandler):
+    def __init__(   # pylint: disable=super-init-not-called
+        self, db_service : ServiceModel, database : Database, context_client : ContextClient,
+        device_client : DeviceClient, **settings
+    ) -> None:
+        self.__db_service = db_service
+        self.__database = database
+        self.__context_client = context_client # pylint: disable=unused-private-member
+        self.__device_client = device_client
+
+        self.__db_context : ContextModel = get_object(self.__database, ContextModel, self.__db_service.context_fk)
+        str_service_key = key_to_str([self.__db_context.context_uuid, self.__db_service.service_uuid])
+        db_config = get_config_rules(self.__database, str_service_key, 'running')
+        self.__resolver = anytree.Resolver(pathattr='name')
+        self.__config = TreeNode('.')
+        for action, resource_key, resource_value in db_config:
+            if action == ORM_ConfigActionEnum.SET:
+                try:
+                    resource_value = json.loads(resource_value)
+                except: # pylint: disable=bare-except
+                    pass
+                set_subnode_value(self.__resolver, self.__config, resource_key, resource_value)
+            elif action == ORM_ConfigActionEnum.DELETE:
+                delete_subnode(self.__resolver, self.__config, resource_key)
+
+    def SetEndpoint(self, endpoints : List[Tuple[str, str, Optional[str]]]) -> List[Union[bool, Exception]]:
+        chk_type('endpoints', endpoints, list)
+        if len(endpoints) == 0: return []
+
+        service_uuid              = self.__db_service.service_uuid
+        network_instance_name     = '{:s}-NetInst'.format(service_uuid)
+        network_interface_name    = '{:s}-NetIf'.format(service_uuid)
+        network_subinterface_name = '{:s}-NetSubIf'.format(service_uuid)
+
+        settings : TreeNode = get_subnode(self.__resolver, self.__config, 'settings', None)
+        if settings is None: raise Exception('Unable to retrieve service settings')
+        json_settings : Dict = settings.value
+        route_distinguisher = json_settings.get('route_distinguisher', '0:0')    # '60001:801'
+        mtu                 = json_settings.get('mtu',                 1450 )    # 1512
+        address_families    = json_settings.get('address_families',    []   )    # ['IPV4']
+
+        results = []
+        for endpoint in endpoints:
+            try:
+                chk_type('endpoint', endpoint, (tuple, list))
+                chk_length('endpoint', endpoint, min_length=2, max_length=3)
+                if len(endpoint) == 2:
+                    device_uuid, endpoint_uuid = endpoint
+                else:
+                    device_uuid, endpoint_uuid, _ = endpoint # ignore topology_uuid by now
+
+                endpoint_settings_uri = 'device[{:s}]/endpoint[{:s}]/settings'.format(device_uuid, endpoint_uuid)
+                endpoint_settings : TreeNode = get_subnode(self.__resolver, self.__config, endpoint_settings_uri, None)
+                if endpoint_settings is None:
+                    raise Exception('Unable to retrieve service settings for endpoint({:s})'.format(
+                        str(endpoint_settings_uri)))
+                json_endpoint_settings : Dict = endpoint_settings.value
+                router_id           = json_endpoint_settings.get('router_id',           '0.0.0.0')  # '10.95.0.10'
+                sub_interface_index = json_endpoint_settings.get('sub_interface_index', 0        )  # 1
+
+                db_device : DeviceModel = get_object(self.__database, DeviceModel, device_uuid, raise_if_not_found=True)
+                json_device = db_device.dump(include_config_rules=False, include_drivers=True, include_endpoints=True)
+                json_device_config : Dict = json_device.setdefault('device_config', {})
+                json_device_config_rules : List = json_device_config.setdefault('config_rules', [])
+                json_device_config_rules.extend([
+                    config_rule_set(
+                        '/network_instance[{:s}]'.format(network_instance_name), {
+                            'name': network_instance_name, 'type': 'L3VRF', 'router_id': router_id,
+                            'route_distinguisher': route_distinguisher, 'address_families': address_families,
+                    }),
+                    config_rule_set(
+                        '/interface[{:s}]'.format(endpoint_uuid), {
+                            'name': endpoint_uuid, 'description': network_interface_name, 'mtu': mtu,
+                    }),
+                    config_rule_set(
+                        '/interface[{:s}]/subinterface[{:d}]'.format(endpoint_uuid, sub_interface_index), {
+                            'name': endpoint_uuid, 'index': sub_interface_index,
+                            'description': network_subinterface_name, 'mtu': mtu,
+                    }),
+                    config_rule_set(
+                        '/network_instance[{:s}]/interface[{:s}]'.format(network_instance_name, endpoint_uuid), {
+                            'name': network_instance_name, 'id': endpoint_uuid,
+                    }),
+                ])
+                self.__device_client.ConfigureDevice(Device(**json_device))
+                results.append(True)
+            except Exception as e: # pylint: disable=broad-except
+                LOGGER.exception('Unable to SetEndpoint({:s})'.format(str(endpoint)))
+                results.append(e)
+
+        return results
+
+    def DeleteEndpoint(self, endpoints : List[Tuple[str, str, Optional[str]]]) -> List[Union[bool, Exception]]:
+        chk_type('endpoints', endpoints, list)
+        if len(endpoints) == 0: return []
+
+        service_uuid              = self.__db_service.service_uuid
+        network_instance_name     = '{:s}-NetInst'.format(service_uuid)
+
+        results = []
+        for endpoint in endpoints:
+            try:
+                chk_type('endpoint', endpoint, (tuple, list))
+                chk_length('endpoint', endpoint, min_length=2, max_length=3)
+                if len(endpoint) == 2:
+                    device_uuid, endpoint_uuid = endpoint
+                else:
+                    device_uuid, endpoint_uuid, _ = endpoint # ignore topology_uuid by now
+
+                endpoint_settings_uri = 'device[{:s}]/endpoint[{:s}]/settings'.format(device_uuid, endpoint_uuid)
+                endpoint_settings : TreeNode = get_subnode(self.__resolver, self.__config, endpoint_settings_uri, None)
+                if endpoint_settings is None:
+                    raise Exception('Unable to retrieve service settings for endpoint({:s})'.format(
+                        str(endpoint_settings_uri)))
+                json_endpoint_settings : Dict = endpoint_settings.value
+                sub_interface_index = json_endpoint_settings.get('sub_interface_index', 0        )  # 1
+
+                db_device : DeviceModel = get_object(self.__database, DeviceModel, device_uuid, raise_if_not_found=True)
+                json_device = db_device.dump(include_config_rules=False, include_drivers=True, include_endpoints=True)
+                json_device_config : Dict = json_device.setdefault('device_config', {})
+                json_device_config_rules : List = json_device_config.setdefault('config_rules', [])
+                json_device_config_rules.extend([
+                    config_rule_delete(
+                        '/network_instance[{:s}]/interface[{:s}]'.format(network_instance_name, endpoint_uuid), {
+                            'name': network_instance_name, 'id': endpoint_uuid
+                    }),
+                    config_rule_delete(
+                        '/interface[{:s}]/subinterface[{:d}]'.format(endpoint_uuid, sub_interface_index), {
+                            'name': endpoint_uuid, 'index': sub_interface_index,
+                    }),
+                    config_rule_delete(
+                        '/network_instance[{:s}]'.format(network_instance_name), {
+                            'name': network_instance_name
+                    }),
+                ])
+                self.__device_client.ConfigureDevice(Device(**json_device))
+                results.append(True)
+            except Exception as e: # pylint: disable=broad-except
+                LOGGER.exception('Unable to DeleteEndpoint({:s})'.format(str(endpoint)))
+                results.append(e)
+
+        return results
+
+    def SetConstraint(self, constraints : List[Tuple[str, Any]]) -> List[Union[bool, Exception]]:
+        chk_type('constraints', constraints, list)
+        if len(constraints) == 0: return []
+
+        msg = '[SetConstraint] Method not implemented. Constraints({:s}) are being ignored.'
+        LOGGER.warning(msg.format(str(constraints)))
+        return [True for _ in range(len(constraints))]
+
+    def DeleteConstraint(self, constraints : List[Tuple[str, Any]]) -> List[Union[bool, Exception]]:
+        chk_type('constraints', constraints, list)
+        if len(constraints) == 0: return []
+
+        msg = '[DeleteConstraint] Method not implemented. Constraints({:s}) are being ignored.'
+        LOGGER.warning(msg.format(str(constraints)))
+        return [True for _ in range(len(constraints))]
+
+    def SetConfig(self, resources : List[Tuple[str, Any]]) -> List[Union[bool, Exception]]:
+        chk_type('resources', resources, list)
+        if len(resources) == 0: return []
+
+        results = []
+        for resource in resources:
+            try:
+                resource_key, resource_value = resource
+                resource_value = json.loads(resource_value)
+                set_subnode_value(self.__resolver, self.__config, resource_key, resource_value)
+                results.append(True)
+            except Exception as e: # pylint: disable=broad-except
+                LOGGER.exception('Unable to SetConfig({:s})'.format(str(resource)))
+                results.append(e)
+
+        return results
+
+    def DeleteConfig(self, resources : List[Tuple[str, Any]]) -> List[Union[bool, Exception]]:
+        chk_type('resources', resources, list)
+        if len(resources) == 0: return []
+
+        results = []
+        for resource in resources:
+            try:
+                resource_key, _ = resource
+                delete_subnode(self.__resolver, self.__config, resource_key)
+            except Exception as e: # pylint: disable=broad-except
+                LOGGER.exception('Unable to DeleteConfig({:s})'.format(str(resource)))
+                results.append(e)
+
+        return results
diff --git a/src/service/service/service_handlers/l3nm_openconfig/__init__.py b/src/service/service/service_handlers/l3nm_openconfig/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/service/tests/.gitignore b/src/service/tests/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..f2f83148dc3aa48945967df7297805c06296dde9
--- /dev/null
+++ b/src/service/tests/.gitignore
@@ -0,0 +1,2 @@
+# Add here your files containing confidential testbed details such as IP addresses, ports, usernames, passwords, etc.
+ServiceHandler_L3NM_OC.py
diff --git a/src/service/tests/CommonObjects.py b/src/service/tests/CommonObjects.py
new file mode 100644
index 0000000000000000000000000000000000000000..2b51dcd2322e70dff9a5229e65e6a220708a834f
--- /dev/null
+++ b/src/service/tests/CommonObjects.py
@@ -0,0 +1,21 @@
+from copy import deepcopy
+from common.Constants import DEFAULT_CONTEXT_UUID, DEFAULT_TOPOLOGY_UUID
+
+# use "deepcopy" to prevent propagating forced changes during tests
+
+CONTEXT_ID = {'context_uuid': {'uuid': DEFAULT_CONTEXT_UUID}}
+CONTEXT = {
+    'context_id': deepcopy(CONTEXT_ID),
+    'topology_ids': [],
+    'service_ids': [],
+}
+
+TOPOLOGY_ID = {
+    'context_id': deepcopy(CONTEXT_ID),
+    'topology_uuid': {'uuid': DEFAULT_TOPOLOGY_UUID},
+}
+TOPOLOGY = {
+    'topology_id': deepcopy(TOPOLOGY_ID),
+    'device_ids': [],
+    'link_ids': [],
+}
diff --git a/src/service/tests/ServiceHandler_L3NM_EMU.py b/src/service/tests/ServiceHandler_L3NM_EMU.py
new file mode 100644
index 0000000000000000000000000000000000000000..cfaf49d084448b0aee88c18823a8af91040831d0
--- /dev/null
+++ b/src/service/tests/ServiceHandler_L3NM_EMU.py
@@ -0,0 +1,102 @@
+from copy import deepcopy
+from service.proto.context_pb2 import DeviceDriverEnum, DeviceOperationalStatusEnum, ServiceStatusEnum, ServiceTypeEnum
+from service.service.service_handlers.Tools import config_rule_set, constraint, device_id, endpoint_id
+from .CommonObjects import CONTEXT_ID
+
+# use "deepcopy" to prevent propagating forced changes during tests
+
+SERVICE_HANDLER_NAME = 'l3nm_emulated'
+
+SERVICE_UUID = 'SVC_L3NM_EMU'
+
+SERVICE_ID = {
+    'context_id': deepcopy(CONTEXT_ID),
+    'service_uuid': {'uuid': SERVICE_UUID}
+}
+
+SERVICE_DESCRIPTOR = {
+    'service_id': deepcopy(SERVICE_ID),
+    'service_type': ServiceTypeEnum.SERVICETYPE_L3NM,
+    'service_endpoint_ids' : [],
+    'service_constraints': [],
+    'service_status': {'service_status': ServiceStatusEnum.SERVICESTATUS_PLANNED},
+    'service_config': {'config_rules': []},
+}
+
+DEVICE1_UUID = 'EMULATED-ROUTER-1'
+DEVICE2_UUID = 'EMULATED-ROUTER-2'
+
+DEVICE_ATTRIBUTES = { # device_uuid => {device_settings}
+    DEVICE1_UUID: {
+        'type'    : 'emulated',
+        'drivers' : [DeviceDriverEnum.DEVICEDRIVER_UNDEFINED],
+        'endpoint': 'EP100',
+    },
+    DEVICE2_UUID: {
+        'type'    : 'emulated',
+        'drivers' : [DeviceDriverEnum.DEVICEDRIVER_UNDEFINED],
+        'endpoint': 'EP100',
+    },
+}
+
+SERVICE_DEVICE_UUIDS = [DEVICE1_UUID, DEVICE2_UUID]
+
+SERVICE_ENDPOINT_IDS = [
+    endpoint_id(device_uuid, DEVICE_ATTRIBUTES[device_uuid]['endpoint'])
+    for device_uuid in SERVICE_DEVICE_UUIDS
+]
+
+SERVICE_CONFIG_RULES = [
+    config_rule_set(
+        'settings', {
+            'route_distinguisher': '60001:801', 'mtu': 1512, 'address_families': ['IPV4']
+        }),
+    config_rule_set(
+        'device[{:s}]/endpoint[{:s}]/settings'.format(DEVICE1_UUID, DEVICE_ATTRIBUTES[DEVICE1_UUID]['endpoint']), {
+            'router_id': '10.0.0.1', 'sub_interface_index': 1,
+        }),
+    config_rule_set(
+        'device[{:s}]/endpoint[{:s}]/settings'.format(DEVICE2_UUID, DEVICE_ATTRIBUTES[DEVICE2_UUID]['endpoint']), {
+            'router_id': '10.0.0.2', 'sub_interface_index': 1,
+        }),
+]
+
+SERVICE_CONSTRAINTS = [
+    constraint('latency_ms', 15.2),
+    constraint('jitter_us', 1.2),
+]
+
+def get_device_descriptor(device_uuid, enabled=True):
+    device_operational_status = (
+        DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_ENABLED \
+        if enabled else \
+        DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_DISABLED)
+    return {
+        'device_id': device_id(device_uuid),
+        'device_type': DEVICE_ATTRIBUTES[device_uuid]['type'],
+        'device_config': {'config_rules': []},
+        'device_operational_status': device_operational_status,
+        'device_drivers': DEVICE_ATTRIBUTES[device_uuid]['drivers'],
+        'device_endpoints': [],
+    }
+
+def get_connect_rules(device_uuid):
+    return [
+        config_rule_set('_connect/address', '127.0.0.1'),
+        config_rule_set('_connect/port',    '0'),
+        config_rule_set('_connect/settings', {'endpoints': [
+            {'uuid': endpoint_uuid, 'type': '10Gbps', 'sample_types': []}
+            for endpoint_uuid in ['EP1', 'EP2', 'EP3', 'EP100']
+        ]}),
+    ]
+
+TEST_SERVICE_HANDLER = (SERVICE_HANDLER_NAME, {
+    'service_id'            : SERVICE_ID,
+    'service_descriptor'    : SERVICE_DESCRIPTOR,
+    'service_endpoint_ids'  : SERVICE_ENDPOINT_IDS,
+    'service_config_rules'  : SERVICE_CONFIG_RULES,
+    'service_constraints'   : SERVICE_CONSTRAINTS,
+    'service_device_uuids'  : SERVICE_DEVICE_UUIDS,
+    'get_device_descriptor' : get_device_descriptor,
+    'get_connect_rules'     : get_connect_rules,
+})
diff --git a/src/service/tests/ServiceHandlersToTest.py b/src/service/tests/ServiceHandlersToTest.py
new file mode 100644
index 0000000000000000000000000000000000000000..484ae3e45491f31a065ad27b49f86e6338af7a51
--- /dev/null
+++ b/src/service/tests/ServiceHandlersToTest.py
@@ -0,0 +1,15 @@
+# Add/comment in this file the service handlers to be tested by the unitry tests.
+
+SERVICE_HANDLERS_TO_TEST = []
+
+try:
+    from service.tests.ServiceHandler_L3NM_EMU import TEST_SERVICE_HANDLER
+    SERVICE_HANDLERS_TO_TEST.append(TEST_SERVICE_HANDLER)
+except ImportError:
+    pass
+
+try:
+    from service.tests.ServiceHandler_L3NM_OC import TEST_SERVICE_HANDLER
+    SERVICE_HANDLERS_TO_TEST.append(TEST_SERVICE_HANDLER)
+except ImportError:
+    pass
diff --git a/src/service/tests/test_unitary.py b/src/service/tests/test_unitary.py
index e807cb845ad2abf0d13fca559a0be5e03f4ed795..e03f2017ad2acaf91f6e4186fc7c502f08b7ec0c 100644
--- a/src/service/tests/test_unitary.py
+++ b/src/service/tests/test_unitary.py
@@ -1,357 +1,270 @@
-import copy, grpc, logging, pytest
+import copy, grpc, logging, os, pytest
+from typing import Tuple
 from google.protobuf.json_format import MessageToDict
-#from common.database.api.context.Constants import DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID
-#from common.tests.Assertions import validate_empty, validate_service, validate_service_id, \
-#    validate_service_list_is_empty, validate_service_list_is_not_empty
-from service.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD
+from common.orm.Database import Database
+from common.orm.Factory import get_database_backend, BackendEnum as DatabaseBackendEnum
+from common.message_broker.Factory import get_messagebroker_backend, BackendEnum as MessageBrokerBackendEnum
+from common.message_broker.MessageBroker import MessageBroker
+from common.tests.PytestGenerateTests import pytest_generate_tests # (required) pylint: disable=unused-import
+from context.Config import (
+    GRPC_SERVICE_PORT as CONTEXT_GRPC_SERVICE_PORT, GRPC_MAX_WORKERS as CONTEXT_GRPC_MAX_WORKERS,
+    GRPC_GRACE_PERIOD as CONTEXT_GRPC_GRACE_PERIOD)
+from context.client.ContextClient import ContextClient
+from context.proto.context_pb2 import Context, DeviceId, Topology, Device
+from context.service.grpc_server.ContextService import ContextService
+from device.Config import (
+    GRPC_SERVICE_PORT as DEVICE_GRPC_SERVICE_PORT, GRPC_MAX_WORKERS as DEVICE_GRPC_MAX_WORKERS,
+    GRPC_GRACE_PERIOD as DEVICE_GRPC_GRACE_PERIOD)
+from device.client.DeviceClient import DeviceClient
+from device.service.DeviceService import DeviceService
+from device.service.driver_api.DriverFactory import DriverFactory
+from device.service.driver_api.DriverInstanceCache import DriverInstanceCache
+from device.service.drivers import DRIVERS
+from device.tests.MockMonitoringService import MockMonitoringService
+from monitoring.Config import (
+    GRPC_SERVICE_PORT as MONITORING_GRPC_SERVICE_PORT, GRPC_MAX_WORKERS as MONITORING_GRPC_MAX_WORKERS,
+    GRPC_GRACE_PERIOD as MONITORING_GRPC_GRACE_PERIOD)
+from monitoring.client.monitoring_client import MonitoringClient
+from service.Config import (
+    GRPC_SERVICE_PORT as SERVICE_GRPC_SERVICE_PORT, GRPC_MAX_WORKERS as SERVICE_GRPC_MAX_WORKERS,
+    GRPC_GRACE_PERIOD as SERVICE_GRPC_GRACE_PERIOD)
 from service.client.ServiceClient import ServiceClient
-from service.proto.context_pb2 import Service
+from service.proto.context_pb2 import Service, ServiceId
 from service.service.ServiceService import ServiceService
-
-port = 10000 + GRPC_SERVICE_PORT # avoid privileged ports
+from service.service.service_handler_api.ServiceHandlerFactory import ServiceHandlerFactory
+from service.service.service_handlers import SERVICE_HANDLERS
+from .CommonObjects import CONTEXT, TOPOLOGY
 
 LOGGER = logging.getLogger(__name__)
 LOGGER.setLevel(logging.DEBUG)
 
-## use "copy.deepcopy" to prevent propagating forced changes during tests
-#CONTEXT_ID = {'contextUuid': {'uuid': DEFAULT_CONTEXT_ID}}
-#TOPOLOGY_ID = {'contextId': copy.deepcopy(CONTEXT_ID), 'topoId': {'uuid': DEFAULT_TOPOLOGY_ID}}
-#SERVICE_ID = {'contextId': copy.deepcopy(CONTEXT_ID), 'cs_id': {'uuid': 'DEV1'}}
-#SERVICE = {
-#    'cs_id': copy.deepcopy(SERVICE_ID),
-#    'serviceType': ServiceType.L3NM,
-#    'serviceConfig': {'serviceConfig': '<config/>'},
-#    'serviceState': {'serviceState': ServiceStateEnum.PLANNED},
-#    'constraint': [
-#        {'constraint_type': 'latency_ms', 'constraint_value': '100'},
-#        {'constraint_type': 'hops', 'constraint_value': '5'},
-#    ],
-#    'endpointList' : [
-#        {'topoId': copy.deepcopy(TOPOLOGY_ID), 'dev_id': {'device_id': {'uuid': 'DEV1'}}, 'port_id': {'uuid' : 'EP5'}},
-#        {'topoId': copy.deepcopy(TOPOLOGY_ID), 'dev_id': {'device_id': {'uuid': 'DEV2'}}, 'port_id': {'uuid' : 'EP5'}},
-#        {'topoId': copy.deepcopy(TOPOLOGY_ID), 'dev_id': {'device_id': {'uuid': 'DEV3'}}, 'port_id': {'uuid' : 'EP5'}},
-#    ]
-#}
+CONTEXT_GRPC_SERVICE_PORT = 10000 + CONTEXT_GRPC_SERVICE_PORT # avoid privileged ports
+DEVICE_GRPC_SERVICE_PORT = 10000 + DEVICE_GRPC_SERVICE_PORT # avoid privileged ports
+SERVICE_GRPC_SERVICE_PORT = 10000 + SERVICE_GRPC_SERVICE_PORT # avoid privileged ports
+MONITORING_GRPC_SERVICE_PORT = 10000 + MONITORING_GRPC_SERVICE_PORT # avoid privileged ports
+
+DEFAULT_REDIS_SERVICE_HOST = '127.0.0.1'
+DEFAULT_REDIS_SERVICE_PORT = 6379
+DEFAULT_REDIS_DATABASE_ID  = 0
+
+REDIS_CONFIG = {
+    'REDIS_SERVICE_HOST': os.environ.get('REDIS_SERVICE_HOST', DEFAULT_REDIS_SERVICE_HOST),
+    'REDIS_SERVICE_PORT': os.environ.get('REDIS_SERVICE_PORT', DEFAULT_REDIS_SERVICE_PORT),
+    'REDIS_DATABASE_ID' : os.environ.get('REDIS_DATABASE_ID',  DEFAULT_REDIS_DATABASE_ID ),
+}
+
+SCENARIOS = [
+    ('all_inmemory', DatabaseBackendEnum.INMEMORY, {},           MessageBrokerBackendEnum.INMEMORY, {}          ),
+    #('all_redis',    DatabaseBackendEnum.REDIS,    REDIS_CONFIG, MessageBrokerBackendEnum.REDIS,    REDIS_CONFIG),
+]
+
+@pytest.fixture(scope='session', ids=[str(scenario[0]) for scenario in SCENARIOS], params=SCENARIOS)
+def context_db_mb(request) -> Tuple[Database, MessageBroker]:
+    name,db_backend,db_settings,mb_backend,mb_settings = request.param
+    msg = 'Running scenario {:s} db_backend={:s}, db_settings={:s}, mb_backend={:s}, mb_settings={:s}...'
+    LOGGER.info(msg.format(str(name), str(db_backend.value), str(db_settings), str(mb_backend.value), str(mb_settings)))
+    _database = Database(get_database_backend(backend=db_backend, **db_settings))
+    _message_broker = MessageBroker(get_messagebroker_backend(backend=mb_backend, **mb_settings))
+    yield _database, _message_broker
+    _message_broker.terminate()
+
+@pytest.fixture(scope='session')
+def context_service(context_db_mb : Tuple[Database, MessageBroker]): # pylint: disable=redefined-outer-name
+    _service = ContextService(
+        context_db_mb[0], context_db_mb[1], port=CONTEXT_GRPC_SERVICE_PORT, max_workers=CONTEXT_GRPC_MAX_WORKERS,
+        grace_period=CONTEXT_GRPC_GRACE_PERIOD)
+    _service.start()
+    yield _service
+    _service.stop()
+
+@pytest.fixture(scope='session')
+def context_client(context_service : ContextService): # pylint: disable=redefined-outer-name
+    _client = ContextClient(address='127.0.0.1', port=CONTEXT_GRPC_SERVICE_PORT)
+    yield _client
+    _client.close()
+
+@pytest.fixture(scope='session')
+def monitoring_service():
+    _service = MockMonitoringService(port=MONITORING_GRPC_SERVICE_PORT, max_workers=MONITORING_GRPC_MAX_WORKERS,
+        grace_period=MONITORING_GRPC_GRACE_PERIOD)
+    _service.start()
+    yield _service
+    _service.stop()
 
 @pytest.fixture(scope='session')
-def service_service():
-    _service = ServiceService(port=port, max_workers=GRPC_MAX_WORKERS, grace_period=GRPC_GRACE_PERIOD)
+def monitoring_client(monitoring_service : MockMonitoringService): # pylint: disable=redefined-outer-name
+    _client = MonitoringClient(server='127.0.0.1', port=MONITORING_GRPC_SERVICE_PORT)
+    #yield _client
+    #_client.close()
+    return _client
+
+@pytest.fixture(scope='session')
+def device_service(
+    context_client : ContextClient,         # pylint: disable=redefined-outer-name
+    monitoring_client : MonitoringClient):  # pylint: disable=redefined-outer-name
+
+    _driver_factory = DriverFactory(DRIVERS)
+    _driver_instance_cache = DriverInstanceCache(_driver_factory)
+    _service = DeviceService(
+        context_client, monitoring_client, _driver_instance_cache, port=DEVICE_GRPC_SERVICE_PORT,
+        max_workers=DEVICE_GRPC_MAX_WORKERS, grace_period=DEVICE_GRPC_GRACE_PERIOD)
     _service.start()
     yield _service
     _service.stop()
 
 @pytest.fixture(scope='session')
-def service_client(service_service):
-    _client = ServiceClient(address='127.0.0.1', port=port)
+def device_client(device_service : DeviceService): # pylint: disable=redefined-outer-name
+    _client = DeviceClient(address='127.0.0.1', port=DEVICE_GRPC_SERVICE_PORT)
     yield _client
     _client.close()
 
-def test_dummy(service_client : ServiceClient):
-    reply = service_client.CreateService(Service())
-
-#def test_get_services_empty(service_client : ServiceClient):
-#    # should work
-#    validate_service_list_is_empty(MessageToDict(
-#        service_client.GetServiceList(Empty()),
-#        including_default_value_fields=True, preserving_proto_field_name=True,
-#        use_integers_for_enums=False))
-
-#def test_create_service_wrong_service_attributes(service_client : ServiceClient):
-#    # should fail with wrong service context
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service = copy.deepcopy(SERVICE)
-#        copy_service['cs_id']['contextId']['contextUuid']['uuid'] = ''
-#        service_client.CreateService(Service(**copy_service))
-#    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-#    msg = 'service.cs_id.contextId.contextUuid.uuid() is out of range: '\
-#          'allow_empty(False) min_length(None) max_length(None) allowed_lengths(None).'
-#    assert e.value.details() == msg
-#
-#    # should fail with service context does not exist
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service = copy.deepcopy(SERVICE)
-#        copy_service['cs_id']['contextId']['contextUuid']['uuid'] = 'wrong-context'
-#        service_client.CreateService(Service(**copy_service))
-#    assert e.value.code() == grpc.StatusCode.NOT_FOUND
-#    msg = 'Context(wrong-context) does not exist in the database.'
-#    assert e.value.details() == msg
-#
-#    # should fail with wrong service id
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service = copy.deepcopy(SERVICE)
-#        copy_service['cs_id']['cs_id']['uuid'] = ''
-#        service_client.CreateService(Service(**copy_service))
-#    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-#    msg = 'service.cs_id.cs_id.uuid() is out of range: '\
-#          'allow_empty(False) min_length(None) max_length(None) allowed_lengths(None).'
-#    assert e.value.details() == msg
-#
-#    # should fail with wrong service type
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service = copy.deepcopy(SERVICE)
-#        copy_service['serviceType'] = ServiceType.UNKNOWN
-#        service_client.CreateService(Service(**copy_service))
-#    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-#    msg = 'Method(CreateService) does not accept ServiceType(UNKNOWN). '\
-#          'Permitted values for Method(CreateService) are '\
-#          'ServiceType([\'L2NM\', \'L3NM\', \'TAPI_CONNECTIVITY_SERVICE\']).'
-#    assert e.value.details() == msg
-#
-#    # should fail with wrong service state
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service = copy.deepcopy(SERVICE)
-#        copy_service['serviceState']['serviceState'] = ServiceStateEnum.PENDING_REMOVAL
-#        service_client.CreateService(Service(**copy_service))
-#    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-#    msg = 'Method(CreateService) does not accept ServiceState(PENDING_REMOVAL). '\
-#          'Permitted values for Method(CreateService) are '\
-#          'ServiceState([\'PLANNED\']).'
-#    assert e.value.details() == msg
-#
-#def test_create_service_wrong_constraint(service_client : ServiceClient):
-#    # should fail with wrong constraint type
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service = copy.deepcopy(SERVICE)
-#        copy_service['constraint'][0]['constraint_type'] = ''
-#        service_client.CreateService(Service(**copy_service))
-#    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-#    msg = 'constraint[#0].constraint_type() is out of range: '\
-#          'allow_empty(False) min_length(None) max_length(None) allowed_lengths(None).'
-#    assert e.value.details() == msg
-#
-#    # should fail with wrong constraint value
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service = copy.deepcopy(SERVICE)
-#        copy_service['constraint'][0]['constraint_value'] = ''
-#        service_client.CreateService(Service(**copy_service))
-#    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-#    msg = 'constraint[#0].constraint_value() is out of range: '\
-#          'allow_empty(False) min_length(None) max_length(None) allowed_lengths(None).'
-#    assert e.value.details() == msg
-#
-#    # should fail with dupplicated constraint type
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service = copy.deepcopy(SERVICE)
-#        copy_service['constraint'][1] = copy_service['constraint'][0]
-#        service_client.CreateService(Service(**copy_service))
-#    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-#    msg = 'Duplicated ConstraintType(latency_ms) in Constraint(#1) of Context(admin)/Service(DEV1).'
-#    assert e.value.details() == msg
-#
-#def test_create_service_wrong_endpoint(service_client : ServiceClient, database : Database):
-#    # should fail with wrong endpoint context
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service = copy.deepcopy(SERVICE)
-#        copy_service['endpointList'][0]['topoId']['contextId']['contextUuid']['uuid'] = 'wrong-context'
-#        print(copy_service)
-#        service_client.CreateService(Service(**copy_service))
-#    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-#    msg = 'Context(wrong-context) in Endpoint(#0) of '\
-#          'Context(admin)/Service(DEV1) mismatches acceptable Contexts({\'admin\'}). '\
-#          'Optionally, leave field empty to use predefined Context(admin).'
-#    assert e.value.details() == msg
-#
-#    # should fail with wrong endpoint topology
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service = copy.deepcopy(SERVICE)
-#        copy_service['endpointList'][0]['topoId']['topoId']['uuid'] = 'wrong-topo'
-#        service_client.CreateService(Service(**copy_service))
-#    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-#    msg = 'Context(admin)/Topology(wrong-topo) in Endpoint(#0) of '\
-#          'Context(admin)/Service(DEV1) mismatches acceptable Topologies({\'admin\'}). '\
-#          'Optionally, leave field empty to use predefined Topology(admin).'
-#    assert e.value.details() == msg
-#
-#    # should fail with endpoint device is empty
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service = copy.deepcopy(SERVICE)
-#        copy_service['endpointList'][0]['dev_id']['device_id']['uuid'] = ''
-#        service_client.CreateService(Service(**copy_service))
-#    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-#    msg = 'endpoint_id[#0].dev_id.device_id.uuid() is out of range: '\
-#          'allow_empty(False) min_length(None) max_length(None) allowed_lengths(None).'
-#    assert e.value.details() == msg
-#
-#    # should fail with endpoint device not found
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service = copy.deepcopy(SERVICE)
-#        copy_service['endpointList'][0]['dev_id']['device_id']['uuid'] = 'wrong-device'
-#        service_client.CreateService(Service(**copy_service))
-#    assert e.value.code() == grpc.StatusCode.NOT_FOUND
-#    msg = 'Context(admin)/Topology(admin)/Device(wrong-device) in Endpoint(#0) of '\
-#          'Context(admin)/Service(DEV1) does not exist in the database.'
-#    assert e.value.details() == msg
-#
-#    # should fail with endpoint device duplicated
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service = copy.deepcopy(SERVICE)
-#        copy_service['endpointList'][1] = copy_service['endpointList'][0]
-#        service_client.CreateService(Service(**copy_service))
-#    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-#    msg = 'Duplicated Context(admin)/Topology(admin)/Device(DEV1) in Endpoint(#1) of '\
-#          'Context(admin)/Service(DEV1).'
-#    assert e.value.details() == msg
-#
-#    # should fail with endpoint port is empty
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service = copy.deepcopy(SERVICE)
-#        copy_service['endpointList'][0]['port_id']['uuid'] = ''
-#        service_client.CreateService(Service(**copy_service))
-#    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-#    msg = 'endpoint_id[#0].port_id.uuid() is out of range: '\
-#          'allow_empty(False) min_length(None) max_length(None) allowed_lengths(None).'
-#    assert e.value.details() == msg
-#
-#    # should fail with endpoint port not found
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service = copy.deepcopy(SERVICE)
-#        copy_service['endpointList'][0]['port_id']['uuid'] = 'wrong-port'
-#        service_client.CreateService(Service(**copy_service))
-#    assert e.value.code() == grpc.StatusCode.NOT_FOUND
-#    msg = 'Context(admin)/Topology(admin)/Device(DEV1)/Port(wrong-port) in Endpoint(#0) of '\
-#          'Context(admin)/Service(DEV1) does not exist in the database.'
-#    assert e.value.details() == msg
-#
-#def test_get_service_does_not_exist(service_client : ServiceClient):
-#    # should fail with service context does not exist
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service_id = copy.deepcopy(SERVICE_ID)
-#        copy_service_id['contextId']['contextUuid']['uuid'] = 'wrong-context'
-#        service_client.GetServiceById(ServiceId(**copy_service_id))
-#    assert e.value.code() == grpc.StatusCode.NOT_FOUND
-#    msg = 'Context(wrong-context) does not exist in the database.'
-#    assert e.value.details() == msg
-#
-#    # should fail with service does not exist
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        service_client.GetServiceById(ServiceId(**SERVICE_ID))
-#    assert e.value.code() == grpc.StatusCode.NOT_FOUND
-#    msg = 'Context(admin)/Service(DEV1) does not exist in the database.'
-#    assert e.value.details() == msg
-#
-#def test_update_service_does_not_exist(service_client : ServiceClient):
-#    # should fail with service does not exist
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        service_client.UpdateService(Service(**SERVICE))
-#    assert e.value.code() == grpc.StatusCode.NOT_FOUND
-#    msg = 'Context(admin)/Service(DEV1) does not exist in the database.'
-#    assert e.value.details() == msg
-#
-#def test_create_service(service_client : ServiceClient):
-#    # should work
-#    validate_service_id(MessageToDict(
-#        service_client.CreateService(Service(**SERVICE)),
-#        including_default_value_fields=True, preserving_proto_field_name=True,
-#        use_integers_for_enums=False))
-#
-#def test_create_service_already_exists(service_client : ServiceClient):
-#    # should fail with service already exists
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        service_client.CreateService(Service(**SERVICE))
-#    assert e.value.code() == grpc.StatusCode.ALREADY_EXISTS
-#    msg = 'Context(admin)/Service(DEV1) already exists in the database.'
-#    assert e.value.details() == msg
-#
-#def test_get_service(service_client : ServiceClient):
-#    # should work
-#    validate_service(MessageToDict(
-#        service_client.GetServiceById(ServiceId(**SERVICE_ID)),
-#        including_default_value_fields=True, preserving_proto_field_name=True,
-#        use_integers_for_enums=False))
-#
-#def test_update_service(service_client : ServiceClient):
-#    # should work
-#    copy_service = copy.deepcopy(SERVICE)
-#    copy_service['serviceConfig']['serviceConfig'] = '<newconfig/>'
-#    copy_service['serviceState']['serviceState'] = ServiceStateEnum.ACTIVE
-#    copy_service['constraint'] = [
-#        {'constraint_type': 'latency_ms', 'constraint_value': '200'},
-#        {'constraint_type': 'bandwidth_gbps', 'constraint_value': '100'},
-#    ]
-#    copy_service['endpointList'] = [
-#        {
-#            'topoId': {'contextId': {'contextUuid': {'uuid': 'admin'}}, 'topoId': {'uuid': 'admin'}},
-#            'dev_id': {'device_id': {'uuid': 'DEV1'}},
-#            'port_id': {'uuid' : 'EP5'}
-#        },
-#        {
-#            'topoId': {'contextId': {'contextUuid': {'uuid': 'admin'}}, 'topoId': {'uuid': 'admin'}},
-#            'dev_id': {'device_id': {'uuid': 'DEV2'}},
-#            'port_id': {'uuid' : 'EP6'}
-#        },
-#    ]
-#    validate_service_id(MessageToDict(
-#        service_client.UpdateService(Service(**copy_service)),
-#        including_default_value_fields=True, preserving_proto_field_name=True,
-#        use_integers_for_enums=False))
-#
-#def test_delete_service_wrong_service_id(service_client : ServiceClient):
-#    # should fail with service context is empty
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service_id = copy.deepcopy(SERVICE_ID)
-#        copy_service_id['contextId']['contextUuid']['uuid'] = ''
-#        service_client.DeleteService(ServiceId(**copy_service_id))
-#    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-#    msg = 'service_id.contextId.contextUuid.uuid() is out of range: '\
-#          'allow_empty(False) min_length(None) max_length(None) allowed_lengths(None).'
-#    assert e.value.details() == msg
-#
-#    # should fail with service context does not exist
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service_id = copy.deepcopy(SERVICE_ID)
-#        copy_service_id['contextId']['contextUuid']['uuid'] = 'wrong-context'
-#        service_client.DeleteService(ServiceId(**copy_service_id))
-#    assert e.value.code() == grpc.StatusCode.NOT_FOUND
-#    msg = 'Context(wrong-context) does not exist in the database.'
-#    assert e.value.details() == msg
-#
-#    # should fail with service id is empty
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service_id = copy.deepcopy(SERVICE_ID)
-#        copy_service_id['cs_id']['uuid'] = ''
-#        service_client.DeleteService(ServiceId(**copy_service_id))
-#    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-#    msg = 'service_id.cs_id.uuid() is out of range: '\
-#          'allow_empty(False) min_length(None) max_length(None) allowed_lengths(None).'
-#    assert e.value.details() == msg
-#
-#    # should fail with service id is empty
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service_id = copy.deepcopy(SERVICE_ID)
-#        copy_service_id['cs_id']['uuid'] = 'wrong-service'
-#        service_client.DeleteService(ServiceId(**copy_service_id))
-#    assert e.value.code() == grpc.StatusCode.NOT_FOUND
-#    msg = 'Context(admin)/Service(wrong-service) does not exist in the database.'
-#    assert e.value.details() == msg
-#
-#def test_delete_service(service_client : ServiceClient):
-#    # should work
-#    validate_empty(MessageToDict(
-#        service_client.DeleteService(ServiceId(**SERVICE_ID)),
-#        including_default_value_fields=True, preserving_proto_field_name=True,
-#        use_integers_for_enums=False))
-#
-#def test_get_services_empty_2(service_client : ServiceClient):
-#    # should work
-#    validate_service_list_is_empty(MessageToDict(
-#        service_client.GetServiceList(Empty()),
-#        including_default_value_fields=True, preserving_proto_field_name=True,
-#        use_integers_for_enums=False))
-#
-#def test_create_service_empty_endpoints(service_client : ServiceClient):
-#    # should work
-#    copy_service = copy.deepcopy(SERVICE)
-#    copy_service['endpointList'][0]['topoId']['contextId']['contextUuid']['uuid'] = ''
-#    copy_service['endpointList'][0]['topoId']['topoId']['uuid'] = ''
-#    validate_service_id(MessageToDict(
-#        service_client.CreateService(Service(**copy_service)),
-#        including_default_value_fields=True, preserving_proto_field_name=True,
-#        use_integers_for_enums=False))
-#
-#def test_get_services_full(service_client : ServiceClient):
-#    # should work
-#    validate_service_list_is_not_empty(MessageToDict(
-#        service_client.GetServiceList(Empty()),
-#        including_default_value_fields=True, preserving_proto_field_name=True,
-#        use_integers_for_enums=False))
-#
\ No newline at end of file
+@pytest.fixture(scope='session')
+def service_service(
+    context_client : ContextClient, # pylint: disable=redefined-outer-name
+    device_client : DeviceClient):  # pylint: disable=redefined-outer-name
+
+    _service_handler_factory = ServiceHandlerFactory(SERVICE_HANDLERS)
+    _service = ServiceService(
+        context_client, device_client, _service_handler_factory,
+        port=SERVICE_GRPC_SERVICE_PORT, max_workers=SERVICE_GRPC_MAX_WORKERS, grace_period=SERVICE_GRPC_GRACE_PERIOD)
+    _service.start()
+    yield _service
+    _service.stop()
+
+@pytest.fixture(scope='session')
+def service_client(service_service : ServiceService): # pylint: disable=redefined-outer-name
+    _client = ServiceClient(address='127.0.0.1', port=SERVICE_GRPC_SERVICE_PORT)
+    yield _client
+    _client.close()
+
+def grpc_message_to_json_string(message):
+    return str(MessageToDict(
+        message, including_default_value_fields=True, preserving_proto_field_name=True, use_integers_for_enums=False))
+
+try:
+    from .ServiceHandlersToTest import SERVICE_HANDLERS_TO_TEST
+except ImportError:
+    LOGGER.exception('Unable to load service handlers, nothing will be tested.')
+    SERVICE_HANDLERS_TO_TEST = []
+
+class TestServiceHandlers:
+    scenarios = SERVICE_HANDLERS_TO_TEST
+
+    def test_prepare_environment(
+        self, service_id, service_descriptor, service_endpoint_ids, service_config_rules, service_constraints,
+        service_device_uuids, get_device_descriptor, get_connect_rules,
+        context_client : ContextClient, # pylint: disable=redefined-outer-name
+        device_client : DeviceClient):  # pylint: disable=redefined-outer-name
+
+        context_client.SetContext(Context(**CONTEXT))
+        context_client.SetTopology(Topology(**TOPOLOGY))
+
+        for device_uuid in service_device_uuids:
+            device_with_connect_rules = copy.deepcopy(get_device_descriptor(device_uuid))
+            device_with_connect_rules['device_config']['config_rules'].extend(get_connect_rules(device_uuid))
+            device_client.AddDevice(Device(**device_with_connect_rules))
+
+    def test_service_create_error_cases(
+        self, service_id, service_descriptor, service_endpoint_ids, service_config_rules, service_constraints,
+        service_device_uuids, get_device_descriptor, get_connect_rules,
+        service_client : ServiceClient):    # pylint: disable=redefined-outer-name
+
+        with pytest.raises(grpc.RpcError) as e:
+            service_with_endpoints = copy.deepcopy(service_descriptor)
+            service_with_endpoints['service_endpoint_ids'].extend(service_endpoint_ids)
+            service_client.CreateService(Service(**service_with_endpoints))
+        assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
+        msg_head = 'service.service_endpoint_ids(['
+        msg_tail = ']) is invalid; RPC method CreateService does not accept Endpoints. '\
+                'Endpoints should be configured after creating the service.'
+        except_msg = str(e.value.details())
+        assert except_msg.startswith(msg_head) and except_msg.endswith(msg_tail)
+
+        with pytest.raises(grpc.RpcError) as e:
+            service_with_config_rules = copy.deepcopy(service_descriptor)
+            service_with_config_rules['service_config']['config_rules'].extend(service_config_rules)
+            service_client.CreateService(Service(**service_with_config_rules))
+        assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
+        msg_head = 'service.service_config.config_rules(['
+        msg_tail = ']) is invalid; RPC method CreateService does not accept Config Rules. '\
+                'Config Rules should be configured after creating the service.'
+        except_msg = str(e.value.details())
+        assert except_msg.startswith(msg_head) and except_msg.endswith(msg_tail)
+
+        with pytest.raises(grpc.RpcError) as e:
+            service_with_constraints = copy.deepcopy(service_descriptor)
+            service_with_constraints['service_constraints'].extend(service_constraints)
+            service_client.CreateService(Service(**service_with_constraints))
+        assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
+        msg_head = 'service.service_constraints(['
+        msg_tail = ']) is invalid; RPC method CreateService does not accept Constraints. '\
+                'Constraints should be configured after creating the service.'
+        except_msg = str(e.value.details())
+        assert except_msg.startswith(msg_head) and except_msg.endswith(msg_tail)
+
+
+    def test_service_create_correct(
+        self, service_id, service_descriptor, service_endpoint_ids, service_config_rules, service_constraints,
+        service_device_uuids, get_device_descriptor, get_connect_rules,
+        service_client : ServiceClient):    # pylint: disable=redefined-outer-name
+
+        service_client.CreateService(Service(**service_descriptor))
+
+
+    def test_service_get_created(
+        self, service_id, service_descriptor, service_endpoint_ids, service_config_rules, service_constraints,
+        service_device_uuids, get_device_descriptor, get_connect_rules,
+        context_client : ContextClient):    # pylint: disable=redefined-outer-name
+
+        service_data = context_client.GetService(ServiceId(**service_id))
+        LOGGER.info('service_data = {:s}'.format(grpc_message_to_json_string(service_data)))
+
+
+    def test_service_update(
+        self, service_id, service_descriptor, service_endpoint_ids, service_config_rules, service_constraints,
+        service_device_uuids, get_device_descriptor, get_connect_rules,
+        context_client : ContextClient,     # pylint: disable=redefined-outer-name
+        service_client : ServiceClient):    # pylint: disable=redefined-outer-name
+
+        # Configure
+        service_with_settings = copy.deepcopy(service_descriptor)
+        service_with_settings['service_endpoint_ids'].extend(service_endpoint_ids)
+        service_with_settings['service_config']['config_rules'].extend(service_config_rules)
+        service_with_settings['service_constraints'].extend(service_constraints)
+        service_client.UpdateService(Service(**service_with_settings))
+
+        for endpoint_id in service_endpoint_ids:
+            device_id = endpoint_id['device_id']
+            device_data = context_client.GetDevice(DeviceId(**device_id))
+            for i,config_rule in enumerate(device_data.device_config.config_rules):
+                LOGGER.info('device_data[{:s}][#{:d}] => {:s}'.format(
+                    str(device_id), i, grpc_message_to_json_string(config_rule)))
+
+        # Deconfigure
+        service_with_settings = copy.deepcopy(service_descriptor)
+        service_with_settings['service_endpoint_ids'].extend([]) # remove endpoints
+        service_client.UpdateService(Service(**service_with_settings))
+
+        for endpoint_id in service_endpoint_ids:
+            device_id = endpoint_id['device_id']
+            device_data = context_client.GetDevice(DeviceId(**device_id))
+            for i,config_rule in enumerate(device_data.device_config.config_rules):
+                LOGGER.info('device_data[{:s}][#{:d}] => {:s}'.format(
+                    str(device_id), i, grpc_message_to_json_string(config_rule)))
+
+
+    def test_service_get_updated(
+        self, service_id, service_descriptor, service_endpoint_ids, service_config_rules, service_constraints,
+        service_device_uuids, get_device_descriptor, get_connect_rules,
+        context_client : ContextClient):    # pylint: disable=redefined-outer-name
+
+        service_data = context_client.GetService(ServiceId(**service_id))
+        LOGGER.info('service_data = {:s}'.format(grpc_message_to_json_string(service_data)))
+
+
+    def test_service_delete(
+        self, service_id, service_descriptor, service_endpoint_ids, service_config_rules, service_constraints,
+        service_device_uuids, get_device_descriptor, get_connect_rules,
+        service_client : ServiceClient):    # pylint: disable=redefined-outer-name
+
+        service_client.DeleteService(ServiceId(**service_id))
diff --git a/src/start_webui_dev_mode.sh b/src/start_webui_dev_mode.sh
index 46de8314f314eaf4037fdd8ebde72091ac1c29d1..32cf9c7f131ced88c4b496c26ab6890f571aa2f2 100755
--- a/src/start_webui_dev_mode.sh
+++ b/src/start_webui_dev_mode.sh
@@ -1,3 +1,5 @@
+# for development purposes only
+
 export CONTEXT_SERVICE_ADDRESS=`kubectl get service/contextservice -n tf-dev -o jsonpath='{.spec.clusterIP}'`
 
 echo $CONTEXT_SERVICE_ADDRESS
@@ -8,4 +10,6 @@ echo $DEVICE_SERVICE_ADDRESS
 
 export HOST="127.0.0.1"
 
+export FLASK_ENV="development"
+
 python -m webui.service
diff --git a/src/webui/.gitlab-ci.yml b/src/webui/.gitlab-ci.yml
index 4d0aa02ebb91ef62a6b0bbfd4384d5eeefd67241..bc1ce61bd71b23e9b9fa578a70da4648519025d1 100644
--- a/src/webui/.gitlab-ci.yml
+++ b/src/webui/.gitlab-ci.yml
@@ -1,7 +1,7 @@
 # build, tag and push the Docker image to the gitlab registry
 build webui:
   variables:
-    IMAGE_NAME: 'webuiwervice' # name of the microservice
+    IMAGE_NAME: 'webuiservice' # name of the microservice
     IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: build
   before_script:
@@ -24,7 +24,7 @@ build webui:
 # apply unit test to the webui component
 unit test webui:
   variables:
-    IMAGE_NAME: 'webuiwervice' # name of the microservice
+    IMAGE_NAME: 'webuiservice' # name of the microservice
     IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: unit_test
   needs:
@@ -35,9 +35,10 @@ unit test webui:
     - if docker container ls | grep $IMAGE_NAME; then docker rm -f $IMAGE_NAME; else echo "$IMAGE_NAME image is not in the system"; fi
   script:
     - docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
-    - docker run --name $IMAGE_NAME -d -p 8004:8004 --network=teraflowbridge  --rm $CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG
+    - docker run --name $IMAGE_NAME -d -p 8004:8004 -v "$PWD/src/$IMAGE_NAME/tests:/opt/results" --network=teraflowbridge  --rm $CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG
     - docker ps -a
-    - docker exec -i $IMAGE_NAME bash -c "pytest --log-level=DEBUG --verbose $IMAGE_NAME/tests/test_unitary.py"
+    - docker exec -i $IMAGE_NAME bash -c "pytest --log-level=DEBUG --verbose ${IMAGE_NAME}/tests/test_unitary.py --junitxml=/opt/results/${IMAGE_NAME}_report.xml; b.	coverage xml -o /opt/results/${IMAGE_NAME}_coverage.xml ; coverage report --include='webui/*' --show-missing "
+  coverage: '/TOTAL\s+\d+\s+\d+\s+(\d+%)/'
   after_script:
     - docker rm -f $IMAGE_NAME
     - docker network rm teraflowbridge
@@ -54,7 +55,8 @@ unit test webui:
   artifacts:
       when: always
       reports:
-        junit: src/$IMAGE_NAME/tests/report.xml
+        junit: src/$IMAGE_NAME/tests/${IMAGE_NAME}_report.xml
+        cobertura: src/$IMAGE_NAME/tests/${IMAGE_NAME}_coverage.xml
 
 # Deployment of the webui service in Kubernetes Cluster
 deploy webui:
diff --git a/src/webui/Dockerfile b/src/webui/Dockerfile
index b65889025c8d52c42ded40fcff0f574a0923d87d..17d63932a6302c4a0f099f70f846ce7d2b164228 100644
--- a/src/webui/Dockerfile
+++ b/src/webui/Dockerfile
@@ -7,6 +7,7 @@ RUN apt-get --yes --quiet --quiet update && \
 
 # Set Python to show logs as they occur
 ENV PYTHONUNBUFFERED=0
+ENV PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION='python'
 
 # Download the gRPC health probe -- not needed here... health will be asserted using HTTP
 # RUN GRPC_HEALTH_PROBE_VERSION=v0.2.0 && \
diff --git a/src/webui/requirements.in b/src/webui/requirements.in
index b913056357776460e11afe6ed04d0479e10793e6..8d30ba299df8cdc3c52885082c0dcfdfe0586f70 100644
--- a/src/webui/requirements.in
+++ b/src/webui/requirements.in
@@ -8,3 +8,4 @@ prometheus-client
 pytest
 pytest-benchmark
 lorem-text
+coverage
diff --git a/src/webui/service/main/routes.py b/src/webui/service/main/routes.py
index 55d6fb9f52f21eaa272647dca87e6d710db98fda..ee49d8cb6ac011d6234cd9cc4dd72dcd4fbf0d58 100644
--- a/src/webui/service/main/routes.py
+++ b/src/webui/service/main/routes.py
@@ -27,3 +27,8 @@ def home():
         context_form.context.data = session['context_uuid']
 
     return render_template('main/home.html', context_form=context_form)
+
+
+@main.get('/about')
+def about():
+    return render_template('main/about.html')
diff --git a/src/webui/service/static/partners.png b/src/webui/service/static/partners.png
new file mode 100644
index 0000000000000000000000000000000000000000..f88680212f68cdb4c17ad0de55b9d22ef9276a23
Binary files /dev/null and b/src/webui/service/static/partners.png differ
diff --git a/src/webui/service/templates/base.html b/src/webui/service/templates/base.html
index 862ae6b44f745baf3f787634f95421fb2f0459bc..6ada3c8502d26f9ff8e2ac289bbcdd822d7a4a32 100644
--- a/src/webui/service/templates/base.html
+++ b/src/webui/service/templates/base.html
@@ -5,7 +5,7 @@
     <meta charset="utf-8">
     <meta name="viewport" content="width=device-width, initial-scale=1">
 
-    <link rel="shortcut icon" href="https://teraflow-h2020.eu/sites/teraflow-h2020.eu/files/public/favicon.png" type="image/png" />
+    <link rel="shortcut icon" href="https://teraflow-h2020.eu/sites/teraflow/files/public/favicon.png" type="image/png" />
 
     <!-- Bootstrap CSS -->
     <link href="https://cdn.jsdelivr.net/npm/bootstrap@5.1.2/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-uWxY/CJNBR+1zjPWmfnSnVxwRheevXITnMqoEIeG1LJrdI0GlVs/9cVSyPYXdcSF" crossorigin="anonymous">
@@ -23,7 +23,7 @@
     <nav class="navbar navbar-expand-lg navbar-dark bg-primary" style="margin-bottom: 10px;">
         <div class="container-fluid">
           <a class="navbar-brand" href="#">
-            <img src="https://teraflow-h2020.eu/sites/teraflow-h2020.eu/files/public/favicon.png" alt="" width="30" height="24" class="d-inline-block align-text-top"/>
+            <img src="https://teraflow-h2020.eu/sites/teraflow/files/public/favicon.png" alt="" width="30" height="24" class="d-inline-block align-text-top"/>
             TeraFlow
           </a>
           <button class="navbar-toggler" type="button" data-bs-toggle="collapse" data-bs-target="#navbarColor02" aria-controls="navbarColor02" aria-expanded="false" aria-label="Toggle navigation">
@@ -54,7 +54,7 @@
                 {% endif %}
                 <!-- <a class="nav-link" href="{{ url_for('service.home') }}">Service</a> -->
               </li>
-              <li class="nav-item">
+              <!-- <li class="nav-item">
                 <a class="nav-link" href="#">Compute</a>
               </li>
               <li class="nav-item">
@@ -63,9 +63,9 @@
               
               <li class="nav-item">
                 <a class="nav-link" href="#">Monitoring</a>
-              </li>
+              </li> -->
               <li class="nav-item">
-                <a class="nav-link" href="#">About</a>
+                <a class="nav-link" href="{{ url_for('main.about') }}">About</a>
               </li>
             </ul>
             <span class="navbar-text" style="color: #fff;">
diff --git a/src/webui/service/templates/device/home.html b/src/webui/service/templates/device/home.html
index ba70f6b8ccbf2508ef881cf64e471fc549799490..6010ed6a91c583dc352d14fc4e4b2543ee7e7eef 100644
--- a/src/webui/service/templates/device/home.html
+++ b/src/webui/service/templates/device/home.html
@@ -4,12 +4,12 @@
     <h1>Devices</h1>
 
     <div class="row">
-        <div class="col">
+        <!-- <div class="col">
             <a href="{{ url_for('device.add') }}" class="btn btn-primary" style="margin-bottom: 10px;">
                 <i class="bi bi-plus"></i>
                 Add New Device
             </a>
-        </div>
+        </div> -->
         <div class="col">
             {{ devices | length }} devices found in context <i>{{ session['context_uuid'] }}</i>
         </div>
@@ -32,7 +32,7 @@
             <th scope="col">Drivers</th>
             <th scope="col">Status</th>
             <th scope="col">Configuration</th>
-            <th scope="col"></th>
+            <!-- <th scope="col"></th> -->
           </tr>
         </thead>
         <tbody>
@@ -40,9 +40,9 @@
                 {% for device in devices %}
                 <tr>
                     <td>
-                        <a href="{{ url_for('device.detail', device_uuid=device.device_id.device_uuid.uuid) }}">
+                        <!-- <a href="{{ url_for('device.detail', device_uuid=device.device_id.device_uuid.uuid) }}"> -->
                             {{ device.device_id.device_uuid.uuid }}
-                        </a>
+                        <!-- </a> -->
                     </td>
                     <td>
                         {{ device.device_type }}
@@ -72,14 +72,14 @@
                             {% endfor %}
                         </ul>
                     </td>
-                    <td>
+                    <!-- <td>
                         <a href="{{ url_for('device.detail', device_uuid=device.device_id.device_uuid.uuid) }}">
                             <svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-eye" viewBox="0 0 16 16">
                                 <path d="M16 8s-3-5.5-8-5.5S0 8 0 8s3 5.5 8 5.5S16 8 16 8zM1.173 8a13.133 13.133 0 0 1 1.66-2.043C4.12 4.668 5.88 3.5 8 3.5c2.12 0 3.879 1.168 5.168 2.457A13.133 13.133 0 0 1 14.828 8c-.058.087-.122.183-.195.288-.335.48-.83 1.12-1.465 1.755C11.879 11.332 10.119 12.5 8 12.5c-2.12 0-3.879-1.168-5.168-2.457A13.134 13.134 0 0 1 1.172 8z"/>
                                 <path d="M8 5.5a2.5 2.5 0 1 0 0 5 2.5 2.5 0 0 0 0-5zM4.5 8a3.5 3.5 0 1 1 7 0 3.5 3.5 0 0 1-7 0z"/>
                             </svg>
                         </a>
-                    </td>
+                    </td> -->
                 </tr>
                 {% endfor %}
             {% else %}
diff --git a/src/webui/service/templates/main/about.html b/src/webui/service/templates/main/about.html
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..9ac1e29e8b326a3cefb8712b2bd9db92eebab226 100644
--- a/src/webui/service/templates/main/about.html
+++ b/src/webui/service/templates/main/about.html
@@ -0,0 +1,9 @@
+{% extends 'base.html' %}
+{% block content %}
+    <h1>TeraFlow OS</h1>
+
+    <p>For more information, visit the <a href="https://teraflow-h2020.eu/" target="_newtf">TeraFlow H2020 webpage</a>.</p>
+
+    <img alt="Consortium" src="{{ url_for('static', filename='partners.png') }}"/>
+
+{% endblock %}
\ No newline at end of file
diff --git a/src/webui/service/templates/service/home.html b/src/webui/service/templates/service/home.html
index 3673200709db52a19924789e4d3a68156bef7793..648ce99bd9276dd12986f644977520a980f6be79 100644
--- a/src/webui/service/templates/service/home.html
+++ b/src/webui/service/templates/service/home.html
@@ -4,12 +4,12 @@
     <h1>Services</h1>
 
     <div class="row">
-        <div class="col">
+        <!-- <div class="col">
             <a href="{{ url_for('service.add') }}" class="btn btn-primary" style="margin-bottom: 10px;">
                 <i class="bi bi-plus"></i>
                 Add New Service
             </a>
-        </div>
+        </div> -->
         <div class="col">
             {{ services | length }} services found in context <i>{{ session['context_uuid'] }}</i>
         </div>
@@ -33,7 +33,7 @@
             <th scope="col">Constraints</th>
             <th scope="col">Status</th>
             <th scope="col">Configuration</th>
-            <th scope="col"></th>
+            <!-- <th scope="col"></th> -->
           </tr>
         </thead>
         <tbody>
@@ -41,9 +41,9 @@
                 {% for service in services %}
                 <tr>
                     <td>
-                        <a href="{{ url_for('service.detail', service_uuid=service.service_id.service_uuid.uuid) }}">
+                        <!-- <a href="{{ url_for('service.detail', service_uuid=service.service_id.service_uuid.uuid) }}"> -->
                             {{ service.service_id.service_uuid.uuid }}
-                        </a>
+                        <!-- </a> -->
                     </td>
                     <td>
                         {{ ste.Name(service.service_type) }}
@@ -74,14 +74,14 @@
                             {% endfor %}
                         </ul>
                     </td>
-                    <td>
+                    <!-- <td>
                         <a href="{{ url_for('service.detail', service_uuid=service.service_id.service_uuid.uuid) }}">
                             <svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-eye" viewBox="0 0 16 16">
                                 <path d="M16 8s-3-5.5-8-5.5S0 8 0 8s3 5.5 8 5.5S16 8 16 8zM1.173 8a13.133 13.133 0 0 1 1.66-2.043C4.12 4.668 5.88 3.5 8 3.5c2.12 0 3.879 1.168 5.168 2.457A13.133 13.133 0 0 1 14.828 8c-.058.087-.122.183-.195.288-.335.48-.83 1.12-1.465 1.755C11.879 11.332 10.119 12.5 8 12.5c-2.12 0-3.879-1.168-5.168-2.457A13.134 13.134 0 0 1 1.172 8z"/>
                                 <path d="M8 5.5a2.5 2.5 0 1 0 0 5 2.5 2.5 0 0 0 0-5zM4.5 8a3.5 3.5 0 1 1 7 0 3.5 3.5 0 0 1-7 0z"/>
                             </svg>
                         </a>
-                    </td>
+                    </td> -->
                 </tr>
                 {% endfor %}
             {% else %}
diff --git a/src/webui/tests/test_unitary.py b/src/webui/tests/test_unitary.py
index 6983049b5f49ed9a08ca0e2b4fb2c10b7bb4db84..0612c9d05670e9b1a062694f41bf4762b13d1d7e 100644
--- a/src/webui/tests/test_unitary.py
+++ b/src/webui/tests/test_unitary.py
@@ -47,6 +47,7 @@ class TestWebUI(ClientTestCase):
             url_for('main.home')
             url_for('service.home')
             url_for('device.home')
+            url_for('main.about')
     
     def test_device_add_action_success(self, client):
         with client.session_transaction() as sess: