diff --git a/manifests/.gitlab-ci.yml b/manifests/.gitlab-ci.yml
index 0c0b1d4dc0783e1bf3158d0629558b4fedd355d6..9fc872ca61752d1be012aa4b866b3b592171e2cd 100644
--- a/manifests/.gitlab-ci.yml
+++ b/manifests/.gitlab-ci.yml
@@ -6,5 +6,4 @@ dependencies all:
     - kubectl version
     - kubectl get all
     - kubectl apply -f "manifests/prometheus.yaml"
-    - kubectl apply -f "manifests/redis.yaml"
     - kubectl get all
diff --git a/manifests/contextservice.yaml b/manifests/contextservice.yaml
index cf7da7e43fbfc02f5872745fc6f10f3cfcee6cb2..149ae4e72d4c55679f81876cf681253788d8d5ab 100644
--- a/manifests/contextservice.yaml
+++ b/manifests/contextservice.yaml
@@ -6,6 +6,7 @@ spec:
   selector:
     matchLabels:
       app: contextservice
+  replicas: 1
   template:
     metadata:
       labels:
@@ -13,6 +14,17 @@ spec:
     spec:
       terminationGracePeriodSeconds: 5
       containers:
+      - name: redis
+        image: redis:6.2
+        ports:
+        - containerPort: 6379
+        resources:
+          requests:
+            cpu: 250m
+            memory: 512Mi
+          limits:
+            cpu: 700m
+            memory: 1024Mi
       - name: server
         image: registry.gitlab.com/teraflow-h2020/controller/context:latest
         imagePullPolicy: Always
@@ -20,12 +32,14 @@ spec:
         - containerPort: 1010
         - containerPort: 8080
         env:
-        - name: DB_ENGINE
+        - name: DB_BACKEND
           value: "redis"
         - name: REDIS_DATABASE_ID
           value: "0"
         - name: LOG_LEVEL
-          value: "DEBUG"
+          value: "INFO"
+        - name: POPULATE_FAKE_DATA
+          value: "true"
         readinessProbe:
           exec:
             command: ["/bin/grpc_health_probe", "-addr=:1010"]
diff --git a/manifests/redis.yaml b/manifests/redis.yaml
deleted file mode 100644
index 9aaebb1673637e6afc4fcf2d5887009f5d365a4d..0000000000000000000000000000000000000000
--- a/manifests/redis.yaml
+++ /dev/null
@@ -1,54 +0,0 @@
----
-apiVersion: apps/v1
-kind: Deployment
-metadata:
-  name: redis
-spec:
-  selector:
-    matchLabels:
-      app: redis
-  replicas: 1
-  template:
-    metadata:
-      labels:
-        app: redis
-        version: v1
-    spec:
-      containers:
-      - name: redis
-        image: redis:6.2
-        ports:
-        - containerPort: 6379
----
-apiVersion: v1
-kind: Service
-metadata:
-  name: redis
-  labels:
-    app: redis
-spec:
-  type: ClusterIP
-  selector:
-    app: redis
-  ports:
-  - name: redis
-    protocol: TCP
-    port: 6379
-    targetPort: 6379
----
-apiVersion: v1
-kind: Service
-metadata:
-  name: redis-public
-  labels:
-    app: redis
-spec:
-  type: NodePort
-  selector:
-    app: redis
-  ports:
-  - name: redis
-    protocol: TCP
-    port: 6379
-    targetPort: 6379
----
diff --git a/proto/context.proto b/proto/context.proto
index e578f60fbaa78620abb09e81ad7fad2afaf39c07..251326c8094831b60fb82b24fb1ff15c6d6625d5 100644
--- a/proto/context.proto
+++ b/proto/context.proto
@@ -1,98 +1,298 @@
 syntax = "proto3";
 package context;
 
-
 service ContextService {
-  rpc GetTopology (Empty) returns (Topology) {}
+  rpc ListContextIds   (Empty     ) returns (       ContextIdList ) {}
+  rpc ListContexts     (Empty     ) returns (       ContextList   ) {}
+  rpc GetContext       (ContextId ) returns (       Context       ) {}
+  rpc SetContext       (Context   ) returns (       ContextId     ) {}
+  rpc RemoveContext    (ContextId ) returns (       Empty         ) {}
+  rpc GetContextEvents (Empty     ) returns (stream ContextEvent  ) {}
+
+  rpc ListTopologyIds  (ContextId ) returns (       TopologyIdList) {}
+  rpc ListTopologies   (ContextId ) returns (       TopologyList  ) {}
+  rpc GetTopology      (TopologyId) returns (       Topology      ) {}
+  rpc SetTopology      (Topology  ) returns (       TopologyId    ) {}
+  rpc RemoveTopology   (TopologyId) returns (       Empty         ) {}
+  rpc GetTopologyEvents(Empty     ) returns (stream TopologyEvent ) {}
+
+  rpc ListDeviceIds    (Empty     ) returns (       DeviceIdList  ) {}
+  rpc ListDevices      (Empty     ) returns (       DeviceList    ) {}
+  rpc GetDevice        (DeviceId  ) returns (       Device        ) {}
+  rpc SetDevice        (Device    ) returns (       DeviceId      ) {}
+  rpc RemoveDevice     (DeviceId  ) returns (       Empty         ) {}
+  rpc GetDeviceEvents  (Empty     ) returns (stream DeviceEvent   ) {}
+
+  rpc ListLinkIds      (Empty     ) returns (       LinkIdList    ) {}
+  rpc ListLinks        (Empty     ) returns (       LinkList      ) {}
+  rpc GetLink          (LinkId    ) returns (       Link          ) {}
+  rpc SetLink          (Link      ) returns (       LinkId        ) {}
+  rpc RemoveLink       (LinkId    ) returns (       Empty         ) {}
+  rpc GetLinkEvents    (Empty     ) returns (stream LinkEvent     ) {}
 
-  rpc AddLink(Link) returns (LinkId) {}
-  rpc DeleteLink(LinkId) returns (Empty) {}
+  rpc ListServiceIds   (ContextId ) returns (       ServiceIdList ) {}
+  rpc ListServices     (ContextId ) returns (       ServiceList   ) {}
+  rpc GetService       (ServiceId ) returns (       Service       ) {}
+  rpc SetService       (Service   ) returns (       ServiceId     ) {}
+  rpc RemoveService    (ServiceId ) returns (       Empty         ) {}
+  rpc GetServiceEvents (Empty     ) returns (stream ServiceEvent  ) {}
 }
 
-message Empty {
+// ----- Generic -------------------------------------------------------------------------------------------------------
+message Empty {}
 
+message Uuid {
+  string uuid = 1;
 }
 
-message Context {
-  ContextId contextId= 1;
-  Topology topo = 2;
-  TeraFlowController ctl = 3;
+enum EventTypeEnum {
+  EVENTTYPE_UNDEFINED = 0;
+  EVENTTYPE_CREATE = 1;
+  EVENTTYPE_UPDATE = 2;
+  EVENTTYPE_REMOVE = 3;
 }
 
+message Event {
+  double timestamp = 1;
+  EventTypeEnum event_type = 2;
+}
+
+// ----- Context -------------------------------------------------------------------------------------------------------
 message ContextId {
-  Uuid contextUuid = 1;
+  Uuid context_uuid = 1;
 }
 
-message Topology {
-  TopologyId topoId = 2;
-  repeated Device device = 3;
-  repeated Link link = 4; 
+message Context {
+  ContextId context_id = 1;
+  repeated TopologyId topology_ids = 2;
+  repeated ServiceId service_ids = 3;
+  TeraFlowController controller = 4;
 }
 
-message Link {
-  LinkId link_id = 1;
-  repeated EndPointId endpointList = 2;
+message ContextIdList {
+  repeated ContextId context_ids = 1;
 }
 
+message ContextList {
+  repeated Context contexts = 1;
+}
+
+message ContextEvent {
+  Event event = 1;
+  ContextId context_id = 2;
+}
+
+
+// ----- Topology ------------------------------------------------------------------------------------------------------
 message TopologyId {
-  ContextId contextId = 1;
-  Uuid topoId = 2;
+  ContextId context_id = 1;
+  Uuid topology_uuid = 2;
 }
 
-message Constraint {
-  string constraint_type = 1;
-  string constraint_value = 2;
+message Topology {
+  TopologyId topology_id = 1;
+  repeated DeviceId device_ids = 2;
+  repeated LinkId link_ids = 3;
+}
+
+message TopologyIdList {
+  repeated TopologyId topology_ids = 1;
+}
+
+message TopologyList {
+  repeated Topology topologies = 1;
+}
+
+message TopologyEvent {
+  Event event = 1;
+  TopologyId topology_id = 2;
+}
+
+
+// ----- Device --------------------------------------------------------------------------------------------------------
+message DeviceId {
+  Uuid device_uuid = 1;
 }
 
 message Device {
   DeviceId device_id = 1;
   string device_type = 2;
   DeviceConfig device_config = 3;
-  DeviceOperationalStatus devOperationalStatus = 4;
-  repeated EndPoint endpointList = 5;  
+  DeviceOperationalStatusEnum device_operational_status = 4;
+  repeated DeviceDriverEnum device_drivers = 5;
+  repeated EndPoint device_endpoints = 6;
 }
 
 message DeviceConfig {
-  string device_config = 1;
+  repeated ConfigRule config_rules = 1;
 }
 
-message EndPoint {
-  EndPointId port_id = 1;
-  string port_type = 2;
+enum DeviceDriverEnum {
+  DEVICEDRIVER_UNDEFINED = 0; // also used for emulated
+  DEVICEDRIVER_OPENCONFIG = 1;
+  DEVICEDRIVER_TRANSPORT_API = 2;
+  DEVICEDRIVER_P4 = 3;
+  DEVICEDRIVER_IETF_NETWORK_TOPOLOGY = 4;
+  DEVICEDRIVER_ONF_TR_352 = 5;
 }
 
-message EndPointId {
-  TopologyId topoId = 1;
-  DeviceId dev_id = 2;
-  Uuid port_id = 3;
+enum DeviceOperationalStatusEnum {
+  DEVICEOPERATIONALSTATUS_UNDEFINED = 0;
+  DEVICEOPERATIONALSTATUS_DISABLED = 1;
+  DEVICEOPERATIONALSTATUS_ENABLED = 2;
 }
 
-message DeviceId {
-  Uuid device_id = 1;
+message DeviceIdList {
+  repeated DeviceId device_ids = 1;
+}
+
+message DeviceList {
+  repeated Device devices = 1;
 }
 
+message DeviceEvent {
+  Event event = 1;
+  DeviceId device_id = 2;
+}
+
+
+// ----- Link ----------------------------------------------------------------------------------------------------------
 message LinkId {
-  Uuid link_id = 1;
+  Uuid link_uuid = 1;
 }
 
-message Uuid {
-  string uuid = 1;
+message Link {
+  LinkId link_id = 1;
+  repeated EndPointId link_endpoint_ids = 2;
+}
+
+message LinkIdList {
+  repeated LinkId link_ids = 1;
 }
 
-enum DeviceOperationalStatus {
-  KEEP_STATUS = 0; // Do not change operational status of device (used in configure)
-  DISABLED    = -1;
-  ENABLED     = 1;
+message LinkList {
+  repeated Link links = 1;
 }
 
+message LinkEvent {
+  Event event = 1;
+  LinkId link_id = 2;
+}
+
+
+// ----- Service -------------------------------------------------------------------------------------------------------
+message ServiceId {
+  ContextId context_id = 1;
+  Uuid service_uuid = 2;
+}
+
+message Service {
+  ServiceId service_id = 1;
+  ServiceTypeEnum service_type = 2;
+  repeated EndPointId service_endpoint_ids = 3;
+  repeated Constraint service_constraints = 4;
+  ServiceStatus service_status = 5;
+  ServiceConfig service_config = 6;
+}
+
+enum ServiceTypeEnum {
+  SERVICETYPE_UNKNOWN = 0;
+  SERVICETYPE_L3NM = 1;
+  SERVICETYPE_L2NM = 2;
+  SERVICETYPE_TAPI_CONNECTIVITY_SERVICE = 3;
+}
+
+enum ServiceStatusEnum {
+  SERVICESTATUS_UNDEFINED = 0;
+  SERVICESTATUS_PLANNED = 1;
+  SERVICESTATUS_ACTIVE =  2;
+  SERVICESTATUS_PENDING_REMOVAL = 3;
+}
+
+message ServiceStatus {
+  ServiceStatusEnum service_status = 1;
+}
+
+message ServiceConfig {
+  repeated ConfigRule config_rules = 1;
+}
+
+message ServiceIdList {
+  repeated ServiceId service_ids = 1;
+}
+
+message ServiceList {
+  repeated Service services = 1;
+}
+
+message ServiceEvent {
+  Event event = 1;
+  ServiceId service_id = 2;
+}
+
+
+// ----- Endpoint ------------------------------------------------------------------------------------------------------
+message EndPointId {
+  TopologyId topology_id = 1;
+  DeviceId device_id = 2;
+  Uuid endpoint_uuid = 3;
+}
+
+message EndPoint {
+  EndPointId endpoint_id = 1;
+  string endpoint_type = 2;
+}
+
+
+// ----- Configuration -------------------------------------------------------------------------------------------------
+enum ConfigActionEnum {
+  CONFIGACTION_UNDEFINED = 0;
+  CONFIGACTION_SET = 1;
+  CONFIGACTION_DELETE = 2;
+}
+
+message ConfigRule {
+  ConfigActionEnum action = 1;
+  string resource_key = 2;
+  string resource_value = 3;
+}
+
+
+// ----- Constraint ----------------------------------------------------------------------------------------------------
+message Constraint {
+  string constraint_type = 1;
+  string constraint_value = 2;
+}
+
+
+// ----- Connection ----------------------------------------------------------------------------------------------------
+message ConnectionId {
+  Uuid connection_uuid = 1;
+}
+
+message Connection {
+  ConnectionId connection_id = 1;
+  ServiceId related_service_id = 2;
+  repeated EndPointId path = 3;
+}
+
+message ConnectionIdList {
+  repeated ConnectionId connection_ids = 1;
+}
+
+message ConnectionList {
+  repeated Connection connections = 1;
+}
+
+
+// ----- Miscellaneous -------------------------------------------------------------------------------------------------
 message TeraFlowController {
-  ContextId ctl_id = 1;
-  string ipaddress = 2;
+  ContextId context_id = 1;
+  string ip_address = 2;
+  uint32 port = 3;
 }
 
 message AuthenticationResult {
-  ContextId ctl_id = 1;
+  ContextId context_id = 1;
   bool authenticated = 2;
 }
-
-
diff --git a/proto/device.proto b/proto/device.proto
index 4fe74b78afd3790e392c1df4df66d409316dda05..2a06731b4d01be39a78781032ee635f69957eaa8 100644
--- a/proto/device.proto
+++ b/proto/device.proto
@@ -4,7 +4,8 @@ package device;
 import "context.proto";
 
 service DeviceService {
-  rpc AddDevice(context.Device) returns (context.DeviceId) {}
-  rpc ConfigureDevice(context.Device) returns (context.DeviceId) {}
-  rpc DeleteDevice(context.DeviceId) returns (context.Empty) {}
+  rpc AddDevice       (context.Device  ) returns (context.DeviceId    ) {}
+  rpc ConfigureDevice (context.Device  ) returns (context.DeviceId    ) {}
+  rpc DeleteDevice    (context.DeviceId) returns (context.Empty       ) {}
+  rpc GetInitialConfig(context.DeviceId) returns (context.DeviceConfig) {}
 }
diff --git a/proto/service.proto b/proto/service.proto
index fb10d11b77b4c35d89bdb6047691045c71322644..6a6c1f0e1c6683d4d01946f9a8bb365c8dbdb2ef 100644
--- a/proto/service.proto
+++ b/proto/service.proto
@@ -1,73 +1,13 @@
-//Example of topology
 syntax = "proto3";
 package service;
 
 import "context.proto";
 
 service ServiceService {
-  rpc GetServiceList (context.Empty) returns (ServiceList) {}
-  rpc CreateService (Service) returns (ServiceId) {}
-  rpc UpdateService (Service) returns (ServiceId) {}
-  rpc DeleteService (ServiceId) returns (context.Empty) {}
-  rpc GetServiceById (ServiceId) returns (Service) {}
-  rpc GetConnectionList (context.Empty) returns (ConnectionList) {}
-  
+  rpc GetServiceList   (context.Empty    ) returns (context.ServiceList   ) {}
+  rpc CreateService    (context.Service  ) returns (context.ServiceId     ) {}
+  rpc UpdateService    (context.Service  ) returns (context.ServiceId     ) {}
+  rpc DeleteService    (context.ServiceId) returns (context.Empty         ) {}
+  rpc GetServiceById   (context.ServiceId) returns (context.Service       ) {}
+  rpc GetConnectionList(context.Empty    ) returns (context.ConnectionList) {}
 }
-
-message ServiceList {
-  repeated Service cs = 1;
-}
-
-message Service {
-  ServiceId cs_id = 1;
-  ServiceType serviceType = 2;
-  repeated context.EndPointId endpointList = 3;
-  repeated context.Constraint constraint = 4;
-  ServiceState serviceState = 5;
-  ServiceConfig serviceConfig = 6;
-}
-
-enum ServiceType {
-  UNKNOWN = 0;
-  L3NM = 1;
-  L2NM = 2;
-  TAPI_CONNECTIVITY_SERVICE = 3;
-}
-
-message ServiceConfig {
-  string serviceConfig = 1;
-}
-
-message ServiceId {
-  context.ContextId contextId = 1;
-  context.Uuid cs_id = 2;
-}
-
-message ServiceIdList {
-  repeated ServiceId serviceIdList = 1;
-}
-
-message ServiceState {
-  ServiceStateEnum serviceState = 1;
-}
-
-enum ServiceStateEnum {
-  PLANNED = 0;
-  ACTIVE =  1;
-  PENDING_REMOVAL = 2;
-}
-
-message ConnectionList {
-  repeated Connection connectionList = 1;
-}
-
-message Connection {
-  ConnectionId con_id = 1;
-  ServiceId relatedServiceId = 2;
-  repeated context.EndPointId path = 3;
-}
-
-message ConnectionId {
-  context.Uuid con_id = 1;
-}
-
diff --git a/report_coverage_common.sh b/report_coverage_common.sh
new file mode 100755
index 0000000000000000000000000000000000000000..500b3dfb130e6e0614a0beb649f0d93bf7d0ffee
--- /dev/null
+++ b/report_coverage_common.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+./report_coverage_all.sh | grep -v -E "^(cent|comp|cont|devi|moni|serv|test)" | grep --color -E -i "^common/.*$|$"
diff --git a/report_coverage_context.sh b/report_coverage_context.sh
index 3a404a62698cdd95f94c9ed7d4c8b4b073778d08..95966ead0bdf84b39be3e3f3063e1b93dfad32f1 100755
--- a/report_coverage_context.sh
+++ b/report_coverage_context.sh
@@ -1,3 +1,3 @@
 #!/bin/bash
 
-./report_coverage_all.sh | grep --color -E -i "^context/.*$|$"
+./report_coverage_all.sh | grep -v -E "^(cent|com|devi|moni|serv|test)" | grep --color -E -i "^context/.*$|$"
diff --git a/run_local_tests.sh b/run_local_tests.sh
index 3a02c1f6a6880849ce29bc62771c158b397ffd95..14ec7b32eff36eb5cebfed363439fb3ee3fbbba7 100755
--- a/run_local_tests.sh
+++ b/run_local_tests.sh
@@ -15,8 +15,9 @@ cat $PROJECTDIR/coverage/.coveragerc.template | sed s+~/teraflow/controller+$PRO
 rm -f $COVERAGEFILE
 
 coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \
-    common/database/tests/test_unitary.py \
-    common/database/tests/test_engine_inmemory.py
+    common/orm/tests/test_unitary.py \
+    common/message_broker/tests/test_unitary.py \
+    common/rpc_method_wrapper/tests/test_unitary.py
 
 coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \
     centralizedattackdetector/tests/test_unitary.py
@@ -33,12 +34,3 @@ coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \
 
 coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \
     compute/tests/test_unitary.py
-
-# Run integration tests and analyze coverage of code at same time
-export DB_ENGINE='redis'
-export REDIS_SERVICE_HOST='10.1.7.194'
-export REDIS_SERVICE_PORT='31789'
-export REDIS_DATABASE_ID='0'
-coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \
-    common/database/tests/test_engine_redis.py \
-    tester_integration/test_context_device_service.py
diff --git a/src/common/Checkers.py b/src/common/Checkers.py
deleted file mode 100644
index b9e0f3067357912ef8e7404768b234f3c58d40e5..0000000000000000000000000000000000000000
--- a/src/common/Checkers.py
+++ /dev/null
@@ -1,100 +0,0 @@
-from typing import Any, List, Set, Sized, Tuple, Union
-
-def chk_none(name : str, value : Any) -> Any:
-    if value is not None:
-        msg = '{}({}) is not None.'
-        raise AttributeError(msg.format(str(name), str(value)))
-    return value
-
-def chk_not_none(name : str, value : Any) -> Any:
-    if value is None:
-        msg = '{}({}) is None.'
-        raise AttributeError(msg.format(str(name), str(value)))
-    return value
-
-def chk_type(name : str, value : Any, type_or_types : Union[type, Set[type]] = set()) -> Any:
-    if not isinstance(value, type_or_types):
-        msg = '{}({}) is of a wrong type({}). Accepted type_or_types({}).'
-        raise AttributeError(msg.format(str(name), str(value), type(value).__name__, str(type_or_types)))
-    return value
-
-def chk_length(
-    name : str, value : Sized, allow_empty : bool = False,
-    min_length : Union[int, None] = None, max_length : Union[int, None] = None,
-    allowed_lengths : Union[None, int, Set[int], List[int], Tuple[int]] = None) -> Any:
-
-    length = len(chk_type(name, value, Sized))
-
-    allow_empty = chk_type('allow_empty for {}'.format(name), allow_empty, bool)
-    if not allow_empty and length == 0:
-        msg = '{}({}) is out of range: allow_empty({}) min_length({}) max_length({}) allowed_lengths({}).'
-        raise AttributeError(msg.format(
-            str(name), str(value), str(allow_empty), str(min_length), str(max_length), str(allowed_lengths)))
-
-
-    if min_length is not None:
-        min_length = chk_type('min_length for {}'.format(name), min_length, int)
-        if length < min_length:
-            msg = '{}({}) is out of range: allow_empty({}) min_length({}) max_length({}) allowed_lengths({}).'
-            raise AttributeError(msg.format(
-                str(name), str(value), str(allow_empty), str(min_length), str(max_length), str(allowed_lengths)))
-
-    if max_length is not None:
-        max_length = chk_type('max_length for {}'.format(name), max_length, int)
-        if length > max_length:
-            msg = '{}({}) is out of range: allow_empty({}) min_value({}) max_value({}) allowed_lengths({}).'
-            raise AttributeError(msg.format(
-                str(name), str(value), str(allow_empty), str(max_length), str(max_length), str(allowed_lengths)))
-
-    if allowed_lengths is not None:
-        chk_type('allowed_lengths for {}'.format(name), allowed_lengths, (int, set, list, tuple))
-        if isinstance(allowed_lengths, int):
-            fixed_length = allowed_lengths
-            if length != fixed_length:
-                msg = '{}({}) is out of range: allow_empty({}) min_value({}) max_value({}) allowed_lengths({}).'
-                raise AttributeError(msg.format(
-                    str(name), str(value), str(allow_empty), str(max_length), str(max_length), str(allowed_lengths)))
-        else:
-            for i in allowed_lengths: chk_type('allowed_lengths[#{}] for {}'.format(i, name), i, int)
-            if length not in allowed_lengths:
-                msg = '{}({}) is out of range: allow_empty({}) min_value({}) max_value({}) allowed_lengths({}).'
-                raise AttributeError(msg.format(
-                    str(name), str(value), str(allow_empty), str(max_length), str(max_length), str(allowed_lengths)))
-
-    return value
-
-def chk_string(
-    name : str, value : Any, allow_empty : bool = False,
-    min_length : Union[int, None] = None, max_length : Union[int, None] = None,
-    allowed_lengths : Union[None, int, Set[int], List[int], Tuple[int]] = None) -> str:
-
-    chk_type(name, value, str)
-    chk_length(
-        name, value, allow_empty=allow_empty, min_length=min_length, max_length=max_length,
-        allowed_lengths=allowed_lengths)
-    return value
-
-def chk_float(name, value, type_or_types=(int, float), min_value=None, max_value=None) -> float:
-    chk_not_none(name, value)
-    chk_type(name, value, type_or_types)
-    if min_value is not None:
-        chk_type(name, value, type_or_types)
-        if value < min_value:
-            msg = '{}({}) lower than min_value({}).'
-            raise AttributeError(msg.format(str(name), str(value), str(min_value)))
-    if max_value is not None:
-        chk_type(name, value, type_or_types)
-        if value > max_value:
-            msg = '{}({}) greater than max_value({}).'
-            raise AttributeError(msg.format(str(name), str(value), str(max_value)))
-    return value
-
-def chk_integer(name, value, min_value=None, max_value=None) -> int:
-    return int(chk_float(name, value, type_or_types=int, min_value=min_value, max_value=max_value))
-
-def chk_options(name, value, options):
-    chk_not_none(name, value)
-    if value not in options:
-        msg = '{}({}) is not one of options({}).'
-        raise AttributeError(msg.format(str(name), str(value), str(options)))
-    return value
diff --git a/src/common/Constants.py b/src/common/Constants.py
new file mode 100644
index 0000000000000000000000000000000000000000..2595a1606631fcea4152d43fdf32e7db0dbb560a
--- /dev/null
+++ b/src/common/Constants.py
@@ -0,0 +1,2 @@
+DEFAULT_CONTEXT_UUID = 'admin'
+DEFAULT_TOPOLOGY_UUID = 'admin'
diff --git a/src/common/database/Factory.py b/src/common/database/Factory.py
deleted file mode 100644
index e67036372f3b4f0fc76d3874f3c4cb0c1a6d666d..0000000000000000000000000000000000000000
--- a/src/common/database/Factory.py
+++ /dev/null
@@ -1,39 +0,0 @@
-import logging
-from enum import Enum
-from common.Settings import get_setting
-from common.database.api.Database import Database
-from common.database.engines.inmemory.InMemoryDatabaseEngine import InMemoryDatabaseEngine
-from common.database.engines.redis.RedisDatabaseEngine import RedisDatabaseEngine
-
-LOGGER = logging.getLogger(__name__)
-
-class DatabaseEngineEnum(Enum):
-    INMEMORY = 'inmemory'
-    REDIS = 'redis'
-    #MONGO = 'mongo'
-    #RETHINK = 'rethink'
-    #ETCD = 'etcd'
-
-ENGINES = {
-    DatabaseEngineEnum.INMEMORY.value: InMemoryDatabaseEngine,
-    DatabaseEngineEnum.REDIS.value: RedisDatabaseEngine,
-    #DatabaseEngineEnum.MONGO.value: MongoDatabase,
-    #DatabaseEngineEnum.RETHINK.value: RethinkDatabase,
-    #DatabaseEngineEnum.ETCD.value: EtcdDatabase,
-}
-
-DEFAULT_DB_ENGINE = DatabaseEngineEnum.INMEMORY
-
-def get_database(engine=None, **settings) -> Database:
-    # return an instance of Database initialized with selected engine.
-    # Engine is selected using following criteria (first that is not None is selected):
-    # 1. user selected by parameter (engine=...)
-    # 2. environment variable DB_ENGINE
-    # 3. default engine: INMEMORY
-    if engine is None: engine = get_setting('DB_ENGINE', default=DEFAULT_DB_ENGINE)
-    if engine is None: raise Exception('Database Engine not specified')
-    if isinstance(engine, DatabaseEngineEnum): engine = engine.value
-    engine_class = ENGINES.get(engine)
-    if engine_class is None: raise Exception('Unsupported DatabaseEngine({})'.format(engine))
-    LOGGER.info('Selected Database Engine: {}'.format(engine))
-    return Database(engine_class(**settings))
diff --git a/src/common/database/api/Database.py b/src/common/database/api/Database.py
deleted file mode 100644
index 319f9bbd380241d11cf6a34cdd2f14a003aed560..0000000000000000000000000000000000000000
--- a/src/common/database/api/Database.py
+++ /dev/null
@@ -1,53 +0,0 @@
-import logging
-from typing import List
-from common.database.api.Exceptions import WrongDatabaseEngine, MutexException
-from common.database.api.context.Context import Context
-from common.database.api.context.Keys import KEY_CONTEXTS
-from common.database.api.entity._Entity import _Entity
-from common.database.api.entity.EntityCollection import EntityCollection
-from common.database.engines._DatabaseEngine import _DatabaseEngine
-
-LOGGER = logging.getLogger(__name__)
-
-class Database(_Entity):
-    def __init__(self, database_engine : _DatabaseEngine):
-        if not isinstance(database_engine, _DatabaseEngine):
-            raise WrongDatabaseEngine('database_engine must inherit from _DatabaseEngine')
-        self._database_engine = database_engine
-        super().__init__(self, 'root', 'database', {}, {})
-        self._acquired = False
-        self._owner_key = None
-        self._contexts = EntityCollection(self, KEY_CONTEXTS)
-
-    @property
-    def parent(self) -> 'Database': return self
-
-    @property
-    def database_engine(self) -> _DatabaseEngine: return self._database_engine
-
-    def __enter__(self) -> '_DatabaseEngine':
-        self._acquired, self._owner_key = self._database_engine.lock()
-        if not self._acquired: raise MutexException('Unable to acquire database lock')
-        return self
-
-    def __exit__(self, exc_type, exc_val, exc_tb):
-        self._database_engine.unlock(self._owner_key)
-
-    def clear_all(self, keep_keys=set()):
-        LOGGER.info('Cleaning up...')
-        keys = self._database_engine.keys()
-        LOGGER.info('  keys before = {}'.format(str(keys)))
-        for key in keys:
-            if(key in keep_keys): continue
-            self._database_engine.delete(key)
-        LOGGER.info('  keys after  = {}'.format(str(self._database_engine.keys())))
-
-    def dump(self) -> List[str]:
-        entries = self._database_engine.dump()
-        entries.sort()
-        return ['[{:>4s}] {:100s} :: {}'.format(k_type, k_name, k_value) for k_name,k_type,k_value in entries]
-
-    @property
-    def contexts(self) -> EntityCollection: return self._contexts
-
-    def context(self, context_uuid : str) -> Context: return Context(context_uuid, self)
diff --git a/src/common/database/api/context/Constants.py b/src/common/database/api/context/Constants.py
deleted file mode 100644
index 120b094a30e06a9476c1dce8bf91f42b383d0fa1..0000000000000000000000000000000000000000
--- a/src/common/database/api/context/Constants.py
+++ /dev/null
@@ -1,2 +0,0 @@
-DEFAULT_CONTEXT_ID = 'admin'
-DEFAULT_TOPOLOGY_ID = 'admin'
diff --git a/src/common/database/api/context/Context.py b/src/common/database/api/context/Context.py
deleted file mode 100644
index f4b530dd2e4519568f9f27c97b0f78d8efbaa53d..0000000000000000000000000000000000000000
--- a/src/common/database/api/context/Context.py
+++ /dev/null
@@ -1,69 +0,0 @@
-from typing import TYPE_CHECKING, Dict, List
-from common.database.api.context.service.Service import Service
-from common.database.api.context.topology.Topology import Topology
-from common.database.api.context.Keys import KEY_CONTEXT, KEY_SERVICES, KEY_TOPOLOGIES
-from common.database.api.entity._Entity import _Entity
-from common.database.api.entity.EntityCollection import EntityCollection
-
-if TYPE_CHECKING:
-    from common.database.api.Database import Database
-
-VALIDATORS = {}  # no attributes accepted
-TRANSCODERS = {} # no transcoding applied to attributes
-
-class Context(_Entity):
-    def __init__(self, context_uuid : str, parent : 'Database'):
-        super().__init__(parent, context_uuid, KEY_CONTEXT, VALIDATORS, TRANSCODERS)
-        self._topologies = EntityCollection(self, KEY_TOPOLOGIES)
-        self._services = EntityCollection(self, KEY_SERVICES)
-
-    @property
-    def parent(self) -> 'Database': return self._parent
-
-    @property
-    def context(self) -> 'Context': return self
-
-    @property
-    def context_uuid(self) -> str: return self._entity_uuid
-
-    @property
-    def topologies(self) -> EntityCollection: return self._topologies
-
-    @property
-    def services(self) -> EntityCollection: return self._services
-
-    def topology(self, topology_uuid : str) -> Topology: return Topology(topology_uuid, self)
-
-    def service(self, service_uuid : str) -> Service: return Service(service_uuid, self)
-
-    def create(self) -> 'Context':
-        self.parent.contexts.add(self.context_uuid)
-        return self
-
-    def delete(self):
-        for service_uuid in self.services.get(): self.service(service_uuid).delete()
-        for topology_uuid in self.topologies.get(): self.topology(topology_uuid).delete()
-        self.parent.contexts.delete(self.context_uuid)
-        self.attributes.delete()
-
-    def dump_id(self) -> Dict:
-        return {
-            'contextUuid': {'uuid': self.context_uuid},
-        }
-
-    def dump_topologies(self) -> List:
-        return [
-            self.topology(topology_uuid).dump() for topology_uuid in self.topologies.get()
-        ]
-
-    def dump_services(self) -> List:
-        return [
-            self.service(service_uuid).dump() for service_uuid in self.services.get()
-        ]
-
-    def dump(self) -> Dict:
-        return {
-            'contextId': self.dump_id(),
-            'topologies': self.dump_topologies(),
-            'services': self.dump_services(),
-        }
diff --git a/src/common/database/api/context/Keys.py b/src/common/database/api/context/Keys.py
deleted file mode 100644
index 0842bb4e4e639c8fc83a0733e8c951eb4994fafe..0000000000000000000000000000000000000000
--- a/src/common/database/api/context/Keys.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# Database keys
-KEY_CONTEXTS            =                'contexts{container_name}'
-
-# Context keys
-KEY_CONTEXT             =                'context[{context_uuid}]'
-KEY_TOPOLOGIES          = KEY_CONTEXT  + '/topologies{container_name}'
-KEY_SERVICES            = KEY_CONTEXT  + '/services{container_name}'
-
-# Context.Topology keys
-KEY_TOPOLOGY            = KEY_CONTEXT  + '/topology[{topology_uuid}]'
-KEY_DEVICES             = KEY_TOPOLOGY + '/devices{container_name}'
-KEY_LINKS               = KEY_TOPOLOGY + '/links{container_name}'
-
-# Context.Topology.Device keys
-KEY_DEVICE              = KEY_TOPOLOGY + '/device[{device_uuid}]'
-KEY_DEVICE_ENDPOINTS    = KEY_DEVICE   + '/endpoints{container_name}'
-
-# Context.Topology.Device.Endpoint keys
-KEY_DEVICE_ENDPOINT     = KEY_DEVICE   + '/endpoint[{endpoint_uuid}]'
-
-# Context.Topology.Link keys
-KEY_LINK                = KEY_TOPOLOGY + '/link[{link_uuid}]'
-KEY_LINK_ENDPOINTS      = KEY_LINK     + '/endpoints{container_name}'
-
-# Context.Topology.Link.Endpoint Keys
-KEY_LINK_ENDPOINT       = KEY_LINK     + '/endpoint[{endpoint_uuid}]'
-
-# Service keys
-KEY_SERVICE             = KEY_CONTEXT  + '/service[{service_uuid}]'
-KEY_SERVICE_ENDPOINTS   = KEY_SERVICE  + '/endpoints{container_name}'
-KEY_SERVICE_CONSTRAINTS = KEY_SERVICE  + '/constraints{container_name}'
-
-# Context.Service.Endpoint Keys
-KEY_SERVICE_ENDPOINT    = KEY_SERVICE  + '/endpoint[{endpoint_uuid}]'
-
-# Context.Service.Constraint Keys
-KEY_SERVICE_CONSTRAINT  = KEY_SERVICE  + '/constraint[{constraint_type}]'
diff --git a/src/common/database/api/context/_structure.txt b/src/common/database/api/context/_structure.txt
deleted file mode 100644
index 0dc6219f8801bcb9d9d26c61707d82eb2925e895..0000000000000000000000000000000000000000
--- a/src/common/database/api/context/_structure.txt
+++ /dev/null
@@ -1,165 +0,0 @@
-######################
-# Internal structure #
-######################
-
-Note (1): for containers like topologies, devices, links, services, etc. two containers are defined:
-list    List is a sorted list containing the uuid's of th elements belonging to the parent element. It is used to
-        define the order of the elements and enable to iterate them deterministically.
-
-set     Set is an unordered set containing the uuid's of the elements belonging to the parent element. It is used to
-        check existence of elements within the parent in O(1).
-
-
-Context structure
------------------
-context[<context_uuid>]/lock
-    String containing the mutex owner key of that user/process/thread that is currently managing the context.
-    Example:
-        context[ctx-test]/lock
-            dc56647a-9539-446e-9a61-cbc67de328e4
-
-context[<context_uuid>]/topologies_<container>
-    Containers (see Note 1) with the topology_uuid's belonging to the context.
-    Examples:
-        context[ctx-test]/topologies_list
-            ['base-topo', 'other-topo']
-        context[ctx-test]/topologies_set
-            {'base-topo', 'other-topo'}
-
-context[<context_uuid>]/services_<container>
-    Containers (see Note 1) with the service_uuid's belonging to the context.
-    Examples:
-        context[ctx-test]/service_list
-            ['service-1', 'service-2']
-        context[ctx-test]/service_set
-            {'service-1', 'service-2'}
-
-
-Topology structure:
--------------------
-context[<context_uuid>]/topology[<topology_uuid>]
-    Hash set containing the attributes for the topology.
-    NOTE: Currently not used.
-    Example: <none>
-
-context[<context_uuid>]/topology[<topology_uuid>]/devices_<container>
-    Containers (see Note 1) with the device_uuid's belonging to the topology.
-    Examples:
-        context[ctx-test]/topology[base-topo]/device_list
-            ['dev1', 'dev2', 'dev3', 'dev4']
-        context[ctx-test]/topology[base-topo]/device_set
-            {'dev2', 'dev3', 'dev4', 'dev1'}
-
-context[<context_uuid>]/topology[<topology_uuid>]/links_<container>
-    Containers (see Note 1) with the link_uuid's belonging to the topology.
-    Examples:
-        context[ctx-test]/topology[base-topo]/link_list
-            ['dev1/to-dev2 ==> dev2/to-dev1', 'dev1/to-dev3 ==> dev3/to-dev1', 'dev2/to-dev1 ==> dev1/to-dev2', ...]
-        context[ctx-test]/topology[base-topo]/link_set
-            {'dev2/to-dev1 ==> dev1/to-dev2', 'dev1/to-dev2 ==> dev2/to-dev1', 'dev1/to-dev3 ==> dev3/to-dev1', ...}
-
-
-Device structure:
------------------
-context[<context_uuid>]/topology[<topology_uuid>]/device[<device_uuid>]
-    Hash set containing the attributes for the device.
-    Defined attributes are:
-        device_type              : string
-        device_config            : string
-        device_operational_status: string "0" (KEEP_STATUS) / "-1" (DISABLED) / "1" (ENABLED)
-    Example: {'device_type': 'ROADM', 'device_config': '<config/>', 'device_operational_status': '1'}
-
-context[<context_uuid>]/topology[<topology_uuid>]/device[<device_uuid>]/endpoints_<container>
-    Containers (see Note 1) with the device_endpoints_uuid's belonging to the device.
-    Examples:
-        context[ctx-test]/topology[base-topo]/device[dev1]/endpoints_list
-            ['to-dev2', 'to-dev3', 'to-dev4']
-        context[ctx-test]/topology[base-topo]/device[dev1]/endpoints_set
-            {'to-dev3', 'to-dev2', 'to-dev4'}
-
-
-Device Endpoint structure:
---------------------------
-context[<context_uuid>]/topology[<topology_uuid>]/device[<device_uuid>]/endpoint[<device_endpoint_uuid>]
-    Hash set containing the attributes for the device_endpoint.
-    Defined attributes are:
-        port_type: string
-    Example: {'port_type': 'WDM'}
-
-
-Link structure:
----------------
-context[<context_uuid>]/topology[<topology_uuid>]/link[<link_uuid>]
-    Hash set containing the attributes for the link.
-    NOTE: Currently not used.
-    Example: <none>
-
-context[<context_uuid>]/topology[<topology_uuid>]/link[<link_uuid>]/endpoints_<container>
-    Containers (see Note 1) with the link_endpoint_uuid's belonging to the link.
-    Examples:
-        context[ctx-test]/topology[base-topo]/link[dev2/to-dev1 ==> dev1/to-dev2]/endpoints_list
-            ['dev2/to-dev1', 'dev1/to-dev2']
-        context[ctx-test]/topology[base-topo]/link[dev2/to-dev1 ==> dev1/to-dev2]/endpoints_set
-            {'dev2/to-dev1', 'dev1/to-dev2'}
-
-
-Link Endpoint structure:
-------------------------
-context[<context_uuid>]/topology[<topology_uuid>]/link[<link_uuid>]/endpoint[<link_endpoint_uuid>]
-    Hash set containing the attributes for the link_endpoint.
-    Defined attributes are:
-        device_uuid: string
-        endpoint_uuid: string
-    Example:
-        context[ctx-test]/topology[base-topo]/link[dev1/to-dev2 ==> dev2/to-dev1]/endpoint[dev1/to-dev2]
-            {'device_uuid': 'dev1', 'endpoint_uuid': 'to-dev2'}
-
-
-Service structure:
-------------------
-context[<context_uuid>]/service[<service_uuid>]
-    Hash set containing the attributes for the service.
-    Defined attributes are:
-        service_type  : string "0" (UNKNOWN) / "1" (L3NM) / "2" (L2NM) / "3" (TAPI_CONNECTIVITY_SERVICE)
-        service_config: string
-        service_state : string "0" (PLANNED) / "1" (ACTIVE) / "2" (PENDING_REMOVAL)
-    Example: ...
-
-context[<context_uuid>]/service[<service_uuid>]/endpoints_<container>
-    Containers (see Note 1) with the service_endpoint_uuid's belonging to the service.
-    Examples:
-        context[ctx-test]/service[service-1]/endpoints_list
-            ['base-topo:dev2/to-dev1', 'base-topo:dev3/to-dev1']
-        context[ctx-test]/service[service-1]/endpoints_set
-            {'base-topo:dev2/to-dev1', 'base-topo:dev3/to-dev1'}
-
-context[<context_uuid>]/service[<service_uuid>]/constraints_<container>
-    Containers (see Note 1) with the constraint_type's belonging to the service.
-    Examples:
-        context[ctx-test]/service[service-1]/constraints_list
-            ['latency_ms', 'hops']
-        context[ctx-test]/service[service-1]/constraints_set
-            {'latency_ms', 'hops'}
-
-
-Service Endpoint structure:
----------------------------
-context[<context_uuid>]/service[<service_uuid>]/endpoint[<service_endpoint_uuid>]
-    Hash set containing the attributes for the service_endpoint.
-    Defined attributes are:
-        topology_uuid: string
-        device_uuid: string
-        endpoint_uuid: string
-    Example:
-        context[ctx-test]/service[service-1]/endpoint[dev2/to-dev1]
-            {'topology_uuid': 'base-topo', 'device_uuid': 'dev2', 'endpoint_uuid': 'to-dev1'}
-
-Service Constraint structure:
------------------------------
-context[<context_uuid>]/service[<service_uuid>]/constraint[<constraint_type>]
-    Hash set containing the attributes for the constraint.
-    Defined attributes are:
-        constraint_value: string
-    Example:
-        context[ctx-test]/service[service-1]/constraint['latency_ms']
-            {'constraint_value': '100'}
diff --git a/src/common/database/api/context/service/Constraint.py b/src/common/database/api/context/service/Constraint.py
deleted file mode 100644
index 866f98c1761ba399a3d27fc440524515626a019d..0000000000000000000000000000000000000000
--- a/src/common/database/api/context/service/Constraint.py
+++ /dev/null
@@ -1,62 +0,0 @@
-from __future__ import annotations
-from typing import TYPE_CHECKING, Dict
-from common.database.api.context.Keys import KEY_SERVICE_CONSTRAINT
-from common.database.api.entity._Entity import _Entity
-
-if TYPE_CHECKING:
-    from common.database.api.context.Context import Context
-    from common.database.api.context.service.Service import Service
-
-VALIDATORS = {
-    'constraint_value': lambda v: v is not None and isinstance(v, str) and (len(v) > 0),
-}
-
-TRANSCODERS = {} # no transcoding applied to attributes
-
-class Constraint(_Entity):
-    def __init__(self, constraint_type : str, parent : 'Service'):
-        super().__init__(parent, constraint_type, KEY_SERVICE_CONSTRAINT, VALIDATORS, TRANSCODERS)
-
-    @property
-    def parent(self) -> 'Service': return self._parent
-
-    @property
-    def context(self) -> 'Context': return self.parent.context
-
-    @property
-    def context_uuid(self) -> str: return self.parent.context_uuid
-
-    @property
-    def service(self) -> 'Service': return self.parent
-
-    @property
-    def service_uuid(self) -> str: return self.parent.service_uuid
-
-    @property
-    def constraint_type(self) -> str: return self._entity_uuid
-
-    def create(self, constraint_value : str) -> 'Constraint':
-        self.update(update_attributes={
-            'constraint_value': constraint_value,
-        })
-        self.parent.constraints.add(self.constraint_type)
-        return self
-
-    def update(self, update_attributes={}, remove_attributes=[]) -> 'Constraint':
-        self.attributes.update(update_attributes=update_attributes, remove_attributes=remove_attributes)
-        return self
-
-    def delete(self) -> None:
-        self.attributes.delete()
-        self.parent.constraints.delete(self.constraint_type)
-
-    def dump_id(self) -> Dict:
-        return {
-            'constraint_type': self.constraint_type,
-        }
-
-    def dump(self) -> Dict:
-        attributes = self.attributes.get()
-        result = self.dump_id()
-        result.update({'constraint_value': attributes.get('constraint_value', None)})
-        return result
diff --git a/src/common/database/api/context/service/Endpoint.py b/src/common/database/api/context/service/Endpoint.py
deleted file mode 100644
index 5f86002371f7c2288b75332061ba928c120ca621..0000000000000000000000000000000000000000
--- a/src/common/database/api/context/service/Endpoint.py
+++ /dev/null
@@ -1,67 +0,0 @@
-from __future__ import annotations
-from typing import TYPE_CHECKING, Dict
-from common.database.api.context.topology.device.Endpoint import Endpoint as DeviceEndpoint
-from common.database.api.context.Keys import KEY_SERVICE_ENDPOINT
-from common.database.api.entity._Entity import _Entity
-
-if TYPE_CHECKING:
-    from common.database.api.context.Context import Context
-    from common.database.api.context.service.Service import Service
-
-VALIDATORS = {
-    'topology_uuid': lambda v: v is not None and isinstance(v, str) and (len(v) > 0),
-    'device_uuid': lambda v: v is not None and isinstance(v, str) and (len(v) > 0),
-    'endpoint_uuid': lambda v: v is not None and isinstance(v, str) and (len(v) > 0),
-}
-
-TRANSCODERS = {} # no transcoding applied to attributes
-
-class Endpoint(_Entity):
-    def __init__(self, endpoint_uuid : str, parent : 'Service'):
-        super().__init__(parent, endpoint_uuid, KEY_SERVICE_ENDPOINT, VALIDATORS, TRANSCODERS)
-
-    @property
-    def parent(self) -> 'Service': return self._parent
-
-    @property
-    def context(self) -> 'Context': return self.parent.context
-
-    @property
-    def context_uuid(self) -> str: return self.parent.context_uuid
-
-    @property
-    def service(self) -> 'Service': return self.parent
-
-    @property
-    def service_uuid(self) -> str: return self.parent.service_uuid
-
-    @property
-    def endpoint_uuid(self) -> str: return self._entity_uuid
-
-    def create(self, endpoint : DeviceEndpoint) -> 'Endpoint':
-        self.update(update_attributes={
-            'topology_uuid': endpoint.topology_uuid,
-            'device_uuid': endpoint.device_uuid,
-            'endpoint_uuid': endpoint.endpoint_uuid,
-        })
-        self.parent.endpoints.add(self.endpoint_uuid)
-        return self
-
-    def update(self, update_attributes={}, remove_attributes=[]) -> 'Endpoint':
-        self.attributes.update(update_attributes=update_attributes, remove_attributes=remove_attributes)
-        return self
-
-    def delete(self) -> None:
-        self.attributes.delete()
-        self.parent.endpoints.delete(self.endpoint_uuid)
-
-    def dump_id(self) -> Dict:
-        attributes = self.attributes.get()
-        topology_uuid = attributes.get('topology_uuid', None)
-        device_uuid = attributes.get('device_uuid', None)
-        endpoint_uuid = attributes.get('endpoint_uuid', None)
-        endpoint = self.context.topology(topology_uuid).device(device_uuid).endpoint(endpoint_uuid)
-        return endpoint.dump_id()
-
-    def dump(self) -> Dict:
-        return self.dump_id()
diff --git a/src/common/database/api/context/service/Service.py b/src/common/database/api/context/service/Service.py
deleted file mode 100644
index 71fe3488db8691259e02d2c0ba76e7565adeaf15..0000000000000000000000000000000000000000
--- a/src/common/database/api/context/service/Service.py
+++ /dev/null
@@ -1,102 +0,0 @@
-from __future__ import annotations
-from typing import TYPE_CHECKING, Dict
-from common.database.api.context.Keys import KEY_SERVICE, KEY_SERVICE_CONSTRAINTS, KEY_SERVICE_ENDPOINTS
-from common.database.api.context.service.Constraint import Constraint
-from common.database.api.context.service.Endpoint import Endpoint
-from common.database.api.context.service.ServiceState import ServiceState, to_servicestate_enum
-from common.database.api.context.service.ServiceType import ServiceType, to_servicetype_enum
-from common.database.api.entity._Entity import _Entity
-from common.database.api.entity.EntityCollection import EntityCollection
-
-if TYPE_CHECKING:
-    from common.database.api.context.Context import Context
-
-VALIDATORS = {
-    'service_type': lambda v: v is not None and isinstance(v, ServiceType),
-    'service_config': lambda v: v is not None and isinstance(v, str) and (len(v) > 0),
-    'service_state': lambda v: v is not None and isinstance(v, ServiceState),
-}
-
-TRANSCODERS = {
-    'service_type': {
-        ServiceType: lambda v: v.value,
-        int        : lambda v: to_servicetype_enum(v),
-        str        : lambda v: to_servicetype_enum(v),
-    },
-    'service_state': {
-        ServiceState: lambda v: v.value,
-        int         : lambda v: to_servicestate_enum(v),
-        str         : lambda v: to_servicestate_enum(v),
-    },
-}
-
-class Service(_Entity):
-    def __init__(self, service_uuid : str, parent : 'Context'):
-        super().__init__(parent, service_uuid, KEY_SERVICE, VALIDATORS, TRANSCODERS)
-        self._endpoints = EntityCollection(self, KEY_SERVICE_ENDPOINTS)
-        self._constraints = EntityCollection(self, KEY_SERVICE_CONSTRAINTS)
-
-    @property
-    def parent(self) -> 'Context': return self._parent
-
-    @property
-    def context(self) -> 'Context': return self._parent
-
-    @property
-    def context_uuid(self) -> str: return self.context.context_uuid
-
-    @property
-    def service_uuid(self) -> str: return self._entity_uuid
-
-    @property
-    def endpoints(self) -> EntityCollection: return self._endpoints
-
-    @property
-    def constraints(self) -> EntityCollection: return self._constraints
-
-    def endpoint(self, endpoint_uuid : str) -> Endpoint: return Endpoint(endpoint_uuid, self)
-
-    def constraint(self, constraint_type : str) -> Constraint: return Constraint(constraint_type, self)
-
-    def create(self, service_type : ServiceType, service_config : str, service_state : ServiceState) -> 'Service':
-        self.update(update_attributes={
-            'service_type': service_type,
-            'service_config': service_config,
-            'service_state': service_state,
-        })
-        self.parent.services.add(self.service_uuid)
-        return self
-
-    def update(self, update_attributes={}, remove_attributes=[]) -> 'Service':
-        self.attributes.update(update_attributes=update_attributes, remove_attributes=remove_attributes)
-        return self
-
-    def delete(self) -> None:
-        for endpoint_uuid in self.endpoints.get(): self.endpoint(endpoint_uuid).delete()
-        for constraint_uuid in self.constraints.get(): self.constraint(constraint_uuid).delete()
-        self.attributes.delete()
-        self.parent.services.delete(self.service_uuid)
-
-    def dump_id(self) -> Dict:
-        return {
-            'contextId': self.context.dump_id(),
-            'cs_id': {'uuid': self.service_uuid},
-        }
-
-    def dump(self) -> Dict:
-        attributes = self.attributes.get()
-        service_type = attributes.get('service_type', None)
-        if isinstance(service_type, ServiceType): service_type = service_type.value
-        service_state = attributes.get('service_state', None)
-        if isinstance(service_state, ServiceState): service_state = service_state.value
-        service_config = attributes.get('service_config', None)
-        endpoints = [self.endpoint(endpoint_uuid).dump() for endpoint_uuid in self.endpoints.get()]
-        constraints = [self.constraint(constraint_type).dump() for constraint_type in self.constraints.get()]
-        return {
-            'cs_id': self.dump_id(),
-            'serviceType': service_type,
-            'endpointList': endpoints,
-            'constraint': constraints,
-            'serviceState': {'serviceState': service_state},
-            'serviceConfig': {'serviceConfig': service_config}
-        }
diff --git a/src/common/database/api/context/service/ServiceState.py b/src/common/database/api/context/service/ServiceState.py
deleted file mode 100644
index 3855138d99c40b743885f256fe3aafbaa44aa18b..0000000000000000000000000000000000000000
--- a/src/common/database/api/context/service/ServiceState.py
+++ /dev/null
@@ -1,27 +0,0 @@
-from enum import Enum
-
-class ServiceState(Enum):
-    PLANNED = 0
-    ACTIVE = 1
-    PENDING_REMOVAL = 2
-
-ANY_TO_ENUM = {
-    0: ServiceState.PLANNED,
-    1: ServiceState.ACTIVE,
-    2: ServiceState.PENDING_REMOVAL,
-
-    '0': ServiceState.PLANNED,
-    '1': ServiceState.ACTIVE,
-    '2': ServiceState.PENDING_REMOVAL,
-
-    'PLANNED': ServiceState.PLANNED,
-    'ACTIVE': ServiceState.ACTIVE,
-    'PENDING_REMOVAL': ServiceState.PENDING_REMOVAL,
-}
-
-def servicestate_enum_values():
-    return {m.value for m in ServiceState.__members__.values()}
-
-def to_servicestate_enum(int_or_str):
-    if isinstance(int_or_str, str): int_or_str = int_or_str.lower()
-    return ANY_TO_ENUM.get(int_or_str)
diff --git a/src/common/database/api/context/service/ServiceType.py b/src/common/database/api/context/service/ServiceType.py
deleted file mode 100644
index c779fc31c89c8746a547a38135f99d07716e2bbb..0000000000000000000000000000000000000000
--- a/src/common/database/api/context/service/ServiceType.py
+++ /dev/null
@@ -1,31 +0,0 @@
-from enum import Enum
-
-class ServiceType(Enum):
-    UNKNOWN = 0
-    L3NM = 1
-    L2NM = 2
-    TAPI_CONNECTIVITY_SERVICE = 3
-
-ANY_TO_ENUM = {
-    0: ServiceType.UNKNOWN,
-    1: ServiceType.L3NM,
-    2: ServiceType.L2NM,
-    3: ServiceType.TAPI_CONNECTIVITY_SERVICE,
-
-    '0': ServiceType.UNKNOWN,
-    '1': ServiceType.L3NM,
-    '2': ServiceType.L2NM,
-    '3': ServiceType.TAPI_CONNECTIVITY_SERVICE,
-
-    'UNKNOWN': ServiceType.UNKNOWN,
-    'L3NM': ServiceType.L3NM,
-    'L2NM': ServiceType.L2NM,
-    'TAPI_CONNECTIVITY_SERVICE': ServiceType.TAPI_CONNECTIVITY_SERVICE,
-}
-
-def servicetype_enum_values():
-    return {m.value for m in ServiceType.__members__.values()}
-
-def to_servicetype_enum(int_or_str):
-    if isinstance(int_or_str, str): int_or_str = int_or_str.lower()
-    return ANY_TO_ENUM.get(int_or_str)
diff --git a/src/common/database/api/context/topology/Topology.py b/src/common/database/api/context/topology/Topology.py
deleted file mode 100644
index de9cd67a41b822800f78f07208b090adf91b7bd8..0000000000000000000000000000000000000000
--- a/src/common/database/api/context/topology/Topology.py
+++ /dev/null
@@ -1,66 +0,0 @@
-from __future__ import annotations
-from typing import TYPE_CHECKING, Dict
-from common.database.api.context.Keys import KEY_TOPOLOGY, KEY_DEVICES, KEY_LINKS
-from common.database.api.context.topology.device.Device import Device
-from common.database.api.context.topology.link.Link import Link
-from common.database.api.entity._Entity import _Entity
-from common.database.api.entity.EntityCollection import EntityCollection
-
-if TYPE_CHECKING:
-    from common.database.api.context.Context import Context
-
-VALIDATORS = {}  # no attributes accepted
-TRANSCODERS = {} # no transcoding applied to attributes
-
-class Topology(_Entity):
-    def __init__(self, topology_uuid : str, parent : 'Context'):
-        super().__init__(parent, topology_uuid, KEY_TOPOLOGY, VALIDATORS, TRANSCODERS)
-        self._devices = EntityCollection(self, KEY_DEVICES)
-        self._links = EntityCollection(self, KEY_LINKS)
-
-    @property
-    def parent(self) -> 'Context': return self._parent
-
-    @property
-    def context(self) -> 'Context': return self._parent
-
-    @property
-    def context_uuid(self) -> str: return self.context.context_uuid
-
-    @property
-    def topology_uuid(self) -> str: return self._entity_uuid
-
-    @property
-    def devices(self) -> EntityCollection: return self._devices
-
-    @property
-    def links(self) -> EntityCollection: return self._links
-
-    def device(self, device_uuid : str) -> Device: return Device(device_uuid, self)
-
-    def link(self, link_uuid : str) -> Link: return Link(link_uuid, self)
-
-    def create(self) -> 'Topology':
-        self.parent.topologies.add(self.topology_uuid)
-        return self
-
-    def delete(self) -> None:
-        for device_uuid in self.devices.get(): self.device(device_uuid).delete()
-        for link_uuid in self.links.get(): self.link(link_uuid).delete()
-        self.attributes.delete()
-        self.parent.topologies.delete(self.topology_uuid)
-
-    def dump_id(self) -> Dict:
-        return {
-            'contextId': self.context.dump_id(),
-            'topoId': {'uuid': self.topology_uuid},
-        }
-
-    def dump(self) -> Dict:
-        devices = [self.device(device_uuid).dump() for device_uuid in self.devices.get()]
-        links = [self.link(link_uuid).dump() for link_uuid in self.links.get()]
-        return {
-            'topoId': self.dump_id(),
-            'device': devices,
-            'link': links,
-        }
diff --git a/src/common/database/api/context/topology/device/Device.py b/src/common/database/api/context/topology/device/Device.py
deleted file mode 100644
index 06c560051e1b3d7d930efb6ddca16d9096a2509e..0000000000000000000000000000000000000000
--- a/src/common/database/api/context/topology/device/Device.py
+++ /dev/null
@@ -1,89 +0,0 @@
-from __future__ import annotations
-from typing import TYPE_CHECKING, Dict
-from common.database.api.context.Keys import KEY_DEVICE, KEY_DEVICE_ENDPOINTS
-from common.database.api.context.topology.device.Endpoint import Endpoint
-from common.database.api.context.topology.device.OperationalStatus import OperationalStatus, to_operationalstatus_enum
-from common.database.api.entity._Entity import _Entity
-from common.database.api.entity.EntityCollection import EntityCollection
-
-if TYPE_CHECKING:
-    from common.database.api.context.Context import Context
-    from common.database.api.context.topology.Topology import Topology
-
-VALIDATORS = {
-    'device_type': lambda v: v is not None and isinstance(v, str) and (len(v) > 0),
-    'device_config': lambda v: v is not None and isinstance(v, str) and (len(v) > 0),
-    'device_operational_status': lambda v: v is not None and isinstance(v, OperationalStatus),
-}
-
-TRANSCODERS = {
-    'device_operational_status': {
-        OperationalStatus: lambda v: v.value,
-        int              : lambda v: to_operationalstatus_enum(v),
-        str              : lambda v: to_operationalstatus_enum(v),
-    }
-}
-
-class Device(_Entity):
-    def __init__(self, device_uuid : str, parent : 'Topology'):
-        super().__init__(parent, device_uuid, KEY_DEVICE, VALIDATORS, TRANSCODERS)
-        self._endpoints = EntityCollection(self, KEY_DEVICE_ENDPOINTS)
-
-    @property
-    def parent(self) -> 'Topology': return self._parent
-
-    @property
-    def context(self) -> 'Context': return self.parent.context
-
-    @property
-    def context_uuid(self) -> str: return self.parent.context_uuid
-
-    @property
-    def topology(self) -> 'Topology': return self.parent
-
-    @property
-    def topology_uuid(self) -> str: return self.parent.topology_uuid
-
-    @property
-    def device_uuid(self) -> str: return self._entity_uuid
-
-    @property
-    def endpoints(self) -> EntityCollection: return self._endpoints
-
-    def endpoint(self, endpoint_uuid : str) -> Endpoint: return Endpoint(endpoint_uuid, self)
-
-    def create(self, device_type : str, device_config : str, device_operational_status : OperationalStatus) -> 'Device':
-        self.update(update_attributes={
-            'device_type': device_type,
-            'device_config': device_config,
-            'device_operational_status': device_operational_status,
-        })
-        self.parent.devices.add(self.device_uuid)
-        return self
-
-    def update(self, update_attributes={}, remove_attributes=[]) -> 'Device':
-        self.attributes.update(update_attributes=update_attributes, remove_attributes=remove_attributes)
-        return self
-
-    def delete(self) -> None:
-        for endpoint_uuid in self.endpoints.get(): self.endpoint(endpoint_uuid).delete()
-        self.attributes.delete()
-        self.parent.devices.delete(self.device_uuid)
-
-    def dump_id(self) -> Dict:
-        return {
-            'device_id': {'uuid': self.device_uuid},
-        }
-
-    def dump(self) -> Dict:
-        attributes = self.attributes.get()
-        dev_op_status = attributes.get('device_operational_status', None)
-        if isinstance(dev_op_status, OperationalStatus): dev_op_status = dev_op_status.value
-        endpoints = [self.endpoint(endpoint_uuid).dump() for endpoint_uuid in self.endpoints.get()]
-        return {
-            'device_id': self.dump_id(),
-            'device_type': attributes.get('device_type', None),
-            'device_config': {'device_config': attributes.get('device_config', None)},
-            'devOperationalStatus': dev_op_status,
-            'endpointList': endpoints
-        }
diff --git a/src/common/database/api/context/topology/device/Endpoint.py b/src/common/database/api/context/topology/device/Endpoint.py
deleted file mode 100644
index 8ea516f3e50ad14fd133048570555abf4d872372..0000000000000000000000000000000000000000
--- a/src/common/database/api/context/topology/device/Endpoint.py
+++ /dev/null
@@ -1,72 +0,0 @@
-from __future__ import annotations
-from typing import TYPE_CHECKING, Dict
-from common.database.api.entity._Entity import _Entity
-from common.database.api.context.Keys import KEY_DEVICE_ENDPOINT
-
-if TYPE_CHECKING:
-    from common.database.api.context.Context import Context
-    from common.database.api.context.topology.Topology import Topology
-    from common.database.api.context.topology.device.Device import Device
-
-VALIDATORS = {
-    'port_type': lambda v: v is not None and isinstance(v, str) and (len(v) > 0),
-}
-
-TRANSCODERS = {} # no transcoding applied to attributes
-
-class Endpoint(_Entity):
-    def __init__(self, endpoint_uuid : str, parent : 'Device'):
-        super().__init__(parent, endpoint_uuid, KEY_DEVICE_ENDPOINT, VALIDATORS, TRANSCODERS)
-
-    @property
-    def parent(self) -> 'Device': return self._parent
-
-    @property
-    def context(self) -> 'Context': return self.parent.context
-
-    @property
-    def context_uuid(self) -> str: return self.parent.context_uuid
-
-    @property
-    def topology(self) -> 'Topology': return self.parent.topology
-
-    @property
-    def topology_uuid(self) -> str: return self.parent.topology_uuid
-
-    @property
-    def device(self) -> 'Device': return self.parent
-
-    @property
-    def device_uuid(self) -> str: return self.parent.device_uuid
-
-    @property
-    def endpoint_uuid(self) -> str: return self._entity_uuid
-
-    def create(self, port_type : str) -> 'Endpoint':
-        self.update(update_attributes={
-            'port_type': port_type
-        })
-        self.parent.endpoints.add(self.endpoint_uuid)
-        return self
-
-    def update(self, update_attributes={}, remove_attributes=[]) -> 'Endpoint':
-        self.attributes.update(update_attributes=update_attributes, remove_attributes=remove_attributes)
-        return self
-
-    def delete(self) -> None:
-        self.attributes.delete()
-        self.parent.endpoints.delete(self.endpoint_uuid)
-
-    def dump_id(self) -> Dict:
-        return {
-            'topoId': self.topology.dump_id(),
-            'dev_id': self.device.dump_id(),
-            'port_id': {'uuid': self.endpoint_uuid},
-        }
-
-    def dump(self) -> Dict:
-        attributes = self.attributes.get()
-        return {
-            'port_id': self.dump_id(),
-            'port_type': attributes.get('port_type', None),
-        }
diff --git a/src/common/database/api/context/topology/device/OperationalStatus.py b/src/common/database/api/context/topology/device/OperationalStatus.py
deleted file mode 100644
index c726b32ef5b03feb8c0a04a586cf7ef4cd250263..0000000000000000000000000000000000000000
--- a/src/common/database/api/context/topology/device/OperationalStatus.py
+++ /dev/null
@@ -1,27 +0,0 @@
-from enum import Enum
-
-class OperationalStatus(Enum):
-    KEEP_STATE = 0  # Do not change operational status of device (used in configure)
-    DISABLED = -1
-    ENABLED = 1
-
-ANY_TO_ENUM = {
-     1: OperationalStatus.ENABLED,
-     0: OperationalStatus.KEEP_STATE,
-    -1: OperationalStatus.DISABLED,
-
-     '1': OperationalStatus.ENABLED,
-     '0': OperationalStatus.KEEP_STATE,
-    '-1': OperationalStatus.DISABLED,
-
-    'enabled': OperationalStatus.ENABLED,
-    'disabled': OperationalStatus.DISABLED,
-    'keep_state': OperationalStatus.KEEP_STATE,
-}
-
-def operationalstatus_enum_values():
-    return {m.value for m in OperationalStatus.__members__.values()}
-
-def to_operationalstatus_enum(int_or_str):
-    if isinstance(int_or_str, str): int_or_str = int_or_str.lower()
-    return ANY_TO_ENUM.get(int_or_str)
diff --git a/src/common/database/api/context/topology/link/Endpoint.py b/src/common/database/api/context/topology/link/Endpoint.py
deleted file mode 100644
index 0fbdd26cbc222f7591ccfc8fbfdc6aa9062ead58..0000000000000000000000000000000000000000
--- a/src/common/database/api/context/topology/link/Endpoint.py
+++ /dev/null
@@ -1,71 +0,0 @@
-from __future__ import annotations
-from typing import TYPE_CHECKING, Dict
-from common.database.api.context.topology.device.Endpoint import Endpoint as DeviceEndpoint
-from common.database.api.context.Keys import KEY_LINK_ENDPOINT
-from common.database.api.entity._Entity import _Entity
-
-if TYPE_CHECKING:
-    from common.database.api.context.Context import Context
-    from common.database.api.context.topology.Topology import Topology
-    from common.database.api.context.topology.link.Link import Link
-
-VALIDATORS = {
-    'device_uuid': lambda v: v is not None and isinstance(v, str) and (len(v) > 0),
-    'endpoint_uuid': lambda v: v is not None and isinstance(v, str) and (len(v) > 0),
-}
-
-TRANSCODERS = {} # no transcoding applied to attributes
-
-class Endpoint(_Entity):
-    def __init__(self, endpoint_uuid : str, parent : 'Link'):
-        super().__init__(parent, endpoint_uuid, KEY_LINK_ENDPOINT, VALIDATORS, TRANSCODERS)
-
-    @property
-    def parent(self) -> 'Link': return self._parent
-
-    @property
-    def context(self) -> 'Context': return self.parent.context
-
-    @property
-    def context_uuid(self) -> str: return self.parent.context_uuid
-
-    @property
-    def topology(self) -> 'Topology': return self.parent.topology
-
-    @property
-    def topology_uuid(self) -> str: return self.parent.topology_uuid
-
-    @property
-    def link(self) -> 'Link': return self.parent
-
-    @property
-    def link_uuid(self) -> str: return self.parent.link_uuid
-
-    @property
-    def endpoint_uuid(self) -> str: return self._entity_uuid
-
-    def create(self, endpoint : DeviceEndpoint) -> 'Endpoint':
-        self.update(update_attributes={
-            'device_uuid': endpoint.device_uuid,
-            'endpoint_uuid': endpoint.endpoint_uuid,
-        })
-        self.parent.endpoints.add(self.endpoint_uuid)
-        return self
-
-    def update(self, update_attributes={}, remove_attributes=[]) -> 'Endpoint':
-        self.attributes.update(update_attributes=update_attributes, remove_attributes=remove_attributes)
-        return self
-
-    def delete(self) -> None:
-        self.attributes.delete()
-        self.parent.endpoints.delete(self.endpoint_uuid)
-
-    def dump_id(self) -> Dict:
-        attributes = self.attributes.get()
-        device_uuid = attributes.get('device_uuid', None)
-        endpoint_uuid = attributes.get('endpoint_uuid', None)
-        endpoint = self.topology.device(device_uuid).endpoint(endpoint_uuid)
-        return endpoint.dump_id()
-
-    def dump(self) -> Dict:
-        return self.dump_id()
diff --git a/src/common/database/api/context/topology/link/Link.py b/src/common/database/api/context/topology/link/Link.py
deleted file mode 100644
index 41d72e0d2284b6569a550216e0539ce950fcdc14..0000000000000000000000000000000000000000
--- a/src/common/database/api/context/topology/link/Link.py
+++ /dev/null
@@ -1,62 +0,0 @@
-from __future__ import annotations
-from typing import TYPE_CHECKING, Dict
-from common.database.api.entity._Entity import _Entity
-from common.database.api.entity.EntityCollection import EntityCollection
-from common.database.api.context.topology.link.Endpoint import Endpoint
-from common.database.api.context.Keys import KEY_LINK, KEY_LINK_ENDPOINTS
-
-if TYPE_CHECKING:
-    from common.database.api.context.Context import Context
-    from common.database.api.context.topology.Topology import Topology
-
-VALIDATORS = {}  # no attributes accepted
-TRANSCODERS = {} # no transcoding applied to attributes
-
-class Link(_Entity):
-    def __init__(self, link_uuid : str, parent : 'Topology'):
-        super().__init__(parent, link_uuid, KEY_LINK, VALIDATORS, TRANSCODERS)
-        self._endpoints = EntityCollection(self, KEY_LINK_ENDPOINTS)
-
-    @property
-    def parent(self) -> 'Topology': return self._parent
-
-    @property
-    def context(self) -> 'Context': return self.parent.context
-
-    @property
-    def context_uuid(self) -> str: return self.parent.context_uuid
-
-    @property
-    def topology(self) -> 'Topology': return self.parent
-
-    @property
-    def topology_uuid(self) -> str: return self.parent.topology_uuid
-
-    @property
-    def link_uuid(self) -> str: return self._entity_uuid
-
-    @property
-    def endpoints(self) -> EntityCollection: return self._endpoints
-
-    def endpoint(self, link_endpoint_uuid : str) -> Endpoint: return Endpoint(link_endpoint_uuid, self)
-
-    def create(self) -> 'Link':
-        self.parent.links.add(self.link_uuid)
-        return self
-
-    def delete(self) -> None:
-        for endpoint_uuid in self.endpoints.get(): self.endpoint(endpoint_uuid).delete()
-        self.attributes.delete()
-        self.parent.links.delete(self.link_uuid)
-
-    def dump_id(self) -> Dict:
-        return {
-            'link_id': {'uuid': self.link_uuid},
-        }
-
-    def dump(self) -> Dict:
-        endpoints = [self.endpoint(link_endpoint_uuid).dump() for link_endpoint_uuid in self.endpoints.get()]
-        return {
-            'link_id': self.dump_id(),
-            'endpointList': endpoints
-        }
diff --git a/src/common/database/api/entity/EntityAttributes.py b/src/common/database/api/entity/EntityAttributes.py
deleted file mode 100644
index b3e553453f7ebdda9439c81e11e1833ab32f1e41..0000000000000000000000000000000000000000
--- a/src/common/database/api/entity/EntityAttributes.py
+++ /dev/null
@@ -1,58 +0,0 @@
-from __future__ import annotations
-import copy
-from typing import Any, Dict, TYPE_CHECKING
-from common.database.engines._DatabaseEngine import _DatabaseEngine
-from common.database.api.entity.Tools import format_key
-
-if TYPE_CHECKING:
-    from common.database.api.entity._Entity import _Entity
-
-class EntityAttributes:
-    def __init__(self, parent : '_Entity', entity_key : str, validators : Dict, transcoders : Dict = {}):
-        self._parent = parent
-        self._database_engine : _DatabaseEngine = self._parent.database_engine
-        self._entity_key = format_key(entity_key, self._parent)
-        self._validators = validators
-        self._transcoders = transcoders
-
-    def validate(self, update_attributes, remove_attributes, attribute_name):
-        remove_attributes.discard(attribute_name)
-        value = update_attributes.pop(attribute_name, None)
-        if value is None: return
-        validator = self._validators.get(attribute_name)
-        if validator is None: return
-        if not validator(value): raise AttributeError('{} is invalid'.format(attribute_name))
-
-    def transcode(self, attribute_name, attribute_value):
-        transcoder_set = self._transcoders.get(attribute_name, {})
-        transcoder = transcoder_set.get(type(attribute_value))
-        return attribute_value if transcoder is None else transcoder(attribute_value)
-
-    def get(self, attributes=[]) -> Dict[str, Any]:
-        return {
-            k:self.transcode(k, v)
-            for k,v in self._database_engine.dict_get(self._entity_key, fields=attributes).items()
-        }
-
-    def update(self, update_attributes={}, remove_attributes=[]):
-        remove_attributes = set(remove_attributes)
-        copy_update_attributes = copy.deepcopy(update_attributes)
-        copy_remove_attributes = copy.deepcopy(remove_attributes)
-
-        for attribute_name in self._validators.keys():
-            self.validate(copy_update_attributes, copy_remove_attributes, attribute_name)
-            attribute_value = update_attributes.get(attribute_name)
-            if attribute_value is None: continue
-            update_attributes[attribute_name] = self.transcode(attribute_name, attribute_value)
-
-        if len(copy_update_attributes) > 0:
-            raise AttributeError('Unexpected update_attributes: {}'.format(str(copy_update_attributes)))
-
-        if len(copy_remove_attributes) > 0:
-            raise AttributeError('Unexpected remove_attributes: {}'.format(str(copy_remove_attributes)))
-
-        self._database_engine.dict_update(self._entity_key, update_attributes, remove_attributes)
-        return self
-
-    def delete(self, attributes=[]):
-        self._database_engine.dict_delete(self._entity_key, attributes)
diff --git a/src/common/database/api/entity/EntityCollection.py b/src/common/database/api/entity/EntityCollection.py
deleted file mode 100644
index ed155370b43a91c7d64c921433d65327733fba54..0000000000000000000000000000000000000000
--- a/src/common/database/api/entity/EntityCollection.py
+++ /dev/null
@@ -1,30 +0,0 @@
-from __future__ import annotations
-from typing import TYPE_CHECKING
-from common.database.engines._DatabaseEngine import _DatabaseEngine
-from common.database.api.entity.Tools import format_key
-
-if TYPE_CHECKING:
-    from common.database.api.entity._Entity import _Entity
-
-class EntityCollection:
-    def __init__(self, parent : '_Entity', entity_key : str):
-        self._parent = parent
-        self._database_engine : _DatabaseEngine = self._parent.database_engine
-        self._entity_key_list = format_key(entity_key, self._parent, container_name='_list')
-        self._entity_key_set = format_key(entity_key, self._parent, container_name='_set')
-
-    def add(self, entity_uuid : str) -> None:
-        if self._database_engine.set_has(self._entity_key_set, entity_uuid): return
-        self._database_engine.set_add(self._entity_key_set, entity_uuid)
-        self._database_engine.list_push_last(self._entity_key_list, entity_uuid)
-
-    def get(self):
-        return self._database_engine.list_get_all(self._entity_key_list)
-
-    def contains(self, entity_uuid : str):
-        return self._database_engine.set_has(self._entity_key_set, entity_uuid)
-
-    def delete(self, entity_uuid : str) -> None:
-        if not self._database_engine.set_has(self._entity_key_set, entity_uuid): return
-        self._database_engine.set_remove(self._entity_key_set, entity_uuid)
-        self._database_engine.list_remove_first_occurrence(self._entity_key_list, entity_uuid)
diff --git a/src/common/database/api/entity/_Entity.py b/src/common/database/api/entity/_Entity.py
deleted file mode 100644
index 784ffbf61c7958329c28f1c7ac371f5126289971..0000000000000000000000000000000000000000
--- a/src/common/database/api/entity/_Entity.py
+++ /dev/null
@@ -1,47 +0,0 @@
-from typing import Any, Callable, Dict
-from common.database.engines._DatabaseEngine import _DatabaseEngine
-from common.database.api.entity.EntityAttributes import EntityAttributes
-
-class _Entity:
-    def __init__(self, parent, entity_uuid : str, attributes_key : str,
-                 attribute_validators : Dict[str, Callable[[Any], bool]],
-                 attribute_transcoders : Dict[str, Dict[Any, Callable[[Any], Any]]]):
-        if not isinstance(parent, _Entity):
-            raise AttributeError('parent must be an instance of _Entity')
-        if (not isinstance(entity_uuid, str)) or (len(entity_uuid) == 0):
-            raise AttributeError('entity_uuid must be a non-empty instance of str')
-        if (not isinstance(attributes_key, str)) or (len(attributes_key) == 0):
-            raise AttributeError('attributes_key must be a non-empty instance of str')
-        if not isinstance(attribute_validators, dict):
-            raise AttributeError('attribute_validators must be an instance of dict')
-        if not isinstance(attribute_transcoders, dict):
-            raise AttributeError('attribute_transcoders must be an instance of dict')
-
-        self._entity_uuid = entity_uuid
-        self._parent = parent
-        self._attributes = EntityAttributes(self, attributes_key, attribute_validators,
-                                            transcoders=attribute_transcoders)
-
-    @property
-    def parent(self) -> '_Entity': return self._parent
-
-    @property
-    def database_engine(self) -> _DatabaseEngine: return self._parent.database_engine
-
-    @property
-    def attributes(self) -> EntityAttributes: return self._attributes
-
-    def load(self):
-        raise NotImplementedError()
-
-    def create(self):
-        raise NotImplementedError()
-
-    def delete(self):
-        raise NotImplementedError()
-
-    def dump_id(self) -> Dict:
-        raise NotImplementedError()
-
-    def dump(self) -> Dict:
-        raise NotImplementedError()
diff --git a/src/common/database/engines/_DatabaseEngine.py b/src/common/database/engines/_DatabaseEngine.py
deleted file mode 100644
index 962608826a679a9028c6061d8ab1b3afc9b11ded..0000000000000000000000000000000000000000
--- a/src/common/database/engines/_DatabaseEngine.py
+++ /dev/null
@@ -1,50 +0,0 @@
-from typing import Dict, List, Set, Tuple
-
-class _DatabaseEngine:
-    def __init__(self, **settings) -> None:
-        raise NotImplementedError()
-
-    def lock(self) -> Tuple[bool, str]:
-        raise NotImplementedError()
-
-    def unlock(self, owner_key : str) -> bool:
-        raise NotImplementedError()
-
-    def keys(self) -> list:
-        raise NotImplementedError()
-
-    def exists(self, key_name : str) -> bool:
-        raise NotImplementedError()
-
-    def delete(self, key_name : str) -> bool:
-        raise NotImplementedError()
-
-    def dict_get(self, key_name : str, fields : List[str] = []) -> Dict[str, str]:
-        raise NotImplementedError()
-
-    def dict_update(self, key_name : str, update_fields : Dict[str,str] = {}, remove_fields : Set[str] = set()) -> None:
-        raise NotImplementedError()
-
-    def dict_delete(self, key_name : str, fields : List[str] = []) -> None:
-        raise NotImplementedError()
-
-    def list_get_all(self, key_name : str) -> List[str]:
-        raise NotImplementedError()
-
-    def list_push_last(self, key_name : str, item : str) -> None:
-        raise NotImplementedError()
-
-    def list_remove_first_occurrence(self, key_name : str, item: str) -> None:
-        raise NotImplementedError()
-
-    def set_add(self, key_name : str, item : str) -> None:
-        raise NotImplementedError()
-
-    def set_has(self, key_name : str, item : str) -> bool:
-        raise NotImplementedError()
-
-    def set_remove(self, key_name : str, item : str) -> None:
-        raise NotImplementedError()
-
-    def dump(self) -> List[str]:
-        raise NotImplementedError()
diff --git a/src/common/database/engines/inmemory/InMemoryDatabaseEngine.py b/src/common/database/engines/inmemory/InMemoryDatabaseEngine.py
deleted file mode 100644
index 80c1669c73f603ac1eb477d883196b144d1ea143..0000000000000000000000000000000000000000
--- a/src/common/database/engines/inmemory/InMemoryDatabaseEngine.py
+++ /dev/null
@@ -1,116 +0,0 @@
-import copy, logging, threading, uuid
-from typing import Dict, List, Set, Tuple, Union
-from common.database.engines._DatabaseEngine import _DatabaseEngine
-
-LOGGER = logging.getLogger(__name__)
-
-def get_or_create_dict(keys : Dict[str, Union[Dict, List, Set]], key_name : str) -> Dict:
-    container = keys.get(key_name, None)
-    if container is None: container = keys.setdefault(key_name, dict())
-    if not isinstance(container, dict): raise Exception('Key({}) is not a dict'.format(key_name))
-    return container
-
-def get_or_create_list(keys : Dict[str, Union[Dict, List, Set]], key_name : str) -> List:
-    container = keys.get(key_name, None)
-    if container is None: container = keys.setdefault(key_name, list())
-    if not isinstance(container, list): raise Exception('Key({}) is not a list'.format(key_name))
-    return container
-
-def get_or_create_set(keys : Dict[str, Union[Dict, List, Set]], key_name : str) -> Set:
-    container = keys.get(key_name, None)
-    if container is None: container = keys.setdefault(key_name, set())
-    if not isinstance(container, set): raise Exception('Key({}) is not a set'.format(key_name))
-    return container
-
-class InMemoryDatabaseEngine(_DatabaseEngine):
-    def __init__(self, **settings):
-        self._internal_lock = threading.Lock()
-        self._external_lock = threading.Lock()
-        self._owner_key = None
-        self._keys = {} # name => set/list/dict/string
-
-    def lock(self) -> Tuple[bool, str]:
-        owner_key = str(uuid.uuid4())
-        with self._internal_lock:
-            acquired = self._external_lock.acquire()
-            if not acquired: return False, None
-            self._owner_key = owner_key
-            return True, owner_key
-
-    def unlock(self, owner_key : str) -> bool:
-        with self._internal_lock:
-            if self._owner_key != owner_key: return False
-            self._external_lock.release()
-            self._owner_key = None
-            return True
-
-    def keys(self) -> list:
-        with self._internal_lock:
-            return copy.deepcopy(list(self._keys.keys()))
-
-    def exists(self, key_name : str) -> bool:
-        with self._internal_lock:
-            return key_name in self._keys
-
-    def delete(self, key_name : str) -> bool:
-        with self._internal_lock:
-            if key_name not in self._keys: return False
-            del self._keys[key_name]
-            return True
-
-    def dict_get(self, key_name : str, fields : List[str] = []) -> Dict[str, str]:
-        with self._internal_lock:
-            container = get_or_create_dict(self._keys, key_name)
-            if len(fields) == 0: fields = container.keys()
-            return copy.deepcopy({
-                field_name : field_value for field_name,field_value in container.items() if field_name in fields
-            })
-
-    def dict_update(self, key_name : str, update_fields : Dict[str,str] = {}, remove_fields : Set[str] = set()) -> None:
-        with self._internal_lock:
-            container = get_or_create_dict(self._keys, key_name)
-            for field in list(remove_fields): container.pop(field, None)
-            container.update(update_fields)
-
-    def dict_delete(self, key_name : str, fields : List[str] = []) -> None:
-        with self._internal_lock:
-            container = get_or_create_dict(self._keys, key_name)
-            if len(fields) == 0: fields = container.keys()
-            for field in list(fields): container.pop(field, None)
-
-    def list_get_all(self, key_name : str) -> List[str]:
-        with self._internal_lock:
-            container = get_or_create_list(self._keys, key_name)
-            return copy.deepcopy(container)
-
-    def list_push_last(self, key_name : str, item : str) -> None:
-        with self._internal_lock:
-            container = get_or_create_list(self._keys, key_name)
-            container.append(item)
-
-    def list_remove_first_occurrence(self, key_name : str, item: str) -> None:
-        with self._internal_lock:
-            container = get_or_create_list(self._keys, key_name)
-            container.remove(item)
-
-    def set_add(self, key_name : str, item : str) -> None:
-        with self._internal_lock:
-            container = get_or_create_set(self._keys, key_name)
-            container.add(item)
-
-    def set_has(self, key_name : str, item : str) -> bool:
-        with self._internal_lock:
-            container = get_or_create_set(self._keys, key_name)
-            return item in container
-
-    def set_remove(self, key_name : str, item : str) -> None:
-        with self._internal_lock:
-            container = get_or_create_set(self._keys, key_name)
-            container.discard(item)
-
-    def dump(self) -> List[Tuple[str, str, str]]:
-        with self._internal_lock:
-            entries = []
-            for key_name,key_value in self._keys.items():
-                entries.append((key_name, type(key_value).__name__, str(key_value)))
-        return entries
diff --git a/src/common/database/engines/redis/RedisDatabaseEngine.py b/src/common/database/engines/redis/RedisDatabaseEngine.py
deleted file mode 100644
index a4b31aa13debd501112eb04b4353b4593dbc5b04..0000000000000000000000000000000000000000
--- a/src/common/database/engines/redis/RedisDatabaseEngine.py
+++ /dev/null
@@ -1,98 +0,0 @@
-import uuid
-from typing import Dict, List, Set, Tuple
-from redis.client import Redis
-from common.Settings import get_setting
-from common.database.engines._DatabaseEngine import _DatabaseEngine
-from common.database.engines.redis.Mutex import Mutex
-
-KEY_ENTIRE_DATABASE_LOCK = 'everything'
-
-class RedisDatabaseEngine(_DatabaseEngine):
-    def __init__(self, **settings) -> None:
-        host = get_setting('REDIS_SERVICE_HOST', settings=settings)
-        port = get_setting('REDIS_SERVICE_PORT', settings=settings)
-        dbid = get_setting('REDIS_DATABASE_ID',  settings=settings)
-        self._client = Redis.from_url('redis://{host}:{port}/{dbid}'.format(host=host, port=port, dbid=dbid))
-        self._mutex = Mutex(self._client)
-
-    def lock(self) -> Tuple[bool, str]:
-        owner_key = str(uuid.uuid4())
-        return self._mutex.acquire(KEY_ENTIRE_DATABASE_LOCK, owner_key=owner_key, blocking=True)
-
-    def unlock(self, owner_key : str) -> bool:
-        return self._mutex.release(KEY_ENTIRE_DATABASE_LOCK, owner_key)
-
-    def keys(self) -> list:
-        return [k.decode('UTF-8') for k in self._client.keys()]
-
-    def exists(self, key_name : str) -> bool:
-        return self._client.exists(key_name) == 1
-
-    def delete(self, key_name : str) -> bool:
-        return self._client.delete(key_name) == 1
-
-    def set_has(self, key_name : str, item : str) -> bool:
-        return self._client.sismember(key_name, item) == 1
-
-    def set_add(self, key_name : str, item : str) -> None:
-        self._client.sadd(key_name, item)
-
-    def set_remove(self, key_name : str, item : str) -> None:
-        self._client.srem(key_name, item)
-
-    def list_push_last(self, key_name : str, item : str) -> None:
-        self._client.rpush(key_name, item)
-
-    def list_get_all(self, key_name : str) -> List[str]:
-        return list(map(lambda m: m.decode('UTF-8'), self._client.lrange(key_name, 0, -1)))
-
-    def list_remove_first_occurrence(self, key_name : str, item: str) -> None:
-        self._client.lrem(key_name, 1, item)
-
-    def dict_get(self, key_name : str, fields : List[str] = []) -> Dict[str, str]:
-        if len(fields) == 0:
-            keys_values = self._client.hgetall(key_name).items()
-        else:
-            fields = list(fields)
-            keys_values = zip(fields, self._client.hmget(key_name, fields))
-
-        attributes = {}
-        for key,value in keys_values:
-            str_key = key.decode('UTF-8') if isinstance(key, bytes) else key
-            attributes[str_key] = value.decode('UTF-8') if isinstance(value, bytes) else value
-        return attributes
-
-    def dict_update(
-        self, key_name : str, update_fields : Dict[str, str] = {}, remove_fields : Set[str] = set()) -> None:
-        if len(remove_fields) > 0:
-            self._client.hdel(key_name, *remove_fields)
-
-        if len(update_fields) > 0:
-            self._client.hset(key_name, mapping=update_fields)
-
-    def dict_delete(self, key_name : str, fields : List[str] = []) -> None:
-        if len(fields) == 0:
-            self._client.delete(key_name)
-        else:
-            self._client.hdel(key_name, set(fields))
-
-    def dump(self) -> List[Tuple[str, str, str]]:
-        entries = []
-        for key_name in self._client.keys():
-            key_name = key_name.decode('UTF-8')
-            key_type = self._client.type(key_name)
-            if key_type is not None: key_type = key_type.decode('UTF-8')
-            key_type = {
-                'hash'  : 'dict',
-                'list'  : 'list',
-                'set'   : 'set',
-                'string': 'str',
-            }.get(key_type)
-            key_content = {
-                'dict': lambda key: {k.decode('UTF-8'):v.decode('UTF-8') for k,v in self._client.hgetall(key).items()},
-                'list': lambda key: [m.decode('UTF-8') for m in self._client.lrange(key, 0, -1)],
-                'set' : lambda key: {m.decode('UTF-8') for m in self._client.smembers(key)},
-                'str' : lambda key: self._client.get(key).decode('UTF-8'),
-            }.get(key_type, lambda key: 'UNSUPPORTED_TYPE')
-            entries.append((key_name, key_type, key_content(key_name)))
-        return entries
diff --git a/src/common/database/tests/script.py b/src/common/database/tests/script.py
deleted file mode 100644
index 78efa9d6aaaf7c5288faf112b70d200b917b82f1..0000000000000000000000000000000000000000
--- a/src/common/database/tests/script.py
+++ /dev/null
@@ -1,152 +0,0 @@
-import json, logging, time
-from common.database.api.Database import Database
-from common.database.api.context.Constants import DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID
-from common.database.api.context.service.ServiceState import ServiceState
-from common.database.api.context.service.ServiceType import ServiceType
-from common.database.api.context.topology.device.OperationalStatus import OperationalStatus
-
-LOGGER = logging.getLogger(__name__)
-
-def populate_example(
-    database : Database, context_uuid : str = DEFAULT_CONTEXT_ID, topology_uuid : str = DEFAULT_TOPOLOGY_ID,
-    add_devices : bool = True, add_links : bool = True, add_services : bool = True):
-
-    if add_links:
-        if not add_devices: raise Exception('add_links requires add_devices')
-
-    if add_services:
-        if not add_devices: raise Exception('add_services requires add_devices')
-        if not add_links: raise Exception('add_services requires add_links')
-
-    with database:
-        database.clear_all()
-
-        context = database.context(context_uuid).create()
-        topology = context.topology(topology_uuid).create()
-
-        if add_devices:
-            dev_1 = topology.device('DEV1').create(
-                device_type='ROADM', device_config='<config/>', device_operational_status=OperationalStatus.ENABLED)
-            dev1_ep2 = dev_1.endpoint('EP2').create(port_type='WDM')
-            dev1_ep3 = dev_1.endpoint('EP3').create(port_type='WDM')
-            dev1_ep4 = dev_1.endpoint('EP4').create(port_type='WDM')
-            dev1_ep5 = dev_1.endpoint('EP5').create(port_type='OCH')
-            dev1_ep6 = dev_1.endpoint('EP6').create(port_type='OCH')
-            dev1_ep7 = dev_1.endpoint('EP7').create(port_type='OCH')
-
-            dev_2 = topology.device('DEV2').create(
-                device_type='ROADM', device_config='<config/>', device_operational_status=OperationalStatus.ENABLED)
-            dev2_ep1 = dev_2.endpoint('EP1').create(port_type='WDM')
-            dev2_ep3 = dev_2.endpoint('EP3').create(port_type='WDM')
-            dev2_ep4 = dev_2.endpoint('EP4').create(port_type='WDM')
-            dev2_ep5 = dev_2.endpoint('EP5').create(port_type='OCH')
-            dev2_ep6 = dev_2.endpoint('EP6').create(port_type='OCH')
-            dev2_ep7 = dev_2.endpoint('EP7').create(port_type='OCH')
-
-            dev_3 = topology.device('DEV3').create(
-                device_type='ROADM', device_config='<config/>', device_operational_status=OperationalStatus.ENABLED)
-            dev3_ep1 = dev_3.endpoint('EP1').create(port_type='WDM')
-            dev3_ep2 = dev_3.endpoint('EP2').create(port_type='WDM')
-            dev3_ep4 = dev_3.endpoint('EP4').create(port_type='WDM')
-            dev3_ep5 = dev_3.endpoint('EP5').create(port_type='OCH')
-            dev3_ep6 = dev_3.endpoint('EP6').create(port_type='OCH')
-            dev3_ep7 = dev_3.endpoint('EP7').create(port_type='OCH')
-
-            dev_4 = topology.device('DEV4').create(
-                device_type='ROADM', device_config='<config/>', device_operational_status=OperationalStatus.ENABLED)
-            dev4_ep1 = dev_4.endpoint('EP1').create(port_type='WDM')
-            dev4_ep2 = dev_4.endpoint('EP2').create(port_type='WDM')
-            dev4_ep3 = dev_4.endpoint('EP3').create(port_type='WDM')
-            dev4_ep5 = dev_4.endpoint('EP5').create(port_type='OCH')
-            dev4_ep6 = dev_4.endpoint('EP6').create(port_type='OCH')
-            dev4_ep7 = dev_4.endpoint('EP7').create(port_type='OCH')
-
-        if add_links:
-            link_dev1_to_dev2 = topology.link('DEV1/EP2 ==> DEV2/EP1').create()
-            link_dev1_to_dev2.endpoint('DEV1/EP2').create(dev1_ep2)
-            link_dev1_to_dev2.endpoint('DEV2/EP1').create(dev2_ep1)
-
-            link_dev1_to_dev3 = topology.link('DEV1/EP3 ==> DEV3/EP1').create()
-            link_dev1_to_dev3.endpoint('DEV1/EP3').create(dev1_ep3)
-            link_dev1_to_dev3.endpoint('DEV3/EP1').create(dev3_ep1)
-
-            link_dev1_to_dev4 = topology.link('DEV1/EP4 ==> DEV4/EP1').create()
-            link_dev1_to_dev4.endpoint('DEV1/EP4').create(dev1_ep4)
-            link_dev1_to_dev4.endpoint('DEV4/EP1').create(dev4_ep1)
-
-            link_dev2_to_dev1 = topology.link('DEV2/EP1 ==> DEV1/EP2').create()
-            link_dev2_to_dev1.endpoint('DEV2/EP1').create(dev2_ep1)
-            link_dev2_to_dev1.endpoint('DEV1/EP2').create(dev1_ep2)
-
-            link_dev2_to_dev3 = topology.link('DEV2/EP3 ==> DEV3/EP2').create()
-            link_dev2_to_dev3.endpoint('DEV2/EP3').create(dev2_ep3)
-            link_dev2_to_dev3.endpoint('DEV3/EP2').create(dev3_ep2)
-
-            link_dev2_to_dev4 = topology.link('DEV2/EP4 ==> DEV4/EP2').create()
-            link_dev2_to_dev4.endpoint('DEV2/EP4').create(dev2_ep4)
-            link_dev2_to_dev4.endpoint('DEV4/EP2').create(dev4_ep2)
-
-            link_dev3_to_dev1 = topology.link('DEV3/EP1 ==> DEV1/EP3').create()
-            link_dev3_to_dev1.endpoint('DEV3/EP1').create(dev3_ep1)
-            link_dev3_to_dev1.endpoint('DEV1/EP3').create(dev1_ep3)
-
-            link_dev3_to_dev2 = topology.link('DEV3/EP2 ==> DEV2/EP3').create()
-            link_dev3_to_dev2.endpoint('DEV3/EP2').create(dev3_ep2)
-            link_dev3_to_dev2.endpoint('DEV2/EP3').create(dev2_ep3)
-
-            link_dev3_to_dev4 = topology.link('DEV3/EP4 ==> DEV4/EP3').create()
-            link_dev3_to_dev4.endpoint('DEV3/EP4').create(dev3_ep4)
-            link_dev3_to_dev4.endpoint('DEV4/EP3').create(dev4_ep3)
-
-            link_dev4_to_dev1 = topology.link('DEV4/EP1 ==> DEV1/EP4').create()
-            link_dev4_to_dev1.endpoint('DEV4/EP1').create(dev4_ep1)
-            link_dev4_to_dev1.endpoint('DEV1/EP4').create(dev1_ep4)
-
-            link_dev4_to_dev2 = topology.link('DEV4/EP2 ==> DEV2/EP4').create()
-            link_dev4_to_dev2.endpoint('DEV4/EP2').create(dev4_ep2)
-            link_dev4_to_dev2.endpoint('DEV2/EP4').create(dev2_ep4)
-
-            link_dev4_to_dev3 = topology.link('DEV4/EP3 ==> DEV3/EP4').create()
-            link_dev4_to_dev3.endpoint('DEV4/EP3').create(dev4_ep3)
-            link_dev4_to_dev3.endpoint('DEV3/EP4').create(dev3_ep4)
-
-        if add_services:
-            service = context.service('S01').create(ServiceType.L3NM, '<config/>', ServiceState.PLANNED)
-            service.endpoint('S01/EP01').create(dev1_ep5)
-            service.endpoint('S01/EP02').create(dev2_ep5)
-            service.endpoint('S01/EP03').create(dev3_ep5)
-            service.endpoint('S01/EP04').create(dev4_ep5)
-
-            service = context.service('S02').create(ServiceType.L3NM, '<config/>', ServiceState.PLANNED)
-            service.endpoint('S02/EP01').create(dev1_ep6)
-            service.endpoint('S02/EP02').create(dev2_ep6)
-            service.endpoint('S02/EP03').create(dev3_ep6)
-            service.endpoint('S02/EP04').create(dev4_ep6)
-
-            service = context.service('S03').create(ServiceType.L3NM, '<config/>', ServiceState.PLANNED)
-            service.endpoint('S03/EP01').create(dev1_ep7)
-            service.endpoint('S03/EP02').create(dev2_ep7)
-            service.endpoint('S03/EP03').create(dev3_ep7)
-            service.endpoint('S03/EP04').create(dev4_ep7)
-
-def sequence(database : Database):
-    populate_example(database)
-
-    with database:
-        LOGGER.info('Dump:')
-        for entry in database.dump():
-            LOGGER.info('  {}'.format(entry))
-
-    with database:
-        t0 = time.time()
-        context = database.context(DEFAULT_CONTEXT_ID).create()
-        json_context = context.dump()
-        t1 = time.time()
-        LOGGER.info(json.dumps(json_context))
-        LOGGER.info('Dump elapsed: {}'.format(1000.0 * (t1-t0)))
-
-    with database:
-        database.context(DEFAULT_CONTEXT_ID).delete()
-        LOGGER.info('Dump:')
-        for entry in database.dump():
-            LOGGER.info('  {}'.format(entry))
diff --git a/src/common/database/tests/test_engine_inmemory.py b/src/common/database/tests/test_engine_inmemory.py
deleted file mode 100644
index e3afd995573b04926bbec080e2dd5797eac6ce00..0000000000000000000000000000000000000000
--- a/src/common/database/tests/test_engine_inmemory.py
+++ /dev/null
@@ -1,10 +0,0 @@
-import logging
-from common.database.Factory import get_database, DatabaseEngineEnum
-from common.database.tests.script import sequence
-
-logging.basicConfig(level=logging.INFO)
-
-def test_inmemory():
-    database = get_database(
-        engine=DatabaseEngineEnum.INMEMORY)
-    sequence(database)
diff --git a/src/common/database/tests/test_engine_redis.py b/src/common/database/tests/test_engine_redis.py
deleted file mode 100644
index e68600db6a8e196890526c97d79b132c03dd4b32..0000000000000000000000000000000000000000
--- a/src/common/database/tests/test_engine_redis.py
+++ /dev/null
@@ -1,11 +0,0 @@
-import logging
-from common.database.Factory import get_database, DatabaseEngineEnum
-from common.database.tests.script import sequence
-
-logging.basicConfig(level=logging.INFO)
-
-def test_redis():
-    database = get_database(
-        engine=DatabaseEngineEnum.REDIS, REDISDB_SERVICE_HOST='127.0.0.1', REDISDB_SERVICE_PORT=31926,
-        REDISDB_DATABASE_ID=0)
-    sequence(database)
diff --git a/src/common/database/tests/test_unitary.py b/src/common/database/tests/test_unitary.py
deleted file mode 100644
index 8589c7cfacb04505c75108f510b5f7bcca4005a2..0000000000000000000000000000000000000000
--- a/src/common/database/tests/test_unitary.py
+++ /dev/null
@@ -1,75 +0,0 @@
-import logging, pytest
-from common.database.api.Database import Database
-from common.database.api.entity._Entity import _Entity
-from common.database.api.entity.EntityAttributes import EntityAttributes
-from common.database.api.Exceptions import WrongDatabaseEngine
-from common.database.engines._DatabaseEngine import _DatabaseEngine
-from common.database.engines.inmemory.InMemoryDatabaseEngine import InMemoryDatabaseEngine
-
-logging.basicConfig(level=logging.INFO)
-
-def test_database_gets_none_database_engine():
-    # should fail with invalid database engine
-    with pytest.raises(WrongDatabaseEngine) as e:
-        Database(None)
-    assert str(e.value) == 'database_engine must inherit from _DatabaseEngine'
-
-def test_database_gets_correct_database_engine():
-    # should work
-    assert Database(InMemoryDatabaseEngine()) is not None
-
-def test_entity_gets_invalid_parameters():
-
-    # should fail with invalid parent
-    with pytest.raises(AttributeError) as e:
-        _Entity(None, 'valid-uuid', 'valid-attributes-key', {}, {})
-    assert str(e.value) == 'parent must be an instance of _Entity'
-
-    # should fail with invalid entity uuid
-    with pytest.raises(AttributeError) as e:
-        _Entity(Database(InMemoryDatabaseEngine()), None, 'valid-attributes-key', {}, {})
-    assert str(e.value) == 'entity_uuid must be a non-empty instance of str'
-
-    # should fail with invalid entity uuid
-    with pytest.raises(AttributeError) as e:
-        _Entity(Database(InMemoryDatabaseEngine()), '', 'valid-attributes-key', {}, {})
-    assert str(e.value) == 'entity_uuid must be a non-empty instance of str'
-
-    # should fail with invalid attribute key
-    with pytest.raises(AttributeError) as e:
-        _Entity(Database(InMemoryDatabaseEngine()), 'valid-uuid', None, {}, {})
-    assert str(e.value) == 'attributes_key must be a non-empty instance of str'
-
-    # should fail with invalid attribute key
-    with pytest.raises(AttributeError) as e:
-        _Entity(Database(InMemoryDatabaseEngine()), 'valid-uuid', '', {}, {})
-    assert str(e.value) == 'attributes_key must be a non-empty instance of str'
-
-    # should fail with invalid attribute validators
-    with pytest.raises(AttributeError) as e:
-        _Entity(Database(InMemoryDatabaseEngine()), 'valid-uuid', 'valid-attributes-key', [], {})
-    assert str(e.value) == 'attribute_validators must be an instance of dict'
-
-    # should fail with invalid attribute transcoders
-    with pytest.raises(AttributeError) as e:
-        _Entity(Database(InMemoryDatabaseEngine()), 'valid-uuid', 'valid-attributes-key', {}, [])
-    assert str(e.value) == 'attribute_transcoders must be an instance of dict'
-
-    # should work
-    assert _Entity(Database(InMemoryDatabaseEngine()), 'valid-uuid', 'valid-attributes-key', {}, {}) is not None
-
-def test_entity_attributes_gets_invalid_parameters():
-
-    # should work
-    root_entity = Database(InMemoryDatabaseEngine())
-    validators = {'attr': lambda v: True}
-    entity_attrs = EntityAttributes(root_entity, 'valid-attributes-key', validators, {})
-    assert entity_attrs is not None
-
-    with pytest.raises(AttributeError) as e:
-        entity_attrs.update(update_attributes={'non-defined-attr': 'random-value'})
-    assert str(e.value) == "Unexpected update_attributes: {'non-defined-attr': 'random-value'}"
-
-    with pytest.raises(AttributeError) as e:
-        entity_attrs.update(remove_attributes=['non-defined-attr'])
-    assert str(e.value) == "Unexpected remove_attributes: {'non-defined-attr'}"
diff --git a/src/common/exceptions/ServiceException.py b/src/common/exceptions/ServiceException.py
deleted file mode 100644
index 864e44e2345dcc53ae54fb9dcc35ea1efd5cdd52..0000000000000000000000000000000000000000
--- a/src/common/exceptions/ServiceException.py
+++ /dev/null
@@ -1,7 +0,0 @@
-import grpc
-
-class ServiceException(Exception):
-    def __init__(self, code : grpc.StatusCode, details : str) -> None:
-        self.code = code
-        self.details = details
-        super().__init__(self.details)
diff --git a/src/common/message_broker/Constants.py b/src/common/message_broker/Constants.py
new file mode 100644
index 0000000000000000000000000000000000000000..671f11add119bf5103258af0c07df60e0e3afd0d
--- /dev/null
+++ b/src/common/message_broker/Constants.py
@@ -0,0 +1 @@
+CONSUME_TIMEOUT = 0.1 # seconds
diff --git a/src/common/message_broker/Factory.py b/src/common/message_broker/Factory.py
new file mode 100644
index 0000000000000000000000000000000000000000..a2ea36435c717835bf4b7c89c2522878e67074c9
--- /dev/null
+++ b/src/common/message_broker/Factory.py
@@ -0,0 +1,32 @@
+import logging, os
+from typing import Optional, Union
+from .backend._Backend import _Backend
+from .backend.BackendEnum import BackendEnum
+from .backend.inmemory.InMemoryBackend import InMemoryBackend
+from .backend.redis.RedisBackend import RedisBackend
+
+LOGGER = logging.getLogger(__name__)
+
+BACKENDS = {
+    BackendEnum.INMEMORY.value: InMemoryBackend,
+    BackendEnum.REDIS.value: RedisBackend,
+    #BackendEnum.KAFKA.value: KafkaBackend,
+    #BackendEnum.RABBITMQ.value: RabbitMQBackend,
+    #BackendEnum.ZEROMQ.value: ZeroMQBackend,
+}
+
+DEFAULT_MB_BACKEND = BackendEnum.INMEMORY
+
+def get_messagebroker_backend(backend : Optional[Union[str, BackendEnum]] = None, **settings) -> _Backend:
+    # return an instance of MessageBroker initialized with selected backend.
+    # The backend is selected using following criteria (first that is not None is selected):
+    # 1. user selected by parameter (backend=...)
+    # 2. environment variable MB_BACKEND
+    # 3. default backend: INMEMORY
+    if backend is None: backend = os.environ.get('MB_BACKEND', DEFAULT_MB_BACKEND)
+    if backend is None: raise Exception('MessageBroker Backend not specified')
+    if isinstance(backend, BackendEnum): backend = backend.value
+    backend_class = BACKENDS.get(backend)
+    if backend_class is None: raise Exception('Unsupported MessageBrokerBackend({:s})'.format(backend))
+    LOGGER.info('Selected MessageBroker Backend: {:s}'.format(backend))
+    return backend_class(**settings)
diff --git a/src/common/message_broker/Message.py b/src/common/message_broker/Message.py
new file mode 100644
index 0000000000000000000000000000000000000000..ee527dc9f46855b3d806f9188a7d5640c34f416b
--- /dev/null
+++ b/src/common/message_broker/Message.py
@@ -0,0 +1,5 @@
+from typing import NamedTuple
+
+class Message(NamedTuple):
+    topic: str
+    content: str
diff --git a/src/common/message_broker/MessageBroker.py b/src/common/message_broker/MessageBroker.py
new file mode 100644
index 0000000000000000000000000000000000000000..53697db2d0062a8290be44991990ee7f217e2c25
--- /dev/null
+++ b/src/common/message_broker/MessageBroker.py
@@ -0,0 +1,27 @@
+import logging
+from typing import Iterator, Set
+from .backend._Backend import _Backend
+from .Constants import CONSUME_TIMEOUT
+from .Message import Message
+
+LOGGER = logging.getLogger(__name__)
+
+class MessageBroker:
+    def __init__(self, backend : _Backend):
+        if not isinstance(backend, _Backend):
+            str_class_path = '{}.{}'.format(_Backend.__module__, _Backend.__name__)
+            raise AttributeError('backend must inherit from {}'.format(str_class_path))
+        self._backend = backend
+
+    @property
+    def backend(self) -> _Backend: return self._backend
+
+    def publish(self, message : Message) -> None:
+        self._backend.publish(message.topic, message.content)
+
+    def consume(self, topic_names : Set[str], consume_timeout : float = CONSUME_TIMEOUT) -> Iterator[Message]:
+        for pair in self._backend.consume(topic_names, consume_timeout=consume_timeout):
+            yield Message(*pair)
+
+    def terminate(self):
+        self._backend.terminate()
diff --git a/src/common/database/__init__.py b/src/common/message_broker/__init__.py
similarity index 100%
rename from src/common/database/__init__.py
rename to src/common/message_broker/__init__.py
diff --git a/src/common/message_broker/backend/BackendEnum.py b/src/common/message_broker/backend/BackendEnum.py
new file mode 100644
index 0000000000000000000000000000000000000000..9ee482477e489fff25ed0538330e33d378cbd251
--- /dev/null
+++ b/src/common/message_broker/backend/BackendEnum.py
@@ -0,0 +1,8 @@
+from enum import Enum
+
+class BackendEnum(Enum):
+    INMEMORY = 'inmemory'
+    REDIS = 'redis'
+    #KAFKA = 'kafka'
+    #RABBITMQ = 'rabbitmq'
+    #ZEROMQ = 'zeromq'
diff --git a/src/common/message_broker/backend/_Backend.py b/src/common/message_broker/backend/_Backend.py
new file mode 100644
index 0000000000000000000000000000000000000000..a6461d2538a1794e6bdec597ac0ad611eec6d484
--- /dev/null
+++ b/src/common/message_broker/backend/_Backend.py
@@ -0,0 +1,14 @@
+from typing import Iterator, Set, Tuple
+
+class _Backend:
+    def __init__(self, **settings) -> None:
+        raise NotImplementedError()
+
+    def terminate(self) -> None:
+        raise NotImplementedError()
+
+    def publish(self, topic_name : str, message_content : str) -> None:
+        raise NotImplementedError()
+
+    def consume(self, topic_names : Set[str], consume_timeout : float) -> Iterator[Tuple[str, str]]:
+        raise NotImplementedError()
diff --git a/src/common/database/api/__init__.py b/src/common/message_broker/backend/__init__.py
similarity index 100%
rename from src/common/database/api/__init__.py
rename to src/common/message_broker/backend/__init__.py
diff --git a/src/common/message_broker/backend/inmemory/InMemoryBackend.py b/src/common/message_broker/backend/inmemory/InMemoryBackend.py
new file mode 100644
index 0000000000000000000000000000000000000000..9b0e758724deb280f9cc8a1ba3eb20e6f4c57356
--- /dev/null
+++ b/src/common/message_broker/backend/inmemory/InMemoryBackend.py
@@ -0,0 +1,41 @@
+# InMemeory MessageBroker Backend
+# -------------------------------
+# - WARNING: DESIGNED AND BUILT FOR UNIT TESTING AND INTEGRATION TESTING PURPOSES ONLY !!!
+#            USE ANOTHER BACKEND IN PRODUCTION ENVIRONMENTS.
+
+import logging, threading
+from queue import Queue, Empty
+from typing import Dict, Iterator, Set, Tuple
+from .._Backend import _Backend
+
+LOGGER = logging.getLogger(__name__)
+
+class InMemoryBackend(_Backend):
+    def __init__(self, **settings) -> None: # pylint: disable=super-init-not-called
+        self._lock = threading.Lock()
+        self._terminate = threading.Event()
+        self._topic__to__queues : Dict[str, Set[Queue]] = {}
+
+    def terminate(self) -> None:
+        self._terminate.set()
+
+    def publish(self, topic_name : str, message_content : str) -> None:
+        queues = self._topic__to__queues.get(topic_name, None)
+        if queues is None: return
+        for queue in queues: queue.put_nowait((topic_name, message_content))
+
+    def consume(self, topic_names : Set[str], consume_timeout : float) -> Iterator[Tuple[str, str]]:
+        queue = Queue()
+        for topic_name in topic_names:
+            self._topic__to__queues.setdefault(topic_name, set()).add(queue)
+
+        while not self._terminate.is_set():
+            try:
+                message = queue.get(block=True, timeout=consume_timeout)
+            except Empty:
+                continue
+            if message is None: continue
+            yield message
+
+        for topic_name in topic_names:
+            self._topic__to__queues.get(topic_name, set()).discard(queue)
diff --git a/src/common/database/api/context/__init__.py b/src/common/message_broker/backend/inmemory/__init__.py
similarity index 100%
rename from src/common/database/api/context/__init__.py
rename to src/common/message_broker/backend/inmemory/__init__.py
diff --git a/src/common/message_broker/backend/redis/RedisBackend.py b/src/common/message_broker/backend/redis/RedisBackend.py
new file mode 100644
index 0000000000000000000000000000000000000000..0e8be0f30d77dcf6db5dbebcacba06591d1dbdff
--- /dev/null
+++ b/src/common/message_broker/backend/redis/RedisBackend.py
@@ -0,0 +1,44 @@
+import os, threading
+from typing import Any, Dict, Iterator, Set, Tuple
+from redis.client import Redis
+
+from common.message_broker.Message import Message
+from .._Backend import _Backend
+
+DEFAULT_SERVICE_HOST = '127.0.0.1'
+DEFAULT_SERVICE_PORT = 6379
+DEFAULT_DATABASE_ID  = 0
+
+def get_setting(settings : Dict[str, Any], name : str, default : Any) -> Any:
+    value = settings.get(name, os.environ.get(name))
+    return default if value is None else value
+
+class RedisBackend(_Backend):
+    def __init__(self, **settings) -> None: # pylint: disable=super-init-not-called
+        host = get_setting(settings, 'REDIS_SERVICE_HOST', DEFAULT_SERVICE_HOST)
+        port = get_setting(settings, 'REDIS_SERVICE_PORT', DEFAULT_SERVICE_PORT)
+        dbid = get_setting(settings, 'REDIS_DATABASE_ID',  DEFAULT_DATABASE_ID )
+        self._client = Redis.from_url('redis://{host}:{port}/{dbid}'.format(host=host, port=port, dbid=dbid))
+        self._terminate = threading.Event()
+
+    def terminate(self) -> None:
+        self._terminate.set()
+
+    def publish(self, topic_name : str, message_content : str) -> None:
+        self._client.publish(topic_name, message_content)
+
+    def consume(self, topic_names : Set[str], consume_timeout : float) -> Iterator[Tuple[str, str]]:
+        pubsub = self._client.pubsub(ignore_subscribe_messages=True)
+        for topic_name in topic_names: pubsub.subscribe(topic_name)
+
+        while not self._terminate.is_set():
+            message = pubsub.get_message(ignore_subscribe_messages=True, timeout=consume_timeout)
+            if message is None: continue
+            if message['type'] not in {'message', 'pmessage'}: continue
+            topic = message['channel'].decode('UTF-8')
+            content = message['data'].decode('UTF-8')
+            yield Message(topic, content)
+
+        pubsub.unsubscribe()
+        while pubsub.get_message() is not None: pass
+        pubsub.close()
diff --git a/src/common/database/api/context/service/__init__.py b/src/common/message_broker/backend/redis/__init__.py
similarity index 100%
rename from src/common/database/api/context/service/__init__.py
rename to src/common/message_broker/backend/redis/__init__.py
diff --git a/src/common/database/api/context/topology/__init__.py b/src/common/message_broker/tests/__init__.py
similarity index 100%
rename from src/common/database/api/context/topology/__init__.py
rename to src/common/message_broker/tests/__init__.py
diff --git a/src/common/message_broker/tests/test_unitary.py b/src/common/message_broker/tests/test_unitary.py
new file mode 100644
index 0000000000000000000000000000000000000000..7ef27ae68eff96b8b5c3932a453b663820fe8abf
--- /dev/null
+++ b/src/common/message_broker/tests/test_unitary.py
@@ -0,0 +1,144 @@
+import logging, pytest, threading, time
+from typing import List, Set
+from common.message_broker.Factory import get_messagebroker_backend
+from common.message_broker.Message import Message
+from common.message_broker.MessageBroker import MessageBroker
+from common.message_broker.backend.BackendEnum import BackendEnum
+from common.message_broker.backend._Backend import _Backend
+
+logging.basicConfig(level=logging.INFO)
+LOGGER = logging.getLogger(__name__)
+
+SCENARIOS = [
+    (BackendEnum.INMEMORY, {}),
+    (BackendEnum.REDIS,    {
+        'REDIS_SERVICE_HOST': '10.1.7.194',
+        'REDIS_SERVICE_PORT': 30283,
+        'REDIS_DATABASE_ID': 0,
+    }),
+]
+
+CONSUME_TIMEOUT = 0.1 # seconds
+
+TOPIC_DEVICES  = 'devices'
+TOPIC_LINKS    = 'links'
+TOPIC_SERVICES = 'services'
+
+class Consumer(threading.Thread):
+    def __init__(
+        self, message_broker : MessageBroker, # pylint: disable=redefined-outer-name
+        topic_names : Set[str], output_list : List[Message],
+        consume_timeout=CONSUME_TIMEOUT) -> None:
+
+        super().__init__(daemon=True)
+        self._message_broker = message_broker
+        self._topic_names = topic_names
+        self._output_list = output_list
+        self._consume_timeout = consume_timeout
+
+    def run(self) -> None:
+        LOGGER.info('{:s} subscribes to topics {:s}'.format(self.name, str(self._topic_names)))
+        for message in self._message_broker.consume(self._topic_names, consume_timeout=self._consume_timeout):
+            LOGGER.info('{:s} receives {:s}'.format(self.name, str(message)))
+            self._output_list.append(message)
+        LOGGER.info('{:s} terminates')
+
+@pytest.fixture(scope='session', ids=[str(scenario[0].value) for scenario in SCENARIOS], params=SCENARIOS)
+def message_broker(request):
+    backend,settings = request.param
+    LOGGER.info('Running fixture with backend={:s}, settings={:s}...'.format(str(backend), str(settings)))
+    return MessageBroker(get_messagebroker_backend(backend=backend, **settings))
+
+def test_messagebroker_instantiation():
+    with pytest.raises(AttributeError) as e:
+        MessageBroker(None)
+    str_class_path = '{}.{}'.format(_Backend.__module__, _Backend.__name__)
+    assert str(e.value) == 'backend must inherit from {}'.format(str_class_path)
+
+    assert MessageBroker(get_messagebroker_backend(BackendEnum.INMEMORY)) is not None
+
+def test_messagebroker(message_broker : MessageBroker): # pylint: disable=redefined-outer-name
+    output_list1 : List[Message] = []
+    consumer1 = Consumer(message_broker, {TOPIC_DEVICES, TOPIC_LINKS}, output_list1)
+    consumer1.start()
+
+    output_list2 : List[Message] = []
+    consumer2 = Consumer(message_broker, {TOPIC_DEVICES, TOPIC_SERVICES}, output_list2)
+    consumer2.start()
+
+    output_list3 : List[Message] = []
+    consumer3 = Consumer(message_broker, {TOPIC_SERVICES}, output_list3)
+    consumer3.start()
+
+    LOGGER.info('delay')
+    time.sleep(0.5)
+
+    message = Message(topic=TOPIC_DEVICES, content='new-device-01')
+    LOGGER.info('publish message={:s}'.format(str(message)))
+    message_broker.publish(message)
+
+    message = Message(topic=TOPIC_DEVICES, content='new-device-02')
+    LOGGER.info('publish message={:s}'.format(str(message)))
+    message_broker.publish(message)
+
+    message = Message(topic=TOPIC_LINKS,   content='new-link-01-02')
+    LOGGER.info('publish message={:s}'.format(str(message)))
+    message_broker.publish(message)
+
+    LOGGER.info('delay')
+    time.sleep(0.1)
+
+    message = Message(topic=TOPIC_DEVICES,  content='update-device-01')
+    LOGGER.info('publish message={:s}'.format(str(message)))
+    message_broker.publish(message)
+
+    message = Message(topic=TOPIC_DEVICES,  content='update-device-02')
+    LOGGER.info('publish message={:s}'.format(str(message)))
+    message_broker.publish(message)
+
+    message = Message(topic=TOPIC_SERVICES, content='new-service-01-02')
+    LOGGER.info('publish message={:s}'.format(str(message)))
+    message_broker.publish(message)
+
+    LOGGER.info('delay')
+    time.sleep(0.5)
+
+    LOGGER.info('terminate')
+    message_broker.terminate()
+
+    LOGGER.info('join')
+    consumer1.join()
+    consumer2.join()
+    consumer3.join()
+
+    LOGGER.info('output_list1={:s}'.format(str(output_list1)))
+    LOGGER.info('output_list2={:s}'.format(str(output_list2)))
+    LOGGER.info('output_list3={:s}'.format(str(output_list3)))
+
+    assert len(output_list1) == 5
+    assert output_list1[0].topic == TOPIC_DEVICES
+    assert output_list1[0].content == 'new-device-01'
+    assert output_list1[1].topic == TOPIC_DEVICES
+    assert output_list1[1].content == 'new-device-02'
+    assert output_list1[2].topic == TOPIC_LINKS
+    assert output_list1[2].content == 'new-link-01-02'
+    assert output_list1[3].topic == TOPIC_DEVICES
+    assert output_list1[3].content == 'update-device-01'
+    assert output_list1[4].topic == TOPIC_DEVICES
+    assert output_list1[4].content == 'update-device-02'
+
+    assert len(output_list2) == 5
+    assert output_list2[0].topic == TOPIC_DEVICES
+    assert output_list2[0].content == 'new-device-01'
+    assert output_list2[1].topic == TOPIC_DEVICES
+    assert output_list2[1].content == 'new-device-02'
+    assert output_list2[2].topic == TOPIC_DEVICES
+    assert output_list2[2].content == 'update-device-01'
+    assert output_list2[3].topic == TOPIC_DEVICES
+    assert output_list2[3].content == 'update-device-02'
+    assert output_list2[4].topic == TOPIC_SERVICES
+    assert output_list2[4].content == 'new-service-01-02'
+
+    assert len(output_list3) == 1
+    assert output_list3[0].topic == TOPIC_SERVICES
+    assert output_list3[0].content == 'new-service-01-02'
diff --git a/src/common/orm/Database.py b/src/common/orm/Database.py
new file mode 100644
index 0000000000000000000000000000000000000000..81c18aec1699a6c356739b4fc7f8a5ae97edac95
--- /dev/null
+++ b/src/common/orm/Database.py
@@ -0,0 +1,38 @@
+import logging
+from typing import List, Set, Tuple
+from .backend._Backend import _Backend
+
+LOGGER = logging.getLogger(__name__)
+
+class Database:
+    def __init__(self, backend : _Backend):
+        if not isinstance(backend, _Backend):
+            str_class_path = '{}.{}'.format(_Backend.__module__, _Backend.__name__)
+            raise AttributeError('backend must inherit from {}'.format(str_class_path))
+        self._backend = backend
+
+    @property
+    def backend(self) -> _Backend: return self._backend
+
+    def clear_all(self, keep_keys : Set[str] = set()) -> None:
+        for key in self._backend.keys():
+            if key in keep_keys: continue
+            self._backend.delete(key)
+
+    def dump(self) -> List[Tuple[str, str, str]]:
+        entries = self._backend.dump()
+        entries.sort()
+        _entries = []
+        for str_key, str_type, value in entries:
+            if isinstance(value, list):
+                str_value = ', '.join(map("'{:s}'".format, sorted(list(value))))
+                str_value = '[' + str_value + ']'
+            elif isinstance(value, set):
+                str_value = ', '.join(map("'{:s}'".format, sorted(list(value))))
+                str_value = '{' + str_value + '}'
+            elif isinstance(value, dict):
+                sorted_keys = sorted(value.keys())
+                str_value = ', '.join(["'{}': '{}'".format(key, value[key]) for key in sorted_keys])
+                str_value = '{' + str_value + '}'
+            _entries.append((str_type, str_key, str_value))
+        return _entries
diff --git a/src/common/database/api/Exceptions.py b/src/common/orm/Exceptions.py
similarity index 57%
rename from src/common/database/api/Exceptions.py
rename to src/common/orm/Exceptions.py
index ef60ac5f5ab9c3ec3de6cbf4c1af54dcc4389470..eea0b564e1918cb6a2da0553641c9492a32b1425 100644
--- a/src/common/database/api/Exceptions.py
+++ b/src/common/orm/Exceptions.py
@@ -1,4 +1,4 @@
-class WrongDatabaseEngine(Exception):
+class ConstraintException(Exception):
     pass
 
 class MutexException(Exception):
diff --git a/src/common/orm/Factory.py b/src/common/orm/Factory.py
new file mode 100644
index 0000000000000000000000000000000000000000..6ef0e11ccdd7b2f0f9e3fde62903fef522cb9f7a
--- /dev/null
+++ b/src/common/orm/Factory.py
@@ -0,0 +1,32 @@
+import logging, os
+from typing import Optional, Union
+from .backend._Backend import _Backend
+from .backend.BackendEnum import BackendEnum
+from .backend.inmemory.InMemoryBackend import InMemoryBackend
+from .backend.redis.RedisBackend import RedisBackend
+
+LOGGER = logging.getLogger(__name__)
+
+BACKENDS = {
+    BackendEnum.INMEMORY.value: InMemoryBackend,
+    BackendEnum.REDIS.value: RedisBackend,
+    #BackendEnum.MONGODB.value: MongoDBBackend,
+    #BackendEnum.RETHINKDB.value: RethinkDBBackend,
+    #BackendEnum.ETCD.value: EtcdBackend,
+}
+
+DEFAULT_DB_BACKEND = BackendEnum.INMEMORY
+
+def get_database_backend(backend : Optional[Union[str, BackendEnum]] = None, **settings) -> _Backend:
+    # return an instance of Database initialized with selected backend.
+    # The backend is selected using following criteria (first that is not None is selected):
+    # 1. user selected by parameter (backend=...)
+    # 2. environment variable DB_BACKEND
+    # 3. default backend: INMEMORY
+    if backend is None: backend = os.environ.get('DB_BACKEND', DEFAULT_DB_BACKEND)
+    if backend is None: raise Exception('Database Backend not specified')
+    if isinstance(backend, BackendEnum): backend = backend.value
+    backend_class = BACKENDS.get(backend)
+    if backend_class is None: raise Exception('Unsupported DatabaseBackend({:s})'.format(backend))
+    LOGGER.info('Selected Database Backend: {:s}'.format(backend))
+    return backend_class(**settings)
diff --git a/src/common/orm/HighLevel.py b/src/common/orm/HighLevel.py
new file mode 100644
index 0000000000000000000000000000000000000000..7e2e9e49ea22861d3d96bcac70acc86e2f50db35
--- /dev/null
+++ b/src/common/orm/HighLevel.py
@@ -0,0 +1,70 @@
+from typing import Any, Dict, List, Optional, Set, Tuple
+from common.rpc_method_wrapper.ServiceExceptions import NotFoundException
+from common.orm.Database import Database
+from common.orm.backend.Tools import key_to_str
+from common.orm.fields.ForeignKeyField import ForeignKeyField
+from common.orm.model.Model import Model, MetaModel
+
+def get_all_objects(database : Database, model_class : MetaModel) -> List[Model]:
+    db_pks = sorted(list(model_class.get_primary_keys(database)))
+    return [model_class(database, db_pk) for db_pk in db_pks]
+
+def get_object(
+    database : Database, model_class : Model, key_parts : List[str], raise_if_not_found : bool = True
+    ) -> Optional[Model]:
+
+    str_key = key_to_str(key_parts)
+    db_object = model_class(database, str_key, auto_load=False)
+    found = db_object.load()
+    if found: return db_object
+    if raise_if_not_found: raise NotFoundException(model_class.__name__.replace('Model', ''), str_key)
+    return None
+
+def get_related_objects(
+    source_instance : Model, reference_model_class : MetaModel, navigation_field_name : str = None) -> Set[Model]:
+
+    database = source_instance.database
+    db_target_instances = set()
+
+    if navigation_field_name is not None:
+        navigation_fk_field : Optional[ForeignKeyField] = getattr(reference_model_class, navigation_field_name, None)
+        if navigation_fk_field is None or not isinstance(navigation_fk_field, ForeignKeyField):
+            msg = 'navigation_field_name({:s}) must be a ForeignKeyField in reference_model_class({:s})'
+            raise AttributeError(msg.format(navigation_field_name, reference_model_class.__name__))
+        target_model_class = navigation_fk_field.foreign_model
+
+    for db_reference_pk,_ in source_instance.references(reference_model_class):
+        db_reference = reference_model_class(database, db_reference_pk)
+        if navigation_field_name is not None:
+            target_fk_field = getattr(db_reference, navigation_field_name, None)
+            if target_fk_field is None: continue
+            db_reference = target_model_class(database, target_fk_field)
+        db_target_instances.add(db_reference)
+    return db_target_instances
+
+def update_or_create_object(
+    database : Database, model_class : Model, key_parts : List[str], attributes : Dict[str, Any]
+    ) -> Tuple[Model, bool]:
+
+    str_key = key_to_str(key_parts)
+    db_object : Model = model_class(database, str_key, auto_load=False)
+    found = db_object.load()
+    for attr_name, attr_value in attributes.items():
+        setattr(db_object, attr_name, attr_value)
+    db_object.save()
+    updated = found # updated if found, else created
+    return db_object, updated
+
+def get_or_create_object(
+    database : Database, model_class : Model, key_parts : List[str], defaults : Dict[str, Any] = {}
+    ) -> Tuple[Model, bool]:
+
+    str_key = key_to_str(key_parts)
+    db_object : Model = model_class(database, str_key, auto_load=False)
+    found = db_object.load()
+    if not found:
+        for attr_name, attr_value in defaults.items():
+            setattr(db_object, attr_name, attr_value)
+        db_object.save()
+    created = not found # created if not found, else loaded
+    return db_object, created
diff --git a/src/common/database/api/context/topology/device/__init__.py b/src/common/orm/__init__.py
similarity index 100%
rename from src/common/database/api/context/topology/device/__init__.py
rename to src/common/orm/__init__.py
diff --git a/src/common/orm/backend/BackendEnum.py b/src/common/orm/backend/BackendEnum.py
new file mode 100644
index 0000000000000000000000000000000000000000..7699262f468b63044b160b9f0649432668002023
--- /dev/null
+++ b/src/common/orm/backend/BackendEnum.py
@@ -0,0 +1,8 @@
+from enum import Enum
+
+class BackendEnum(Enum):
+    INMEMORY = 'inmemory'
+    REDIS = 'redis'
+    #MONGODB = 'mongodb'
+    #RETHINKDB = 'rethinkdb'
+    #ETCD = 'etcd'
diff --git a/src/common/orm/backend/Tools.py b/src/common/orm/backend/Tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..8152e1a768e9a824fb6ce8a30ac4a09409fc47da
--- /dev/null
+++ b/src/common/orm/backend/Tools.py
@@ -0,0 +1,5 @@
+from typing import List, Union
+
+def key_to_str(key : Union[str, List[str]], separator : str = '/') -> str:
+    if isinstance(key, str): return key
+    return separator.join(map(str, key))
diff --git a/src/common/orm/backend/_Backend.py b/src/common/orm/backend/_Backend.py
new file mode 100644
index 0000000000000000000000000000000000000000..ead206354b12b0a39b0868c9bc853c17ac976325
--- /dev/null
+++ b/src/common/orm/backend/_Backend.py
@@ -0,0 +1,53 @@
+from typing import Any, Dict, List, Optional, Set, Tuple
+
+class _Backend:
+    def __init__(self, **settings) -> None:
+        raise NotImplementedError()
+
+    def lock(self, keys : List[List[str]], owner_key : Optional[str] = None) -> Tuple[bool, str]:
+        raise NotImplementedError()
+
+    def unlock(self, keys : List[List[str]], owner_key : str) -> bool:
+        raise NotImplementedError()
+
+    def keys(self) -> list:
+        raise NotImplementedError()
+
+    def exists(self, key : List[str]) -> bool:
+        raise NotImplementedError()
+
+    def delete(self, key : List[str]) -> bool:
+        raise NotImplementedError()
+
+    def dict_get(self, key : List[str], fields : List[str] = []) -> Dict[str, str]:
+        raise NotImplementedError()
+
+    def dict_update(self, key : List[str], fields : Dict[str, str] = {}) -> None:
+        raise NotImplementedError()
+
+    def dict_delete(self, key : List[str], fields : List[str] = []) -> None:
+        raise NotImplementedError()
+
+    def list_get_all(self, key : List[str]) -> List[str]:
+        raise NotImplementedError()
+
+    def list_push_last(self, key : List[str], item : str) -> None:
+        raise NotImplementedError()
+
+    def list_remove_first_occurrence(self, key : List[str], item: str) -> None:
+        raise NotImplementedError()
+
+    def set_add(self, key : List[str], item : str) -> None:
+        raise NotImplementedError()
+
+    def set_has(self, key : List[str], item : str) -> bool:
+        raise NotImplementedError()
+
+    def set_get_all(self, key : List[str]) -> Set[str]:
+        raise NotImplementedError()
+
+    def set_remove(self, key : List[str], item : str) -> None:
+        raise NotImplementedError()
+
+    def dump(self) -> List[Tuple[str, str, Any]]:
+        raise NotImplementedError()
diff --git a/src/common/database/api/context/topology/link/__init__.py b/src/common/orm/backend/__init__.py
similarity index 100%
rename from src/common/database/api/context/topology/link/__init__.py
rename to src/common/orm/backend/__init__.py
diff --git a/src/common/orm/backend/inmemory/InMemoryBackend.py b/src/common/orm/backend/inmemory/InMemoryBackend.py
new file mode 100644
index 0000000000000000000000000000000000000000..c960a17094c6b8e07ef6823fcef123c092add94a
--- /dev/null
+++ b/src/common/orm/backend/inmemory/InMemoryBackend.py
@@ -0,0 +1,147 @@
+# InMemeory Database Backend
+# --------------------------
+# - Concurrency is limited to 1 operation at a time
+# - All operations are strictly sequential by means of locks
+# - WARNING: DESIGNED AND BUILT FOR UNIT TESTING AND INTEGRATION TESTING PURPOSES ONLY !!!
+#            USE ANOTHER BACKEND IN PRODUCTION ENVIRONMENTS.
+
+import copy, logging, threading, uuid
+from typing import Any, Dict, List, Optional, Set, Tuple, Union
+from .._Backend import _Backend
+from ..Tools import key_to_str
+from .Tools import get_dict, get_list, get_or_create_dict, get_or_create_list, get_or_create_set, get_set
+
+LOGGER = logging.getLogger(__name__)
+
+class InMemoryBackend(_Backend):
+    def __init__(self, **settings): # pylint: disable=super-init-not-called
+        self._lock = threading.Lock()
+        self._keys : Dict[str, Union[Set[str], List[str], Dict[str, str], str]]= {} # key => set/list/dict/string
+
+    def lock(self, keys : List[List[str]], owner_key : Optional[str] = None) -> Tuple[bool, str]:
+        # InMemoryBackend uses a database where all operations are atomic. Locks are implemented by assigning the lock
+        # owner key into a string variable. If the field is empty and enables to 
+        owner_key = str(uuid.uuid4()) if owner_key is None else owner_key
+        str_keys = {key_to_str(key) for key in keys}
+        with self._lock:
+            acquired_lock_keys : Dict[str, str] = {}
+            for str_key in str_keys:
+                if (str_key in self._keys) and (len(self._keys[str_key]) > 0) and (self._keys[str_key] != owner_key):
+                    # lock already acquired, cannot acquire all locks atomically
+                    for str_key_acquired in acquired_lock_keys:
+                        if str_key_acquired not in self._keys: continue
+                        del self._keys[str_key_acquired]
+                    return False, None
+
+                # lock available, temporarily acquire it; locks will be released if some of them for a requested
+                # key is not available
+                self._keys[str_key] = owner_key
+                acquired_lock_keys[str_key] = owner_key
+            return True, owner_key
+
+    def unlock(self, keys : List[List[str]], owner_key : str) -> bool:
+        str_keys = {key_to_str(key) for key in keys}
+        with self._lock:
+            for str_key in str_keys:
+                if str_key not in self._keys: return False
+                if self._keys[str_key] != owner_key: return False
+            # Up to here, we own all the keys we want to release
+            for str_key in str_keys:
+                del self._keys[str_key]
+            return True
+
+    def keys(self) -> list:
+        with self._lock:
+            return copy.deepcopy(list(self._keys.keys()))
+
+    def exists(self, key : List[str]) -> bool:
+        str_key = key_to_str(key)
+        with self._lock:
+            return str_key in self._keys
+
+    def delete(self, key : List[str]) -> bool:
+        str_key = key_to_str(key)
+        with self._lock:
+            if str_key not in self._keys: return False
+            del self._keys[str_key]
+            return True
+
+    def dict_get(self, key : List[str], fields : List[str] = []) -> Dict[str, str]:
+        str_key = key_to_str(key)
+        with self._lock:
+            container = get_dict(self._keys, str_key)
+            if container is None: return {}
+            if len(fields) == 0: fields = container.keys()
+            return copy.deepcopy({
+                field_name : field_value for field_name,field_value in container.items() if field_name in fields
+            })
+
+    def dict_update(self, key : List[str], fields : Dict[str,str] = {}) -> None:
+        str_key = key_to_str(key)
+        with self._lock:
+            container = get_or_create_dict(self._keys, str_key)
+            container.update(fields)
+
+    def dict_delete(self, key : List[str], fields : List[str] = []) -> None:
+        str_key = key_to_str(key)
+        with self._lock:
+            if len(fields) == 0:
+                if str_key not in self._keys: return False
+                del self._keys[str_key]
+            else:
+                container = get_or_create_dict(self._keys, str_key)
+                for field in list(fields): container.pop(field, None)
+                if len(container) == 0: self._keys.pop(str_key)
+
+    def list_get_all(self, key : List[str]) -> List[str]:
+        str_key = key_to_str(key)
+        with self._lock:
+            container = get_list(self._keys, str_key)
+            if container is None: return []
+            return copy.deepcopy(container)
+
+    def list_push_last(self, key : List[str], item : str) -> None:
+        str_key = key_to_str(key)
+        with self._lock:
+            container = get_or_create_list(self._keys, str_key)
+            container.append(item)
+
+    def list_remove_first_occurrence(self, key : List[str], item: str) -> None:
+        str_key = key_to_str(key)
+        with self._lock:
+            container = get_or_create_list(self._keys, str_key)
+            container.remove(item)
+            if len(container) == 0: self._keys.pop(str_key)
+
+    def set_add(self, key : List[str], item : str) -> None:
+        str_key = key_to_str(key)
+        with self._lock:
+            container = get_or_create_set(self._keys, str_key)
+            container.add(item)
+
+    def set_has(self, key : List[str], item : str) -> bool:
+        str_key = key_to_str(key)
+        with self._lock:
+            container = get_or_create_set(self._keys, str_key)
+            return item in container
+
+    def set_get_all(self, key : List[str]) -> Set[str]:
+        str_key = key_to_str(key)
+        with self._lock:
+            container = get_set(self._keys, str_key)
+            if container is None: return {}
+            return copy.deepcopy(container)
+
+    def set_remove(self, key : List[str], item : str) -> None:
+        str_key = key_to_str(key)
+        with self._lock:
+            container = get_or_create_set(self._keys, str_key)
+            container.discard(item)
+            if len(container) == 0: self._keys.pop(str_key)
+
+    def dump(self) -> List[Tuple[str, str, Any]]:
+        with self._lock:
+            entries = []
+            for str_key,key_value in self._keys.items():
+                entries.append((str_key, type(key_value).__name__, key_value))
+        return entries
diff --git a/src/common/orm/backend/inmemory/Tools.py b/src/common/orm/backend/inmemory/Tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..fe10506556de36d6f40665c6f59119cbf540d8a4
--- /dev/null
+++ b/src/common/orm/backend/inmemory/Tools.py
@@ -0,0 +1,31 @@
+from typing import Dict, List, Set, Union
+
+def get_dict(keys : Dict[str, Union[Dict, List, Set]], str_key : str) -> Dict:
+    return keys.get(str_key, None)
+
+def get_or_create_dict(keys : Dict[str, Union[Dict, List, Set]], str_key : str) -> Dict:
+    container = keys.get(str_key, None)
+    if container is None: container = keys.setdefault(str_key, dict())
+    if not isinstance(container, dict):
+        raise Exception('Key({:s}, {:s}) is not a dict'.format(str(type(container).__name__), str(str_key)))
+    return container
+
+def get_list(keys : Dict[str, Union[Dict, List, Set]], str_key : str) -> List:
+    return keys.get(str_key, None)
+
+def get_or_create_list(keys : Dict[str, Union[Dict, List, Set]], str_key : str) -> List:
+    container = keys.get(str_key, None)
+    if container is None: container = keys.setdefault(str_key, list())
+    if not isinstance(container, list):
+        raise Exception('Key({:s}, {:s}) is not a list'.format(str(type(container).__name__), str(str_key)))
+    return container
+
+def get_set(keys : Dict[str, Union[Dict, List, Set]], str_key : str) -> Set:
+    return keys.get(str_key, None)
+
+def get_or_create_set(keys : Dict[str, Union[Dict, List, Set]], str_key : str) -> Set:
+    container = keys.get(str_key, None)
+    if container is None: container = keys.setdefault(str_key, set())
+    if not isinstance(container, set):
+        raise Exception('Key({:s}, {:s}) is not a set'.format(str(type(container).__name__), str(str_key)))
+    return container
diff --git a/src/common/database/api/entity/__init__.py b/src/common/orm/backend/inmemory/__init__.py
similarity index 100%
rename from src/common/database/api/entity/__init__.py
rename to src/common/orm/backend/inmemory/__init__.py
diff --git a/src/common/database/engines/redis/Mutex.py b/src/common/orm/backend/redis/Mutex.py
similarity index 98%
rename from src/common/database/engines/redis/Mutex.py
rename to src/common/orm/backend/redis/Mutex.py
index a83bed740a9abfe9bf4cea2b76c049662853bebf..55d52d8d5ff558b096958aefd85d926b46716db9 100644
--- a/src/common/database/engines/redis/Mutex.py
+++ b/src/common/orm/backend/redis/Mutex.py
@@ -8,7 +8,8 @@ MIN_WAIT_TIME = 0.01
 class Mutex:
     def __init__(self, client: Redis) -> None:
         if not isinstance(client, Redis):
-            raise AttributeError('client must be an instance of redis.client.Redis')
+            str_class_path = '{}.{}'.format(Redis.__module__, Redis.__name__)
+            raise AttributeError('client must be an instance of {}'.format(str_class_path))
         self._client = client
         self._script_release = None
         self._script_refresh_expire = None
diff --git a/src/common/orm/backend/redis/RedisBackend.py b/src/common/orm/backend/redis/RedisBackend.py
new file mode 100644
index 0000000000000000000000000000000000000000..edd73917315322803a102142c69f59dc305955a5
--- /dev/null
+++ b/src/common/orm/backend/redis/RedisBackend.py
@@ -0,0 +1,117 @@
+import os, uuid
+from typing import Any, Dict, List, Optional, Set, Tuple
+from redis.client import Redis
+from .._Backend import _Backend
+from ..Tools import key_to_str
+from .Mutex import Mutex
+
+DEFAULT_SERVICE_HOST = '127.0.0.1'
+DEFAULT_SERVICE_PORT = 6379
+DEFAULT_DATABASE_ID  = 0
+
+def get_setting(settings : Dict[str, Any], name : str, default : Any) -> Any:
+    value = settings.get(name, os.environ.get(name))
+    return default if value is None else value
+
+class RedisBackend(_Backend):
+    def __init__(self, **settings) -> None: # pylint: disable=super-init-not-called
+        host = get_setting(settings, 'REDIS_SERVICE_HOST', DEFAULT_SERVICE_HOST)
+        port = get_setting(settings, 'REDIS_SERVICE_PORT', DEFAULT_SERVICE_PORT)
+        dbid = get_setting(settings, 'REDIS_DATABASE_ID',  DEFAULT_DATABASE_ID )
+        self._client = Redis.from_url('redis://{host}:{port}/{dbid}'.format(host=host, port=port, dbid=dbid))
+        self._mutex = Mutex(self._client)
+
+    def lock(self, keys : List[List[str]], owner_key : Optional[str] = None) -> Tuple[bool, str]:
+        str_keys = {key_to_str(key) for key in keys}
+        owner_key = str(uuid.uuid4()) if owner_key is None else owner_key
+        return self._mutex.acquire(str_keys, owner_key=owner_key, blocking=True)
+
+    def unlock(self, keys : List[List[str]], owner_key : str) -> bool:
+        str_keys = {key_to_str(key) for key in keys}
+        return self._mutex.release(str_keys, owner_key)
+
+    def keys(self) -> list:
+        return [k.decode('UTF-8') for k in self._client.keys()]
+
+    def exists(self, key : List[str]) -> bool:
+        str_key = key_to_str(key)
+        return self._client.exists(str_key) == 1
+
+    def delete(self, key : List[str]) -> bool:
+        str_key = key_to_str(key)
+        return self._client.delete(str_key) == 1
+
+    def dict_get(self, key : List[str], fields : List[str] = []) -> Dict[str, str]:
+        str_key = key_to_str(key)
+        if len(fields) == 0:
+            keys_values = self._client.hgetall(str_key).items()
+        else:
+            fields = list(fields)
+            keys_values = zip(fields, self._client.hmget(str_key, fields))
+
+        attributes = {}
+        for key,value in keys_values:
+            str_key = key.decode('UTF-8') if isinstance(key, bytes) else key
+            attributes[str_key] = value.decode('UTF-8') if isinstance(value, bytes) else value
+        return attributes
+
+    def dict_update(self, key : List[str], fields : Dict[str, str] = {}) -> None:
+        str_key = key_to_str(key)
+        if len(fields) > 0:
+            self._client.hset(str_key, mapping=fields)
+
+    def dict_delete(self, key : List[str], fields : List[str] = []) -> None:
+        str_key = key_to_str(key)
+        if len(fields) == 0:
+            self._client.delete(str_key)
+        else:
+            self._client.hdel(str_key, set(fields))
+
+    def list_get_all(self, key : List[str]) -> List[str]:
+        str_key = key_to_str(key)
+        return list(map(lambda m: m.decode('UTF-8'), self._client.lrange(str_key, 0, -1)))
+
+    def list_push_last(self, key : List[str], item : str) -> None:
+        str_key = key_to_str(key)
+        self._client.rpush(str_key, item)
+
+    def list_remove_first_occurrence(self, key : List[str], item: str) -> None:
+        str_key = key_to_str(key)
+        self._client.lrem(str_key, 1, item)
+
+    def set_add(self, key : List[str], item : str) -> None:
+        str_key = key_to_str(key)
+        self._client.sadd(str_key, item)
+
+    def set_has(self, key : List[str], item : str) -> bool:
+        str_key = key_to_str(key)
+        return self._client.sismember(str_key, item) == 1
+
+    def set_get_all(self, key : List[str]) -> Set[str]:
+        str_key = key_to_str(key)
+        return set(map(lambda m: m.decode('UTF-8'), self._client.smembers(str_key)))
+
+    def set_remove(self, key : List[str], item : str) -> None:
+        str_key = key_to_str(key)
+        self._client.srem(str_key, item)
+
+    def dump(self) -> List[Tuple[str, str, Any]]:
+        entries = []
+        for str_key in self._client.keys():
+            str_key = str_key.decode('UTF-8')
+            key_type = self._client.type(str_key)
+            if key_type is not None: key_type = key_type.decode('UTF-8')
+            key_type = {
+                'hash'  : 'dict',
+                'list'  : 'list',
+                'set'   : 'set',
+                'string': 'str',
+            }.get(key_type)
+            key_content = {
+                'dict': lambda key: {k.decode('UTF-8'):v.decode('UTF-8') for k,v in self._client.hgetall(key).items()},
+                'list': lambda key: [m.decode('UTF-8') for m in self._client.lrange(key, 0, -1)],
+                'set' : lambda key: {m.decode('UTF-8') for m in self._client.smembers(key)},
+                'str' : lambda key: self._client.get(key).decode('UTF-8'),
+            }.get(key_type, lambda key: 'UNSUPPORTED_TYPE')
+            entries.append((str_key, key_type, key_content(str_key)))
+        return entries
diff --git a/src/common/database/engines/__init__.py b/src/common/orm/backend/redis/__init__.py
similarity index 100%
rename from src/common/database/engines/__init__.py
rename to src/common/orm/backend/redis/__init__.py
diff --git a/src/common/orm/fields/BooleanField.py b/src/common/orm/fields/BooleanField.py
new file mode 100644
index 0000000000000000000000000000000000000000..3bfbed17fb8cee7962ae86df2416c64369f9127d
--- /dev/null
+++ b/src/common/orm/fields/BooleanField.py
@@ -0,0 +1,17 @@
+from __future__ import annotations
+from typing import Union
+from common.type_checkers.Checkers import chk_boolean
+from .Field import Field
+
+BOOL_TRUE_VALUES = {'TRUE', 'T', '1'}
+
+class BooleanField(Field):
+    def __init__(self, *args, **kwargs) -> None:
+        super().__init__(*args, type_=bool, **kwargs)
+
+    def validate(self, value : Union[bool, str], try_convert_type=False) -> bool:
+        value = self.is_required(value)
+        if value is None: return None
+        if try_convert_type and isinstance(value, str):
+            return value.upper() in BOOL_TRUE_VALUES
+        return chk_boolean(self.name, value)
diff --git a/src/common/orm/fields/EnumeratedField.py b/src/common/orm/fields/EnumeratedField.py
new file mode 100644
index 0000000000000000000000000000000000000000..37e95bd338f736c7473055d9952eaa4ce4a334ea
--- /dev/null
+++ b/src/common/orm/fields/EnumeratedField.py
@@ -0,0 +1,24 @@
+from __future__ import annotations
+from enum import Enum
+from typing import Union
+from common.orm.fields.Field import Field
+from common.type_checkers.Checkers import chk_issubclass, chk_options, chk_type
+from .Field import Field
+
+class EnumeratedField(Field):
+    def __init__(self, enumeration_class : 'Enum', *args, required : bool = True, **kwargs) -> None:
+        self.enumeration_class : Enum = chk_issubclass('EnumeratedField.enumeration_class', enumeration_class, Enum)
+        super().__init__(*args, type_=self.enumeration_class, required=required, **kwargs)
+
+    def validate(self, value : Union['Enum', str], try_convert_type=False) -> 'Enum':
+        value = super().is_required(value)
+        if value is None: return None
+        if try_convert_type and isinstance(value, str):
+            chk_options(self.name, value, self.enumeration_class.__members__.keys())
+            value = self.enumeration_class.__members__[value]
+        return chk_type(self.name, value, self.enumeration_class)
+
+    def serialize(self, value: 'Enum') -> str:
+        value = self.validate(value, try_convert_type=True)
+        if value is None: return None
+        return str(value.name)
diff --git a/src/common/orm/fields/Field.py b/src/common/orm/fields/Field.py
new file mode 100644
index 0000000000000000000000000000000000000000..aa1a9e4352658ce3219abe24abdfb87183508eb5
--- /dev/null
+++ b/src/common/orm/fields/Field.py
@@ -0,0 +1,49 @@
+from __future__ import annotations
+import logging
+from typing import TYPE_CHECKING, Any, List, Set, Tuple, Union
+from common.type_checkers.Checkers import chk_boolean, chk_not_none, chk_string, chk_type
+
+if TYPE_CHECKING:
+    from ..model.Model import Model
+
+LOGGER = logging.getLogger(__name__)
+
+# Ref: https://docs.python.org/3.9/howto/descriptor.html
+
+class Field:
+    def __init__(
+        self, name : str = None, type_ : Union[type, Set[type], Tuple[type], List[type]] = object,
+        required : bool = False) -> None:
+
+        self.name = None if name is None else chk_string('Field.name', name)
+        self.type_ = chk_type('Field.type', type_, (type, set, tuple, list))
+        self.required = chk_boolean('Field.required', required)
+
+    def __get__(self, instance : 'Model', objtype=None):
+        if instance is None: return self
+        return instance.__dict__.get(self.name)
+
+    def __set__(self, instance : 'Model', value : Any) -> None:
+        instance.__dict__[self.name] = self.validate(value)
+
+    def __delete__(self, instance : 'Model'):
+        raise AttributeError('Attribute "{:s}" cannot be deleted'.format(self.name))
+
+    def is_required(self, value):
+        if self.required:
+            chk_not_none(self.name, value, reason='is required. It cannot be None.')
+        return value
+    
+    def validate(self, value, try_convert_type=False):
+        value = self.is_required(value)
+        if value is None: return None
+        if try_convert_type: value = self.type_(value)
+        return value
+
+    def serialize(self, value : Any) -> str:
+        value = self.validate(value)
+        if value is None: return None
+        return str(value)
+
+    def deserialize(self, value : str) -> Any:
+        return self.validate(value, try_convert_type=True)
diff --git a/src/common/orm/fields/FloatField.py b/src/common/orm/fields/FloatField.py
new file mode 100644
index 0000000000000000000000000000000000000000..e88b10a21c3f556e5ca0501d484aa5ab36311a76
--- /dev/null
+++ b/src/common/orm/fields/FloatField.py
@@ -0,0 +1,20 @@
+from __future__ import annotations
+from typing import Optional, Union
+from common.type_checkers.Checkers import chk_float
+from .Field import Field
+
+class FloatField(Field):
+    def __init__(
+        self, *args, min_value : Optional[float] = None, max_value : Optional[float] = None, **kwargs) -> None:
+
+        super().__init__(*args, type_=float, **kwargs)
+        self._min_value = None if min_value is None else \
+            chk_float('FloatField.min_value', min_value)
+        self._max_value = None if max_value is None else \
+            chk_float('FloatField.max_value', max_value, min_value=self._min_value)
+
+    def validate(self, value : Union[float, str], try_convert_type=False) -> float:
+        value = super().validate(value)
+        if value is None: return None
+        if try_convert_type and isinstance(value, str): value = float(value)
+        return chk_float(self.name, value, min_value=self._min_value, max_value=self._max_value)
diff --git a/src/common/orm/fields/ForeignKeyField.py b/src/common/orm/fields/ForeignKeyField.py
new file mode 100644
index 0000000000000000000000000000000000000000..21815d97c930128ef2f75e3105ca6309a13d524e
--- /dev/null
+++ b/src/common/orm/fields/ForeignKeyField.py
@@ -0,0 +1,20 @@
+from __future__ import annotations
+from typing import TYPE_CHECKING
+from common.type_checkers.Checkers import chk_issubclass, chk_type
+from .StringField import StringField
+
+if TYPE_CHECKING:
+    from ..model.Model import Model
+
+class ForeignKeyField(StringField):
+    def __init__(self, foreign_model : 'Model', *args, required : bool = True, **kwargs) -> None:
+        from ..model.Model import Model
+        self.foreign_model : Model = chk_issubclass('ForeignKeyField.foreign_model', foreign_model, Model)
+        super().__init__(*args, required=required, allow_empty=not required, **kwargs)
+
+    def __set__(self, instance : 'Model', value : 'Model') -> None:
+        model_instance : 'Model' = chk_type('value', value, self.foreign_model)
+        super().__set__(instance, self.validate(model_instance.instance_key))
+
+    def __delete__(self, instance: 'Model'):
+        super().__set__(instance, self.validate(None))
diff --git a/src/common/orm/fields/IntegerField.py b/src/common/orm/fields/IntegerField.py
new file mode 100644
index 0000000000000000000000000000000000000000..591d90b83ea24054712ee2f03ebf09039ce4816f
--- /dev/null
+++ b/src/common/orm/fields/IntegerField.py
@@ -0,0 +1,20 @@
+from __future__ import annotations
+from typing import Optional, Union
+from common.type_checkers.Checkers import chk_integer
+from .Field import Field
+
+class IntegerField(Field):
+    def __init__(
+        self, *args, min_value : Optional[int] = None, max_value : Optional[int] = None, **kwargs) -> None:
+
+        super().__init__(*args, type_=int, **kwargs)
+        self._min_value = None if min_value is None else \
+            chk_integer('IntegerField.min_value', min_value)
+        self._max_value = None if max_value is None else \
+            chk_integer('IntegerField.max_value', max_value, min_value=self._min_value)
+
+    def validate(self, value : Union[int, str], try_convert_type=False) -> int:
+        value = super().validate(value)
+        if value is None: return None
+        if try_convert_type and isinstance(value, str): value = int(value)
+        return chk_integer(self.name, value, min_value=self._min_value, max_value=self._max_value)
diff --git a/src/common/orm/fields/PrimaryKeyField.py b/src/common/orm/fields/PrimaryKeyField.py
new file mode 100644
index 0000000000000000000000000000000000000000..1e567f0361236194819198b726f3d32029072adc
--- /dev/null
+++ b/src/common/orm/fields/PrimaryKeyField.py
@@ -0,0 +1,15 @@
+from __future__ import annotations
+from typing import TYPE_CHECKING
+from .StringField import StringField
+
+if TYPE_CHECKING:
+    from ..model.Model import Model
+
+class PrimaryKeyField(StringField):
+    def __init__(self, *args, **kwargs) -> None:
+        super().__init__(*args, required=True, allow_empty=False, min_length=1, **kwargs)
+
+    def __set__(self, instance : 'Model', value : str) -> None:
+        if (self.name in instance.__dict__) and (instance.__dict__[self.name] is not None):
+            raise ValueError('PrimaryKeyField cannot be modified')
+        super().__set__(instance, self.validate(value))
diff --git a/src/common/orm/fields/StringField.py b/src/common/orm/fields/StringField.py
new file mode 100644
index 0000000000000000000000000000000000000000..0034c69baf2b4bbe0de16ade38244d19262cfe80
--- /dev/null
+++ b/src/common/orm/fields/StringField.py
@@ -0,0 +1,25 @@
+from __future__ import annotations
+import re
+from typing import Optional, Pattern, Union
+from common.type_checkers.Checkers import chk_boolean, chk_integer, chk_string
+from .Field import Field
+
+class StringField(Field):
+    def __init__(
+        self, *args, allow_empty : bool = False, min_length : Optional[int] = None, max_length : Optional[int] = None,
+        pattern : Optional[Union[Pattern, str]] = None, **kwargs) -> None:
+
+        super().__init__(*args, type_=str, **kwargs)
+        self._allow_empty = chk_boolean('StringField.allow_empty', allow_empty)
+        self._min_length = None if min_length is None else \
+            chk_integer('StringField.min_length', min_length, min_value=0)
+        self._max_length = None if max_length is None else \
+            chk_integer('StringField.max_length', max_length, min_value=self._min_length)
+        self._pattern = None if pattern is None else re.compile(pattern)
+
+    def validate(self, value : str, try_convert_type=False) -> str:
+        value = super().validate(value, try_convert_type=try_convert_type)
+        if value is None: return None
+        return chk_string(
+            self.name, value, allow_empty=self._allow_empty, min_length=self._min_length, max_length=self._max_length,
+            pattern=self._pattern)
diff --git a/src/common/orm/fields/__init__.py b/src/common/orm/fields/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..b97f0f08a646c23d51b98b03bf7fe29f9c7d8bb8
--- /dev/null
+++ b/src/common/orm/fields/__init__.py
@@ -0,0 +1,8 @@
+from .BooleanField import BooleanField
+from .Field import Field
+from .FloatField import FloatField
+from .ForeignKeyField import ForeignKeyField
+from .IntegerField import IntegerField
+from .PrimaryKeyField import PrimaryKeyField
+from .StringField import StringField
+__all__ = ['BooleanField', 'Field', 'FloatField', 'ForeignKeyField', 'IntegerField', 'PrimaryKeyField', 'StringField']
diff --git a/src/common/orm/model/Model.py b/src/common/orm/model/Model.py
new file mode 100644
index 0000000000000000000000000000000000000000..cf66a859f16302e9e746b612791fd0dacc07b996
--- /dev/null
+++ b/src/common/orm/model/Model.py
@@ -0,0 +1,292 @@
+from __future__ import annotations
+import logging, re
+from typing import Any, Dict, List, Mapping, Optional, Set, Tuple, Union
+from common.orm.Database import Database
+from common.orm.backend.Tools import key_to_str
+from common.orm.fields.ForeignKeyField import ForeignKeyField
+from ..Exceptions import ConstraintException, MutexException
+from ..fields.Field import Field
+from ..fields.PrimaryKeyField import PrimaryKeyField
+from .Tools import NoDupOrderedDict
+
+LOGGER = logging.getLogger(__name__)
+DEFAULT_PRIMARY_KEY_NAME = 'pk_auto'
+
+class MetaModel(type):
+    @classmethod
+    def __prepare__(cls, name : str, bases : Tuple[type, ...], **attrs : Any) -> Mapping[str, Any]:
+        return NoDupOrderedDict()
+
+    def __new__(cls, name : str, bases : Tuple[type, ...], attrs : NoDupOrderedDict[str, Any]):
+        field_names = list()
+        pk_field_name = None
+        for key, value in attrs.items():
+            if not isinstance(value, Field): continue
+            value.name = key
+            field_names.append(key)
+            if not isinstance(value, PrimaryKeyField): continue
+            if pk_field_name is None:
+                pk_field_name = key
+                continue
+            raise AttributeError('PrimaryKeyField for Model({:s}) already set to attribute({:s})'.format(
+                str(name), str(pk_field_name)))
+        if pk_field_name is None:
+            if DEFAULT_PRIMARY_KEY_NAME in attrs.keys():
+                msg = 'PrimaryKeyField for Model({:s}) not defined and attribute "{:s}" already used. '\
+                      'Leave attribute name "{:s}" for automatic PrimaryKeyField, or set a PrimaryKeyField.'
+                raise AttributeError(msg.format(str(name), DEFAULT_PRIMARY_KEY_NAME, DEFAULT_PRIMARY_KEY_NAME))
+            pk_field_name = DEFAULT_PRIMARY_KEY_NAME
+            attrs[pk_field_name] = PrimaryKeyField(name=pk_field_name)
+            field_names.append(pk_field_name)
+        cls_obj = super().__new__(cls, name, bases, dict(attrs))
+        setattr(cls_obj, '_pk_field_name', pk_field_name)
+        setattr(cls_obj, '_field_names_list', field_names)
+        setattr(cls_obj, '_field_names_set', set(field_names))
+        return cls_obj
+
+KEYWORD_INSTANCES  = 'instances'
+KEYWORD_LOCK       = 'lock'
+KEYWORD_REFERENCES = 'references'
+KEYWORD_STORED     = '_stored'
+
+class Model(metaclass=MetaModel):
+    @classmethod
+    def get_backend_key_instances(cls) -> str:
+        return key_to_str(['{:s}'.format(cls.__name__), KEYWORD_INSTANCES])
+
+    @classmethod
+    def get_backend_key_instance(cls, primary_key : str) -> str:
+        return '{:s}[{:s}]'.format(cls.__name__, primary_key)
+
+    @classmethod
+    def get_backend_key_references(cls, primary_key : str) -> str:
+        match = re.match(r'^[a-zA-Z0-9\_]+\[([^\]]*)\]', primary_key)
+        if not match: primary_key = cls.get_backend_key_instance(primary_key)
+        return key_to_str([primary_key, KEYWORD_REFERENCES])
+
+    @staticmethod
+    def get_backend_key_lock(backend_key : str) -> str:
+        if backend_key.endswith(KEYWORD_LOCK): return backend_key
+        return key_to_str([backend_key, KEYWORD_LOCK])
+
+    @staticmethod
+    def get_backend_key_locks(backend_keys : List[str]) -> List[str]:
+        return [Model.get_backend_key_lock(backend_key) for backend_key in backend_keys]
+
+    @classmethod
+    def backend_key__to__instance_key(cls, backend_key : str) -> str:
+        class_name = cls.__name__
+        if backend_key.startswith(class_name):
+            match = re.match(r'^{:s}\[([^\]]*)\]'.format(class_name), backend_key)
+            if match: return match.group(1)
+        return backend_key
+
+    def __init__(self, database : Database, primary_key : str, auto_load : bool = True) -> None:
+        if not isinstance(database, Database):
+            str_class_path = '{}.{}'.format(Database.__module__, Database.__name__)
+            raise AttributeError('database must inherit from {}'.format(str_class_path))
+        self._model_class = type(self)
+        self._class_name = self._model_class.__name__
+        pk_field_name = self._pk_field_name # pylint: disable=no-member
+        pk_field_instance : 'PrimaryKeyField' = getattr(self._model_class, pk_field_name)
+        primary_key = pk_field_instance.validate(primary_key)
+        primary_key = self.backend_key__to__instance_key(primary_key)
+        setattr(self, pk_field_name, primary_key)
+        self._database = database
+        self._backend = database.backend
+        self._instance_key : str = self.get_backend_key_instance(primary_key)
+        self._instances_key : str = self.get_backend_key_instances()
+        self._references_key : str = self.get_backend_key_references(primary_key)
+        self._owner_key : Optional[str] = None
+        if auto_load: self.load()
+
+    @property
+    def database(self) -> Database: return self._database
+
+    @property
+    def instance_key(self) -> str: return self._instance_key
+
+    def lock(self, extra_keys : List[List[str]] = []):
+        lock_keys = Model.get_backend_key_locks(
+            [self._instance_key, self._instances_key, self._references_key] + extra_keys)
+        acquired,self._owner_key = self._backend.lock(lock_keys, owner_key=self._owner_key)
+        if acquired: return
+        raise MutexException('Unable to lock keys {:s} using owner_key {:s}'.format(
+            str(lock_keys), str(self._owner_key)))
+
+    def unlock(self, extra_keys : List[List[str]] = []):
+        lock_keys = Model.get_backend_key_locks(
+            [self._instance_key, self._instances_key, self._references_key] + extra_keys)
+        released = self._backend.unlock(lock_keys, self._owner_key)
+        if released: return
+        raise MutexException('Unable to unlock keys {:s} using owner_key {:s}'.format(
+            str(lock_keys), str(self._owner_key)))
+
+    def load(self) -> bool:
+        pk_field_name = self._pk_field_name # pylint: disable=no-member
+
+        try:
+            self.lock()
+
+            attributes = self._backend.dict_get(self._instance_key)
+            if attributes is None or len(attributes) == 0: return False
+            for field_name in self._field_names_list: # pylint: disable=no-member
+                if field_name == pk_field_name: continue
+                if field_name not in attributes: continue
+                raw_field_value = attributes[field_name]
+                field_instance : 'Field' = getattr(self._model_class, field_name)
+                field_value = field_instance.deserialize(raw_field_value)
+                if isinstance(field_instance, ForeignKeyField):
+                    setattr(self, field_name + KEYWORD_STORED, field_value)
+                    field_value = field_instance.foreign_model(self._database, field_value, auto_load=True)
+                setattr(self, field_name, field_value)
+            return True
+        finally:
+            self.unlock()
+
+    def save(self) -> None:
+        attributes : Dict[str, Any] = dict()
+        required_keys : Set[str] = set()
+        foreign_additions : Dict[str, str] = dict()
+        foreign_removals : Dict[str, str] = dict()
+        for field_name in self._field_names_list: # pylint: disable=no-member
+            field_value = getattr(self, field_name)
+            field_instance : 'Field' = getattr(self._model_class, field_name)
+            serialized_field_value = field_instance.serialize(field_value)
+            if (serialized_field_value is None) and (not field_instance.required): continue
+            if isinstance(field_instance, ForeignKeyField):
+                foreign_reference = '{:s}:{:s}'.format(self._instance_key, field_name)
+                field_value_stored = getattr(self, field_name + KEYWORD_STORED, None)
+                if field_value_stored is not None:
+                    foreign_removals[self.get_backend_key_references(field_value_stored)] = foreign_reference
+                foreign_additions[self.get_backend_key_references(serialized_field_value)] = foreign_reference
+                required_keys.add(serialized_field_value)
+            attributes[field_name] = serialized_field_value
+
+        extra_keys = []
+        extra_keys.extend(list(foreign_removals.keys()))
+        extra_keys.extend(list(foreign_additions.keys()))
+
+        try:
+            self.lock(extra_keys=extra_keys)
+
+            not_exists = [
+                str(required_key)
+                for required_key in required_keys
+                if not self._backend.exists(required_key)]
+            if len(not_exists) > 0:
+                raise ConstraintException('Required Keys ({:s}) does not exist'.format(', '.join(sorted(not_exists))))
+
+            self._backend.dict_update(self._instance_key, attributes)
+            self._backend.set_add(self._instances_key, self._instance_key)
+
+            for serialized_field_value,foreign_reference in foreign_removals.items():
+                self._backend.set_remove(serialized_field_value, foreign_reference)
+
+            for serialized_field_value,foreign_reference in foreign_additions.items():
+                self._backend.set_add(serialized_field_value, foreign_reference)
+        finally:
+            self.unlock(extra_keys=extra_keys)
+
+        for serialized_field_value,foreign_reference in foreign_additions.items():
+            setattr(self, (foreign_reference.rsplit(':', 1)[-1]) + KEYWORD_STORED, field_value_stored)
+
+    def delete(self) -> None:
+        foreign_removals : Dict[str, str] = {}
+        for field_name in self._field_names_list: # pylint: disable=no-member
+            field_instance : 'Field' = getattr(self._model_class, field_name)
+            if not isinstance(field_instance, ForeignKeyField): continue
+            foreign_reference = '{:s}:{:s}'.format(self._instance_key, field_name)
+            field_value_stored = getattr(self, field_name + KEYWORD_STORED, None)
+            if field_value_stored is None: continue
+            foreign_removals[self.get_backend_key_references(field_value_stored)] = foreign_reference
+
+        extra_keys = []
+        extra_keys.extend(list(foreign_removals.keys()))
+
+        try:
+            self.lock(extra_keys=extra_keys)
+
+            if self._backend.exists(self._references_key):
+                references = self._backend.set_get_all(self._references_key)
+                raise ConstraintException('Instance is used by Keys ({:s})'.format(', '.join(sorted(references))))
+
+            self._backend.delete(self._instance_key)
+            self._backend.set_remove(self._instances_key, self._instance_key)
+
+            for serialized_field_value,foreign_reference in foreign_removals.items():
+                self._backend.set_remove(serialized_field_value, foreign_reference)
+        finally:
+            self.unlock(extra_keys=extra_keys)
+
+    @staticmethod
+    def get_model_name(model_or_str) -> str:
+        if isinstance(model_or_str, str):
+            return model_or_str
+        if (type(model_or_str).__name__ == 'MetaModel') and issubclass(model_or_str, Model):
+            return model_or_str.__name__
+        raise Exception()
+
+    def references(
+        self, filter_by_models : Optional[Union[type, List[type], Set[type], Tuple[type]]] = None
+        ) -> Set[Tuple[str, str]]:
+
+        try:
+            self.lock()
+            if not self._backend.exists(self._references_key): return {}
+            references = self._backend.set_get_all(self._references_key)
+            try:
+                if filter_by_models is None:
+                    pass
+                elif isinstance(filter_by_models, str):
+                    filter_by_models = {filter_by_models}
+                elif isinstance(filter_by_models, (list, set, tuple)):
+                    filter_by_models = {Model.get_model_name(model_or_str) for model_or_str in filter_by_models}
+                elif (type(filter_by_models).__name__ == 'MetaModel') and issubclass(filter_by_models, Model):
+                    filter_by_models = {Model.get_model_name(filter_by_models)}
+                else:
+                    raise Exception()
+            except Exception as e:
+                msg = 'filter_by_models({:s}) unsupported. Expected a type or a list/set of types. Optionally, keep '\
+                      'it as None to retrieve all the references pointing to this instance.'
+                raise AttributeError(msg.format(str(filter_by_models))) from e
+            if filter_by_models:
+                references = filter(lambda instance_key: instance_key.split('[', 1)[0] in filter_by_models, references)
+            return {tuple(reference.rsplit(':', 1)) for reference in references}
+        finally:
+            self.unlock()
+
+    @classmethod
+    def get_primary_keys(cls, database : Database):
+        backend = database.backend
+        key_model_instances = cls.get_backend_key_instances()
+        key_model_instances_lock = cls.get_backend_key_lock(key_model_instances)
+
+        acquired,owner_key = backend.lock(key_model_instances_lock)
+        if not acquired:
+            raise MutexException('Unable to lock keys {:s}'.format(
+                str(key_model_instances_lock)))
+
+        instance_keys = backend.set_get_all(key_model_instances)
+
+        released = backend.unlock(key_model_instances_lock, owner_key)
+        if not released:
+            raise MutexException('Unable to unlock keys {:s} using owner_key {:s}'.format(
+                str(key_model_instances_lock), str(owner_key)))
+
+        return instance_keys
+
+    def dump_id(self) -> Dict:
+        raise NotImplementedError()
+
+    def dump(self) -> Dict:
+        raise NotImplementedError()
+
+    def __repr__(self) -> str:
+        pk_field_name = self._pk_field_name # pylint: disable=no-member
+        arguments = ', '.join(
+            '{:s}={:s}{:s}'.format(
+                name, repr(getattr(self, name)), '(PK)' if name == pk_field_name else '')
+            for name in self._field_names_list # pylint: disable=no-member
+        )
+        return '{:s}({:s})'.format(self._class_name, arguments)
diff --git a/src/common/database/api/entity/Tools.py b/src/common/orm/model/Tools.py
similarity index 65%
rename from src/common/database/api/entity/Tools.py
rename to src/common/orm/model/Tools.py
index 6669d0f25d602f3028481dc7a07dc6242201eee4..3ebf63ae35a9e27d083a533ace3d1805197f166c 100644
--- a/src/common/database/api/entity/Tools.py
+++ b/src/common/orm/model/Tools.py
@@ -1,3 +1,10 @@
+from collections import OrderedDict
+
+class NoDupOrderedDict(OrderedDict):
+    def __setitem__(self, key, value):
+        if key in self: raise NameError('{:s} already defined'.format(str(key)))
+        super().__setitem__(key, value)
+
 def format_key(key_pattern, instance, **kwargs):
     attributes = {}
     for attribute_name in instance.__dir__():
diff --git a/src/common/database/engines/inmemory/__init__.py b/src/common/orm/model/__init__.py
similarity index 100%
rename from src/common/database/engines/inmemory/__init__.py
rename to src/common/orm/model/__init__.py
diff --git a/src/common/database/engines/redis/__init__.py b/src/common/orm/tests/__init__.py
similarity index 100%
rename from src/common/database/engines/redis/__init__.py
rename to src/common/orm/tests/__init__.py
diff --git a/src/common/orm/tests/test_unitary.py b/src/common/orm/tests/test_unitary.py
new file mode 100644
index 0000000000000000000000000000000000000000..b6f58b9a33d474b2574099870c7a8f6f51682b76
--- /dev/null
+++ b/src/common/orm/tests/test_unitary.py
@@ -0,0 +1,642 @@
+import logging, pytest
+from enum import Enum
+from common.orm.Exceptions import ConstraintException
+from common.orm.Database import Database
+from common.orm.Factory import get_database_backend
+from common.orm.backend.BackendEnum import BackendEnum
+from common.orm.backend._Backend import _Backend
+from common.orm.fields.BooleanField import BooleanField
+from common.orm.fields.EnumeratedField import EnumeratedField
+from common.orm.fields.FloatField import FloatField
+from common.orm.fields.ForeignKeyField import ForeignKeyField
+from common.orm.fields.IntegerField import IntegerField
+from common.orm.fields.PrimaryKeyField import PrimaryKeyField
+from common.orm.fields.StringField import StringField
+from common.orm.model.Model import DEFAULT_PRIMARY_KEY_NAME, Model
+
+logging.basicConfig(level=logging.INFO)
+LOGGER = logging.getLogger(__name__)
+
+def test_database_instantiation():
+    with pytest.raises(AttributeError) as e:
+        Database(None)
+    str_class_path = '{}.{}'.format(_Backend.__module__, _Backend.__name__)
+    assert str(e.value) == 'backend must inherit from {}'.format(str_class_path)
+
+    assert Database(get_database_backend(BackendEnum.INMEMORY)) is not None
+
+def test_model_without_attributes():
+    with pytest.raises(AttributeError) as e:
+        Model(None, 'valid-uuid')
+    str_class_path = '{}.{}'.format(Database.__module__, Database.__name__)
+    assert str(e.value) == 'database must inherit from {}'.format(str_class_path)
+
+    database = Database(get_database_backend(BackendEnum.INMEMORY))
+
+    with pytest.raises(ValueError) as e:
+        Model(database, '')
+    msg = '{:s}() is out of range: allow_empty(False).'
+    assert str(e.value) == msg.format(DEFAULT_PRIMARY_KEY_NAME)
+
+    with pytest.raises(TypeError) as e:
+        Model(database, 23)
+    msg = '{:s}(23) is of a wrong type(int). Accepted type_or_types(<class \'str\'>).'
+    assert str(e.value) == msg.format(DEFAULT_PRIMARY_KEY_NAME)
+
+    with pytest.raises(TypeError) as e:
+        Model(database, 23.5)
+    msg = '{:s}(23.5) is of a wrong type(float). Accepted type_or_types(<class \'str\'>).'
+    assert str(e.value) == msg.format(DEFAULT_PRIMARY_KEY_NAME)
+    
+    with pytest.raises(TypeError) as e:
+        Model(database, True)
+    msg = '{:s}(True) is of a wrong type(bool). Accepted type_or_types(<class \'str\'>).'
+    assert str(e.value) == msg.format(DEFAULT_PRIMARY_KEY_NAME)
+
+    with pytest.raises(TypeError) as e:
+        Model(database, ['a'])
+    msg = '{:s}([\'a\']) is of a wrong type(list). Accepted type_or_types(<class \'str\'>).'
+    assert str(e.value) == msg.format(DEFAULT_PRIMARY_KEY_NAME)
+
+    Model(database, 'valid-primary-key')
+
+def test_model_with_primarykey():
+    database = Database(get_database_backend(BackendEnum.INMEMORY))
+
+    with pytest.raises(AttributeError) as e:
+        class WrongTestModel(Model): # pylint: disable=unused-variable
+            pk = PrimaryKeyField()
+            name = StringField(min_length=1)
+            age = IntegerField(min_value=0)
+            salary = FloatField(min_value=0.0)
+            active = BooleanField()
+            pk2 = PrimaryKeyField()
+    assert str(e.value) == 'PrimaryKeyField for Model(WrongTestModel) already set to attribute(pk)'
+
+    class GenderEnum(Enum):
+        FEMALE = 'female'
+        MALE   = 'male'
+
+    class TestModel(Model):
+        pk = PrimaryKeyField()
+        name = StringField(min_length=1)
+        age = IntegerField(min_value=0)
+        salary = FloatField(min_value=0.0)
+        active = BooleanField()
+        gender = EnumeratedField(GenderEnum)
+
+    backend_key_instances  = TestModel.get_backend_key_instances()
+    backend_key_instance   = TestModel.get_backend_key_instance('pk')
+    backend_key_references = TestModel.get_backend_key_references('pk')
+
+    assert backend_key_instances  == 'TestModel/instances'
+    assert backend_key_instance   == 'TestModel[pk]'
+    assert backend_key_references == 'TestModel[pk]/references'
+
+    assert TestModel.get_backend_key_lock(backend_key_instances ) == 'TestModel/instances/lock'
+    assert TestModel.get_backend_key_lock(backend_key_instance  ) == 'TestModel[pk]/lock'
+    assert TestModel.get_backend_key_lock(backend_key_references) == 'TestModel[pk]/references/lock'
+
+    with pytest.raises(ValueError) as e:
+        TestModel(database, None)
+    assert str(e.value) == 'pk(None) is required. It cannot be None.'
+
+    with pytest.raises(ValueError) as e:
+        TestModel(database, '')
+    assert str(e.value) == 'pk() is out of range: allow_empty(False).'
+
+    obj = TestModel(database, 'valid-pk')
+    assert obj is not None
+
+    with pytest.raises(ValueError) as e:
+        obj.pk = 'another-valid-pk'
+    assert str(e.value) == 'PrimaryKeyField cannot be modified'
+
+def test_model_with_primarykey_and_attributes():
+    database = Database(get_database_backend(BackendEnum.INMEMORY))
+
+    class GenderEnum(Enum):
+        FEMALE = 'female'
+        MALE   = 'male'
+
+    with pytest.raises(AttributeError) as e:
+        class BadTestModel(Model):
+            pk_auto = StringField() # field using default name of primary key
+            name = StringField(min_length=5, max_length=10)
+            age = IntegerField(min_value=0)
+            salary = FloatField(min_value=0.0)
+            active = BooleanField()
+            gender = EnumeratedField(GenderEnum)
+
+    msg = 'PrimaryKeyField for Model(BadTestModel) not defined and attribute "pk_auto" already used. '\
+          'Leave attribute name "pk_auto" for automatic PrimaryKeyField, or set a PrimaryKeyField.'
+    assert str(e.value) == msg
+
+    class TestModel(Model):
+        pk = PrimaryKeyField()
+        name = StringField(min_length=5, max_length=10)
+        age = IntegerField(min_value=0)
+        salary = FloatField(min_value=0.0)
+        active = BooleanField()
+        gender = EnumeratedField(GenderEnum)
+
+    obj = TestModel(database, 'valid-pk')
+    assert obj is not None
+
+    with pytest.raises(AttributeError) as e:
+        del obj.name
+    assert str(e.value) == 'Attribute "name" cannot be deleted'
+
+    with pytest.raises(TypeError) as e:
+        obj.name = 55
+    assert str(e.value) == "name(55) is of a wrong type(int). Accepted type_or_types(<class 'str'>)."
+
+    with pytest.raises(TypeError) as e:
+        obj.name = 55.5
+    assert str(e.value) == "name(55.5) is of a wrong type(float). Accepted type_or_types(<class 'str'>)."
+
+    with pytest.raises(TypeError) as e:
+        obj.name = True
+    assert str(e.value) == "name(True) is of a wrong type(bool). Accepted type_or_types(<class 'str'>)."
+
+    with pytest.raises(TypeError) as e:
+        obj.age = 'too old'
+    assert str(e.value) == "age(too old) is of a wrong type(str). Accepted type_or_types(<class 'int'>)."
+
+    with pytest.raises(TypeError) as e:
+        obj.age = 37.5
+    assert str(e.value) == "age(37.5) is of a wrong type(float). Accepted type_or_types(<class 'int'>)."
+
+    with pytest.raises(TypeError) as e:
+        obj.salary = 'too high'
+    msg = "salary(too high) is of a wrong type(str). Accepted type_or_types((<class 'int'>, <class 'float'>))."
+    assert str(e.value) == msg
+
+    with pytest.raises(TypeError) as e:
+        obj.active = 'active'
+    assert str(e.value) == "active(active) is of a wrong type(str). Accepted type_or_types(<class 'bool'>)."
+
+    with pytest.raises(TypeError) as e:
+        obj.active = 27
+    assert str(e.value) == "active(27) is of a wrong type(int). Accepted type_or_types(<class 'bool'>)."
+
+    with pytest.raises(TypeError) as e:
+        obj.active = 92.5
+    assert str(e.value) == "active(92.5) is of a wrong type(float). Accepted type_or_types(<class 'bool'>)."
+
+    with pytest.raises(ValueError) as e:
+        obj.name = ''
+    assert str(e.value) == 'name() is out of range: allow_empty(False).'
+
+    with pytest.raises(ValueError) as e:
+        obj.name = 'John'
+    assert str(e.value) == 'name(John) is out of range: min_length(5).'
+
+    with pytest.raises(ValueError) as e:
+        obj.name = 'John Smith Willson'
+    assert str(e.value) == 'name(John Smith Willson) is out of range: max_value(10).'
+
+    with pytest.raises(TypeError) as e:
+        obj.gender = 51
+    assert str(e.value) == "gender(51) is of a wrong type(int). Accepted type_or_types(<enum 'GenderEnum'>)."
+
+    with pytest.raises(TypeError) as e:
+        obj.gender = 55.5
+    assert str(e.value) == "gender(55.5) is of a wrong type(float). Accepted type_or_types(<enum 'GenderEnum'>)."
+
+    with pytest.raises(TypeError) as e:
+        obj.gender = False
+    assert str(e.value) == "gender(False) is of a wrong type(bool). Accepted type_or_types(<enum 'GenderEnum'>)."
+
+    with pytest.raises(TypeError) as e:
+        obj.gender = 'male'
+    assert str(e.value) == "gender(male) is of a wrong type(str). Accepted type_or_types(<enum 'GenderEnum'>)."
+
+    obj.name = 'John Smith'
+    obj.age = 37
+    obj.salary = 5023.52
+    obj.active = True
+    obj.gender = GenderEnum.MALE
+    assert repr(obj) == "TestModel(pk='valid-pk'(PK), name='John Smith', age=37, salary=5023.52, active=True, "\
+                        "gender=<GenderEnum.MALE: 'male'>)"
+
+def test_model_database_operations():
+    database = Database(get_database_backend(BackendEnum.INMEMORY))
+
+    class GenderEnum(Enum):
+        FEMALE = 'female'
+        MALE   = 'male'
+
+    class TestModel(Model):
+        pk = PrimaryKeyField()
+        name = StringField(min_length=5, max_length=30)
+        age = IntegerField(min_value=0, required=True)
+        salary = FloatField(min_value=0.0)
+        active = BooleanField()
+        gender = EnumeratedField(GenderEnum)
+
+    obj_john = TestModel(database, 'john')
+    assert obj_john is not None
+
+    obj_john.name = 'John Smith'
+    obj_john.salary = 5023.52
+    obj_john.active = True
+    assert repr(obj_john) == "TestModel(pk='john'(PK), name='John Smith', age=None, salary=5023.52, active=True, "\
+                             "gender=None)"
+
+    with pytest.raises(ValueError) as e:
+        obj_john.save()
+    assert str(e.value) == 'age(None) is required. It cannot be None.'
+
+    obj_john.age = 37
+    assert repr(obj_john) == "TestModel(pk='john'(PK), name='John Smith', age=37, salary=5023.52, active=True, "\
+                             "gender=None)"
+
+    with pytest.raises(ValueError) as e:
+        obj_john.save()
+    assert str(e.value) == 'gender(None) is required. It cannot be None.'
+
+    obj_john.gender = GenderEnum.MALE
+    obj_john.save()
+
+    db_entries = database.dump()
+    assert len(db_entries) == 2
+    assert db_entries[0] == (
+        'set', 'TestModel/instances',
+        "{'TestModel[john]'}")
+    assert db_entries[1] == (
+        'dict', 'TestModel[john]',
+        "{'active': 'True', 'age': '37', 'gender': 'MALE', 'name': 'John Smith', 'pk': 'john', "\
+        "'salary': '5023.52'}")
+
+    obj_john2 = TestModel(database, 'john', auto_load=False)
+    assert obj_john2 is not None
+    assert repr(obj_john2) == "TestModel(pk='john'(PK), name=None, age=None, salary=None, active=None, gender=None)"
+    obj_john2.load()
+    assert repr(obj_john2) == "TestModel(pk='john'(PK), name='John Smith', age=37, salary=5023.52, active=True, "\
+                              "gender=<GenderEnum.MALE: 'male'>)"
+
+    obj_john2 = TestModel(database, 'john', auto_load=True)
+    assert obj_john2 is not None
+    assert repr(obj_john2) == "TestModel(pk='john'(PK), name='John Smith', age=37, salary=5023.52, active=True, "\
+                              "gender=<GenderEnum.MALE: 'male'>)"
+
+    obj_john2.delete()
+    assert len(database.dump()) == 0
+
+    obj_john2.save()
+
+    db_entries = database.dump()
+    LOGGER.info('----- Database Dump [{:3d} entries] -------------------------'.format(len(db_entries)))
+    for db_entry in db_entries:
+        LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry))
+    LOGGER.info('-----------------------------------------------------------')
+    assert len(db_entries) == 2
+    assert db_entries[0] == (
+        'set', 'TestModel/instances',
+        "{'TestModel[john]'}")
+    assert db_entries[1] == (
+        'dict', 'TestModel[john]',
+        "{'active': 'True', 'age': '37', 'gender': 'MALE', 'name': 'John Smith', 'pk': 'john', "\
+        "'salary': '5023.52'}")
+
+
+    obj_jane = TestModel(database, 'jane', auto_load=True)
+    obj_jane.name = 'Jane Willson'
+    obj_jane.age = 26
+    obj_jane.salary = 6071.72
+    obj_jane.active = True
+    obj_jane.gender = GenderEnum.FEMALE
+    assert repr(obj_jane) == "TestModel(pk='jane'(PK), name='Jane Willson', age=26, salary=6071.72, active=True, "\
+                             "gender=<GenderEnum.FEMALE: 'female'>)"
+    obj_jane.save()
+
+    obj_julia = TestModel(database, 'julia', auto_load=True)
+    obj_julia.name = 'Julia Simons'
+    obj_julia.age = 42
+    obj_julia.salary = 5451.13
+    obj_julia.active = True
+    obj_julia.gender = GenderEnum.FEMALE
+    assert repr(obj_julia) == "TestModel(pk='julia'(PK), name='Julia Simons', age=42, salary=5451.13, active=True, "\
+                              "gender=<GenderEnum.FEMALE: 'female'>)"
+    obj_julia.save()
+
+    db_entries = database.dump()
+    LOGGER.info('----- Database Dump [{:3d} entries] -------------------------'.format(len(db_entries)))
+    for db_entry in db_entries:
+        LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry))
+    LOGGER.info('-----------------------------------------------------------')
+
+    test_model_pks = sorted(TestModel.get_primary_keys(database))
+    assert len(test_model_pks) == 3
+    assert test_model_pks[0] == 'TestModel[jane]'
+    assert test_model_pks[1] == 'TestModel[john]'
+    assert test_model_pks[2] == 'TestModel[julia]'
+
+    database.clear_all()
+    assert len(database.dump()) == 0
+
+def test_model_foreignkeys():
+    database = Database(get_database_backend(BackendEnum.INMEMORY))
+
+    class GenderEnum(Enum):
+        FEMALE = 'female'
+        MALE   = 'male'
+
+    class Team(Model):
+        pk = PrimaryKeyField()
+        name = StringField(max_length=10, required=True)
+
+    class Workplace(Model):
+        pk = PrimaryKeyField()
+        name = StringField(max_length=10, required=True)
+
+    class Member(Model):
+        pk = PrimaryKeyField()
+        team = ForeignKeyField(Team)
+        place = ForeignKeyField(Workplace, required=False)
+        name = StringField(max_length=10, required=True)
+        gender = EnumeratedField(GenderEnum)
+
+    team_dev_ops = Team(database, 'dev-ops')
+    team_dev_ops.name = 'Dev Ops'
+    assert team_dev_ops is not None
+    assert repr(team_dev_ops) == "Team(pk='dev-ops'(PK), name='Dev Ops')"
+
+    workplace_bcn = Workplace(database, 'bcn')
+    workplace_bcn.name = 'Barcelona'
+    assert workplace_bcn is not None
+    assert repr(workplace_bcn) == "Workplace(pk='bcn'(PK), name='Barcelona')"
+
+    member_john = Member(database, 'john')
+    member_john.name = 'John'
+    member_john.team = team_dev_ops
+    member_john.place = workplace_bcn
+    assert member_john is not None
+    assert repr(member_john) == "Member(pk='john'(PK), team='Team[dev-ops]', place='Workplace[bcn]', name='John', "\
+                                "gender=None)"
+
+    with pytest.raises(ValueError) as e:
+        member_john.save()
+    assert str(e.value) == 'gender(None) is required. It cannot be None.'
+
+    member_john.gender = GenderEnum.MALE
+
+    with pytest.raises(ConstraintException) as e:
+        member_john.save()
+    assert str(e.value) == 'Required Keys (Team[dev-ops], Workplace[bcn]) does not exist'
+
+    workplace_bcn.save()
+    assert repr(Workplace(database, workplace_bcn.pk)) == "Workplace(pk='bcn'(PK), name='Barcelona')"
+
+    with pytest.raises(ConstraintException) as e:
+        member_john.save()
+    assert str(e.value) == 'Required Keys (Team[dev-ops]) does not exist'
+
+    team_dev_ops.save()
+    assert repr(Team(database, team_dev_ops.pk)) == "Team(pk='dev-ops'(PK), name='Dev Ops')"
+
+    member_john.save()
+    assert repr(Member(database, member_john.pk)) == \
+        "Member(pk='john'(PK), team='Team[dev-ops]', place='Workplace[bcn]', name='John', "\
+        "gender=<GenderEnum.MALE: 'male'>)"
+
+    with pytest.raises(ConstraintException) as e:
+        workplace_bcn.delete()
+    assert str(e.value) == 'Instance is used by Keys (Member[john]:place)'
+
+    with pytest.raises(ConstraintException) as e:
+        team_dev_ops.delete()
+    assert str(e.value) == 'Instance is used by Keys (Member[john]:team)'
+
+    workplace_mad = Workplace(database, 'mad')
+    workplace_mad.name = 'Madrid'
+    assert workplace_mad is not None
+    assert repr(workplace_mad) == "Workplace(pk='mad'(PK), name='Madrid')"
+
+    member_john = Member(database, 'john')
+    member_john.name = 'John'
+    member_john.place = workplace_mad
+    assert member_john is not None
+    assert repr(member_john) == \
+        "Member(pk='john'(PK), team='Team[dev-ops]', place='Workplace[mad]', name='John', "\
+        "gender=<GenderEnum.MALE: 'male'>)"
+
+    with pytest.raises(ConstraintException) as e:
+        member_john.save()
+    assert str(e.value) == 'Required Keys (Workplace[mad]) does not exist'
+
+    workplace_mad.save()
+    assert repr(Workplace(database, workplace_mad.pk)) == "Workplace(pk='mad'(PK), name='Madrid')"
+
+    member_john.save()
+
+    member_john = Member(database, 'john')
+
+    with pytest.raises(ValueError) as e:
+        del member_john.place
+        del member_john.team
+    assert str(e.value) == 'team(None) is required. It cannot be None.'
+
+
+    member_jane = Member(database, 'jane')
+    member_jane.name = 'Jane'
+    member_jane.place = workplace_mad
+    assert member_jane is not None
+    assert repr(member_jane) == "Member(pk='jane'(PK), team=None, place='Workplace[mad]', name='Jane', gender=None)"
+
+    with pytest.raises(ValueError) as e:
+        member_jane.save()
+    assert str(e.value) == 'team(None) is required. It cannot be None.'
+
+    member_jane.team = team_dev_ops
+
+    with pytest.raises(ValueError) as e:
+        member_jane.save()
+    assert str(e.value) == 'gender(None) is required. It cannot be None.'
+
+    member_jane.gender = GenderEnum.FEMALE
+
+    member_jane.save()
+    assert repr(Member(database, member_jane.pk)) == \
+        "Member(pk='jane'(PK), team='Team[dev-ops]', place='Workplace[mad]', name='Jane', "\
+        "gender=<GenderEnum.FEMALE: 'female'>)"
+
+    member_brad = Member(database, 'brad')
+    assert member_brad is not None
+    assert repr(member_brad) == "Member(pk='brad'(PK), team=None, place=None, name=None, gender=None)"
+
+    with pytest.raises(ValueError) as e:
+        member_brad.save()
+    assert str(e.value) == 'team(None) is required. It cannot be None.'
+
+    member_brad.team = team_dev_ops
+
+    with pytest.raises(ValueError) as e:
+        member_brad.save()
+    assert str(e.value) == 'name(None) is required. It cannot be None.'
+
+    member_brad.name = 'Brad'
+    assert repr(member_brad) == "Member(pk='brad'(PK), team=\'Team[dev-ops]\', place=None, name='Brad', gender=None)"
+
+    with pytest.raises(ValueError) as e:
+        member_brad.save()
+    assert str(e.value) == 'gender(None) is required. It cannot be None.'
+
+    member_brad.gender = GenderEnum.MALE
+
+    member_brad.save()
+    assert repr(Member(database, member_brad.pk)) == \
+        "Member(pk='brad'(PK), team='Team[dev-ops]', place=None, name='Brad', gender=<GenderEnum.MALE: 'male'>)"
+
+    team_admin = Team(database, 'admin')
+    team_admin.name = 'Admin'
+    team_admin.save()
+    assert repr(Team(database, team_admin.pk)) == "Team(pk='admin'(PK), name='Admin')"
+
+    member_brad = Member(database, member_brad.pk)
+    assert repr(member_brad) == \
+        "Member(pk='brad'(PK), team='Team[dev-ops]', place=None, name='Brad', gender=<GenderEnum.MALE: 'male'>)"
+    member_brad.team = team_admin
+    assert repr(member_brad) == \
+        "Member(pk='brad'(PK), team='Team[admin]', place=None, name='Brad', gender=<GenderEnum.MALE: 'male'>)"
+    member_brad.save()
+    assert repr(Member(database, member_brad.pk)) == \
+        "Member(pk='brad'(PK), team='Team[admin]', place=None, name='Brad', gender=<GenderEnum.MALE: 'male'>)"
+
+    references = sorted(team_dev_ops.references())
+    assert len(references) == 2
+    assert references[0] == ('Member[jane]', 'team')
+    assert references[1] == ('Member[john]', 'team')
+
+    references = sorted(workplace_bcn.references())
+    assert len(references) == 0
+
+    references = sorted(workplace_mad.references())
+    assert len(references) == 2
+    assert references[0] == ('Member[jane]', 'place')
+    assert references[1] == ('Member[john]', 'place')
+
+    references = sorted(workplace_mad.references('Member'))
+    assert len(references) == 2
+    assert references[0] == ('Member[jane]', 'place')
+    assert references[1] == ('Member[john]', 'place')
+
+    references = sorted(workplace_mad.references({'Member'}))
+    assert len(references) == 2
+    assert references[0] == ('Member[jane]', 'place')
+    assert references[1] == ('Member[john]', 'place')
+
+    references = sorted(workplace_mad.references(['Member']))
+    assert len(references) == 2
+    assert references[0] == ('Member[jane]', 'place')
+    assert references[1] == ('Member[john]', 'place')
+
+    references = sorted(workplace_mad.references(('Member',)))
+    assert len(references) == 2
+    assert references[0] == ('Member[jane]', 'place')
+    assert references[1] == ('Member[john]', 'place')
+
+    references = sorted(workplace_mad.references(Member))
+    assert len(references) == 2
+    assert references[0] == ('Member[jane]', 'place')
+    assert references[1] == ('Member[john]', 'place')
+
+    references = sorted(workplace_mad.references({Member}))
+    assert len(references) == 2
+    assert references[0] == ('Member[jane]', 'place')
+    assert references[1] == ('Member[john]', 'place')
+
+    references = sorted(workplace_mad.references([Member]))
+    assert len(references) == 2
+    assert references[0] == ('Member[jane]', 'place')
+    assert references[1] == ('Member[john]', 'place')
+
+    references = sorted(workplace_mad.references((Member,)))
+    assert len(references) == 2
+    assert references[0] == ('Member[jane]', 'place')
+    assert references[1] == ('Member[john]', 'place')
+
+    references = sorted(workplace_mad.references({'non-existing-model'}))
+    assert len(references) == 0
+
+    with pytest.raises(AttributeError) as e:
+        references = sorted(workplace_mad.references(7))
+    assert str(e.value) == 'filter_by_models(7) unsupported. Expected a type or a list/set of types. '\
+                           'Optionally, keep it as None to retrieve all the references pointing to this instance.'
+
+    with pytest.raises(AttributeError) as e:
+        references = sorted(workplace_mad.references({7}))
+    assert str(e.value) == 'filter_by_models({7}) unsupported. Expected a type or a list/set of types. '\
+                           'Optionally, keep it as None to retrieve all the references pointing to this instance.'
+
+    db_entries = database.dump()
+    LOGGER.info('----- Database Dump [{:3d} entries] -------------------------'.format(len(db_entries)))
+    for db_entry in db_entries:
+        LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry))
+    LOGGER.info('-----------------------------------------------------------')
+
+    assert len(db_entries) == 13
+    assert db_entries[ 0] == ('set', "Member/instances",
+                              "{'Member[brad]', 'Member[jane]', 'Member[john]'}")
+    assert db_entries[ 1] == ('dict', "Member[brad]",
+                              "{'gender': 'MALE', 'name': 'Brad', 'pk': 'brad', 'team': 'Team[admin]'}")
+    assert db_entries[ 2] == ('dict', "Member[jane]",
+                              "{'gender': 'FEMALE', 'name': 'Jane', 'pk': 'jane', 'place': 'Workplace[mad]', "\
+                              "'team': 'Team[dev-ops]'}")
+    assert db_entries[ 3] == ('dict', "Member[john]",
+                              "{'gender': 'MALE', 'name': 'John', 'pk': 'john', 'place': 'Workplace[mad]', "\
+                              "'team': 'Team[dev-ops]'}")
+    assert db_entries[ 4] == ('set', "Team/instances",
+                              "{'Team[admin]', 'Team[dev-ops]'}")
+    assert db_entries[ 5] == ('dict', "Team[admin]",
+                              "{'name': 'Admin', 'pk': 'admin'}")
+    assert db_entries[ 6] == ('set' , "Team[admin]/references",
+                              "{'Member[brad]:team'}")
+    assert db_entries[ 7] == ('dict', "Team[dev-ops]",
+                              "{'name': 'Dev Ops', 'pk': 'dev-ops'}")
+    assert db_entries[ 8] == ('set' , "Team[dev-ops]/references",
+                              "{'Member[jane]:team', 'Member[john]:team'}")
+    assert db_entries[ 9] == ('set', "Workplace/instances",
+                              "{'Workplace[bcn]', 'Workplace[mad]'}")
+    assert db_entries[10] == ('dict', "Workplace[bcn]",
+                              "{'name': 'Barcelona', 'pk': 'bcn'}")
+    assert db_entries[11] == ('dict', "Workplace[mad]",
+                              "{'name': 'Madrid', 'pk': 'mad'}")
+    assert db_entries[12] == ('set' , "Workplace[mad]/references",
+                              "{'Member[jane]:place', 'Member[john]:place'}")
+
+    Member(database, member_john.pk).delete()
+
+    db_entries = database.dump()
+    LOGGER.info('----- Database Dump [{:3d} entries] -------------------------'.format(len(db_entries)))
+    for db_entry in db_entries:
+        LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry))
+    LOGGER.info('-----------------------------------------------------------')
+
+    assert len(db_entries) == 12
+    assert db_entries[ 0] == ('set', "Member/instances",
+                              "{'Member[brad]', 'Member[jane]'}")
+    assert db_entries[ 1] == ('dict', 'Member[brad]',
+                              "{'gender': 'MALE', 'name': 'Brad', 'pk': 'brad', 'team': 'Team[admin]'}")
+    assert db_entries[ 2] == ('dict', 'Member[jane]',
+                              "{'gender': 'FEMALE', 'name': 'Jane', 'pk': 'jane', 'place': 'Workplace[mad]', "\
+                              "'team': 'Team[dev-ops]'}")
+    assert db_entries[ 3] == ('set', "Team/instances",
+                              "{'Team[admin]', 'Team[dev-ops]'}")
+    assert db_entries[ 4] == ('dict', 'Team[admin]',
+                              "{'name': 'Admin', 'pk': 'admin'}")
+    assert db_entries[ 5] == ('set',  'Team[admin]/references',
+                              "{'Member[brad]:team'}")
+    assert db_entries[ 6] == ('dict', 'Team[dev-ops]',
+                              "{'name': 'Dev Ops', 'pk': 'dev-ops'}")
+    assert db_entries[ 7] == ('set',  'Team[dev-ops]/references',
+                              "{'Member[jane]:team'}")
+    assert db_entries[ 8] == ('set', "Workplace/instances",
+                              "{'Workplace[bcn]', 'Workplace[mad]'}")
+    assert db_entries[ 9] == ('dict', 'Workplace[bcn]',
+                              "{'name': 'Barcelona', 'pk': 'bcn'}")
+    assert db_entries[10] == ('dict', 'Workplace[mad]',
+                              "{'name': 'Madrid', 'pk': 'mad'}")
+    assert db_entries[11] == ('set',  'Workplace[mad]/references',
+                              "{'Member[jane]:place'}")
diff --git a/src/common/rpc_method_wrapper/Decorator.py b/src/common/rpc_method_wrapper/Decorator.py
new file mode 100644
index 0000000000000000000000000000000000000000..cfbc76ff86810740ed8cc83eff95863ffd823604
--- /dev/null
+++ b/src/common/rpc_method_wrapper/Decorator.py
@@ -0,0 +1,65 @@
+import grpc, logging
+from enum import Enum
+from typing import Dict, List
+from prometheus_client import Counter, Histogram
+from prometheus_client.metrics import MetricWrapperBase
+from .ServiceExceptions import ServiceException
+
+class RequestConditionEnum(Enum):
+    STARTED   = 'started'
+    COMPLETED = 'completed'
+    FAILED    = 'failed'
+
+def get_counter_requests(method_name : str, request_condition : RequestConditionEnum) -> Counter:
+    str_request_condition = request_condition.value
+    name = '{:s}_counter_requests_{:s}'.format(method_name.replace(':', '_'), str_request_condition)
+    description = '{:s} counter of requests {:s}'.format(method_name, str_request_condition)
+    return Counter(name, description)
+
+def get_histogram_duration(method_name : str) -> Histogram:
+    name = '{:s}_histogram_duration'.format(method_name.replace(':', '_'))
+    description = '{:s} histogram of request duration'.format(method_name)
+    return Histogram(name, description)
+
+METRIC_TEMPLATES = {
+    '{:s}_COUNTER_STARTED'   : lambda method_name: get_counter_requests  (method_name, RequestConditionEnum.STARTED),
+    '{:s}_COUNTER_COMPLETED' : lambda method_name: get_counter_requests  (method_name, RequestConditionEnum.COMPLETED),
+    '{:s}_COUNTER_FAILED'    : lambda method_name: get_counter_requests  (method_name, RequestConditionEnum.FAILED),
+    '{:s}_HISTOGRAM_DURATION': lambda method_name: get_histogram_duration(method_name),
+}
+
+def create_metrics(service_name : str, method_names : List[str]) -> Dict[str, MetricWrapperBase]:
+    metrics = {}
+    for method_name in method_names:
+        for template_name, template_generator_function in METRIC_TEMPLATES.items():
+            metric_name = template_name.format(method_name).upper()
+            metrics[metric_name] = template_generator_function('{:s}:{:s}'.format(service_name, method_name))
+    return metrics
+
+def safe_and_metered_rpc_method(metrics : Dict[str, MetricWrapperBase], logger : logging.Logger):
+    def outer_wrapper(func):
+        function_name = func.__name__
+        HISTOGRAM_DURATION : Histogram = metrics.get('{:s}_HISTOGRAM_DURATION'.format(function_name).upper())
+        COUNTER_STARTED    : Counter   = metrics.get('{:s}_COUNTER_STARTED'   .format(function_name).upper())
+        COUNTER_COMPLETED  : Counter   = metrics.get('{:s}_COUNTER_COMPLETED' .format(function_name).upper())
+        COUNTER_FAILED     : Counter   = metrics.get('{:s}_COUNTER_FAILED'    .format(function_name).upper())
+
+        @HISTOGRAM_DURATION.time()
+        def inner_wrapper(self, request, grpc_context : grpc.ServicerContext):
+            COUNTER_STARTED.inc()
+            try:
+                logger.debug('{:s} request: {:s}'.format(function_name, str(request)))
+                reply = func(self, request, grpc_context)
+                logger.debug('{:s} reply: {:s}'.format(function_name, str(reply)))
+                COUNTER_COMPLETED.inc()
+                return reply
+            except ServiceException as e:   # pragma: no cover (ServiceException not thrown)
+                logger.exception('{:s} exception'.format(function_name))
+                COUNTER_FAILED.inc()
+                grpc_context.abort(e.code, e.details)
+            except Exception as e:          # pragma: no cover, pylint: disable=broad-except
+                logger.exception('{:s} exception'.format(function_name))
+                COUNTER_FAILED.inc()
+                grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))
+        return inner_wrapper
+    return outer_wrapper
diff --git a/src/common/rpc_method_wrapper/ServiceExceptions.py b/src/common/rpc_method_wrapper/ServiceExceptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..67a1e8003469ed96845aee74b17e48dfab58d88b
--- /dev/null
+++ b/src/common/rpc_method_wrapper/ServiceExceptions.py
@@ -0,0 +1,23 @@
+import grpc
+from typing import Iterable
+
+class ServiceException(Exception):
+    def __init__(self, code : grpc.StatusCode, details : str, extra_details : Iterable[str] = []) -> None:
+        self.code = code
+        self.details = '; '.join(map(str, [details] + extra_details))
+        super().__init__(self.details)
+
+class NotFoundException(ServiceException):
+    def __init__(self, object_name : str, object_uuid: str, extra_details : Iterable[str] = []) -> None:
+        details = '{:s}({:s}) not found'.format(str(object_name), str(object_uuid))
+        super().__init__(grpc.StatusCode.NOT_FOUND, details, extra_details=extra_details)
+
+class AlreadyExistsException(ServiceException):
+    def __init__(self, object_name : str, object_uuid: str, extra_details : Iterable[str] = None) -> None:
+        details = '{:s}({:s}) already exists'.format(str(object_name), str(object_uuid))
+        super().__init__(grpc.StatusCode.ALREADY_EXISTS, details, extra_details=extra_details)
+
+class InvalidArgumentException(ServiceException):
+    def __init__(self, argument_name : str, argument_value: str, extra_details : Iterable[str] = None) -> None:
+        details = '{:s}({:s}) is invalid'.format(str(argument_name), str(argument_value))
+        super().__init__(grpc.StatusCode.INVALID_ARGUMENT, details, extra_details=extra_details)
diff --git a/src/common/database/tests/__init__.py b/src/common/rpc_method_wrapper/__init__.py
similarity index 100%
rename from src/common/database/tests/__init__.py
rename to src/common/rpc_method_wrapper/__init__.py
diff --git a/src/common/exceptions/__init__.py b/src/common/rpc_method_wrapper/tests/__init__.py
similarity index 100%
rename from src/common/exceptions/__init__.py
rename to src/common/rpc_method_wrapper/tests/__init__.py
diff --git a/src/common/rpc_method_wrapper/tests/test_unitary.py b/src/common/rpc_method_wrapper/tests/test_unitary.py
new file mode 100644
index 0000000000000000000000000000000000000000..816b9dae859017a05f27055a14cb97db6d8e034f
--- /dev/null
+++ b/src/common/rpc_method_wrapper/tests/test_unitary.py
@@ -0,0 +1,30 @@
+import grpc, logging, time
+from common.rpc_method_wrapper.Decorator import create_metrics, safe_and_metered_rpc_method
+
+logging.basicConfig(level=logging.DEBUG)
+LOGGER = logging.getLogger(__name__)
+
+def test_database_instantiation():
+    SERVICE_NAME = 'Context'
+    METHOD_NAMES = [
+        'ListContextIds',  'ListContexts',   'GetContext',  'SetContext',  'RemoveContext',  'GetContextEvents',
+        'ListTopologyIds', 'ListTopologies', 'GetTopology', 'SetTopology', 'RemoveTopology', 'GetTopologyEvents',
+        'ListDeviceIds',   'ListDevices',    'GetDevice',   'SetDevice',   'RemoveDevice',   'GetDeviceEvents',
+        'ListLinkIds',     'ListLinks',      'GetLink',     'SetLink',     'RemoveLink',     'GetLinkEvents',
+        'ListServiceIds',  'ListServices',   'GetService',  'SetService',  'RemoveService',  'GetServiceEvents',
+    ]
+    METRICS = create_metrics(SERVICE_NAME, METHOD_NAMES)
+
+    class TestServiceServicerImpl:
+        @safe_and_metered_rpc_method(METRICS, LOGGER)
+        def GetTopology(self, request, grpc_context : grpc.ServicerContext):
+            print('doing funny things')
+            time.sleep(0.1)
+            return 'done'
+
+    tssi = TestServiceServicerImpl()
+    tssi.GetTopology(1, 2)
+
+    for metric_name,metric in METRICS.items():
+        if 'GETTOPOLOGY_' not in metric_name: continue
+        print(metric_name, metric._child_samples()) # pylint: disable=protected-access
diff --git a/src/common/tests/Assertions.py b/src/common/tests/Assertions.py
deleted file mode 100644
index c7b87a671f88ab65768525c73dcf2b34361b579f..0000000000000000000000000000000000000000
--- a/src/common/tests/Assertions.py
+++ /dev/null
@@ -1,183 +0,0 @@
-def validate_empty(message):
-    assert type(message) is dict
-    assert len(message.keys()) == 0
-
-def validate_uuid(message, allow_empty=False):
-    assert type(message) is dict
-    assert len(message.keys()) == 1
-    assert 'uuid' in message
-    assert type(message['uuid']) is str
-    if allow_empty: return
-    assert len(message['uuid']) > 1
-
-def validate_context_id(message):
-    assert type(message) is dict
-    assert len(message.keys()) == 1
-    assert 'contextUuid' in message
-    validate_uuid(message['contextUuid'])
-
-def validate_device_id(message):
-    assert type(message) is dict
-    assert len(message.keys()) == 1
-    assert 'device_id' in message
-    validate_uuid(message['device_id'])
-
-def validate_link_id(message):
-    assert type(message) is dict
-    assert len(message.keys()) == 1
-    assert 'link_id' in message
-    validate_uuid(message['link_id'])
-
-def validate_topology_id(message):
-    assert type(message) is dict
-    assert len(message.keys()) == 2
-    assert 'contextId' in message
-    validate_context_id(message['contextId'])
-    assert 'topoId' in message
-    validate_uuid(message['topoId'])
-
-def validate_device_config(message):
-    assert type(message) is dict
-    assert len(message.keys()) == 1
-    assert 'device_config' in message
-    assert type(message['device_config']) is str
-
-def validate_device_operational_status(message):
-    assert type(message) is str
-    assert message in ['KEEP_STATE', 'ENABLED', 'DISABLED']
-
-def validate_endpoint_id(message):
-    assert type(message) is dict
-    assert len(message.keys()) == 3
-    assert 'topoId' in message
-    validate_topology_id(message['topoId'])
-    assert 'dev_id' in message
-    validate_device_id(message['dev_id'])
-    assert 'port_id' in message
-    validate_uuid(message['port_id'])
-
-def validate_endpoint(message):
-    assert type(message) is dict
-    assert len(message.keys()) == 2
-    assert 'port_id' in message
-    validate_endpoint_id(message['port_id'])
-    assert 'port_type' in message
-    assert type(message['port_type']) is str
-
-def validate_device(message):
-    assert type(message) is dict
-    assert len(message.keys()) == 5
-    assert 'device_id' in message
-    validate_device_id(message['device_id'])
-    assert 'device_type' in message
-    assert type(message['device_type']) is str
-    assert 'device_config' in message
-    validate_device_config(message['device_config'])
-    assert 'devOperationalStatus' in message
-    validate_device_operational_status(message['devOperationalStatus'])
-    assert 'endpointList' in message
-    assert type(message['endpointList']) is list
-    for endpoint in message['endpointList']: validate_endpoint(endpoint)
-
-def validate_link(message):
-    assert type(message) is dict
-    assert len(message.keys()) == 2
-    assert 'link_id' in message
-    validate_link_id(message['link_id'])
-    assert 'endpointList' in message
-    assert type(message['endpointList']) is list
-    for endpoint_id in message['endpointList']: validate_endpoint_id(endpoint_id)
-
-def validate_topology(message):
-    assert type(message) is dict
-    assert len(message.keys()) > 0
-    assert 'topoId' in message
-    validate_topology_id(message['topoId'])
-    assert 'device' in message
-    assert type(message['device']) is list
-    for device in message['device']: validate_device(device)
-    assert 'link' in message
-    assert type(message['link']) is list
-    for link in message['link']: validate_link(link)
-
-def validate_topology_is_empty(message):
-    validate_topology(message)
-    assert len(message['device']) == 0
-    assert len(message['link']) == 0
-
-def validate_topology_has_devices(message):
-    validate_topology(message)
-    assert len(message['device']) > 0
-
-def validate_topology_has_links(message):
-    validate_topology(message)
-    assert len(message['link']) > 0
-
-def validate_constraint(message):
-    assert type(message) is dict
-    assert len(message.keys()) == 2
-    assert 'constraint_type' in message
-    assert type(message['constraint_type']) is str
-    assert 'constraint_value' in message
-    assert type(message['constraint_value']) is str
-
-def validate_service_id(message):
-    assert type(message) is dict
-    assert len(message.keys()) == 2
-    assert 'contextId' in message
-    validate_context_id(message['contextId'])
-    assert 'cs_id' in message
-    validate_uuid(message['cs_id'])
-
-def validate_service_config(message):
-    assert type(message) is dict
-    assert len(message.keys()) == 1
-    assert 'serviceConfig' in message
-    assert type(message['serviceConfig']) is str
-
-def validate_service_type(message):
-    assert type(message) is str
-    assert message in ['UNKNOWN', 'L3NM', 'L2NM', 'TAPI_CONNECTIVITY_SERVICE']
-
-def validate_service_state_enum(message):
-    assert type(message) is str
-    assert message in ['PLANNED', 'ACTIVE', 'PENDING_REMOVAL']
-
-def validate_service_state(message):
-    assert type(message) is dict
-    assert len(message.keys()) == 1
-    assert 'serviceState' in message
-    validate_service_state_enum(message['serviceState'])
-
-def validate_service(message):
-    assert type(message) is dict
-    assert len(message.keys()) == 6
-    assert 'cs_id' in message
-    validate_service_id(message['cs_id'])
-    assert 'serviceType' in message
-    validate_service_type(message['serviceType'])
-    assert 'endpointList' in message
-    assert type(message['endpointList']) is list
-    for endpoint_id in message['endpointList']: validate_endpoint_id(endpoint_id)
-    assert 'constraint' in message
-    assert type(message['constraint']) is list
-    for constraint in message['constraint']: validate_constraint(constraint)
-    assert 'serviceState' in message
-    validate_service_state(message['serviceState'])
-    assert 'serviceConfig' in message
-    validate_service_config(message['serviceConfig'])
-
-def validate_service_list(message):
-    assert type(message) is dict
-    assert len(message.keys()) == 1
-    assert 'cs' in message
-    assert type(message['cs']) is list
-    for cs in message['cs']: validate_service(cs)
-
-def validate_service_list_is_empty(message):
-    validate_service_list(message)
-    assert len(message['cs']) == 0
-
-def validate_service_list_is_not_empty(message):
-    validate_service_list(message)
-    assert len(message['cs']) > 0
diff --git a/src/common/tools/service/DeviceCheckers.py b/src/common/tools/service/DeviceCheckers.py
deleted file mode 100644
index 9233b683e91ef26c112990dee139e21b3cc4a0c2..0000000000000000000000000000000000000000
--- a/src/common/tools/service/DeviceCheckers.py
+++ /dev/null
@@ -1,50 +0,0 @@
-import grpc
-from common.database.api.Database import Database
-from common.database.api.context.topology.device.Endpoint import Endpoint
-from common.exceptions.ServiceException import ServiceException
-
-def check_device_exists(database : Database, context_id : str, topology_id : str, device_id : str):
-    db_context = database.context(context_id).create()
-    db_topology = db_context.topology(topology_id).create()
-    if db_topology.devices.contains(device_id): return
-    msg = 'Context({})/Topology({})/Device({}) does not exist in the database.'
-    msg = msg.format(context_id, topology_id, device_id)
-    raise ServiceException(grpc.StatusCode.NOT_FOUND, msg)
-
-def check_device_not_exists(database : Database, context_id : str, topology_id : str, device_id : str):
-    db_context = database.context(context_id).create()
-    db_topology = db_context.topology(topology_id).create()
-    if not db_topology.devices.contains(device_id): return
-    msg = 'Context({})/Topology({})/Device({}) already exists in the database.'
-    msg = msg.format(context_id, topology_id, device_id)
-    raise ServiceException(grpc.StatusCode.ALREADY_EXISTS, msg)
-
-def check_device_endpoint_exists(
-    database : Database, parent_name : str,
-    context_id : str, topology_id : str, device_id : str, port_id : str) -> Endpoint:
-
-    # Implicit validation: parent.context == endpoint.context, and parent.context created automatically
-    if not database.contexts.contains(context_id):          # pragma: no cover
-        msg = 'Context({}) in {} does not exist in the database.'
-        msg = msg.format(context_id, parent_name)
-        raise ServiceException(grpc.StatusCode.NOT_FOUND, msg)
-    db_context = database.context(context_id)
-
-    if not db_context.topologies.contains(topology_id):
-        msg = 'Context({})/Topology({}) in {} does not exist in the database.'
-        msg = msg.format(context_id, topology_id, parent_name)
-        raise ServiceException(grpc.StatusCode.NOT_FOUND, msg)
-    db_topology = db_context.topology(topology_id)
-
-    if not db_topology.devices.contains(device_id):
-        msg = 'Context({})/Topology({})/Device({}) in {} does not exist in the database.'
-        msg = msg.format(context_id, topology_id, device_id, parent_name)
-        raise ServiceException(grpc.StatusCode.NOT_FOUND, msg)
-    db_device = db_topology.device(device_id)
-
-    if not db_device.endpoints.contains(port_id):
-        msg = 'Context({})/Topology({})/Device({})/Port({}) in {} does not exist in the database.'
-        msg = msg.format(context_id, topology_id, device_id, port_id, parent_name)
-        raise ServiceException(grpc.StatusCode.NOT_FOUND, msg)
-
-    return db_device.endpoint(port_id)
diff --git a/src/common/tools/service/EndpointIdCheckers.py b/src/common/tools/service/EndpointIdCheckers.py
deleted file mode 100644
index 5ac0fe92dd458f38778d9a62011c4279b42ed918..0000000000000000000000000000000000000000
--- a/src/common/tools/service/EndpointIdCheckers.py
+++ /dev/null
@@ -1,84 +0,0 @@
-import grpc, logging
-from typing import Dict, Set, Tuple, Union
-from common.Checkers import chk_string
-from common.exceptions.ServiceException import ServiceException
-from common.database.api.context.Constants import DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID
-
-def check_endpoint_id(
-    logger : logging.Logger, endpoint_number : int, parent_name : str, endpoint_id : 'EndpointId',
-    add_topology_devices_endpoints : Dict[str, Dict[str, Set[str]]],
-    predefined_context_id : str = DEFAULT_CONTEXT_ID, acceptable_context_ids : Set[str] = set([DEFAULT_CONTEXT_ID]),
-    predefined_topology_id : str = DEFAULT_TOPOLOGY_ID, acceptable_topology_ids : Set[str] = set([DEFAULT_TOPOLOGY_ID]),
-    predefined_device_id : Union[str, None] = None, acceptable_device_ids : Set[str] = set(),
-    prevent_same_device_multiple_times : bool = True) -> Tuple[str, str, str]:
-
-    try:
-        ep_context_id  = chk_string('endpoint_id[#{}].topoId.contextId.contextUuid.uuid'.format(endpoint_number),
-                                    endpoint_id.topoId.contextId.contextUuid.uuid,
-                                    allow_empty=True)
-        ep_topology_id = chk_string('endpoint_id[#{}].topoId.topoId.uuid'.format(endpoint_number),
-                                    endpoint_id.topoId.topoId.uuid,
-                                    allow_empty=True)
-        ep_device_id   = chk_string('endpoint_id[#{}].dev_id.device_id.uuid'.format(endpoint_number),
-                                    endpoint_id.dev_id.device_id.uuid,
-                                    allow_empty=(predefined_device_id is not None))
-        ep_port_id     = chk_string('endpoint_id[#{}].port_id.uuid'.format(endpoint_number),
-                                    endpoint_id.port_id.uuid,
-                                    allow_empty=False)
-    except Exception as e:
-        logger.exception('Invalid arguments:')
-        raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e))
-
-    if len(ep_context_id) == 0:
-        # Assumption: if no context is specified for an endpoint_id, use predefined context
-        ep_context_id = predefined_context_id
-    elif (len(acceptable_context_ids) > 0) and (ep_context_id not in acceptable_context_ids):
-        # Assumption: parent and endpoints should belong to the same context
-        msg = ' '.join([
-            'Context({}) in {} mismatches acceptable Contexts({}).',
-            'Optionally, leave field empty to use predefined Context({}).',
-        ])
-        msg = msg.format(
-            ep_context_id, parent_name, str(acceptable_context_ids), predefined_context_id)
-        raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
-
-    if len(ep_topology_id) == 0:
-        # Assumption: if no topology is specified for an endpoint_id, use predefined topology
-        ep_topology_id = predefined_topology_id
-    elif (len(acceptable_topology_ids) > 0) and (ep_topology_id not in acceptable_topology_ids):
-        msg = ' '.join([
-            'Context({})/Topology({}) in {} mismatches acceptable Topologies({}).',
-            'Optionally, leave field empty to use predefined Topology({}).',
-        ])
-        msg = msg.format(
-            ep_context_id, ep_topology_id, parent_name, str(acceptable_topology_ids), predefined_topology_id)
-        raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
-
-    if (predefined_device_id is not None) and (len(ep_device_id) == 0):
-        # Assumption: if no device is specified for an endpoint_id, use predefined device, if available
-        ep_device_id = predefined_device_id
-    elif (len(acceptable_device_ids) > 0) and (ep_device_id not in acceptable_device_ids):
-        msg = ' '.join([
-            'Context({})/Topology({})/Device({}) in {} mismatches acceptable Devices({}).',
-            'Optionally, leave field empty to use predefined Device({}).',
-        ])
-        msg = msg.format(
-            ep_context_id, ep_topology_id, ep_device_id, parent_name, str(acceptable_device_ids), predefined_device_id)
-        raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
-
-    add_devices = add_topology_devices_endpoints.setdefault(ep_topology_id, dict())
-    if prevent_same_device_multiple_times and (ep_device_id in add_devices):
-        msg = 'Duplicated Context({})/Topology({})/Device({}) in {}.'
-        msg = msg.format(ep_context_id, ep_topology_id, ep_device_id, parent_name)
-        raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
-
-    add_device_and_endpoints = add_devices.setdefault(ep_device_id, set())
-
-    # Implicit validation: same device cannot appear 2 times in the list of endpoints
-    if ep_port_id in add_device_and_endpoints:                                # pragma: no cover
-        msg = 'Duplicated Context({})/Topology({})/Device({})/Port({}) in {}.'
-        msg = msg.format(ep_context_id, ep_topology_id, ep_device_id, ep_port_id, parent_name)
-        raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
-
-    add_device_and_endpoints.add(ep_port_id)
-    return ep_topology_id, ep_device_id, ep_port_id
diff --git a/src/common/tools/service/EnumCheckers.py b/src/common/tools/service/EnumCheckers.py
deleted file mode 100644
index 4f2a84dc74470858efa97e52ad2a0dddd4cbfc88..0000000000000000000000000000000000000000
--- a/src/common/tools/service/EnumCheckers.py
+++ /dev/null
@@ -1,24 +0,0 @@
-import grpc
-from enum import Enum
-from common.exceptions.ServiceException import ServiceException
-
-def check_enum(enum_name, method_name, value, to_enum_method, accepted_values_dict) -> Enum:
-    _value = to_enum_method(value)
-    if _value is None:                          # pragma: no cover (gRPC prevents unsupported values)
-        msg = 'Unsupported {}({}).'
-        msg = msg.format(enum_name, value)
-        raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
-
-    accepted_values = accepted_values_dict.get(method_name)
-    if accepted_values is None:                 # pragma: no cover (test requires malforming the code)
-        msg = '{} acceptable values not specified for Method({}).'
-        msg = msg.format(enum_name, method_name)
-        raise ServiceException(grpc.StatusCode.INTERNAL, msg)
-
-    if len(accepted_values) == 0: return _value
-    if _value in accepted_values: return _value
-
-    msg = 'Method({}) does not accept {}({}). Permitted values for Method({}) are {}({}).'
-    accepted_values_list = sorted(map(lambda v: v.name, accepted_values))
-    msg = msg.format(method_name, enum_name, _value.name, method_name, enum_name, accepted_values_list)
-    raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
diff --git a/src/common/tools/service/LinkCheckers.py b/src/common/tools/service/LinkCheckers.py
deleted file mode 100644
index a65046dbf065286547b1885239ad7578fa69a562..0000000000000000000000000000000000000000
--- a/src/common/tools/service/LinkCheckers.py
+++ /dev/null
@@ -1,19 +0,0 @@
-import grpc
-from common.database.api.Database import Database
-from common.exceptions.ServiceException import ServiceException
-
-def check_link_exists(database : Database, context_id : str, topology_id : str, link_id : str):
-    db_context = database.context(context_id).create()
-    db_topology = db_context.topology(topology_id).create()
-    if db_topology.links.contains(link_id): return
-    msg = 'Context({})/Topology({})/Link({}) does not exist in the database.'
-    msg = msg.format(context_id, topology_id, link_id)
-    raise ServiceException(grpc.StatusCode.NOT_FOUND, msg)
-
-def check_link_not_exists(database : Database, context_id : str, topology_id : str, link_id : str):
-    db_context = database.context(context_id).create()
-    db_topology = db_context.topology(topology_id).create()
-    if not db_topology.links.contains(link_id): return
-    msg = 'Context({})/Topology({})/Link({}) already exists in the database.'
-    msg = msg.format(context_id, topology_id, link_id)
-    raise ServiceException(grpc.StatusCode.ALREADY_EXISTS, msg)
diff --git a/src/common/tools/service/ServiceCheckers.py b/src/common/tools/service/ServiceCheckers.py
deleted file mode 100644
index d8bafd1c03db0b1b330633062456752da7cd93c9..0000000000000000000000000000000000000000
--- a/src/common/tools/service/ServiceCheckers.py
+++ /dev/null
@@ -1,29 +0,0 @@
-import grpc
-from common.database.api.Database import Database
-from common.exceptions.ServiceException import ServiceException
-
-def check_service_exists(database : Database, context_id : str, service_id : str):
-    if not database.contexts.contains(context_id):
-        msg = 'Context({}) does not exist in the database.'
-        msg = msg.format(context_id)
-        raise ServiceException(grpc.StatusCode.NOT_FOUND, msg)
-
-    db_context = database.context(context_id)
-    if db_context.services.contains(service_id): return
-
-    msg = 'Context({})/Service({}) does not exist in the database.'
-    msg = msg.format(context_id, service_id)
-    raise ServiceException(grpc.StatusCode.NOT_FOUND, msg)
-
-def check_service_not_exists(database : Database, context_id : str, service_id : str):
-    if not database.contexts.contains(context_id):
-        msg = 'Context({}) does not exist in the database.'
-        msg = msg.format(context_id)
-        raise ServiceException(grpc.StatusCode.NOT_FOUND, msg)
-
-    db_context = database.context(context_id)
-    if not db_context.services.contains(service_id): return
-
-    msg = 'Context({})/Service({}) already exists in the database.'
-    msg = msg.format(context_id, service_id)
-    raise ServiceException(grpc.StatusCode.ALREADY_EXISTS, msg)
diff --git a/src/common/type_checkers/Assertions.py b/src/common/type_checkers/Assertions.py
new file mode 100644
index 0000000000000000000000000000000000000000..f11a059eb5463751a920b116e86438b7e1b13484
--- /dev/null
+++ b/src/common/type_checkers/Assertions.py
@@ -0,0 +1,294 @@
+# ----- Enumerations ---------------------------------------------------------------------------------------------------
+
+def validate_config_action_enum(message):
+    assert isinstance(message, str)
+    assert message in [
+        'CONFIGACTION_UNDEFINED',
+        'CONFIGACTION_SET',
+        'CONFIGACTION_DELETE',
+    ]
+
+def validate_device_driver_enum(message):
+    assert isinstance(message, str)
+    assert message in [
+        'DEVICEDRIVER_UNDEFINED',
+        'DEVICEDRIVER_OPENCONFIG',
+        'DEVICEDRIVER_TRANSPORT_API',
+        'DEVICEDRIVER_P4',
+        'DEVICEDRIVER_IETF_NETWORK_TOPOLOGY',
+        'DEVICEDRIVER_ONF_TR_352',
+    ]
+
+def validate_device_operational_status_enum(message):
+    assert isinstance(message, str)
+    assert message in [
+        'DEVICEOPERATIONALSTATUS_UNDEFINED',
+        'DEVICEOPERATIONALSTATUS_DISABLED',
+        'DEVICEOPERATIONALSTATUS_ENABLED'
+    ]
+
+def validate_service_type_enum(message):
+    assert isinstance(message, str)
+    assert message in [
+        'SERVICETYPE_UNKNOWN',
+        'SERVICETYPE_L3NM',
+        'SERVICETYPE_L2NM',
+        'SERVICETYPE_TAPI_CONNECTIVITY_SERVICE',
+    ]
+
+def validate_service_state_enum(message):
+    assert isinstance(message, str)
+    assert message in [
+        'SERVICESTATUS_UNDEFINED',
+        'SERVICESTATUS_PLANNED',
+        'SERVICESTATUS_ACTIVE',
+        'SERVICESTATUS_PENDING_REMOVAL',
+    ]
+
+
+# ----- Common ---------------------------------------------------------------------------------------------------------
+def validate_uuid(message, allow_empty=False):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'uuid' in message
+    assert isinstance(message['uuid'], str)
+    if allow_empty: return
+    assert len(message['uuid']) > 1
+
+def validate_config_rule(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 3
+    assert 'action' in message
+    validate_config_action_enum(message['action'])
+    assert 'resource_key' in message
+    assert isinstance(message['resource_key'], str)
+    assert 'resource_value' in message
+    assert isinstance(message['resource_value'], str)
+
+def validate_config_rules(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'config_rules' in message
+    for config_rule in message['config_rules']: validate_config_rule(config_rule)
+
+def validate_constraint(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 2
+    assert 'constraint_type' in message
+    assert isinstance(message['constraint_type'], str)
+    assert 'constraint_value' in message
+    assert isinstance(message['constraint_value'], str)
+
+
+# ----- Identifiers ----------------------------------------------------------------------------------------------------
+
+def validate_context_id(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'context_uuid' in message
+    validate_uuid(message['context_uuid'])
+
+def validate_service_id(message, context_uuid=None):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 2
+    assert 'context_id' in message
+    validate_context_id(message['context_id'])
+    if context_uuid is not None: assert message['context_id']['context_uuid']['uuid'] == context_uuid
+    assert 'service_uuid' in message
+    validate_uuid(message['service_uuid'])
+
+def validate_topology_id(message, context_uuid=None):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 2
+    assert 'context_id' in message
+    validate_context_id(message['context_id'])
+    if context_uuid is not None: assert message['context_id']['context_uuid']['uuid'] == context_uuid
+    assert 'topology_uuid' in message
+    validate_uuid(message['topology_uuid'])
+
+def validate_device_id(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'device_uuid' in message
+    validate_uuid(message['device_uuid'])
+
+def validate_link_id(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'link_uuid' in message
+    validate_uuid(message['link_uuid'])
+
+def validate_endpoint_id(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 3
+    assert 'topology_id' in message
+    validate_topology_id(message['topology_id'])
+    assert 'device_id' in message
+    validate_device_id(message['device_id'])
+    assert 'endpoint_uuid' in message
+    validate_uuid(message['endpoint_uuid'])
+
+
+# ----- Lists of Identifiers -------------------------------------------------------------------------------------------
+
+def validate_context_ids(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'context_ids' in message
+    assert isinstance(message['context_ids'], list)
+    for context_id in message['context_ids']: validate_context_id(context_id)
+
+def validate_service_ids(message, context_uuid=None):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'service_ids' in message
+    assert isinstance(message['service_ids'], list)
+    for service_id in message['service_ids']: validate_service_id(service_id, context_uuid=context_uuid)
+
+def validate_topology_ids(message, context_uuid=None):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'topology_ids' in message
+    assert isinstance(message['topology_ids'], list)
+    for topology_id in message['topology_ids']: validate_topology_id(topology_id, context_uuid=context_uuid)
+
+def validate_device_ids(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'device_ids' in message
+    assert isinstance(message['device_ids'], list)
+    for device_id in message['device_ids']: validate_device_id(device_id)
+
+def validate_link_ids(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'link_ids' in message
+    assert isinstance(message['link_ids'], list)
+    for link_id in message['link_ids']: validate_link_id(link_id)
+
+
+# ----- Objects --------------------------------------------------------------------------------------------------------
+
+def validate_context(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 3
+    assert 'context_id' in message
+    validate_context_id(message['context_id'])
+    context_uuid = message['context_id']['context_uuid']['uuid']
+    assert 'service_ids' in message
+    assert isinstance(message['service_ids'], list)
+    for service_id in message['service_ids']: validate_service_id(service_id, context_uuid=context_uuid)
+    assert 'topology_ids' in message
+    assert isinstance(message['topology_ids'], list)
+    for topology_id in message['topology_ids']: validate_topology_id(topology_id, context_uuid=context_uuid)
+
+def validate_service_state(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'service_status' in message
+    validate_service_state_enum(message['service_status'])
+
+def validate_service(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 6
+    assert 'service_id' in message
+    validate_service_id(message['service_id'])
+    assert 'service_type' in message
+    validate_service_type_enum(message['service_type'])
+    assert 'service_endpoint_ids' in message
+    assert isinstance(message['service_endpoint_ids'], list)
+    for endpoint_id in message['service_endpoint_ids']: validate_endpoint_id(endpoint_id)
+    assert 'service_constraints' in message
+    assert isinstance(message['service_constraints'], list)
+    for constraint in message['service_constraints']: validate_constraint(constraint)
+    assert 'service_status' in message
+    validate_service_state(message['service_status'])
+    assert 'service_config' in message
+    validate_config_rules(message['service_config'])
+
+def validate_topology(message, num_devices=None, num_links=None):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 3
+    assert 'topology_id' in message
+    validate_topology_id(message['topology_id'])
+    assert 'device_ids' in message
+    assert isinstance(message['device_ids'], list)
+    if num_devices is not None: assert len(message['device_ids']) == num_devices
+    for device_id in message['device_ids']: validate_device_id(device_id)
+    assert 'link_ids' in message
+    assert isinstance(message['link_ids'], list)
+    if num_links is not None: assert len(message['link_ids']) == num_links
+    for link_id in message['link_ids']: validate_link_id(link_id)
+
+def validate_endpoint(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 2
+    assert 'endpoint_id' in message
+    validate_endpoint_id(message['endpoint_id'])
+    assert 'endpoint_type' in message
+    assert isinstance(message['endpoint_type'], str)
+
+def validate_device(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 6
+    assert 'device_id' in message
+    validate_device_id(message['device_id'])
+    assert 'device_type' in message
+    assert isinstance(message['device_type'], str)
+    assert 'device_config' in message
+    validate_config_rules(message['device_config'])
+    assert 'device_operational_status' in message
+    validate_device_operational_status_enum(message['device_operational_status'])
+    assert 'device_drivers' in message
+    assert isinstance(message['device_drivers'], list)
+    for driver in message['device_drivers']: validate_device_driver_enum(driver)
+    assert 'device_endpoints' in message
+    assert isinstance(message['device_endpoints'], list)
+    for endpoint in message['device_endpoints']: validate_endpoint(endpoint)
+
+def validate_link(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 2
+    assert 'link_id' in message
+    validate_link_id(message['link_id'])
+    assert 'link_endpoint_ids' in message
+    assert isinstance(message['link_endpoint_ids'], list)
+    for endpoint_id in message['link_endpoint_ids']: validate_endpoint_id(endpoint_id)
+
+
+# ----- Lists of Objects -----------------------------------------------------------------------------------------------
+
+def validate_contexts(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'contexts' in message
+    assert isinstance(message['contexts'], list)
+    for context in message['contexts']: validate_context(context)
+
+def validate_services(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'services' in message
+    assert isinstance(message['services'], list)
+    for service in message['services']: validate_service(service)
+
+def validate_topologies(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'topologies' in message
+    assert isinstance(message['topologies'], list)
+    for topology in message['topologies']: validate_topology(topology)
+
+def validate_devices(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'devices' in message
+    assert isinstance(message['devices'], list)
+    for device in message['devices']: validate_device(device)
+
+def validate_links(message):
+    assert isinstance(message, dict)
+    assert len(message.keys()) == 1
+    assert 'links' in message
+    assert isinstance(message['links'], list)
+    for link in message['links']: validate_link(link)
diff --git a/src/common/type_checkers/Checkers.py b/src/common/type_checkers/Checkers.py
new file mode 100644
index 0000000000000000000000000000000000000000..d0eddcf213143c4c3d99c9edfafd1305384e777b
--- /dev/null
+++ b/src/common/type_checkers/Checkers.py
@@ -0,0 +1,90 @@
+import re
+from typing import Any, Container, List, Optional, Pattern, Set, Sized, Tuple, Union
+
+def chk_none(name : str, value : Any, reason=None) -> Any:
+    if value is None: return value
+    if reason is None: reason = 'must be None.'
+    raise ValueError('{}({}) {}'.format(str(name), str(value), str(reason)))
+
+def chk_not_none(name : str, value : Any, reason=None) -> Any:
+    if value is not None: return value
+    if reason is None: reason = 'must not be None.'
+    raise ValueError('{}({}) {}'.format(str(name), str(value), str(reason)))
+
+def chk_type(name : str, value : Any, type_or_types : Union[type, Set[type]] = set()) -> Any:
+    if isinstance(value, type_or_types): return value
+    msg = '{}({}) is of a wrong type({}). Accepted type_or_types({}).'
+    raise TypeError(msg.format(str(name), str(value), type(value).__name__, str(type_or_types)))
+
+def chk_issubclass(name : str, value : type, class_or_classes : Union[type, Set[type]] = set()) -> Any:
+    if issubclass(value, class_or_classes): return value
+    msg = '{}({}) is of a wrong class({}). Accepted class_or_classes({}).'
+    raise TypeError(msg.format(str(name), str(value), type(value).__name__, str(class_or_classes)))
+
+def chk_length(
+    name : str, value : Sized, allow_empty : bool = False,
+    min_length : Optional[int] = None, max_length : Optional[int] = None) -> Any:
+
+    length = len(chk_type(name, value, Sized))
+
+    allow_empty = chk_type('allow_empty for {}'.format(name), allow_empty, bool)
+    if not allow_empty and length == 0:
+        raise ValueError('{}({}) is out of range: allow_empty({}).'.format(str(name), str(value), str(allow_empty)))
+
+    if min_length is not None:
+        min_length = chk_type('min_length for {}'.format(name), min_length, int)
+        if length < min_length:
+            raise ValueError('{}({}) is out of range: min_length({}).'.format(str(name), str(value), str(min_length)))
+
+    if max_length is not None:
+        max_length = chk_type('max_length for {}'.format(name), max_length, int)
+        if length > max_length:
+            raise ValueError('{}({}) is out of range: max_value({}).'.format(str(name), str(value), str(max_length)))
+
+    return value
+
+def chk_boolean(name : str, value : Any) -> bool:
+    return chk_type(name, value, bool)
+
+def chk_string(
+    name : str, value : Any, allow_empty : bool = False,
+    min_length : Optional[int] = None, max_length : Optional[int] = None,
+    pattern : Optional[Union[Pattern, str]] = None) -> str:
+
+    chk_type(name, value, str)
+    chk_length(name, value, allow_empty=allow_empty, min_length=min_length, max_length=max_length)
+    if pattern is None: return value
+    pattern = re.compile(pattern)
+    if pattern.match(value): return value
+    raise ValueError('{}({}) does not match pattern({}).'.format(str(name), str(value), str(pattern)))
+
+def chk_float(
+    name : str, value : Any, type_or_types : Union[type, Set[type], List[type], Tuple[type]] = (int, float),
+    min_value : Optional[Union[int, float]] = None, max_value : Optional[Union[int, float]] = None) -> float:
+
+    chk_not_none(name, value)
+    chk_type(name, value, type_or_types)
+    if min_value is not None:
+        chk_type(name, value, type_or_types)
+        if value < min_value:
+            msg = '{}({}) lower than min_value({}).'
+            raise ValueError(msg.format(str(name), str(value), str(min_value)))
+    if max_value is not None:
+        chk_type(name, value, type_or_types)
+        if value > max_value:
+            msg = '{}({}) greater than max_value({}).'
+            raise ValueError(msg.format(str(name), str(value), str(max_value)))
+    return float(value)
+
+def chk_integer(
+    name : str, value : Any,
+    min_value : Optional[Union[int, float]] = None, max_value : Optional[Union[int, float]] = None) -> int:
+
+    return int(chk_float(name, value, type_or_types=int, min_value=min_value, max_value=max_value))
+
+def chk_options(name : str, value : Any, options : Container) -> Any:
+    chk_not_none(name, value)
+    if value not in options:
+        msg = '{}({}) is not one of options({}).'
+        raise ValueError(msg.format(str(name), str(value), str(options)))
+    return value
diff --git a/src/common/tests/__init__.py b/src/common/type_checkers/__init__.py
similarity index 100%
rename from src/common/tests/__init__.py
rename to src/common/type_checkers/__init__.py
diff --git a/src/context/.gitlab-ci.yml b/src/context/.gitlab-ci.yml
index a6917d7287dadcae669c3cf2c7ffbc3e54ec0792..8803cc9c2802ec5991f8ce330c2d86a9e5c763eb 100644
--- a/src/context/.gitlab-ci.yml
+++ b/src/context/.gitlab-ci.yml
@@ -30,15 +30,17 @@ unit_test context:
     - if docker network list | grep teraflowbridge; then echo "teraflowbridge is already created"; else docker network create -d bridge teraflowbridge; fi  
   script:
     - docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
-    - docker run -d -p 1010:1010 --name $IMAGE_NAME --network=teraflowbridge "$IMAGE_NAME:$IMAGE_TAG"
+    - docker pull "redis:6.2"
+    - docker run -d --name $IMAGE_NAME-redis --network=teraflowbridge redis:6.2
+    - docker run -d --name $IMAGE_NAME --network=teraflowbridge --env "LOG_LEVEL=INFO" --env "DB_BACKEND=redis" --env "REDIS_SERVICE_HOST=$IMAGE_NAME-redis" --env "REDIS_SERVICE_PORT=6379" --env "REDIS_DATABASE_ID=0" "$IMAGE_NAME:$IMAGE_TAG"
     - docker ps -a
     - sleep 5
     - docker ps -a
     - docker logs $IMAGE_NAME
     - docker exec -i $IMAGE_NAME bash -c "pytest --log-level=DEBUG --verbose $IMAGE_NAME/tests/test_unitary.py"
   after_script:
-    - docker stop $IMAGE_NAME
-    - docker rm $IMAGE_NAME
+    - docker stop $IMAGE_NAME $IMAGE_NAME-redis
+    - docker rm $IMAGE_NAME $IMAGE_NAME-redis
   rules:
     - changes:
       - src/$IMAGE_NAME/**
diff --git a/src/context/Config.py b/src/context/Config.py
index 2019cdd0141dc98063fde51568c59e84f6ae087e..1854d83bbe6f8015e7749de2c747e823ce573d95 100644
--- a/src/context/Config.py
+++ b/src/context/Config.py
@@ -1,11 +1,11 @@
 import logging
 
 # General settings
-LOG_LEVEL = logging.WARNING
+LOG_LEVEL = logging.INFO
 
 # gRPC settings
 GRPC_SERVICE_PORT = 1010
-GRPC_MAX_WORKERS  = 10
+GRPC_MAX_WORKERS  = 200 # multiple clients might keep connections alive for Get*Events() RPC methods
 GRPC_GRACE_PERIOD = 60
 
 # REST-API settings
@@ -14,3 +14,6 @@ RESTAPI_BASE_URL = '/api'
 
 # Prometheus settings
 METRICS_PORT = 9192
+
+# Autopopulate the component with fake data for testing purposes?
+POPULATE_FAKE_DATA = False
diff --git a/src/context/client/ContextClient.py b/src/context/client/ContextClient.py
index 32074eba3c8681029b567ca7bc4760d0879a593a..048a47edc3e51c7628ce5659eed05de78aed633e 100644
--- a/src/context/client/ContextClient.py
+++ b/src/context/client/ContextClient.py
@@ -1,6 +1,13 @@
+from typing import Iterator
 import grpc, logging
 from common.tools.client.RetryDecorator import retry, delay_exponential
-from context.proto.context_pb2 import Link, LinkId, Empty, Topology
+from context.proto.context_pb2 import \
+    Context,  ContextEvent,  ContextId,  ContextIdList,  ContextList,  \
+    Device,   DeviceEvent,   DeviceId,   DeviceIdList,   DeviceList,   \
+    Empty,                                                             \
+    Link,     LinkEvent,     LinkId,     LinkIdList,     LinkList,     \
+    Service,  ServiceEvent,  ServiceId,  ServiceIdList,  ServiceList,  \
+    Topology, TopologyEvent, TopologyId, TopologyIdList, TopologyList
 from context.proto.context_pb2_grpc import ContextServiceStub
 
 LOGGER = logging.getLogger(__name__)
@@ -10,7 +17,7 @@ DELAY_FUNCTION = delay_exponential(initial=0.01, increment=2.0, maximum=5.0)
 class ContextClient:
     def __init__(self, address, port):
         self.endpoint = '{}:{}'.format(address, port)
-        LOGGER.debug('Creating channel to {}...'.format(self.endpoint))
+        LOGGER.debug('Creating channel to {:s}...'.format(self.endpoint))
         self.channel = None
         self.stub = None
         self.connect()
@@ -21,27 +28,216 @@ class ContextClient:
         self.stub = ContextServiceStub(self.channel)
 
     def close(self):
-        if(self.channel is not None): self.channel.close()
+        if self.channel is not None: self.channel.close()
         self.channel = None
         self.stub = None
 
     @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
-    def GetTopology(self, request : Empty) -> Topology:
-        LOGGER.debug('GetTopology request: {}'.format(request))
+    def ListContextIds(self, request: Empty) -> ContextIdList:
+        LOGGER.debug('ListContextIds request: {:s}'.format(str(request)))
+        response = self.stub.ListContextIds(request)
+        LOGGER.debug('ListContextIds result: {:s}'.format(str(response)))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def ListContexts(self, request: Empty) -> ContextList:
+        LOGGER.debug('ListContexts request: {:s}'.format(str(request)))
+        response = self.stub.ListContexts(request)
+        LOGGER.debug('ListContexts result: {:s}'.format(str(response)))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def GetContext(self, request: ContextId) -> Context:
+        LOGGER.debug('GetContext request: {:s}'.format(str(request)))
+        response = self.stub.GetContext(request)
+        LOGGER.debug('GetContext result: {:s}'.format(str(response)))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def SetContext(self, request: Context) -> ContextId:
+        LOGGER.debug('SetContext request: {:s}'.format(str(request)))
+        response = self.stub.SetContext(request)
+        LOGGER.debug('SetContext result: {:s}'.format(str(response)))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def RemoveContext(self, request: ContextId) -> Empty:
+        LOGGER.debug('RemoveContext request: {:s}'.format(str(request)))
+        response = self.stub.RemoveContext(request)
+        LOGGER.debug('RemoveContext result: {:s}'.format(str(response)))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def GetContextEvents(self, request: Empty) -> Iterator[ContextEvent]:
+        LOGGER.debug('GetContextEvents request: {:s}'.format(str(request)))
+        response = self.stub.GetContextEvents(request)
+        LOGGER.debug('GetContextEvents result: {:s}'.format(str(response)))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def ListTopologyIds(self, request: ContextId) -> TopologyIdList:
+        LOGGER.debug('ListTopologyIds request: {:s}'.format(str(request)))
+        response = self.stub.ListTopologyIds(request)
+        LOGGER.debug('ListTopologyIds result: {:s}'.format(str(response)))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def ListTopologies(self, request: ContextId) -> TopologyList:
+        LOGGER.debug('ListTopologies request: {:s}'.format(str(request)))
+        response = self.stub.ListTopologies(request)
+        LOGGER.debug('ListTopologies result: {:s}'.format(str(response)))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def GetTopology(self, request: TopologyId) -> Topology:
+        LOGGER.debug('GetTopology request: {:s}'.format(str(request)))
         response = self.stub.GetTopology(request)
-        LOGGER.debug('GetTopology result: {}'.format(response))
+        LOGGER.debug('GetTopology result: {:s}'.format(str(response)))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def SetTopology(self, request: Topology) -> TopologyId:
+        LOGGER.debug('SetTopology request: {:s}'.format(str(request)))
+        response = self.stub.SetTopology(request)
+        LOGGER.debug('SetTopology result: {:s}'.format(str(response)))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def RemoveTopology(self, request: TopologyId) -> Empty:
+        LOGGER.debug('RemoveTopology request: {:s}'.format(str(request)))
+        response = self.stub.RemoveTopology(request)
+        LOGGER.debug('RemoveTopology result: {:s}'.format(str(response)))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def GetTopologyEvents(self, request: Empty) -> Iterator[TopologyEvent]:
+        LOGGER.debug('GetTopologyEvents request: {:s}'.format(str(request)))
+        response = self.stub.GetTopologyEvents(request)
+        LOGGER.debug('GetTopologyEvents result: {:s}'.format(str(response)))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def ListDeviceIds(self, request: Empty) -> DeviceIdList:
+        LOGGER.debug('ListDeviceIds request: {:s}'.format(str(request)))
+        response = self.stub.ListDeviceIds(request)
+        LOGGER.debug('ListDeviceIds result: {:s}'.format(str(response)))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def ListDevices(self, request: Empty) -> DeviceList:
+        LOGGER.debug('ListDevices request: {:s}'.format(str(request)))
+        response = self.stub.ListDevices(request)
+        LOGGER.debug('ListDevices result: {:s}'.format(str(response)))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def GetDevice(self, request: DeviceId) -> Device:
+        LOGGER.debug('GetDevice request: {:s}'.format(str(request)))
+        response = self.stub.GetDevice(request)
+        LOGGER.debug('GetDevice result: {:s}'.format(str(response)))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def SetDevice(self, request: Device) -> DeviceId:
+        LOGGER.debug('SetDevice request: {:s}'.format(str(request)))
+        response = self.stub.SetDevice(request)
+        LOGGER.debug('SetDevice result: {:s}'.format(str(response)))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def RemoveDevice(self, request: DeviceId) -> Empty:
+        LOGGER.debug('RemoveDevice request: {:s}'.format(str(request)))
+        response = self.stub.RemoveDevice(request)
+        LOGGER.debug('RemoveDevice result: {:s}'.format(str(response)))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def GetDeviceEvents(self, request: Empty) -> Iterator[DeviceEvent]:
+        LOGGER.debug('GetDeviceEvents request: {:s}'.format(str(request)))
+        response = self.stub.GetDeviceEvents(request)
+        LOGGER.debug('GetDeviceEvents result: {:s}'.format(str(response)))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def ListLinkIds(self, request: Empty) -> LinkIdList:
+        LOGGER.debug('ListLinkIds request: {:s}'.format(str(request)))
+        response = self.stub.ListLinkIds(request)
+        LOGGER.debug('ListLinkIds result: {:s}'.format(str(response)))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def ListLinks(self, request: Empty) -> LinkList:
+        LOGGER.debug('ListLinks request: {:s}'.format(str(request)))
+        response = self.stub.ListLinks(request)
+        LOGGER.debug('ListLinks result: {:s}'.format(str(response)))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def GetLink(self, request: LinkId) -> Link:
+        LOGGER.debug('GetLink request: {:s}'.format(str(request)))
+        response = self.stub.GetLink(request)
+        LOGGER.debug('GetLink result: {:s}'.format(str(response)))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def SetLink(self, request: Link) -> LinkId:
+        LOGGER.debug('SetLink request: {:s}'.format(str(request)))
+        response = self.stub.SetLink(request)
+        LOGGER.debug('SetLink result: {:s}'.format(str(response)))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def RemoveLink(self, request: LinkId) -> Empty:
+        LOGGER.debug('RemoveLink request: {:s}'.format(str(request)))
+        response = self.stub.RemoveLink(request)
+        LOGGER.debug('RemoveLink result: {:s}'.format(str(response)))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def GetLinkEvents(self, request: Empty) -> Iterator[LinkEvent]:
+        LOGGER.debug('GetLinkEvents request: {:s}'.format(str(request)))
+        response = self.stub.GetLinkEvents(request)
+        LOGGER.debug('GetLinkEvents result: {:s}'.format(str(response)))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def ListServiceIds(self, request: ContextId) -> ServiceIdList:
+        LOGGER.debug('ListServiceIds request: {:s}'.format(str(request)))
+        response = self.stub.ListServiceIds(request)
+        LOGGER.debug('ListServiceIds result: {:s}'.format(str(response)))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def ListServices(self, request: ContextId) -> ServiceList:
+        LOGGER.debug('ListServices request: {:s}'.format(str(request)))
+        response = self.stub.ListServices(request)
+        LOGGER.debug('ListServices result: {:s}'.format(str(response)))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def GetService(self, request: ServiceId) -> Service:
+        LOGGER.debug('GetService request: {:s}'.format(str(request)))
+        response = self.stub.GetService(request)
+        LOGGER.debug('GetService result: {:s}'.format(str(response)))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def SetService(self, request: Service) -> ServiceId:
+        LOGGER.debug('SetService request: {:s}'.format(str(request)))
+        response = self.stub.SetService(request)
+        LOGGER.debug('SetService result: {:s}'.format(str(response)))
         return response
 
     @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
-    def AddLink(self, request : Link) -> LinkId:
-        LOGGER.debug('AddLink request: {}'.format(request))
-        response = self.stub.AddLink(request)
-        LOGGER.debug('AddLink result: {}'.format(response))
+    def RemoveService(self, request: ServiceId) -> Empty:
+        LOGGER.debug('RemoveService request: {:s}'.format(str(request)))
+        response = self.stub.RemoveService(request)
+        LOGGER.debug('RemoveService result: {:s}'.format(str(response)))
         return response
 
     @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
-    def DeleteLink(self, request : LinkId) -> Empty:
-        LOGGER.debug('DeleteLink request: {}'.format(request))
-        response = self.stub.DeleteLink(request)
-        LOGGER.debug('DeleteLink result: {}'.format(response))
+    def GetServiceEvents(self, request: Empty) -> Iterator[ServiceEvent]:
+        LOGGER.debug('GetServiceEvents request: {:s}'.format(str(request)))
+        response = self.stub.GetServiceEvents(request)
+        LOGGER.debug('GetServiceEvents result: {:s}'.format(str(response)))
         return response
diff --git a/src/context/proto/context_pb2.py b/src/context/proto/context_pb2.py
index a41b1de47f4df97a6e90b42a02fab7556feafd34..8b4848bc33bfb0eba76590c8a3a627b2db84ca9f 100644
--- a/src/context/proto/context_pb2.py
+++ b/src/context/proto/context_pb2.py
@@ -20,43 +20,249 @@ DESCRIPTOR = _descriptor.FileDescriptor(
   syntax='proto3',
   serialized_options=None,
   create_key=_descriptor._internal_create_key,
-  serialized_pb=b'\n\rcontext.proto\x12\x07\x63ontext\"\x07\n\x05\x45mpty\"{\n\x07\x43ontext\x12%\n\tcontextId\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x1f\n\x04topo\x18\x02 \x01(\x0b\x32\x11.context.Topology\x12(\n\x03\x63tl\x18\x03 \x01(\x0b\x32\x1b.context.TeraFlowController\"/\n\tContextId\x12\"\n\x0b\x63ontextUuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"m\n\x08Topology\x12#\n\x06topoId\x18\x02 \x01(\x0b\x32\x13.context.TopologyId\x12\x1f\n\x06\x64\x65vice\x18\x03 \x03(\x0b\x32\x0f.context.Device\x12\x1b\n\x04link\x18\x04 \x03(\x0b\x32\r.context.Link\"S\n\x04Link\x12 \n\x07link_id\x18\x01 \x01(\x0b\x32\x0f.context.LinkId\x12)\n\x0c\x65ndpointList\x18\x02 \x03(\x0b\x32\x13.context.EndPointId\"R\n\nTopologyId\x12%\n\tcontextId\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x1d\n\x06topoId\x18\x02 \x01(\x0b\x32\r.context.Uuid\"?\n\nConstraint\x12\x17\n\x0f\x63onstraint_type\x18\x01 \x01(\t\x12\x18\n\x10\x63onstraint_value\x18\x02 \x01(\t\"\xda\x01\n\x06\x44\x65vice\x12$\n\tdevice_id\x18\x01 \x01(\x0b\x32\x11.context.DeviceId\x12\x13\n\x0b\x64\x65vice_type\x18\x02 \x01(\t\x12,\n\rdevice_config\x18\x03 \x01(\x0b\x32\x15.context.DeviceConfig\x12>\n\x14\x64\x65vOperationalStatus\x18\x04 \x01(\x0e\x32 .context.DeviceOperationalStatus\x12\'\n\x0c\x65ndpointList\x18\x05 \x03(\x0b\x32\x11.context.EndPoint\"%\n\x0c\x44\x65viceConfig\x12\x15\n\rdevice_config\x18\x01 \x01(\t\"C\n\x08\x45ndPoint\x12$\n\x07port_id\x18\x01 \x01(\x0b\x32\x13.context.EndPointId\x12\x11\n\tport_type\x18\x02 \x01(\t\"t\n\nEndPointId\x12#\n\x06topoId\x18\x01 \x01(\x0b\x32\x13.context.TopologyId\x12!\n\x06\x64\x65v_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\x12\x1e\n\x07port_id\x18\x03 \x01(\x0b\x32\r.context.Uuid\",\n\x08\x44\x65viceId\x12 \n\tdevice_id\x18\x01 \x01(\x0b\x32\r.context.Uuid\"(\n\x06LinkId\x12\x1e\n\x07link_id\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\x14\n\x04Uuid\x12\x0c\n\x04uuid\x18\x01 \x01(\t\"K\n\x12TeraFlowController\x12\"\n\x06\x63tl_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x11\n\tipaddress\x18\x02 \x01(\t\"Q\n\x14\x41uthenticationResult\x12\"\n\x06\x63tl_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x15\n\rauthenticated\x18\x02 \x01(\x08*N\n\x17\x44\x65viceOperationalStatus\x12\x0f\n\x0bKEEP_STATUS\x10\x00\x12\x15\n\x08\x44ISABLED\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01\x12\x0b\n\x07\x45NABLED\x10\x01\x32\xa2\x01\n\x0e\x43ontextService\x12\x32\n\x0bGetTopology\x12\x0e.context.Empty\x1a\x11.context.Topology\"\x00\x12+\n\x07\x41\x64\x64Link\x12\r.context.Link\x1a\x0f.context.LinkId\"\x00\x12/\n\nDeleteLink\x12\x0f.context.LinkId\x1a\x0e.context.Empty\"\x00\x62\x06proto3'
+  serialized_pb=b'\n\rcontext.proto\x12\x07\x63ontext\"\x07\n\x05\x45mpty\"\x14\n\x04Uuid\x12\x0c\n\x04uuid\x18\x01 \x01(\t\"F\n\x05\x45vent\x12\x11\n\ttimestamp\x18\x01 \x01(\x01\x12*\n\nevent_type\x18\x02 \x01(\x0e\x32\x16.context.EventTypeEnum\"0\n\tContextId\x12#\n\x0c\x63ontext_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\xb6\x01\n\x07\x43ontext\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12)\n\x0ctopology_ids\x18\x02 \x03(\x0b\x32\x13.context.TopologyId\x12\'\n\x0bservice_ids\x18\x03 \x03(\x0b\x32\x12.context.ServiceId\x12/\n\ncontroller\x18\x04 \x01(\x0b\x32\x1b.context.TeraFlowController\"8\n\rContextIdList\x12\'\n\x0b\x63ontext_ids\x18\x01 \x03(\x0b\x32\x12.context.ContextId\"1\n\x0b\x43ontextList\x12\"\n\x08\x63ontexts\x18\x01 \x03(\x0b\x32\x10.context.Context\"U\n\x0c\x43ontextEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12&\n\ncontext_id\x18\x02 \x01(\x0b\x32\x12.context.ContextId\"Z\n\nTopologyId\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12$\n\rtopology_uuid\x18\x02 \x01(\x0b\x32\r.context.Uuid\"~\n\x08Topology\x12(\n\x0btopology_id\x18\x01 \x01(\x0b\x32\x13.context.TopologyId\x12%\n\ndevice_ids\x18\x02 \x03(\x0b\x32\x11.context.DeviceId\x12!\n\x08link_ids\x18\x03 \x03(\x0b\x32\x0f.context.LinkId\";\n\x0eTopologyIdList\x12)\n\x0ctopology_ids\x18\x01 \x03(\x0b\x32\x13.context.TopologyId\"5\n\x0cTopologyList\x12%\n\ntopologies\x18\x01 \x03(\x0b\x32\x11.context.Topology\"X\n\rTopologyEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12(\n\x0btopology_id\x18\x02 \x01(\x0b\x32\x13.context.TopologyId\".\n\x08\x44\x65viceId\x12\"\n\x0b\x64\x65vice_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\x9a\x02\n\x06\x44\x65vice\x12$\n\tdevice_id\x18\x01 \x01(\x0b\x32\x11.context.DeviceId\x12\x13\n\x0b\x64\x65vice_type\x18\x02 \x01(\t\x12,\n\rdevice_config\x18\x03 \x01(\x0b\x32\x15.context.DeviceConfig\x12G\n\x19\x64\x65vice_operational_status\x18\x04 \x01(\x0e\x32$.context.DeviceOperationalStatusEnum\x12\x31\n\x0e\x64\x65vice_drivers\x18\x05 \x03(\x0e\x32\x19.context.DeviceDriverEnum\x12+\n\x10\x64\x65vice_endpoints\x18\x06 \x03(\x0b\x32\x11.context.EndPoint\"9\n\x0c\x44\x65viceConfig\x12)\n\x0c\x63onfig_rules\x18\x01 \x03(\x0b\x32\x13.context.ConfigRule\"5\n\x0c\x44\x65viceIdList\x12%\n\ndevice_ids\x18\x01 \x03(\x0b\x32\x11.context.DeviceId\".\n\nDeviceList\x12 \n\x07\x64\x65vices\x18\x01 \x03(\x0b\x32\x0f.context.Device\"R\n\x0b\x44\x65viceEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12$\n\tdevice_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\"*\n\x06LinkId\x12 \n\tlink_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"X\n\x04Link\x12 \n\x07link_id\x18\x01 \x01(\x0b\x32\x0f.context.LinkId\x12.\n\x11link_endpoint_ids\x18\x02 \x03(\x0b\x32\x13.context.EndPointId\"/\n\nLinkIdList\x12!\n\x08link_ids\x18\x01 \x03(\x0b\x32\x0f.context.LinkId\"(\n\x08LinkList\x12\x1c\n\x05links\x18\x01 \x03(\x0b\x32\r.context.Link\"L\n\tLinkEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12 \n\x07link_id\x18\x02 \x01(\x0b\x32\x0f.context.LinkId\"X\n\tServiceId\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12#\n\x0cservice_uuid\x18\x02 \x01(\x0b\x32\r.context.Uuid\"\xa6\x02\n\x07Service\x12&\n\nservice_id\x18\x01 \x01(\x0b\x32\x12.context.ServiceId\x12.\n\x0cservice_type\x18\x02 \x01(\x0e\x32\x18.context.ServiceTypeEnum\x12\x31\n\x14service_endpoint_ids\x18\x03 \x03(\x0b\x32\x13.context.EndPointId\x12\x30\n\x13service_constraints\x18\x04 \x03(\x0b\x32\x13.context.Constraint\x12.\n\x0eservice_status\x18\x05 \x01(\x0b\x32\x16.context.ServiceStatus\x12.\n\x0eservice_config\x18\x06 \x01(\x0b\x32\x16.context.ServiceConfig\"C\n\rServiceStatus\x12\x32\n\x0eservice_status\x18\x01 \x01(\x0e\x32\x1a.context.ServiceStatusEnum\":\n\rServiceConfig\x12)\n\x0c\x63onfig_rules\x18\x01 \x03(\x0b\x32\x13.context.ConfigRule\"8\n\rServiceIdList\x12\'\n\x0bservice_ids\x18\x01 \x03(\x0b\x32\x12.context.ServiceId\"1\n\x0bServiceList\x12\"\n\x08services\x18\x01 \x03(\x0b\x32\x10.context.Service\"U\n\x0cServiceEvent\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.context.Event\x12&\n\nservice_id\x18\x02 \x01(\x0b\x32\x12.context.ServiceId\"\x82\x01\n\nEndPointId\x12(\n\x0btopology_id\x18\x01 \x01(\x0b\x32\x13.context.TopologyId\x12$\n\tdevice_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\x12$\n\rendpoint_uuid\x18\x03 \x01(\x0b\x32\r.context.Uuid\"K\n\x08\x45ndPoint\x12(\n\x0b\x65ndpoint_id\x18\x01 \x01(\x0b\x32\x13.context.EndPointId\x12\x15\n\rendpoint_type\x18\x02 \x01(\t\"e\n\nConfigRule\x12)\n\x06\x61\x63tion\x18\x01 \x01(\x0e\x32\x19.context.ConfigActionEnum\x12\x14\n\x0cresource_key\x18\x02 \x01(\t\x12\x16\n\x0eresource_value\x18\x03 \x01(\t\"?\n\nConstraint\x12\x17\n\x0f\x63onstraint_type\x18\x01 \x01(\t\x12\x18\n\x10\x63onstraint_value\x18\x02 \x01(\t\"6\n\x0c\x43onnectionId\x12&\n\x0f\x63onnection_uuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\x8d\x01\n\nConnection\x12,\n\rconnection_id\x18\x01 \x01(\x0b\x32\x15.context.ConnectionId\x12.\n\x12related_service_id\x18\x02 \x01(\x0b\x32\x12.context.ServiceId\x12!\n\x04path\x18\x03 \x03(\x0b\x32\x13.context.EndPointId\"A\n\x10\x43onnectionIdList\x12-\n\x0e\x63onnection_ids\x18\x01 \x03(\x0b\x32\x15.context.ConnectionId\":\n\x0e\x43onnectionList\x12(\n\x0b\x63onnections\x18\x01 \x03(\x0b\x32\x13.context.Connection\"^\n\x12TeraFlowController\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x12\n\nip_address\x18\x02 \x01(\t\x12\x0c\n\x04port\x18\x03 \x01(\r\"U\n\x14\x41uthenticationResult\x12&\n\ncontext_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x15\n\rauthenticated\x18\x02 \x01(\x08*j\n\rEventTypeEnum\x12\x17\n\x13\x45VENTTYPE_UNDEFINED\x10\x00\x12\x14\n\x10\x45VENTTYPE_CREATE\x10\x01\x12\x14\n\x10\x45VENTTYPE_UPDATE\x10\x02\x12\x14\n\x10\x45VENTTYPE_REMOVE\x10\x03*\xc5\x01\n\x10\x44\x65viceDriverEnum\x12\x1a\n\x16\x44\x45VICEDRIVER_UNDEFINED\x10\x00\x12\x1b\n\x17\x44\x45VICEDRIVER_OPENCONFIG\x10\x01\x12\x1e\n\x1a\x44\x45VICEDRIVER_TRANSPORT_API\x10\x02\x12\x13\n\x0f\x44\x45VICEDRIVER_P4\x10\x03\x12&\n\"DEVICEDRIVER_IETF_NETWORK_TOPOLOGY\x10\x04\x12\x1b\n\x17\x44\x45VICEDRIVER_ONF_TR_352\x10\x05*\x8f\x01\n\x1b\x44\x65viceOperationalStatusEnum\x12%\n!DEVICEOPERATIONALSTATUS_UNDEFINED\x10\x00\x12$\n DEVICEOPERATIONALSTATUS_DISABLED\x10\x01\x12#\n\x1f\x44\x45VICEOPERATIONALSTATUS_ENABLED\x10\x02*\x81\x01\n\x0fServiceTypeEnum\x12\x17\n\x13SERVICETYPE_UNKNOWN\x10\x00\x12\x14\n\x10SERVICETYPE_L3NM\x10\x01\x12\x14\n\x10SERVICETYPE_L2NM\x10\x02\x12)\n%SERVICETYPE_TAPI_CONNECTIVITY_SERVICE\x10\x03*\x88\x01\n\x11ServiceStatusEnum\x12\x1b\n\x17SERVICESTATUS_UNDEFINED\x10\x00\x12\x19\n\x15SERVICESTATUS_PLANNED\x10\x01\x12\x18\n\x14SERVICESTATUS_ACTIVE\x10\x02\x12!\n\x1dSERVICESTATUS_PENDING_REMOVAL\x10\x03*]\n\x10\x43onfigActionEnum\x12\x1a\n\x16\x43ONFIGACTION_UNDEFINED\x10\x00\x12\x14\n\x10\x43ONFIGACTION_SET\x10\x01\x12\x17\n\x13\x43ONFIGACTION_DELETE\x10\x02\x32\xa5\r\n\x0e\x43ontextService\x12:\n\x0eListContextIds\x12\x0e.context.Empty\x1a\x16.context.ContextIdList\"\x00\x12\x36\n\x0cListContexts\x12\x0e.context.Empty\x1a\x14.context.ContextList\"\x00\x12\x34\n\nGetContext\x12\x12.context.ContextId\x1a\x10.context.Context\"\x00\x12\x34\n\nSetContext\x12\x10.context.Context\x1a\x12.context.ContextId\"\x00\x12\x35\n\rRemoveContext\x12\x12.context.ContextId\x1a\x0e.context.Empty\"\x00\x12=\n\x10GetContextEvents\x12\x0e.context.Empty\x1a\x15.context.ContextEvent\"\x00\x30\x01\x12@\n\x0fListTopologyIds\x12\x12.context.ContextId\x1a\x17.context.TopologyIdList\"\x00\x12=\n\x0eListTopologies\x12\x12.context.ContextId\x1a\x15.context.TopologyList\"\x00\x12\x37\n\x0bGetTopology\x12\x13.context.TopologyId\x1a\x11.context.Topology\"\x00\x12\x37\n\x0bSetTopology\x12\x11.context.Topology\x1a\x13.context.TopologyId\"\x00\x12\x37\n\x0eRemoveTopology\x12\x13.context.TopologyId\x1a\x0e.context.Empty\"\x00\x12?\n\x11GetTopologyEvents\x12\x0e.context.Empty\x1a\x16.context.TopologyEvent\"\x00\x30\x01\x12\x38\n\rListDeviceIds\x12\x0e.context.Empty\x1a\x15.context.DeviceIdList\"\x00\x12\x34\n\x0bListDevices\x12\x0e.context.Empty\x1a\x13.context.DeviceList\"\x00\x12\x31\n\tGetDevice\x12\x11.context.DeviceId\x1a\x0f.context.Device\"\x00\x12\x31\n\tSetDevice\x12\x0f.context.Device\x1a\x11.context.DeviceId\"\x00\x12\x33\n\x0cRemoveDevice\x12\x11.context.DeviceId\x1a\x0e.context.Empty\"\x00\x12;\n\x0fGetDeviceEvents\x12\x0e.context.Empty\x1a\x14.context.DeviceEvent\"\x00\x30\x01\x12\x34\n\x0bListLinkIds\x12\x0e.context.Empty\x1a\x13.context.LinkIdList\"\x00\x12\x30\n\tListLinks\x12\x0e.context.Empty\x1a\x11.context.LinkList\"\x00\x12+\n\x07GetLink\x12\x0f.context.LinkId\x1a\r.context.Link\"\x00\x12+\n\x07SetLink\x12\r.context.Link\x1a\x0f.context.LinkId\"\x00\x12/\n\nRemoveLink\x12\x0f.context.LinkId\x1a\x0e.context.Empty\"\x00\x12\x37\n\rGetLinkEvents\x12\x0e.context.Empty\x1a\x12.context.LinkEvent\"\x00\x30\x01\x12>\n\x0eListServiceIds\x12\x12.context.ContextId\x1a\x16.context.ServiceIdList\"\x00\x12:\n\x0cListServices\x12\x12.context.ContextId\x1a\x14.context.ServiceList\"\x00\x12\x34\n\nGetService\x12\x12.context.ServiceId\x1a\x10.context.Service\"\x00\x12\x34\n\nSetService\x12\x10.context.Service\x1a\x12.context.ServiceId\"\x00\x12\x35\n\rRemoveService\x12\x12.context.ServiceId\x1a\x0e.context.Empty\"\x00\x12=\n\x10GetServiceEvents\x12\x0e.context.Empty\x1a\x15.context.ServiceEvent\"\x00\x30\x01\x62\x06proto3'
 )
 
-_DEVICEOPERATIONALSTATUS = _descriptor.EnumDescriptor(
-  name='DeviceOperationalStatus',
-  full_name='context.DeviceOperationalStatus',
+_EVENTTYPEENUM = _descriptor.EnumDescriptor(
+  name='EventTypeEnum',
+  full_name='context.EventTypeEnum',
   filename=None,
   file=DESCRIPTOR,
   create_key=_descriptor._internal_create_key,
   values=[
     _descriptor.EnumValueDescriptor(
-      name='KEEP_STATUS', index=0, number=0,
+      name='EVENTTYPE_UNDEFINED', index=0, number=0,
       serialized_options=None,
       type=None,
       create_key=_descriptor._internal_create_key),
     _descriptor.EnumValueDescriptor(
-      name='DISABLED', index=1, number=-1,
+      name='EVENTTYPE_CREATE', index=1, number=1,
       serialized_options=None,
       type=None,
       create_key=_descriptor._internal_create_key),
     _descriptor.EnumValueDescriptor(
-      name='ENABLED', index=2, number=1,
+      name='EVENTTYPE_UPDATE', index=2, number=2,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='EVENTTYPE_REMOVE', index=3, number=3,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=3468,
+  serialized_end=3574,
+)
+_sym_db.RegisterEnumDescriptor(_EVENTTYPEENUM)
+
+EventTypeEnum = enum_type_wrapper.EnumTypeWrapper(_EVENTTYPEENUM)
+_DEVICEDRIVERENUM = _descriptor.EnumDescriptor(
+  name='DeviceDriverEnum',
+  full_name='context.DeviceDriverEnum',
+  filename=None,
+  file=DESCRIPTOR,
+  create_key=_descriptor._internal_create_key,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='DEVICEDRIVER_UNDEFINED', index=0, number=0,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='DEVICEDRIVER_OPENCONFIG', index=1, number=1,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='DEVICEDRIVER_TRANSPORT_API', index=2, number=2,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='DEVICEDRIVER_P4', index=3, number=3,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='DEVICEDRIVER_IETF_NETWORK_TOPOLOGY', index=4, number=4,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='DEVICEDRIVER_ONF_TR_352', index=5, number=5,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=3577,
+  serialized_end=3774,
+)
+_sym_db.RegisterEnumDescriptor(_DEVICEDRIVERENUM)
+
+DeviceDriverEnum = enum_type_wrapper.EnumTypeWrapper(_DEVICEDRIVERENUM)
+_DEVICEOPERATIONALSTATUSENUM = _descriptor.EnumDescriptor(
+  name='DeviceOperationalStatusEnum',
+  full_name='context.DeviceOperationalStatusEnum',
+  filename=None,
+  file=DESCRIPTOR,
+  create_key=_descriptor._internal_create_key,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='DEVICEOPERATIONALSTATUS_UNDEFINED', index=0, number=0,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='DEVICEOPERATIONALSTATUS_DISABLED', index=1, number=1,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='DEVICEOPERATIONALSTATUS_ENABLED', index=2, number=2,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=3777,
+  serialized_end=3920,
+)
+_sym_db.RegisterEnumDescriptor(_DEVICEOPERATIONALSTATUSENUM)
+
+DeviceOperationalStatusEnum = enum_type_wrapper.EnumTypeWrapper(_DEVICEOPERATIONALSTATUSENUM)
+_SERVICETYPEENUM = _descriptor.EnumDescriptor(
+  name='ServiceTypeEnum',
+  full_name='context.ServiceTypeEnum',
+  filename=None,
+  file=DESCRIPTOR,
+  create_key=_descriptor._internal_create_key,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='SERVICETYPE_UNKNOWN', index=0, number=0,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='SERVICETYPE_L3NM', index=1, number=1,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='SERVICETYPE_L2NM', index=2, number=2,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='SERVICETYPE_TAPI_CONNECTIVITY_SERVICE', index=3, number=3,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=3923,
+  serialized_end=4052,
+)
+_sym_db.RegisterEnumDescriptor(_SERVICETYPEENUM)
+
+ServiceTypeEnum = enum_type_wrapper.EnumTypeWrapper(_SERVICETYPEENUM)
+_SERVICESTATUSENUM = _descriptor.EnumDescriptor(
+  name='ServiceStatusEnum',
+  full_name='context.ServiceStatusEnum',
+  filename=None,
+  file=DESCRIPTOR,
+  create_key=_descriptor._internal_create_key,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='SERVICESTATUS_UNDEFINED', index=0, number=0,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='SERVICESTATUS_PLANNED', index=1, number=1,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='SERVICESTATUS_ACTIVE', index=2, number=2,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='SERVICESTATUS_PENDING_REMOVAL', index=3, number=3,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=4055,
+  serialized_end=4191,
+)
+_sym_db.RegisterEnumDescriptor(_SERVICESTATUSENUM)
+
+ServiceStatusEnum = enum_type_wrapper.EnumTypeWrapper(_SERVICESTATUSENUM)
+_CONFIGACTIONENUM = _descriptor.EnumDescriptor(
+  name='ConfigActionEnum',
+  full_name='context.ConfigActionEnum',
+  filename=None,
+  file=DESCRIPTOR,
+  create_key=_descriptor._internal_create_key,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='CONFIGACTION_UNDEFINED', index=0, number=0,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='CONFIGACTION_SET', index=1, number=1,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='CONFIGACTION_DELETE', index=2, number=2,
       serialized_options=None,
       type=None,
       create_key=_descriptor._internal_create_key),
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=1271,
-  serialized_end=1349,
+  serialized_start=4193,
+  serialized_end=4286,
 )
-_sym_db.RegisterEnumDescriptor(_DEVICEOPERATIONALSTATUS)
+_sym_db.RegisterEnumDescriptor(_CONFIGACTIONENUM)
 
-DeviceOperationalStatus = enum_type_wrapper.EnumTypeWrapper(_DEVICEOPERATIONALSTATUS)
-KEEP_STATUS = 0
-DISABLED = -1
-ENABLED = 1
+ConfigActionEnum = enum_type_wrapper.EnumTypeWrapper(_CONFIGACTIONENUM)
+EVENTTYPE_UNDEFINED = 0
+EVENTTYPE_CREATE = 1
+EVENTTYPE_UPDATE = 2
+EVENTTYPE_REMOVE = 3
+DEVICEDRIVER_UNDEFINED = 0
+DEVICEDRIVER_OPENCONFIG = 1
+DEVICEDRIVER_TRANSPORT_API = 2
+DEVICEDRIVER_P4 = 3
+DEVICEDRIVER_IETF_NETWORK_TOPOLOGY = 4
+DEVICEDRIVER_ONF_TR_352 = 5
+DEVICEOPERATIONALSTATUS_UNDEFINED = 0
+DEVICEOPERATIONALSTATUS_DISABLED = 1
+DEVICEOPERATIONALSTATUS_ENABLED = 2
+SERVICETYPE_UNKNOWN = 0
+SERVICETYPE_L3NM = 1
+SERVICETYPE_L2NM = 2
+SERVICETYPE_TAPI_CONNECTIVITY_SERVICE = 3
+SERVICESTATUS_UNDEFINED = 0
+SERVICESTATUS_PLANNED = 1
+SERVICESTATUS_ACTIVE = 2
+SERVICESTATUS_PENDING_REMOVAL = 3
+CONFIGACTION_UNDEFINED = 0
+CONFIGACTION_SET = 1
+CONFIGACTION_DELETE = 2
 
 
 
@@ -85,32 +291,57 @@ _EMPTY = _descriptor.Descriptor(
 )
 
 
-_CONTEXT = _descriptor.Descriptor(
-  name='Context',
-  full_name='context.Context',
+_UUID = _descriptor.Descriptor(
+  name='Uuid',
+  full_name='context.Uuid',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='contextId', full_name='context.Context.contextId', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
+      name='uuid', full_name='context.Uuid.uuid', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=35,
+  serialized_end=55,
+)
+
+
+_EVENT = _descriptor.Descriptor(
+  name='Event',
+  full_name='context.Event',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
     _descriptor.FieldDescriptor(
-      name='topo', full_name='context.Context.topo', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
+      name='timestamp', full_name='context.Event.timestamp', index=0,
+      number=1, type=1, cpp_type=5, label=1,
+      has_default_value=False, default_value=float(0),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='ctl', full_name='context.Context.ctl', index=2,
-      number=3, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
+      name='event_type', full_name='context.Event.event_type', index=1,
+      number=2, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -126,8 +357,8 @@ _CONTEXT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=35,
-  serialized_end=158,
+  serialized_start=57,
+  serialized_end=127,
 )
 
 
@@ -140,7 +371,7 @@ _CONTEXTID = _descriptor.Descriptor(
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='contextUuid', full_name='context.ContextId.contextUuid', index=0,
+      name='context_uuid', full_name='context.ContextId.context_uuid', index=0,
       number=1, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
@@ -158,40 +389,47 @@ _CONTEXTID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=160,
-  serialized_end=207,
+  serialized_start=129,
+  serialized_end=177,
 )
 
 
-_TOPOLOGY = _descriptor.Descriptor(
-  name='Topology',
-  full_name='context.Topology',
+_CONTEXT = _descriptor.Descriptor(
+  name='Context',
+  full_name='context.Context',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='topoId', full_name='context.Topology.topoId', index=0,
-      number=2, type=11, cpp_type=10, label=1,
+      name='context_id', full_name='context.Context.context_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='device', full_name='context.Topology.device', index=1,
-      number=3, type=11, cpp_type=10, label=3,
+      name='topology_ids', full_name='context.Context.topology_ids', index=1,
+      number=2, type=11, cpp_type=10, label=3,
       has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='link', full_name='context.Topology.link', index=2,
-      number=4, type=11, cpp_type=10, label=3,
+      name='service_ids', full_name='context.Context.service_ids', index=2,
+      number=3, type=11, cpp_type=10, label=3,
       has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='controller', full_name='context.Context.controller', index=3,
+      number=4, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
   ],
   extensions=[
   ],
@@ -204,29 +442,22 @@ _TOPOLOGY = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=209,
-  serialized_end=318,
+  serialized_start=180,
+  serialized_end=362,
 )
 
 
-_LINK = _descriptor.Descriptor(
-  name='Link',
-  full_name='context.Link',
+_CONTEXTIDLIST = _descriptor.Descriptor(
+  name='ContextIdList',
+  full_name='context.ContextIdList',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='link_id', full_name='context.Link.link_id', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='endpointList', full_name='context.Link.endpointList', index=1,
-      number=2, type=11, cpp_type=10, label=3,
+      name='context_ids', full_name='context.ContextIdList.context_ids', index=0,
+      number=1, type=11, cpp_type=10, label=3,
       has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
@@ -243,30 +474,23 @@ _LINK = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=320,
-  serialized_end=403,
+  serialized_start=364,
+  serialized_end=420,
 )
 
 
-_TOPOLOGYID = _descriptor.Descriptor(
-  name='TopologyId',
-  full_name='context.TopologyId',
+_CONTEXTLIST = _descriptor.Descriptor(
+  name='ContextList',
+  full_name='context.ContextList',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='contextId', full_name='context.TopologyId.contextId', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='topoId', full_name='context.TopologyId.topoId', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
+      name='contexts', full_name='context.ContextList.contexts', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -282,30 +506,30 @@ _TOPOLOGYID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=405,
-  serialized_end=487,
+  serialized_start=422,
+  serialized_end=471,
 )
 
 
-_CONSTRAINT = _descriptor.Descriptor(
-  name='Constraint',
-  full_name='context.Constraint',
+_CONTEXTEVENT = _descriptor.Descriptor(
+  name='ContextEvent',
+  full_name='context.ContextEvent',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='constraint_type', full_name='context.Constraint.constraint_type', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='event', full_name='context.ContextEvent.event', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='constraint_value', full_name='context.Constraint.constraint_value', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='context_id', full_name='context.ContextEvent.context_id', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -321,54 +545,33 @@ _CONSTRAINT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=489,
-  serialized_end=552,
+  serialized_start=473,
+  serialized_end=558,
 )
 
 
-_DEVICE = _descriptor.Descriptor(
-  name='Device',
-  full_name='context.Device',
+_TOPOLOGYID = _descriptor.Descriptor(
+  name='TopologyId',
+  full_name='context.TopologyId',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='device_id', full_name='context.Device.device_id', index=0,
+      name='context_id', full_name='context.TopologyId.context_id', index=0,
       number=1, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='device_type', full_name='context.Device.device_type', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='device_config', full_name='context.Device.device_config', index=2,
-      number=3, type=11, cpp_type=10, label=1,
+      name='topology_uuid', full_name='context.TopologyId.topology_uuid', index=1,
+      number=2, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='devOperationalStatus', full_name='context.Device.devOperationalStatus', index=3,
-      number=4, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='endpointList', full_name='context.Device.endpointList', index=4,
-      number=5, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
   ],
   extensions=[
   ],
@@ -381,23 +584,37 @@ _DEVICE = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=555,
-  serialized_end=773,
+  serialized_start=560,
+  serialized_end=650,
 )
 
 
-_DEVICECONFIG = _descriptor.Descriptor(
-  name='DeviceConfig',
-  full_name='context.DeviceConfig',
+_TOPOLOGY = _descriptor.Descriptor(
+  name='Topology',
+  full_name='context.Topology',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='device_config', full_name='context.DeviceConfig.device_config', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='topology_id', full_name='context.Topology.topology_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='device_ids', full_name='context.Topology.device_ids', index=1,
+      number=2, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='link_ids', full_name='context.Topology.link_ids', index=2,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -413,30 +630,55 @@ _DEVICECONFIG = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=775,
-  serialized_end=812,
+  serialized_start=652,
+  serialized_end=778,
 )
 
 
-_ENDPOINT = _descriptor.Descriptor(
-  name='EndPoint',
-  full_name='context.EndPoint',
+_TOPOLOGYIDLIST = _descriptor.Descriptor(
+  name='TopologyIdList',
+  full_name='context.TopologyIdList',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='port_id', full_name='context.EndPoint.port_id', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
+      name='topology_ids', full_name='context.TopologyIdList.topology_ids', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=780,
+  serialized_end=839,
+)
+
+
+_TOPOLOGYLIST = _descriptor.Descriptor(
+  name='TopologyList',
+  full_name='context.TopologyList',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
     _descriptor.FieldDescriptor(
-      name='port_type', full_name='context.EndPoint.port_type', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='topologies', full_name='context.TopologyList.topologies', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -452,40 +694,33 @@ _ENDPOINT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=814,
-  serialized_end=881,
+  serialized_start=841,
+  serialized_end=894,
 )
 
 
-_ENDPOINTID = _descriptor.Descriptor(
-  name='EndPointId',
-  full_name='context.EndPointId',
+_TOPOLOGYEVENT = _descriptor.Descriptor(
+  name='TopologyEvent',
+  full_name='context.TopologyEvent',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='topoId', full_name='context.EndPointId.topoId', index=0,
+      name='event', full_name='context.TopologyEvent.event', index=0,
       number=1, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='dev_id', full_name='context.EndPointId.dev_id', index=1,
+      name='topology_id', full_name='context.TopologyEvent.topology_id', index=1,
       number=2, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
-    _descriptor.FieldDescriptor(
-      name='port_id', full_name='context.EndPointId.port_id', index=2,
-      number=3, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
   ],
   extensions=[
   ],
@@ -498,8 +733,8 @@ _ENDPOINTID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=883,
-  serialized_end=999,
+  serialized_start=896,
+  serialized_end=984,
 )
 
 
@@ -512,7 +747,7 @@ _DEVICEID = _descriptor.Descriptor(
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='device_id', full_name='context.DeviceId.device_id', index=0,
+      name='device_uuid', full_name='context.DeviceId.device_uuid', index=0,
       number=1, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
@@ -530,26 +765,61 @@ _DEVICEID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1001,
-  serialized_end=1045,
+  serialized_start=986,
+  serialized_end=1032,
 )
 
 
-_LINKID = _descriptor.Descriptor(
-  name='LinkId',
-  full_name='context.LinkId',
+_DEVICE = _descriptor.Descriptor(
+  name='Device',
+  full_name='context.Device',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='link_id', full_name='context.LinkId.link_id', index=0,
+      name='device_id', full_name='context.Device.device_id', index=0,
       number=1, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='device_type', full_name='context.Device.device_type', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='device_config', full_name='context.Device.device_config', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='device_operational_status', full_name='context.Device.device_operational_status', index=3,
+      number=4, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='device_drivers', full_name='context.Device.device_drivers', index=4,
+      number=5, type=14, cpp_type=8, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='device_endpoints', full_name='context.Device.device_endpoints', index=5,
+      number=6, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
   ],
   extensions=[
   ],
@@ -562,23 +832,23 @@ _LINKID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1047,
-  serialized_end=1087,
+  serialized_start=1035,
+  serialized_end=1317,
 )
 
 
-_UUID = _descriptor.Descriptor(
-  name='Uuid',
-  full_name='context.Uuid',
+_DEVICECONFIG = _descriptor.Descriptor(
+  name='DeviceConfig',
+  full_name='context.DeviceConfig',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='uuid', full_name='context.Uuid.uuid', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='config_rules', full_name='context.DeviceConfig.config_rules', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -594,30 +864,55 @@ _UUID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1089,
-  serialized_end=1109,
+  serialized_start=1319,
+  serialized_end=1376,
 )
 
 
-_TERAFLOWCONTROLLER = _descriptor.Descriptor(
-  name='TeraFlowController',
-  full_name='context.TeraFlowController',
+_DEVICEIDLIST = _descriptor.Descriptor(
+  name='DeviceIdList',
+  full_name='context.DeviceIdList',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='ctl_id', full_name='context.TeraFlowController.ctl_id', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
+      name='device_ids', full_name='context.DeviceIdList.device_ids', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1378,
+  serialized_end=1431,
+)
+
+
+_DEVICELIST = _descriptor.Descriptor(
+  name='DeviceList',
+  full_name='context.DeviceList',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
     _descriptor.FieldDescriptor(
-      name='ipaddress', full_name='context.TeraFlowController.ipaddress', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      name='devices', full_name='context.DeviceList.devices', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -633,30 +928,30 @@ _TERAFLOWCONTROLLER = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1111,
-  serialized_end=1186,
+  serialized_start=1433,
+  serialized_end=1479,
 )
 
 
-_AUTHENTICATIONRESULT = _descriptor.Descriptor(
-  name='AuthenticationResult',
-  full_name='context.AuthenticationResult',
+_DEVICEEVENT = _descriptor.Descriptor(
+  name='DeviceEvent',
+  full_name='context.DeviceEvent',
   filename=None,
   file=DESCRIPTOR,
   containing_type=None,
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='ctl_id', full_name='context.AuthenticationResult.ctl_id', index=0,
+      name='event', full_name='context.DeviceEvent.event', index=0,
       number=1, type=11, cpp_type=10, label=1,
       has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
     _descriptor.FieldDescriptor(
-      name='authenticated', full_name='context.AuthenticationResult.authenticated', index=1,
-      number=2, type=8, cpp_type=7, label=1,
-      has_default_value=False, default_value=False,
+      name='device_id', full_name='context.DeviceEvent.device_id', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
@@ -672,100 +967,1064 @@ _AUTHENTICATIONRESULT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1188,
-  serialized_end=1269,
+  serialized_start=1481,
+  serialized_end=1563,
 )
 
-_CONTEXT.fields_by_name['contextId'].message_type = _CONTEXTID
-_CONTEXT.fields_by_name['topo'].message_type = _TOPOLOGY
-_CONTEXT.fields_by_name['ctl'].message_type = _TERAFLOWCONTROLLER
-_CONTEXTID.fields_by_name['contextUuid'].message_type = _UUID
-_TOPOLOGY.fields_by_name['topoId'].message_type = _TOPOLOGYID
-_TOPOLOGY.fields_by_name['device'].message_type = _DEVICE
-_TOPOLOGY.fields_by_name['link'].message_type = _LINK
-_LINK.fields_by_name['link_id'].message_type = _LINKID
-_LINK.fields_by_name['endpointList'].message_type = _ENDPOINTID
-_TOPOLOGYID.fields_by_name['contextId'].message_type = _CONTEXTID
-_TOPOLOGYID.fields_by_name['topoId'].message_type = _UUID
-_DEVICE.fields_by_name['device_id'].message_type = _DEVICEID
-_DEVICE.fields_by_name['device_config'].message_type = _DEVICECONFIG
-_DEVICE.fields_by_name['devOperationalStatus'].enum_type = _DEVICEOPERATIONALSTATUS
-_DEVICE.fields_by_name['endpointList'].message_type = _ENDPOINT
-_ENDPOINT.fields_by_name['port_id'].message_type = _ENDPOINTID
-_ENDPOINTID.fields_by_name['topoId'].message_type = _TOPOLOGYID
-_ENDPOINTID.fields_by_name['dev_id'].message_type = _DEVICEID
-_ENDPOINTID.fields_by_name['port_id'].message_type = _UUID
-_DEVICEID.fields_by_name['device_id'].message_type = _UUID
-_LINKID.fields_by_name['link_id'].message_type = _UUID
-_TERAFLOWCONTROLLER.fields_by_name['ctl_id'].message_type = _CONTEXTID
-_AUTHENTICATIONRESULT.fields_by_name['ctl_id'].message_type = _CONTEXTID
-DESCRIPTOR.message_types_by_name['Empty'] = _EMPTY
-DESCRIPTOR.message_types_by_name['Context'] = _CONTEXT
-DESCRIPTOR.message_types_by_name['ContextId'] = _CONTEXTID
-DESCRIPTOR.message_types_by_name['Topology'] = _TOPOLOGY
-DESCRIPTOR.message_types_by_name['Link'] = _LINK
-DESCRIPTOR.message_types_by_name['TopologyId'] = _TOPOLOGYID
-DESCRIPTOR.message_types_by_name['Constraint'] = _CONSTRAINT
-DESCRIPTOR.message_types_by_name['Device'] = _DEVICE
-DESCRIPTOR.message_types_by_name['DeviceConfig'] = _DEVICECONFIG
-DESCRIPTOR.message_types_by_name['EndPoint'] = _ENDPOINT
-DESCRIPTOR.message_types_by_name['EndPointId'] = _ENDPOINTID
-DESCRIPTOR.message_types_by_name['DeviceId'] = _DEVICEID
-DESCRIPTOR.message_types_by_name['LinkId'] = _LINKID
-DESCRIPTOR.message_types_by_name['Uuid'] = _UUID
-DESCRIPTOR.message_types_by_name['TeraFlowController'] = _TERAFLOWCONTROLLER
-DESCRIPTOR.message_types_by_name['AuthenticationResult'] = _AUTHENTICATIONRESULT
-DESCRIPTOR.enum_types_by_name['DeviceOperationalStatus'] = _DEVICEOPERATIONALSTATUS
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-Empty = _reflection.GeneratedProtocolMessageType('Empty', (_message.Message,), {
-  'DESCRIPTOR' : _EMPTY,
-  '__module__' : 'context_pb2'
-  # @@protoc_insertion_point(class_scope:context.Empty)
-  })
-_sym_db.RegisterMessage(Empty)
-
-Context = _reflection.GeneratedProtocolMessageType('Context', (_message.Message,), {
-  'DESCRIPTOR' : _CONTEXT,
-  '__module__' : 'context_pb2'
-  # @@protoc_insertion_point(class_scope:context.Context)
-  })
-_sym_db.RegisterMessage(Context)
 
-ContextId = _reflection.GeneratedProtocolMessageType('ContextId', (_message.Message,), {
-  'DESCRIPTOR' : _CONTEXTID,
-  '__module__' : 'context_pb2'
-  # @@protoc_insertion_point(class_scope:context.ContextId)
-  })
-_sym_db.RegisterMessage(ContextId)
+_LINKID = _descriptor.Descriptor(
+  name='LinkId',
+  full_name='context.LinkId',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='link_uuid', full_name='context.LinkId.link_uuid', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1565,
+  serialized_end=1607,
+)
 
-Topology = _reflection.GeneratedProtocolMessageType('Topology', (_message.Message,), {
-  'DESCRIPTOR' : _TOPOLOGY,
-  '__module__' : 'context_pb2'
-  # @@protoc_insertion_point(class_scope:context.Topology)
-  })
-_sym_db.RegisterMessage(Topology)
 
-Link = _reflection.GeneratedProtocolMessageType('Link', (_message.Message,), {
-  'DESCRIPTOR' : _LINK,
-  '__module__' : 'context_pb2'
-  # @@protoc_insertion_point(class_scope:context.Link)
-  })
-_sym_db.RegisterMessage(Link)
+_LINK = _descriptor.Descriptor(
+  name='Link',
+  full_name='context.Link',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='link_id', full_name='context.Link.link_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='link_endpoint_ids', full_name='context.Link.link_endpoint_ids', index=1,
+      number=2, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1609,
+  serialized_end=1697,
+)
 
-TopologyId = _reflection.GeneratedProtocolMessageType('TopologyId', (_message.Message,), {
-  'DESCRIPTOR' : _TOPOLOGYID,
-  '__module__' : 'context_pb2'
-  # @@protoc_insertion_point(class_scope:context.TopologyId)
-  })
-_sym_db.RegisterMessage(TopologyId)
 
-Constraint = _reflection.GeneratedProtocolMessageType('Constraint', (_message.Message,), {
-  'DESCRIPTOR' : _CONSTRAINT,
+_LINKIDLIST = _descriptor.Descriptor(
+  name='LinkIdList',
+  full_name='context.LinkIdList',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='link_ids', full_name='context.LinkIdList.link_ids', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1699,
+  serialized_end=1746,
+)
+
+
+_LINKLIST = _descriptor.Descriptor(
+  name='LinkList',
+  full_name='context.LinkList',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='links', full_name='context.LinkList.links', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1748,
+  serialized_end=1788,
+)
+
+
+_LINKEVENT = _descriptor.Descriptor(
+  name='LinkEvent',
+  full_name='context.LinkEvent',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='event', full_name='context.LinkEvent.event', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='link_id', full_name='context.LinkEvent.link_id', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1790,
+  serialized_end=1866,
+)
+
+
+_SERVICEID = _descriptor.Descriptor(
+  name='ServiceId',
+  full_name='context.ServiceId',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='context_id', full_name='context.ServiceId.context_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='service_uuid', full_name='context.ServiceId.service_uuid', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1868,
+  serialized_end=1956,
+)
+
+
+_SERVICE = _descriptor.Descriptor(
+  name='Service',
+  full_name='context.Service',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='service_id', full_name='context.Service.service_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='service_type', full_name='context.Service.service_type', index=1,
+      number=2, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='service_endpoint_ids', full_name='context.Service.service_endpoint_ids', index=2,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='service_constraints', full_name='context.Service.service_constraints', index=3,
+      number=4, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='service_status', full_name='context.Service.service_status', index=4,
+      number=5, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='service_config', full_name='context.Service.service_config', index=5,
+      number=6, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1959,
+  serialized_end=2253,
+)
+
+
+_SERVICESTATUS = _descriptor.Descriptor(
+  name='ServiceStatus',
+  full_name='context.ServiceStatus',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='service_status', full_name='context.ServiceStatus.service_status', index=0,
+      number=1, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2255,
+  serialized_end=2322,
+)
+
+
+_SERVICECONFIG = _descriptor.Descriptor(
+  name='ServiceConfig',
+  full_name='context.ServiceConfig',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='config_rules', full_name='context.ServiceConfig.config_rules', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2324,
+  serialized_end=2382,
+)
+
+
+_SERVICEIDLIST = _descriptor.Descriptor(
+  name='ServiceIdList',
+  full_name='context.ServiceIdList',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='service_ids', full_name='context.ServiceIdList.service_ids', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2384,
+  serialized_end=2440,
+)
+
+
+_SERVICELIST = _descriptor.Descriptor(
+  name='ServiceList',
+  full_name='context.ServiceList',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='services', full_name='context.ServiceList.services', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2442,
+  serialized_end=2491,
+)
+
+
+_SERVICEEVENT = _descriptor.Descriptor(
+  name='ServiceEvent',
+  full_name='context.ServiceEvent',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='event', full_name='context.ServiceEvent.event', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='service_id', full_name='context.ServiceEvent.service_id', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2493,
+  serialized_end=2578,
+)
+
+
+_ENDPOINTID = _descriptor.Descriptor(
+  name='EndPointId',
+  full_name='context.EndPointId',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='topology_id', full_name='context.EndPointId.topology_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='device_id', full_name='context.EndPointId.device_id', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='endpoint_uuid', full_name='context.EndPointId.endpoint_uuid', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2581,
+  serialized_end=2711,
+)
+
+
+_ENDPOINT = _descriptor.Descriptor(
+  name='EndPoint',
+  full_name='context.EndPoint',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='endpoint_id', full_name='context.EndPoint.endpoint_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='endpoint_type', full_name='context.EndPoint.endpoint_type', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2713,
+  serialized_end=2788,
+)
+
+
+_CONFIGRULE = _descriptor.Descriptor(
+  name='ConfigRule',
+  full_name='context.ConfigRule',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='action', full_name='context.ConfigRule.action', index=0,
+      number=1, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='resource_key', full_name='context.ConfigRule.resource_key', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='resource_value', full_name='context.ConfigRule.resource_value', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2790,
+  serialized_end=2891,
+)
+
+
+_CONSTRAINT = _descriptor.Descriptor(
+  name='Constraint',
+  full_name='context.Constraint',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='constraint_type', full_name='context.Constraint.constraint_type', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='constraint_value', full_name='context.Constraint.constraint_value', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2893,
+  serialized_end=2956,
+)
+
+
+_CONNECTIONID = _descriptor.Descriptor(
+  name='ConnectionId',
+  full_name='context.ConnectionId',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='connection_uuid', full_name='context.ConnectionId.connection_uuid', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=2958,
+  serialized_end=3012,
+)
+
+
+_CONNECTION = _descriptor.Descriptor(
+  name='Connection',
+  full_name='context.Connection',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='connection_id', full_name='context.Connection.connection_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='related_service_id', full_name='context.Connection.related_service_id', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='path', full_name='context.Connection.path', index=2,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=3015,
+  serialized_end=3156,
+)
+
+
+_CONNECTIONIDLIST = _descriptor.Descriptor(
+  name='ConnectionIdList',
+  full_name='context.ConnectionIdList',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='connection_ids', full_name='context.ConnectionIdList.connection_ids', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=3158,
+  serialized_end=3223,
+)
+
+
+_CONNECTIONLIST = _descriptor.Descriptor(
+  name='ConnectionList',
+  full_name='context.ConnectionList',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='connections', full_name='context.ConnectionList.connections', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=3225,
+  serialized_end=3283,
+)
+
+
+_TERAFLOWCONTROLLER = _descriptor.Descriptor(
+  name='TeraFlowController',
+  full_name='context.TeraFlowController',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='context_id', full_name='context.TeraFlowController.context_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='ip_address', full_name='context.TeraFlowController.ip_address', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='port', full_name='context.TeraFlowController.port', index=2,
+      number=3, type=13, cpp_type=3, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=3285,
+  serialized_end=3379,
+)
+
+
+_AUTHENTICATIONRESULT = _descriptor.Descriptor(
+  name='AuthenticationResult',
+  full_name='context.AuthenticationResult',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='context_id', full_name='context.AuthenticationResult.context_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='authenticated', full_name='context.AuthenticationResult.authenticated', index=1,
+      number=2, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=3381,
+  serialized_end=3466,
+)
+
+_EVENT.fields_by_name['event_type'].enum_type = _EVENTTYPEENUM
+_CONTEXTID.fields_by_name['context_uuid'].message_type = _UUID
+_CONTEXT.fields_by_name['context_id'].message_type = _CONTEXTID
+_CONTEXT.fields_by_name['topology_ids'].message_type = _TOPOLOGYID
+_CONTEXT.fields_by_name['service_ids'].message_type = _SERVICEID
+_CONTEXT.fields_by_name['controller'].message_type = _TERAFLOWCONTROLLER
+_CONTEXTIDLIST.fields_by_name['context_ids'].message_type = _CONTEXTID
+_CONTEXTLIST.fields_by_name['contexts'].message_type = _CONTEXT
+_CONTEXTEVENT.fields_by_name['event'].message_type = _EVENT
+_CONTEXTEVENT.fields_by_name['context_id'].message_type = _CONTEXTID
+_TOPOLOGYID.fields_by_name['context_id'].message_type = _CONTEXTID
+_TOPOLOGYID.fields_by_name['topology_uuid'].message_type = _UUID
+_TOPOLOGY.fields_by_name['topology_id'].message_type = _TOPOLOGYID
+_TOPOLOGY.fields_by_name['device_ids'].message_type = _DEVICEID
+_TOPOLOGY.fields_by_name['link_ids'].message_type = _LINKID
+_TOPOLOGYIDLIST.fields_by_name['topology_ids'].message_type = _TOPOLOGYID
+_TOPOLOGYLIST.fields_by_name['topologies'].message_type = _TOPOLOGY
+_TOPOLOGYEVENT.fields_by_name['event'].message_type = _EVENT
+_TOPOLOGYEVENT.fields_by_name['topology_id'].message_type = _TOPOLOGYID
+_DEVICEID.fields_by_name['device_uuid'].message_type = _UUID
+_DEVICE.fields_by_name['device_id'].message_type = _DEVICEID
+_DEVICE.fields_by_name['device_config'].message_type = _DEVICECONFIG
+_DEVICE.fields_by_name['device_operational_status'].enum_type = _DEVICEOPERATIONALSTATUSENUM
+_DEVICE.fields_by_name['device_drivers'].enum_type = _DEVICEDRIVERENUM
+_DEVICE.fields_by_name['device_endpoints'].message_type = _ENDPOINT
+_DEVICECONFIG.fields_by_name['config_rules'].message_type = _CONFIGRULE
+_DEVICEIDLIST.fields_by_name['device_ids'].message_type = _DEVICEID
+_DEVICELIST.fields_by_name['devices'].message_type = _DEVICE
+_DEVICEEVENT.fields_by_name['event'].message_type = _EVENT
+_DEVICEEVENT.fields_by_name['device_id'].message_type = _DEVICEID
+_LINKID.fields_by_name['link_uuid'].message_type = _UUID
+_LINK.fields_by_name['link_id'].message_type = _LINKID
+_LINK.fields_by_name['link_endpoint_ids'].message_type = _ENDPOINTID
+_LINKIDLIST.fields_by_name['link_ids'].message_type = _LINKID
+_LINKLIST.fields_by_name['links'].message_type = _LINK
+_LINKEVENT.fields_by_name['event'].message_type = _EVENT
+_LINKEVENT.fields_by_name['link_id'].message_type = _LINKID
+_SERVICEID.fields_by_name['context_id'].message_type = _CONTEXTID
+_SERVICEID.fields_by_name['service_uuid'].message_type = _UUID
+_SERVICE.fields_by_name['service_id'].message_type = _SERVICEID
+_SERVICE.fields_by_name['service_type'].enum_type = _SERVICETYPEENUM
+_SERVICE.fields_by_name['service_endpoint_ids'].message_type = _ENDPOINTID
+_SERVICE.fields_by_name['service_constraints'].message_type = _CONSTRAINT
+_SERVICE.fields_by_name['service_status'].message_type = _SERVICESTATUS
+_SERVICE.fields_by_name['service_config'].message_type = _SERVICECONFIG
+_SERVICESTATUS.fields_by_name['service_status'].enum_type = _SERVICESTATUSENUM
+_SERVICECONFIG.fields_by_name['config_rules'].message_type = _CONFIGRULE
+_SERVICEIDLIST.fields_by_name['service_ids'].message_type = _SERVICEID
+_SERVICELIST.fields_by_name['services'].message_type = _SERVICE
+_SERVICEEVENT.fields_by_name['event'].message_type = _EVENT
+_SERVICEEVENT.fields_by_name['service_id'].message_type = _SERVICEID
+_ENDPOINTID.fields_by_name['topology_id'].message_type = _TOPOLOGYID
+_ENDPOINTID.fields_by_name['device_id'].message_type = _DEVICEID
+_ENDPOINTID.fields_by_name['endpoint_uuid'].message_type = _UUID
+_ENDPOINT.fields_by_name['endpoint_id'].message_type = _ENDPOINTID
+_CONFIGRULE.fields_by_name['action'].enum_type = _CONFIGACTIONENUM
+_CONNECTIONID.fields_by_name['connection_uuid'].message_type = _UUID
+_CONNECTION.fields_by_name['connection_id'].message_type = _CONNECTIONID
+_CONNECTION.fields_by_name['related_service_id'].message_type = _SERVICEID
+_CONNECTION.fields_by_name['path'].message_type = _ENDPOINTID
+_CONNECTIONIDLIST.fields_by_name['connection_ids'].message_type = _CONNECTIONID
+_CONNECTIONLIST.fields_by_name['connections'].message_type = _CONNECTION
+_TERAFLOWCONTROLLER.fields_by_name['context_id'].message_type = _CONTEXTID
+_AUTHENTICATIONRESULT.fields_by_name['context_id'].message_type = _CONTEXTID
+DESCRIPTOR.message_types_by_name['Empty'] = _EMPTY
+DESCRIPTOR.message_types_by_name['Uuid'] = _UUID
+DESCRIPTOR.message_types_by_name['Event'] = _EVENT
+DESCRIPTOR.message_types_by_name['ContextId'] = _CONTEXTID
+DESCRIPTOR.message_types_by_name['Context'] = _CONTEXT
+DESCRIPTOR.message_types_by_name['ContextIdList'] = _CONTEXTIDLIST
+DESCRIPTOR.message_types_by_name['ContextList'] = _CONTEXTLIST
+DESCRIPTOR.message_types_by_name['ContextEvent'] = _CONTEXTEVENT
+DESCRIPTOR.message_types_by_name['TopologyId'] = _TOPOLOGYID
+DESCRIPTOR.message_types_by_name['Topology'] = _TOPOLOGY
+DESCRIPTOR.message_types_by_name['TopologyIdList'] = _TOPOLOGYIDLIST
+DESCRIPTOR.message_types_by_name['TopologyList'] = _TOPOLOGYLIST
+DESCRIPTOR.message_types_by_name['TopologyEvent'] = _TOPOLOGYEVENT
+DESCRIPTOR.message_types_by_name['DeviceId'] = _DEVICEID
+DESCRIPTOR.message_types_by_name['Device'] = _DEVICE
+DESCRIPTOR.message_types_by_name['DeviceConfig'] = _DEVICECONFIG
+DESCRIPTOR.message_types_by_name['DeviceIdList'] = _DEVICEIDLIST
+DESCRIPTOR.message_types_by_name['DeviceList'] = _DEVICELIST
+DESCRIPTOR.message_types_by_name['DeviceEvent'] = _DEVICEEVENT
+DESCRIPTOR.message_types_by_name['LinkId'] = _LINKID
+DESCRIPTOR.message_types_by_name['Link'] = _LINK
+DESCRIPTOR.message_types_by_name['LinkIdList'] = _LINKIDLIST
+DESCRIPTOR.message_types_by_name['LinkList'] = _LINKLIST
+DESCRIPTOR.message_types_by_name['LinkEvent'] = _LINKEVENT
+DESCRIPTOR.message_types_by_name['ServiceId'] = _SERVICEID
+DESCRIPTOR.message_types_by_name['Service'] = _SERVICE
+DESCRIPTOR.message_types_by_name['ServiceStatus'] = _SERVICESTATUS
+DESCRIPTOR.message_types_by_name['ServiceConfig'] = _SERVICECONFIG
+DESCRIPTOR.message_types_by_name['ServiceIdList'] = _SERVICEIDLIST
+DESCRIPTOR.message_types_by_name['ServiceList'] = _SERVICELIST
+DESCRIPTOR.message_types_by_name['ServiceEvent'] = _SERVICEEVENT
+DESCRIPTOR.message_types_by_name['EndPointId'] = _ENDPOINTID
+DESCRIPTOR.message_types_by_name['EndPoint'] = _ENDPOINT
+DESCRIPTOR.message_types_by_name['ConfigRule'] = _CONFIGRULE
+DESCRIPTOR.message_types_by_name['Constraint'] = _CONSTRAINT
+DESCRIPTOR.message_types_by_name['ConnectionId'] = _CONNECTIONID
+DESCRIPTOR.message_types_by_name['Connection'] = _CONNECTION
+DESCRIPTOR.message_types_by_name['ConnectionIdList'] = _CONNECTIONIDLIST
+DESCRIPTOR.message_types_by_name['ConnectionList'] = _CONNECTIONLIST
+DESCRIPTOR.message_types_by_name['TeraFlowController'] = _TERAFLOWCONTROLLER
+DESCRIPTOR.message_types_by_name['AuthenticationResult'] = _AUTHENTICATIONRESULT
+DESCRIPTOR.enum_types_by_name['EventTypeEnum'] = _EVENTTYPEENUM
+DESCRIPTOR.enum_types_by_name['DeviceDriverEnum'] = _DEVICEDRIVERENUM
+DESCRIPTOR.enum_types_by_name['DeviceOperationalStatusEnum'] = _DEVICEOPERATIONALSTATUSENUM
+DESCRIPTOR.enum_types_by_name['ServiceTypeEnum'] = _SERVICETYPEENUM
+DESCRIPTOR.enum_types_by_name['ServiceStatusEnum'] = _SERVICESTATUSENUM
+DESCRIPTOR.enum_types_by_name['ConfigActionEnum'] = _CONFIGACTIONENUM
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+Empty = _reflection.GeneratedProtocolMessageType('Empty', (_message.Message,), {
+  'DESCRIPTOR' : _EMPTY,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Empty)
+  })
+_sym_db.RegisterMessage(Empty)
+
+Uuid = _reflection.GeneratedProtocolMessageType('Uuid', (_message.Message,), {
+  'DESCRIPTOR' : _UUID,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Uuid)
+  })
+_sym_db.RegisterMessage(Uuid)
+
+Event = _reflection.GeneratedProtocolMessageType('Event', (_message.Message,), {
+  'DESCRIPTOR' : _EVENT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Event)
+  })
+_sym_db.RegisterMessage(Event)
+
+ContextId = _reflection.GeneratedProtocolMessageType('ContextId', (_message.Message,), {
+  'DESCRIPTOR' : _CONTEXTID,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ContextId)
+  })
+_sym_db.RegisterMessage(ContextId)
+
+Context = _reflection.GeneratedProtocolMessageType('Context', (_message.Message,), {
+  'DESCRIPTOR' : _CONTEXT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Context)
+  })
+_sym_db.RegisterMessage(Context)
+
+ContextIdList = _reflection.GeneratedProtocolMessageType('ContextIdList', (_message.Message,), {
+  'DESCRIPTOR' : _CONTEXTIDLIST,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ContextIdList)
+  })
+_sym_db.RegisterMessage(ContextIdList)
+
+ContextList = _reflection.GeneratedProtocolMessageType('ContextList', (_message.Message,), {
+  'DESCRIPTOR' : _CONTEXTLIST,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ContextList)
+  })
+_sym_db.RegisterMessage(ContextList)
+
+ContextEvent = _reflection.GeneratedProtocolMessageType('ContextEvent', (_message.Message,), {
+  'DESCRIPTOR' : _CONTEXTEVENT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ContextEvent)
+  })
+_sym_db.RegisterMessage(ContextEvent)
+
+TopologyId = _reflection.GeneratedProtocolMessageType('TopologyId', (_message.Message,), {
+  'DESCRIPTOR' : _TOPOLOGYID,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.TopologyId)
+  })
+_sym_db.RegisterMessage(TopologyId)
+
+Topology = _reflection.GeneratedProtocolMessageType('Topology', (_message.Message,), {
+  'DESCRIPTOR' : _TOPOLOGY,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Topology)
+  })
+_sym_db.RegisterMessage(Topology)
+
+TopologyIdList = _reflection.GeneratedProtocolMessageType('TopologyIdList', (_message.Message,), {
+  'DESCRIPTOR' : _TOPOLOGYIDLIST,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.TopologyIdList)
+  })
+_sym_db.RegisterMessage(TopologyIdList)
+
+TopologyList = _reflection.GeneratedProtocolMessageType('TopologyList', (_message.Message,), {
+  'DESCRIPTOR' : _TOPOLOGYLIST,
   '__module__' : 'context_pb2'
-  # @@protoc_insertion_point(class_scope:context.Constraint)
+  # @@protoc_insertion_point(class_scope:context.TopologyList)
   })
-_sym_db.RegisterMessage(Constraint)
+_sym_db.RegisterMessage(TopologyList)
+
+TopologyEvent = _reflection.GeneratedProtocolMessageType('TopologyEvent', (_message.Message,), {
+  'DESCRIPTOR' : _TOPOLOGYEVENT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.TopologyEvent)
+  })
+_sym_db.RegisterMessage(TopologyEvent)
+
+DeviceId = _reflection.GeneratedProtocolMessageType('DeviceId', (_message.Message,), {
+  'DESCRIPTOR' : _DEVICEID,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.DeviceId)
+  })
+_sym_db.RegisterMessage(DeviceId)
 
 Device = _reflection.GeneratedProtocolMessageType('Device', (_message.Message,), {
   'DESCRIPTOR' : _DEVICE,
@@ -781,26 +2040,26 @@ DeviceConfig = _reflection.GeneratedProtocolMessageType('DeviceConfig', (_messag
   })
 _sym_db.RegisterMessage(DeviceConfig)
 
-EndPoint = _reflection.GeneratedProtocolMessageType('EndPoint', (_message.Message,), {
-  'DESCRIPTOR' : _ENDPOINT,
+DeviceIdList = _reflection.GeneratedProtocolMessageType('DeviceIdList', (_message.Message,), {
+  'DESCRIPTOR' : _DEVICEIDLIST,
   '__module__' : 'context_pb2'
-  # @@protoc_insertion_point(class_scope:context.EndPoint)
+  # @@protoc_insertion_point(class_scope:context.DeviceIdList)
   })
-_sym_db.RegisterMessage(EndPoint)
+_sym_db.RegisterMessage(DeviceIdList)
 
-EndPointId = _reflection.GeneratedProtocolMessageType('EndPointId', (_message.Message,), {
-  'DESCRIPTOR' : _ENDPOINTID,
+DeviceList = _reflection.GeneratedProtocolMessageType('DeviceList', (_message.Message,), {
+  'DESCRIPTOR' : _DEVICELIST,
   '__module__' : 'context_pb2'
-  # @@protoc_insertion_point(class_scope:context.EndPointId)
+  # @@protoc_insertion_point(class_scope:context.DeviceList)
   })
-_sym_db.RegisterMessage(EndPointId)
+_sym_db.RegisterMessage(DeviceList)
 
-DeviceId = _reflection.GeneratedProtocolMessageType('DeviceId', (_message.Message,), {
-  'DESCRIPTOR' : _DEVICEID,
+DeviceEvent = _reflection.GeneratedProtocolMessageType('DeviceEvent', (_message.Message,), {
+  'DESCRIPTOR' : _DEVICEEVENT,
   '__module__' : 'context_pb2'
-  # @@protoc_insertion_point(class_scope:context.DeviceId)
+  # @@protoc_insertion_point(class_scope:context.DeviceEvent)
   })
-_sym_db.RegisterMessage(DeviceId)
+_sym_db.RegisterMessage(DeviceEvent)
 
 LinkId = _reflection.GeneratedProtocolMessageType('LinkId', (_message.Message,), {
   'DESCRIPTOR' : _LINKID,
@@ -809,12 +2068,138 @@ LinkId = _reflection.GeneratedProtocolMessageType('LinkId', (_message.Message,),
   })
 _sym_db.RegisterMessage(LinkId)
 
-Uuid = _reflection.GeneratedProtocolMessageType('Uuid', (_message.Message,), {
-  'DESCRIPTOR' : _UUID,
+Link = _reflection.GeneratedProtocolMessageType('Link', (_message.Message,), {
+  'DESCRIPTOR' : _LINK,
   '__module__' : 'context_pb2'
-  # @@protoc_insertion_point(class_scope:context.Uuid)
+  # @@protoc_insertion_point(class_scope:context.Link)
   })
-_sym_db.RegisterMessage(Uuid)
+_sym_db.RegisterMessage(Link)
+
+LinkIdList = _reflection.GeneratedProtocolMessageType('LinkIdList', (_message.Message,), {
+  'DESCRIPTOR' : _LINKIDLIST,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.LinkIdList)
+  })
+_sym_db.RegisterMessage(LinkIdList)
+
+LinkList = _reflection.GeneratedProtocolMessageType('LinkList', (_message.Message,), {
+  'DESCRIPTOR' : _LINKLIST,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.LinkList)
+  })
+_sym_db.RegisterMessage(LinkList)
+
+LinkEvent = _reflection.GeneratedProtocolMessageType('LinkEvent', (_message.Message,), {
+  'DESCRIPTOR' : _LINKEVENT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.LinkEvent)
+  })
+_sym_db.RegisterMessage(LinkEvent)
+
+ServiceId = _reflection.GeneratedProtocolMessageType('ServiceId', (_message.Message,), {
+  'DESCRIPTOR' : _SERVICEID,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ServiceId)
+  })
+_sym_db.RegisterMessage(ServiceId)
+
+Service = _reflection.GeneratedProtocolMessageType('Service', (_message.Message,), {
+  'DESCRIPTOR' : _SERVICE,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Service)
+  })
+_sym_db.RegisterMessage(Service)
+
+ServiceStatus = _reflection.GeneratedProtocolMessageType('ServiceStatus', (_message.Message,), {
+  'DESCRIPTOR' : _SERVICESTATUS,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ServiceStatus)
+  })
+_sym_db.RegisterMessage(ServiceStatus)
+
+ServiceConfig = _reflection.GeneratedProtocolMessageType('ServiceConfig', (_message.Message,), {
+  'DESCRIPTOR' : _SERVICECONFIG,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ServiceConfig)
+  })
+_sym_db.RegisterMessage(ServiceConfig)
+
+ServiceIdList = _reflection.GeneratedProtocolMessageType('ServiceIdList', (_message.Message,), {
+  'DESCRIPTOR' : _SERVICEIDLIST,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ServiceIdList)
+  })
+_sym_db.RegisterMessage(ServiceIdList)
+
+ServiceList = _reflection.GeneratedProtocolMessageType('ServiceList', (_message.Message,), {
+  'DESCRIPTOR' : _SERVICELIST,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ServiceList)
+  })
+_sym_db.RegisterMessage(ServiceList)
+
+ServiceEvent = _reflection.GeneratedProtocolMessageType('ServiceEvent', (_message.Message,), {
+  'DESCRIPTOR' : _SERVICEEVENT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ServiceEvent)
+  })
+_sym_db.RegisterMessage(ServiceEvent)
+
+EndPointId = _reflection.GeneratedProtocolMessageType('EndPointId', (_message.Message,), {
+  'DESCRIPTOR' : _ENDPOINTID,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.EndPointId)
+  })
+_sym_db.RegisterMessage(EndPointId)
+
+EndPoint = _reflection.GeneratedProtocolMessageType('EndPoint', (_message.Message,), {
+  'DESCRIPTOR' : _ENDPOINT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.EndPoint)
+  })
+_sym_db.RegisterMessage(EndPoint)
+
+ConfigRule = _reflection.GeneratedProtocolMessageType('ConfigRule', (_message.Message,), {
+  'DESCRIPTOR' : _CONFIGRULE,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ConfigRule)
+  })
+_sym_db.RegisterMessage(ConfigRule)
+
+Constraint = _reflection.GeneratedProtocolMessageType('Constraint', (_message.Message,), {
+  'DESCRIPTOR' : _CONSTRAINT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Constraint)
+  })
+_sym_db.RegisterMessage(Constraint)
+
+ConnectionId = _reflection.GeneratedProtocolMessageType('ConnectionId', (_message.Message,), {
+  'DESCRIPTOR' : _CONNECTIONID,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ConnectionId)
+  })
+_sym_db.RegisterMessage(ConnectionId)
+
+Connection = _reflection.GeneratedProtocolMessageType('Connection', (_message.Message,), {
+  'DESCRIPTOR' : _CONNECTION,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Connection)
+  })
+_sym_db.RegisterMessage(Connection)
+
+ConnectionIdList = _reflection.GeneratedProtocolMessageType('ConnectionIdList', (_message.Message,), {
+  'DESCRIPTOR' : _CONNECTIONIDLIST,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ConnectionIdList)
+  })
+_sym_db.RegisterMessage(ConnectionIdList)
+
+ConnectionList = _reflection.GeneratedProtocolMessageType('ConnectionList', (_message.Message,), {
+  'DESCRIPTOR' : _CONNECTIONLIST,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ConnectionList)
+  })
+_sym_db.RegisterMessage(ConnectionList)
 
 TeraFlowController = _reflection.GeneratedProtocolMessageType('TeraFlowController', (_message.Message,), {
   'DESCRIPTOR' : _TERAFLOWCONTROLLER,
@@ -839,39 +2224,309 @@ _CONTEXTSERVICE = _descriptor.ServiceDescriptor(
   index=0,
   serialized_options=None,
   create_key=_descriptor._internal_create_key,
-  serialized_start=1352,
-  serialized_end=1514,
+  serialized_start=4289,
+  serialized_end=5990,
   methods=[
   _descriptor.MethodDescriptor(
-    name='GetTopology',
-    full_name='context.ContextService.GetTopology',
+    name='ListContextIds',
+    full_name='context.ContextService.ListContextIds',
     index=0,
     containing_service=None,
     input_type=_EMPTY,
-    output_type=_TOPOLOGY,
+    output_type=_CONTEXTIDLIST,
     serialized_options=None,
     create_key=_descriptor._internal_create_key,
   ),
   _descriptor.MethodDescriptor(
-    name='AddLink',
-    full_name='context.ContextService.AddLink',
+    name='ListContexts',
+    full_name='context.ContextService.ListContexts',
     index=1,
     containing_service=None,
+    input_type=_EMPTY,
+    output_type=_CONTEXTLIST,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetContext',
+    full_name='context.ContextService.GetContext',
+    index=2,
+    containing_service=None,
+    input_type=_CONTEXTID,
+    output_type=_CONTEXT,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='SetContext',
+    full_name='context.ContextService.SetContext',
+    index=3,
+    containing_service=None,
+    input_type=_CONTEXT,
+    output_type=_CONTEXTID,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='RemoveContext',
+    full_name='context.ContextService.RemoveContext',
+    index=4,
+    containing_service=None,
+    input_type=_CONTEXTID,
+    output_type=_EMPTY,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetContextEvents',
+    full_name='context.ContextService.GetContextEvents',
+    index=5,
+    containing_service=None,
+    input_type=_EMPTY,
+    output_type=_CONTEXTEVENT,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='ListTopologyIds',
+    full_name='context.ContextService.ListTopologyIds',
+    index=6,
+    containing_service=None,
+    input_type=_CONTEXTID,
+    output_type=_TOPOLOGYIDLIST,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='ListTopologies',
+    full_name='context.ContextService.ListTopologies',
+    index=7,
+    containing_service=None,
+    input_type=_CONTEXTID,
+    output_type=_TOPOLOGYLIST,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetTopology',
+    full_name='context.ContextService.GetTopology',
+    index=8,
+    containing_service=None,
+    input_type=_TOPOLOGYID,
+    output_type=_TOPOLOGY,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='SetTopology',
+    full_name='context.ContextService.SetTopology',
+    index=9,
+    containing_service=None,
+    input_type=_TOPOLOGY,
+    output_type=_TOPOLOGYID,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='RemoveTopology',
+    full_name='context.ContextService.RemoveTopology',
+    index=10,
+    containing_service=None,
+    input_type=_TOPOLOGYID,
+    output_type=_EMPTY,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetTopologyEvents',
+    full_name='context.ContextService.GetTopologyEvents',
+    index=11,
+    containing_service=None,
+    input_type=_EMPTY,
+    output_type=_TOPOLOGYEVENT,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='ListDeviceIds',
+    full_name='context.ContextService.ListDeviceIds',
+    index=12,
+    containing_service=None,
+    input_type=_EMPTY,
+    output_type=_DEVICEIDLIST,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='ListDevices',
+    full_name='context.ContextService.ListDevices',
+    index=13,
+    containing_service=None,
+    input_type=_EMPTY,
+    output_type=_DEVICELIST,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetDevice',
+    full_name='context.ContextService.GetDevice',
+    index=14,
+    containing_service=None,
+    input_type=_DEVICEID,
+    output_type=_DEVICE,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='SetDevice',
+    full_name='context.ContextService.SetDevice',
+    index=15,
+    containing_service=None,
+    input_type=_DEVICE,
+    output_type=_DEVICEID,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='RemoveDevice',
+    full_name='context.ContextService.RemoveDevice',
+    index=16,
+    containing_service=None,
+    input_type=_DEVICEID,
+    output_type=_EMPTY,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetDeviceEvents',
+    full_name='context.ContextService.GetDeviceEvents',
+    index=17,
+    containing_service=None,
+    input_type=_EMPTY,
+    output_type=_DEVICEEVENT,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='ListLinkIds',
+    full_name='context.ContextService.ListLinkIds',
+    index=18,
+    containing_service=None,
+    input_type=_EMPTY,
+    output_type=_LINKIDLIST,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='ListLinks',
+    full_name='context.ContextService.ListLinks',
+    index=19,
+    containing_service=None,
+    input_type=_EMPTY,
+    output_type=_LINKLIST,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetLink',
+    full_name='context.ContextService.GetLink',
+    index=20,
+    containing_service=None,
+    input_type=_LINKID,
+    output_type=_LINK,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='SetLink',
+    full_name='context.ContextService.SetLink',
+    index=21,
+    containing_service=None,
     input_type=_LINK,
     output_type=_LINKID,
     serialized_options=None,
     create_key=_descriptor._internal_create_key,
   ),
   _descriptor.MethodDescriptor(
-    name='DeleteLink',
-    full_name='context.ContextService.DeleteLink',
-    index=2,
+    name='RemoveLink',
+    full_name='context.ContextService.RemoveLink',
+    index=22,
     containing_service=None,
     input_type=_LINKID,
     output_type=_EMPTY,
     serialized_options=None,
     create_key=_descriptor._internal_create_key,
   ),
+  _descriptor.MethodDescriptor(
+    name='GetLinkEvents',
+    full_name='context.ContextService.GetLinkEvents',
+    index=23,
+    containing_service=None,
+    input_type=_EMPTY,
+    output_type=_LINKEVENT,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='ListServiceIds',
+    full_name='context.ContextService.ListServiceIds',
+    index=24,
+    containing_service=None,
+    input_type=_CONTEXTID,
+    output_type=_SERVICEIDLIST,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='ListServices',
+    full_name='context.ContextService.ListServices',
+    index=25,
+    containing_service=None,
+    input_type=_CONTEXTID,
+    output_type=_SERVICELIST,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetService',
+    full_name='context.ContextService.GetService',
+    index=26,
+    containing_service=None,
+    input_type=_SERVICEID,
+    output_type=_SERVICE,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='SetService',
+    full_name='context.ContextService.SetService',
+    index=27,
+    containing_service=None,
+    input_type=_SERVICE,
+    output_type=_SERVICEID,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='RemoveService',
+    full_name='context.ContextService.RemoveService',
+    index=28,
+    containing_service=None,
+    input_type=_SERVICEID,
+    output_type=_EMPTY,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetServiceEvents',
+    full_name='context.ContextService.GetServiceEvents',
+    index=29,
+    containing_service=None,
+    input_type=_EMPTY,
+    output_type=_SERVICEEVENT,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
 ])
 _sym_db.RegisterServiceDescriptor(_CONTEXTSERVICE)
 
diff --git a/src/context/proto/context_pb2_grpc.py b/src/context/proto/context_pb2_grpc.py
index bfae5108cc4bc9a2f5ad93d0cbf25dbcfc65fc47..63a7edb530ab138e7f2d6c2ba2d6075db52e2fee 100644
--- a/src/context/proto/context_pb2_grpc.py
+++ b/src/context/proto/context_pb2_grpc.py
@@ -14,39 +14,336 @@ class ContextServiceStub(object):
         Args:
             channel: A grpc.Channel.
         """
+        self.ListContextIds = channel.unary_unary(
+                '/context.ContextService/ListContextIds',
+                request_serializer=context__pb2.Empty.SerializeToString,
+                response_deserializer=context__pb2.ContextIdList.FromString,
+                )
+        self.ListContexts = channel.unary_unary(
+                '/context.ContextService/ListContexts',
+                request_serializer=context__pb2.Empty.SerializeToString,
+                response_deserializer=context__pb2.ContextList.FromString,
+                )
+        self.GetContext = channel.unary_unary(
+                '/context.ContextService/GetContext',
+                request_serializer=context__pb2.ContextId.SerializeToString,
+                response_deserializer=context__pb2.Context.FromString,
+                )
+        self.SetContext = channel.unary_unary(
+                '/context.ContextService/SetContext',
+                request_serializer=context__pb2.Context.SerializeToString,
+                response_deserializer=context__pb2.ContextId.FromString,
+                )
+        self.RemoveContext = channel.unary_unary(
+                '/context.ContextService/RemoveContext',
+                request_serializer=context__pb2.ContextId.SerializeToString,
+                response_deserializer=context__pb2.Empty.FromString,
+                )
+        self.GetContextEvents = channel.unary_stream(
+                '/context.ContextService/GetContextEvents',
+                request_serializer=context__pb2.Empty.SerializeToString,
+                response_deserializer=context__pb2.ContextEvent.FromString,
+                )
+        self.ListTopologyIds = channel.unary_unary(
+                '/context.ContextService/ListTopologyIds',
+                request_serializer=context__pb2.ContextId.SerializeToString,
+                response_deserializer=context__pb2.TopologyIdList.FromString,
+                )
+        self.ListTopologies = channel.unary_unary(
+                '/context.ContextService/ListTopologies',
+                request_serializer=context__pb2.ContextId.SerializeToString,
+                response_deserializer=context__pb2.TopologyList.FromString,
+                )
         self.GetTopology = channel.unary_unary(
                 '/context.ContextService/GetTopology',
-                request_serializer=context__pb2.Empty.SerializeToString,
+                request_serializer=context__pb2.TopologyId.SerializeToString,
                 response_deserializer=context__pb2.Topology.FromString,
                 )
-        self.AddLink = channel.unary_unary(
-                '/context.ContextService/AddLink',
+        self.SetTopology = channel.unary_unary(
+                '/context.ContextService/SetTopology',
+                request_serializer=context__pb2.Topology.SerializeToString,
+                response_deserializer=context__pb2.TopologyId.FromString,
+                )
+        self.RemoveTopology = channel.unary_unary(
+                '/context.ContextService/RemoveTopology',
+                request_serializer=context__pb2.TopologyId.SerializeToString,
+                response_deserializer=context__pb2.Empty.FromString,
+                )
+        self.GetTopologyEvents = channel.unary_stream(
+                '/context.ContextService/GetTopologyEvents',
+                request_serializer=context__pb2.Empty.SerializeToString,
+                response_deserializer=context__pb2.TopologyEvent.FromString,
+                )
+        self.ListDeviceIds = channel.unary_unary(
+                '/context.ContextService/ListDeviceIds',
+                request_serializer=context__pb2.Empty.SerializeToString,
+                response_deserializer=context__pb2.DeviceIdList.FromString,
+                )
+        self.ListDevices = channel.unary_unary(
+                '/context.ContextService/ListDevices',
+                request_serializer=context__pb2.Empty.SerializeToString,
+                response_deserializer=context__pb2.DeviceList.FromString,
+                )
+        self.GetDevice = channel.unary_unary(
+                '/context.ContextService/GetDevice',
+                request_serializer=context__pb2.DeviceId.SerializeToString,
+                response_deserializer=context__pb2.Device.FromString,
+                )
+        self.SetDevice = channel.unary_unary(
+                '/context.ContextService/SetDevice',
+                request_serializer=context__pb2.Device.SerializeToString,
+                response_deserializer=context__pb2.DeviceId.FromString,
+                )
+        self.RemoveDevice = channel.unary_unary(
+                '/context.ContextService/RemoveDevice',
+                request_serializer=context__pb2.DeviceId.SerializeToString,
+                response_deserializer=context__pb2.Empty.FromString,
+                )
+        self.GetDeviceEvents = channel.unary_stream(
+                '/context.ContextService/GetDeviceEvents',
+                request_serializer=context__pb2.Empty.SerializeToString,
+                response_deserializer=context__pb2.DeviceEvent.FromString,
+                )
+        self.ListLinkIds = channel.unary_unary(
+                '/context.ContextService/ListLinkIds',
+                request_serializer=context__pb2.Empty.SerializeToString,
+                response_deserializer=context__pb2.LinkIdList.FromString,
+                )
+        self.ListLinks = channel.unary_unary(
+                '/context.ContextService/ListLinks',
+                request_serializer=context__pb2.Empty.SerializeToString,
+                response_deserializer=context__pb2.LinkList.FromString,
+                )
+        self.GetLink = channel.unary_unary(
+                '/context.ContextService/GetLink',
+                request_serializer=context__pb2.LinkId.SerializeToString,
+                response_deserializer=context__pb2.Link.FromString,
+                )
+        self.SetLink = channel.unary_unary(
+                '/context.ContextService/SetLink',
                 request_serializer=context__pb2.Link.SerializeToString,
                 response_deserializer=context__pb2.LinkId.FromString,
                 )
-        self.DeleteLink = channel.unary_unary(
-                '/context.ContextService/DeleteLink',
+        self.RemoveLink = channel.unary_unary(
+                '/context.ContextService/RemoveLink',
                 request_serializer=context__pb2.LinkId.SerializeToString,
                 response_deserializer=context__pb2.Empty.FromString,
                 )
+        self.GetLinkEvents = channel.unary_stream(
+                '/context.ContextService/GetLinkEvents',
+                request_serializer=context__pb2.Empty.SerializeToString,
+                response_deserializer=context__pb2.LinkEvent.FromString,
+                )
+        self.ListServiceIds = channel.unary_unary(
+                '/context.ContextService/ListServiceIds',
+                request_serializer=context__pb2.ContextId.SerializeToString,
+                response_deserializer=context__pb2.ServiceIdList.FromString,
+                )
+        self.ListServices = channel.unary_unary(
+                '/context.ContextService/ListServices',
+                request_serializer=context__pb2.ContextId.SerializeToString,
+                response_deserializer=context__pb2.ServiceList.FromString,
+                )
+        self.GetService = channel.unary_unary(
+                '/context.ContextService/GetService',
+                request_serializer=context__pb2.ServiceId.SerializeToString,
+                response_deserializer=context__pb2.Service.FromString,
+                )
+        self.SetService = channel.unary_unary(
+                '/context.ContextService/SetService',
+                request_serializer=context__pb2.Service.SerializeToString,
+                response_deserializer=context__pb2.ServiceId.FromString,
+                )
+        self.RemoveService = channel.unary_unary(
+                '/context.ContextService/RemoveService',
+                request_serializer=context__pb2.ServiceId.SerializeToString,
+                response_deserializer=context__pb2.Empty.FromString,
+                )
+        self.GetServiceEvents = channel.unary_stream(
+                '/context.ContextService/GetServiceEvents',
+                request_serializer=context__pb2.Empty.SerializeToString,
+                response_deserializer=context__pb2.ServiceEvent.FromString,
+                )
 
 
 class ContextServiceServicer(object):
     """Missing associated documentation comment in .proto file."""
 
+    def ListContextIds(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def ListContexts(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def GetContext(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def SetContext(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def RemoveContext(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def GetContextEvents(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def ListTopologyIds(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def ListTopologies(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
     def GetTopology(self, request, context):
         """Missing associated documentation comment in .proto file."""
         context.set_code(grpc.StatusCode.UNIMPLEMENTED)
         context.set_details('Method not implemented!')
         raise NotImplementedError('Method not implemented!')
 
-    def AddLink(self, request, context):
+    def SetTopology(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def RemoveTopology(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def GetTopologyEvents(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def ListDeviceIds(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def ListDevices(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def GetDevice(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def SetDevice(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def RemoveDevice(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def GetDeviceEvents(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def ListLinkIds(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def ListLinks(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def GetLink(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def SetLink(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def RemoveLink(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def GetLinkEvents(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def ListServiceIds(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def ListServices(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def GetService(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def SetService(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def RemoveService(self, request, context):
         """Missing associated documentation comment in .proto file."""
         context.set_code(grpc.StatusCode.UNIMPLEMENTED)
         context.set_details('Method not implemented!')
         raise NotImplementedError('Method not implemented!')
 
-    def DeleteLink(self, request, context):
+    def GetServiceEvents(self, request, context):
         """Missing associated documentation comment in .proto file."""
         context.set_code(grpc.StatusCode.UNIMPLEMENTED)
         context.set_details('Method not implemented!')
@@ -55,21 +352,156 @@ class ContextServiceServicer(object):
 
 def add_ContextServiceServicer_to_server(servicer, server):
     rpc_method_handlers = {
+            'ListContextIds': grpc.unary_unary_rpc_method_handler(
+                    servicer.ListContextIds,
+                    request_deserializer=context__pb2.Empty.FromString,
+                    response_serializer=context__pb2.ContextIdList.SerializeToString,
+            ),
+            'ListContexts': grpc.unary_unary_rpc_method_handler(
+                    servicer.ListContexts,
+                    request_deserializer=context__pb2.Empty.FromString,
+                    response_serializer=context__pb2.ContextList.SerializeToString,
+            ),
+            'GetContext': grpc.unary_unary_rpc_method_handler(
+                    servicer.GetContext,
+                    request_deserializer=context__pb2.ContextId.FromString,
+                    response_serializer=context__pb2.Context.SerializeToString,
+            ),
+            'SetContext': grpc.unary_unary_rpc_method_handler(
+                    servicer.SetContext,
+                    request_deserializer=context__pb2.Context.FromString,
+                    response_serializer=context__pb2.ContextId.SerializeToString,
+            ),
+            'RemoveContext': grpc.unary_unary_rpc_method_handler(
+                    servicer.RemoveContext,
+                    request_deserializer=context__pb2.ContextId.FromString,
+                    response_serializer=context__pb2.Empty.SerializeToString,
+            ),
+            'GetContextEvents': grpc.unary_stream_rpc_method_handler(
+                    servicer.GetContextEvents,
+                    request_deserializer=context__pb2.Empty.FromString,
+                    response_serializer=context__pb2.ContextEvent.SerializeToString,
+            ),
+            'ListTopologyIds': grpc.unary_unary_rpc_method_handler(
+                    servicer.ListTopologyIds,
+                    request_deserializer=context__pb2.ContextId.FromString,
+                    response_serializer=context__pb2.TopologyIdList.SerializeToString,
+            ),
+            'ListTopologies': grpc.unary_unary_rpc_method_handler(
+                    servicer.ListTopologies,
+                    request_deserializer=context__pb2.ContextId.FromString,
+                    response_serializer=context__pb2.TopologyList.SerializeToString,
+            ),
             'GetTopology': grpc.unary_unary_rpc_method_handler(
                     servicer.GetTopology,
-                    request_deserializer=context__pb2.Empty.FromString,
+                    request_deserializer=context__pb2.TopologyId.FromString,
                     response_serializer=context__pb2.Topology.SerializeToString,
             ),
-            'AddLink': grpc.unary_unary_rpc_method_handler(
-                    servicer.AddLink,
+            'SetTopology': grpc.unary_unary_rpc_method_handler(
+                    servicer.SetTopology,
+                    request_deserializer=context__pb2.Topology.FromString,
+                    response_serializer=context__pb2.TopologyId.SerializeToString,
+            ),
+            'RemoveTopology': grpc.unary_unary_rpc_method_handler(
+                    servicer.RemoveTopology,
+                    request_deserializer=context__pb2.TopologyId.FromString,
+                    response_serializer=context__pb2.Empty.SerializeToString,
+            ),
+            'GetTopologyEvents': grpc.unary_stream_rpc_method_handler(
+                    servicer.GetTopologyEvents,
+                    request_deserializer=context__pb2.Empty.FromString,
+                    response_serializer=context__pb2.TopologyEvent.SerializeToString,
+            ),
+            'ListDeviceIds': grpc.unary_unary_rpc_method_handler(
+                    servicer.ListDeviceIds,
+                    request_deserializer=context__pb2.Empty.FromString,
+                    response_serializer=context__pb2.DeviceIdList.SerializeToString,
+            ),
+            'ListDevices': grpc.unary_unary_rpc_method_handler(
+                    servicer.ListDevices,
+                    request_deserializer=context__pb2.Empty.FromString,
+                    response_serializer=context__pb2.DeviceList.SerializeToString,
+            ),
+            'GetDevice': grpc.unary_unary_rpc_method_handler(
+                    servicer.GetDevice,
+                    request_deserializer=context__pb2.DeviceId.FromString,
+                    response_serializer=context__pb2.Device.SerializeToString,
+            ),
+            'SetDevice': grpc.unary_unary_rpc_method_handler(
+                    servicer.SetDevice,
+                    request_deserializer=context__pb2.Device.FromString,
+                    response_serializer=context__pb2.DeviceId.SerializeToString,
+            ),
+            'RemoveDevice': grpc.unary_unary_rpc_method_handler(
+                    servicer.RemoveDevice,
+                    request_deserializer=context__pb2.DeviceId.FromString,
+                    response_serializer=context__pb2.Empty.SerializeToString,
+            ),
+            'GetDeviceEvents': grpc.unary_stream_rpc_method_handler(
+                    servicer.GetDeviceEvents,
+                    request_deserializer=context__pb2.Empty.FromString,
+                    response_serializer=context__pb2.DeviceEvent.SerializeToString,
+            ),
+            'ListLinkIds': grpc.unary_unary_rpc_method_handler(
+                    servicer.ListLinkIds,
+                    request_deserializer=context__pb2.Empty.FromString,
+                    response_serializer=context__pb2.LinkIdList.SerializeToString,
+            ),
+            'ListLinks': grpc.unary_unary_rpc_method_handler(
+                    servicer.ListLinks,
+                    request_deserializer=context__pb2.Empty.FromString,
+                    response_serializer=context__pb2.LinkList.SerializeToString,
+            ),
+            'GetLink': grpc.unary_unary_rpc_method_handler(
+                    servicer.GetLink,
+                    request_deserializer=context__pb2.LinkId.FromString,
+                    response_serializer=context__pb2.Link.SerializeToString,
+            ),
+            'SetLink': grpc.unary_unary_rpc_method_handler(
+                    servicer.SetLink,
                     request_deserializer=context__pb2.Link.FromString,
                     response_serializer=context__pb2.LinkId.SerializeToString,
             ),
-            'DeleteLink': grpc.unary_unary_rpc_method_handler(
-                    servicer.DeleteLink,
+            'RemoveLink': grpc.unary_unary_rpc_method_handler(
+                    servicer.RemoveLink,
                     request_deserializer=context__pb2.LinkId.FromString,
                     response_serializer=context__pb2.Empty.SerializeToString,
             ),
+            'GetLinkEvents': grpc.unary_stream_rpc_method_handler(
+                    servicer.GetLinkEvents,
+                    request_deserializer=context__pb2.Empty.FromString,
+                    response_serializer=context__pb2.LinkEvent.SerializeToString,
+            ),
+            'ListServiceIds': grpc.unary_unary_rpc_method_handler(
+                    servicer.ListServiceIds,
+                    request_deserializer=context__pb2.ContextId.FromString,
+                    response_serializer=context__pb2.ServiceIdList.SerializeToString,
+            ),
+            'ListServices': grpc.unary_unary_rpc_method_handler(
+                    servicer.ListServices,
+                    request_deserializer=context__pb2.ContextId.FromString,
+                    response_serializer=context__pb2.ServiceList.SerializeToString,
+            ),
+            'GetService': grpc.unary_unary_rpc_method_handler(
+                    servicer.GetService,
+                    request_deserializer=context__pb2.ServiceId.FromString,
+                    response_serializer=context__pb2.Service.SerializeToString,
+            ),
+            'SetService': grpc.unary_unary_rpc_method_handler(
+                    servicer.SetService,
+                    request_deserializer=context__pb2.Service.FromString,
+                    response_serializer=context__pb2.ServiceId.SerializeToString,
+            ),
+            'RemoveService': grpc.unary_unary_rpc_method_handler(
+                    servicer.RemoveService,
+                    request_deserializer=context__pb2.ServiceId.FromString,
+                    response_serializer=context__pb2.Empty.SerializeToString,
+            ),
+            'GetServiceEvents': grpc.unary_stream_rpc_method_handler(
+                    servicer.GetServiceEvents,
+                    request_deserializer=context__pb2.Empty.FromString,
+                    response_serializer=context__pb2.ServiceEvent.SerializeToString,
+            ),
     }
     generic_handler = grpc.method_handlers_generic_handler(
             'context.ContextService', rpc_method_handlers)
@@ -81,7 +513,7 @@ class ContextService(object):
     """Missing associated documentation comment in .proto file."""
 
     @staticmethod
-    def GetTopology(request,
+    def ListContextIds(request,
             target,
             options=(),
             channel_credentials=None,
@@ -91,14 +523,14 @@ class ContextService(object):
             wait_for_ready=None,
             timeout=None,
             metadata=None):
-        return grpc.experimental.unary_unary(request, target, '/context.ContextService/GetTopology',
+        return grpc.experimental.unary_unary(request, target, '/context.ContextService/ListContextIds',
             context__pb2.Empty.SerializeToString,
-            context__pb2.Topology.FromString,
+            context__pb2.ContextIdList.FromString,
             options, channel_credentials,
             insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
 
     @staticmethod
-    def AddLink(request,
+    def ListContexts(request,
             target,
             options=(),
             channel_credentials=None,
@@ -108,14 +540,14 @@ class ContextService(object):
             wait_for_ready=None,
             timeout=None,
             metadata=None):
-        return grpc.experimental.unary_unary(request, target, '/context.ContextService/AddLink',
-            context__pb2.Link.SerializeToString,
-            context__pb2.LinkId.FromString,
+        return grpc.experimental.unary_unary(request, target, '/context.ContextService/ListContexts',
+            context__pb2.Empty.SerializeToString,
+            context__pb2.ContextList.FromString,
             options, channel_credentials,
             insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
 
     @staticmethod
-    def DeleteLink(request,
+    def GetContext(request,
             target,
             options=(),
             channel_credentials=None,
@@ -125,8 +557,467 @@ class ContextService(object):
             wait_for_ready=None,
             timeout=None,
             metadata=None):
-        return grpc.experimental.unary_unary(request, target, '/context.ContextService/DeleteLink',
-            context__pb2.LinkId.SerializeToString,
+        return grpc.experimental.unary_unary(request, target, '/context.ContextService/GetContext',
+            context__pb2.ContextId.SerializeToString,
+            context__pb2.Context.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def SetContext(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/context.ContextService/SetContext',
+            context__pb2.Context.SerializeToString,
+            context__pb2.ContextId.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def RemoveContext(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/context.ContextService/RemoveContext',
+            context__pb2.ContextId.SerializeToString,
             context__pb2.Empty.FromString,
             options, channel_credentials,
             insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def GetContextEvents(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_stream(request, target, '/context.ContextService/GetContextEvents',
+            context__pb2.Empty.SerializeToString,
+            context__pb2.ContextEvent.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def ListTopologyIds(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/context.ContextService/ListTopologyIds',
+            context__pb2.ContextId.SerializeToString,
+            context__pb2.TopologyIdList.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def ListTopologies(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/context.ContextService/ListTopologies',
+            context__pb2.ContextId.SerializeToString,
+            context__pb2.TopologyList.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def GetTopology(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/context.ContextService/GetTopology',
+            context__pb2.TopologyId.SerializeToString,
+            context__pb2.Topology.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def SetTopology(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/context.ContextService/SetTopology',
+            context__pb2.Topology.SerializeToString,
+            context__pb2.TopologyId.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def RemoveTopology(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/context.ContextService/RemoveTopology',
+            context__pb2.TopologyId.SerializeToString,
+            context__pb2.Empty.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def GetTopologyEvents(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_stream(request, target, '/context.ContextService/GetTopologyEvents',
+            context__pb2.Empty.SerializeToString,
+            context__pb2.TopologyEvent.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def ListDeviceIds(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/context.ContextService/ListDeviceIds',
+            context__pb2.Empty.SerializeToString,
+            context__pb2.DeviceIdList.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def ListDevices(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/context.ContextService/ListDevices',
+            context__pb2.Empty.SerializeToString,
+            context__pb2.DeviceList.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def GetDevice(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/context.ContextService/GetDevice',
+            context__pb2.DeviceId.SerializeToString,
+            context__pb2.Device.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def SetDevice(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/context.ContextService/SetDevice',
+            context__pb2.Device.SerializeToString,
+            context__pb2.DeviceId.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def RemoveDevice(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/context.ContextService/RemoveDevice',
+            context__pb2.DeviceId.SerializeToString,
+            context__pb2.Empty.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def GetDeviceEvents(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_stream(request, target, '/context.ContextService/GetDeviceEvents',
+            context__pb2.Empty.SerializeToString,
+            context__pb2.DeviceEvent.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def ListLinkIds(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/context.ContextService/ListLinkIds',
+            context__pb2.Empty.SerializeToString,
+            context__pb2.LinkIdList.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def ListLinks(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/context.ContextService/ListLinks',
+            context__pb2.Empty.SerializeToString,
+            context__pb2.LinkList.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def GetLink(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/context.ContextService/GetLink',
+            context__pb2.LinkId.SerializeToString,
+            context__pb2.Link.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def SetLink(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/context.ContextService/SetLink',
+            context__pb2.Link.SerializeToString,
+            context__pb2.LinkId.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def RemoveLink(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/context.ContextService/RemoveLink',
+            context__pb2.LinkId.SerializeToString,
+            context__pb2.Empty.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def GetLinkEvents(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_stream(request, target, '/context.ContextService/GetLinkEvents',
+            context__pb2.Empty.SerializeToString,
+            context__pb2.LinkEvent.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def ListServiceIds(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/context.ContextService/ListServiceIds',
+            context__pb2.ContextId.SerializeToString,
+            context__pb2.ServiceIdList.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def ListServices(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/context.ContextService/ListServices',
+            context__pb2.ContextId.SerializeToString,
+            context__pb2.ServiceList.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def GetService(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/context.ContextService/GetService',
+            context__pb2.ServiceId.SerializeToString,
+            context__pb2.Service.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def SetService(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/context.ContextService/SetService',
+            context__pb2.Service.SerializeToString,
+            context__pb2.ServiceId.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def RemoveService(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/context.ContextService/RemoveService',
+            context__pb2.ServiceId.SerializeToString,
+            context__pb2.Empty.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def GetServiceEvents(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_stream(request, target, '/context.ContextService/GetServiceEvents',
+            context__pb2.Empty.SerializeToString,
+            context__pb2.ServiceEvent.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
diff --git a/src/context/service/ContextServiceServicerImpl.py b/src/context/service/ContextServiceServicerImpl.py
deleted file mode 100644
index f067d144652314ad58d5eaf3c3fdfefb3c900bbf..0000000000000000000000000000000000000000
--- a/src/context/service/ContextServiceServicerImpl.py
+++ /dev/null
@@ -1,130 +0,0 @@
-import grpc, logging
-from prometheus_client import Counter, Histogram
-from common.database.api.context.Constants import DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID
-from common.database.api.Database import Database
-from common.exceptions.ServiceException import ServiceException
-from context.proto.context_pb2 import Empty, Link, LinkId, Topology
-from context.proto.context_pb2_grpc import ContextServiceServicer
-from context.service.Tools import check_link_id_request, check_link_request
-
-LOGGER = logging.getLogger(__name__)
-
-GETTOPOLOGY_COUNTER_STARTED    = Counter  ('context_gettopology_counter_started',
-                                          'Context:GetTopology counter of requests started'  )
-GETTOPOLOGY_COUNTER_COMPLETED  = Counter  ('context_gettopology_counter_completed',
-                                          'Context:GetTopology counter of requests completed')
-GETTOPOLOGY_COUNTER_FAILED     = Counter  ('context_gettopology_counter_failed',
-                                          'Context:GetTopology counter of requests failed'   )
-GETTOPOLOGY_HISTOGRAM_DURATION = Histogram('context_gettopology_histogram_duration',
-                                          'Context:GetTopology histogram of request duration')
-
-ADDLINK_COUNTER_STARTED    = Counter  ('context_addlink_counter_started',
-                                       'Context:AddLink counter of requests started'  )
-ADDLINK_COUNTER_COMPLETED  = Counter  ('context_addlink_counter_completed',
-                                       'Context:AddLink counter of requests completed')
-ADDLINK_COUNTER_FAILED     = Counter  ('context_addlink_counter_failed',
-                                       'Context:AddLink counter of requests failed'   )
-ADDLINK_HISTOGRAM_DURATION = Histogram('context_addlink_histogram_duration',
-                                       'Context:AddLink histogram of request duration')
-
-DELETELINK_COUNTER_STARTED    = Counter  ('context_deletelink_counter_started',
-                                          'Context:DeleteLink counter of requests started'  )
-DELETELINK_COUNTER_COMPLETED  = Counter  ('context_deletelink_counter_completed',
-                                          'Context:DeleteLink counter of requests completed')
-DELETELINK_COUNTER_FAILED     = Counter  ('context_deletelink_counter_failed',
-                                          'Context:DeleteLink counter of requests failed'   )
-DELETELINK_HISTOGRAM_DURATION = Histogram('context_deletelink_histogram_duration',
-                                          'Context:DeleteLink histogram of request duration')
-
-class ContextServiceServicerImpl(ContextServiceServicer):
-    def __init__(self, database : Database):
-        LOGGER.debug('Creating Servicer...')
-        self.database = database
-        LOGGER.debug('Servicer Created')
-
-    @GETTOPOLOGY_HISTOGRAM_DURATION.time()
-    def GetTopology(self, request : Empty, grpc_context : grpc.ServicerContext) -> Topology:
-        GETTOPOLOGY_COUNTER_STARTED.inc()
-        try:
-            LOGGER.debug('GetTopology request: {}'.format(str(request)))
-
-            # ----- Validate request data and pre-conditions -----------------------------------------------------------
-            db_context = self.database.context(DEFAULT_CONTEXT_ID).create()
-            db_topology = db_context.topology(DEFAULT_TOPOLOGY_ID).create()
-
-            # ----- Retrieve data from the database --------------------------------------------------------------------
-            json_topology = db_topology.dump()
-
-            # ----- Compose reply --------------------------------------------------------------------------------------
-            reply = Topology(**json_topology)
-            LOGGER.debug('GetTopology reply: {}'.format(str(reply)))
-            GETTOPOLOGY_COUNTER_COMPLETED.inc()
-            return reply
-        except ServiceException as e:                               # pragma: no cover (ServiceException not thrown)
-            LOGGER.exception('GetTopology exception')
-            GETTOPOLOGY_COUNTER_FAILED.inc()
-            grpc_context.abort(e.code, e.details)
-        except Exception as e:                                      # pragma: no cover
-            LOGGER.exception('GetTopology exception')
-            GETTOPOLOGY_COUNTER_FAILED.inc()
-            grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))
-
-    @ADDLINK_HISTOGRAM_DURATION.time()
-    def AddLink(self, request : Link, grpc_context : grpc.ServicerContext) -> LinkId:
-        ADDLINK_COUNTER_STARTED.inc()
-        try:
-            LOGGER.debug('AddLink request: {}'.format(str(request)))
-
-            # ----- Validate request data and pre-conditions -----------------------------------------------------------
-            link_id, db_endpoints = check_link_request('AddLink', request, self.database, LOGGER)
-
-            # ----- Implement changes in the database ------------------------------------------------------------------
-            db_context = self.database.context(DEFAULT_CONTEXT_ID).create()
-            db_topology = db_context.topology(DEFAULT_TOPOLOGY_ID).create()
-            db_link = db_topology.link(link_id).create()
-            for db_endpoint in db_endpoints:
-                link_endpoint_id = '{}/{}'.format(
-                    db_endpoint.device_uuid, db_endpoint.endpoint_uuid)
-                db_link.endpoint(link_endpoint_id).create(db_endpoint)
-
-            # ----- Compose reply --------------------------------------------------------------------------------------
-            reply = LinkId(**db_link.dump_id())
-            LOGGER.debug('AddLink reply: {}'.format(str(reply)))
-            ADDLINK_COUNTER_COMPLETED.inc()
-            return reply
-        except ServiceException as e:
-            LOGGER.exception('AddLink exception')
-            ADDLINK_COUNTER_FAILED.inc()
-            grpc_context.abort(e.code, e.details)
-        except Exception as e:                                      # pragma: no cover
-            LOGGER.exception('AddLink exception')
-            ADDLINK_COUNTER_FAILED.inc()
-            grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))
-
-    @DELETELINK_HISTOGRAM_DURATION.time()
-    def DeleteLink(self, request : LinkId, grpc_context : grpc.ServicerContext) -> Empty:
-        DELETELINK_COUNTER_STARTED.inc()
-        try:
-            LOGGER.debug('DeleteLink request: {}'.format(str(request)))
-
-            # ----- Validate request data and pre-conditions -----------------------------------------------------------
-            link_id = check_link_id_request('DeleteLink', request, self.database, LOGGER)
-
-            # ----- Implement changes in the database ------------------------------------------------------------------
-            db_context = self.database.context(DEFAULT_CONTEXT_ID).create()
-            db_topology = db_context.topology(DEFAULT_TOPOLOGY_ID).create()
-            db_topology.link(link_id).delete()
-
-            # ----- Compose reply --------------------------------------------------------------------------------------
-            reply = Empty()
-            LOGGER.debug('DeleteLink reply: {}'.format(str(reply)))
-            DELETELINK_COUNTER_COMPLETED.inc()
-            return reply
-        except ServiceException as e:
-            LOGGER.exception('DeleteLink exception')
-            DELETELINK_COUNTER_FAILED.inc()
-            grpc_context.abort(e.code, e.details)
-        except Exception as e:                                      # pragma: no cover
-            LOGGER.exception('DeleteLink exception')
-            DELETELINK_COUNTER_FAILED.inc()
-            grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))
diff --git a/src/context/service/Populate.py b/src/context/service/Populate.py
new file mode 100644
index 0000000000000000000000000000000000000000..eea593d90a608b57db0bd1fa801eef11abee5b03
--- /dev/null
+++ b/src/context/service/Populate.py
@@ -0,0 +1,34 @@
+import copy
+from context.client.ContextClient import ContextClient
+from context.proto.context_pb2 import Context, Device, Link, Service, Topology
+from context.tests.example_objects import (
+    CONTEXT, TOPOLOGY,
+    DEVICE1, DEVICE1_ID, DEVICE2, DEVICE2_ID, DEVICE3, DEVICE3_ID,
+    LINK_DEV1_DEV2, LINK_DEV1_DEV2_ID, LINK_DEV1_DEV3, LINK_DEV1_DEV3_ID, LINK_DEV2_DEV3, LINK_DEV2_DEV3_ID,
+    SERVICE_DEV1_DEV2, SERVICE_DEV1_DEV3, SERVICE_DEV2_DEV3)
+
+def populate(address, port):
+    client = ContextClient(address=address, port=port)
+
+    client.SetContext(Context(**CONTEXT))
+    client.SetTopology(Topology(**TOPOLOGY))
+    client.SetDevice(Device(**DEVICE1))
+    client.SetDevice(Device(**DEVICE2))
+    client.SetDevice(Device(**DEVICE3))
+
+    client.SetLink(Link(**LINK_DEV1_DEV2))
+    client.SetLink(Link(**LINK_DEV1_DEV3))
+    client.SetLink(Link(**LINK_DEV2_DEV3))
+
+    TOPOLOGY_WITH_DEVICES_AND_LINKS = copy.deepcopy(TOPOLOGY)
+    TOPOLOGY_WITH_DEVICES_AND_LINKS['device_ids'].append(DEVICE1_ID)
+    TOPOLOGY_WITH_DEVICES_AND_LINKS['device_ids'].append(DEVICE2_ID)
+    TOPOLOGY_WITH_DEVICES_AND_LINKS['device_ids'].append(DEVICE3_ID)
+    TOPOLOGY_WITH_DEVICES_AND_LINKS['link_ids'].append(LINK_DEV1_DEV2_ID)
+    TOPOLOGY_WITH_DEVICES_AND_LINKS['link_ids'].append(LINK_DEV1_DEV3_ID)
+    TOPOLOGY_WITH_DEVICES_AND_LINKS['link_ids'].append(LINK_DEV2_DEV3_ID)
+    client.SetTopology(Topology(**TOPOLOGY_WITH_DEVICES_AND_LINKS))
+
+    client.SetService(Service(**SERVICE_DEV1_DEV2))
+    client.SetService(Service(**SERVICE_DEV1_DEV3))
+    client.SetService(Service(**SERVICE_DEV2_DEV3))
diff --git a/src/context/service/Tools.py b/src/context/service/Tools.py
deleted file mode 100644
index 9856d616bb335f0a1be64c09de5174eb3523fefc..0000000000000000000000000000000000000000
--- a/src/context/service/Tools.py
+++ /dev/null
@@ -1,70 +0,0 @@
-import grpc, logging
-from typing import Dict, List, Set, Tuple
-from common.Checkers import chk_string
-from common.database.api.Database import Database
-from common.database.api.context.Constants import DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID
-from common.database.api.context.topology.device.Endpoint import Endpoint
-from common.exceptions.ServiceException import ServiceException
-from common.tools.service.EndpointIdCheckers import check_endpoint_id
-from common.tools.service.DeviceCheckers import check_device_endpoint_exists
-from common.tools.service.LinkCheckers import check_link_exists, check_link_not_exists
-from context.proto.context_pb2 import Link, LinkId
-
-def _check_link_exists(method_name : str, database : Database, link_id : str):
-    if method_name in ['AddLink']:
-        check_link_not_exists(database, DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID, link_id)
-    elif method_name in ['DeleteLink']:
-        check_link_exists(database, DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID, link_id)
-    else:                                       # pragma: no cover (test requires malforming the code)
-        msg = 'Unexpected condition [_check_link_exists(method_name={}, link_id={})]'
-        msg = msg.format(str(method_name), str(link_id))
-        raise ServiceException(grpc.StatusCode.UNIMPLEMENTED, msg)
-
-def check_link_request(
-    method_name : str, request : Link, database : Database, logger : logging.Logger
-    ) -> Tuple[str, List[Endpoint]]:
-
-    # ----- Parse attributes -------------------------------------------------------------------------------------------
-    try:
-        link_id = chk_string('link.link_id.link_id.uuid',
-                             request.link_id.link_id.uuid,
-                             allow_empty=False)
-    except Exception as e:
-        logger.exception('Invalid arguments:')
-        raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e))
-
-    # ----- Check if link exists in database ---------------------------------------------------------------------------
-    _check_link_exists(method_name, database, link_id)
-
-    # ----- Parse endpoints and check if they exist in the database as device endpoints --------------------------------
-    add_topology_devices_endpoints : Dict[str, Dict[str, Set[str]]] = {}
-    db_endpoints : List[Endpoint] = []
-    for endpoint_number,endpoint_id in enumerate(request.endpointList):
-        parent_name = 'Endpoint(#{}) of Context({})/Topology({})/Link({})'
-        parent_name = parent_name.format(endpoint_number, DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID, link_id)
-
-        _, ep_device_id, ep_port_id = check_endpoint_id(
-            logger, endpoint_number, parent_name, endpoint_id, add_topology_devices_endpoints)
-
-        db_endpoint = check_device_endpoint_exists(
-            database, parent_name, DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID, ep_device_id, ep_port_id)
-        db_endpoints.append(db_endpoint)
-
-    return link_id, db_endpoints
-
-def check_link_id_request(
-    method_name : str, request : LinkId, database : Database, logger : logging.Logger) -> str:
-
-    # ----- Parse attributes -------------------------------------------------------------------------------------------
-    try:
-        link_id = chk_string('link_id.link_id.uuid',
-                             request.link_id.uuid,
-                             allow_empty=False)
-    except Exception as e:
-        logger.exception('Invalid arguments:')
-        raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e))
-
-    # ----- Check if link exists in database ---------------------------------------------------------------------------
-    _check_link_exists(method_name, database, link_id)
-
-    return link_id
diff --git a/src/context/service/__main__.py b/src/context/service/__main__.py
index 69d2788e2f0bf30bd855f7578474c93afa04fe49..1cb4568221d0c8554dfb0ee65df27360f52df63b 100644
--- a/src/context/service/__main__.py
+++ b/src/context/service/__main__.py
@@ -1,64 +1,79 @@
 import logging, signal, sys, threading
 from prometheus_client import start_http_server
 from common.Settings import get_setting
-from common.database.Factory import get_database
-from context.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD, LOG_LEVEL, RESTAPI_SERVICE_PORT, \
-    RESTAPI_BASE_URL, METRICS_PORT
-from context.service.ContextService import ContextService
+from common.orm.Database import Database
+from common.orm.Factory import get_database_backend
+from common.message_broker.Factory import get_messagebroker_backend
+from common.message_broker.MessageBroker import MessageBroker
+from context.Config import (
+    GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD, LOG_LEVEL, POPULATE_FAKE_DATA, RESTAPI_SERVICE_PORT,
+    RESTAPI_BASE_URL, METRICS_PORT)
+from context.service.Populate import populate
+from context.service.grpc_server.ContextService import ContextService
 from context.service.rest_server.Server import Server
-from context.service.rest_server.resources.Context import Context
+from context.service.rest_server.Resources import RESOURCES
 
 terminate = threading.Event()
-logger = None
+LOGGER = None
 
-def signal_handler(signal, frame):
-    global terminate, logger
-    logger.warning('Terminate signal received')
+def signal_handler(signal_, frame):
+    LOGGER.warning('Terminate signal received')
     terminate.set()
 
 def main():
-    global terminate, logger
+    global LOGGER # pylint: disable=global-statement
 
     grpc_service_port    = get_setting('CONTEXTSERVICE_SERVICE_PORT_GRPC', default=GRPC_SERVICE_PORT   )
     max_workers          = get_setting('MAX_WORKERS',                      default=GRPC_MAX_WORKERS    )
     grace_period         = get_setting('GRACE_PERIOD',                     default=GRPC_GRACE_PERIOD   )
     log_level            = get_setting('LOG_LEVEL',                        default=LOG_LEVEL           )
-    restapi_service_port = get_setting('RESTAPI_SERVICE_PORT',             default=RESTAPI_SERVICE_PORT)
+    restapi_service_port = get_setting('CONTEXTSERVICE_SERVICE_PORT_HTTP', default=RESTAPI_SERVICE_PORT)
     restapi_base_url     = get_setting('RESTAPI_BASE_URL',                 default=RESTAPI_BASE_URL    )
     metrics_port         = get_setting('METRICS_PORT',                     default=METRICS_PORT        )
+    populate_fake_data   = get_setting('POPULATE_FAKE_DATA',               default=POPULATE_FAKE_DATA  )
+    if isinstance(populate_fake_data, str): populate_fake_data = (populate_fake_data.upper() in {'T', '1', 'TRUE'})
 
     logging.basicConfig(level=log_level)
-    logger = logging.getLogger(__name__)
+    LOGGER = logging.getLogger(__name__)
 
     signal.signal(signal.SIGINT,  signal_handler)
     signal.signal(signal.SIGTERM, signal_handler)
 
-    logger.info('Starting...')
+    LOGGER.info('Starting...')
 
     # Start metrics server
     start_http_server(metrics_port)
 
     # Get database instance
-    database = get_database()
+    database = Database(get_database_backend())
+
+    # Get message broker instance
+    messagebroker = MessageBroker(get_messagebroker_backend())
 
     # Starting context service
-    grpc_service = ContextService(database, port=grpc_service_port, max_workers=max_workers, grace_period=grace_period)
+    grpc_service = ContextService(
+        database, messagebroker, port=grpc_service_port, max_workers=max_workers, grace_period=grace_period)
     grpc_service.start()
 
     rest_server = Server(port=restapi_service_port, base_url=restapi_base_url)
-    rest_server.add_resource(
-        Context, '/restconf/config/context', endpoint='api.context', resource_class_args=(database,))
+    for endpoint_name, resource_class, resource_url in RESOURCES:
+        rest_server.add_resource(resource_class, resource_url, endpoint=endpoint_name, resource_class_args=(database,))
     rest_server.start()
 
+    if populate_fake_data:
+        LOGGER.info('Populating fake data...')
+        populate('127.0.0.1', grpc_service_port)
+        LOGGER.info('Fake Data populated')
+
     # Wait for Ctrl+C or termination signal
     while not terminate.wait(timeout=0.1): pass
 
-    logger.info('Terminating...')
+    LOGGER.info('Terminating...')
     grpc_service.stop()
     rest_server.shutdown()
     rest_server.join()
 
-    logger.info('Bye')
+    LOGGER.info('Bye')
     return 0
 
 if __name__ == '__main__':
diff --git a/src/context/service/database/ConfigModel.py b/src/context/service/database/ConfigModel.py
new file mode 100644
index 0000000000000000000000000000000000000000..0fe3484896a39545c4dd49042707dc1ee09fc868
--- /dev/null
+++ b/src/context/service/database/ConfigModel.py
@@ -0,0 +1,84 @@
+import functools, logging, operator
+from enum import Enum
+from typing import Dict, List, Tuple, Union
+from common.orm.Database import Database
+from common.orm.HighLevel import get_or_create_object, update_or_create_object
+from common.orm.backend.Tools import key_to_str
+from common.orm.fields.EnumeratedField import EnumeratedField
+from common.orm.fields.ForeignKeyField import ForeignKeyField
+from common.orm.fields.IntegerField import IntegerField
+from common.orm.fields.PrimaryKeyField import PrimaryKeyField
+from common.orm.fields.StringField import StringField
+from common.orm.model.Model import Model
+from context.proto.context_pb2 import ConfigActionEnum
+from context.service.database.Tools import fast_hasher, grpc_to_enum, remove_dict_key
+
+LOGGER = logging.getLogger(__name__)
+
+class ORM_ConfigActionEnum(Enum):
+    UNDEFINED = ConfigActionEnum.CONFIGACTION_UNDEFINED
+    SET       = ConfigActionEnum.CONFIGACTION_SET
+    DELETE    = ConfigActionEnum.CONFIGACTION_DELETE
+
+grpc_to_enum__config_action = functools.partial(
+    grpc_to_enum, ConfigActionEnum, ORM_ConfigActionEnum)
+
+class ConfigModel(Model): # pylint: disable=abstract-method
+    pk = PrimaryKeyField()
+
+    def dump(self) -> List[Dict]:
+        db_config_rule_pks = self.references(ConfigRuleModel)
+        config_rules = [ConfigRuleModel(self.database, pk).dump(include_position=True) for pk,_ in db_config_rule_pks]
+        config_rules = sorted(config_rules, key=operator.itemgetter('position'))
+        return [remove_dict_key(config_rule, 'position') for config_rule in config_rules]
+
+class ConfigRuleModel(Model): # pylint: disable=abstract-method
+    pk = PrimaryKeyField()
+    config_fk = ForeignKeyField(ConfigModel)
+    position = IntegerField(min_value=0, required=True)
+    action = EnumeratedField(ORM_ConfigActionEnum, required=True)
+    key = StringField(required=True, allow_empty=False)
+    value = StringField(required=True, allow_empty=False)
+
+    def dump(self, include_position=True) -> Dict: # pylint: disable=arguments-differ
+        result = {
+            'action': self.action.value,
+            'resource_key': self.key,
+            'resource_value': self.value,
+        }
+        if include_position: result['position'] = self.position
+        return result
+
+def set_config_rule(
+    database : Database, db_config : ConfigModel, grpc_config_rule, position : int
+    ) -> Tuple[ConfigRuleModel, bool]:
+
+    str_rule_key_hash = fast_hasher(grpc_config_rule.resource_key)
+    str_config_rule_key = key_to_str([db_config.pk, str_rule_key_hash], separator=':')
+
+    result : Tuple[ConfigRuleModel, bool] = update_or_create_object(database, ConfigRuleModel, str_config_rule_key, {
+        'config_fk': db_config,
+        'position' : position,
+        'action'   : grpc_to_enum__config_action(grpc_config_rule.action),
+        'key'      : grpc_config_rule.resource_key,
+        'value'    : grpc_config_rule.resource_value,
+    })
+    db_config_rule, updated = result
+    return db_config_rule, updated
+
+def set_config(
+    database : Database, db_parent_pk : str, config_name : str, grpc_config_rules
+    ) -> List[Tuple[Union[ConfigModel, ConfigRuleModel], bool]]:
+
+    str_config_key = key_to_str([db_parent_pk, config_name], separator=':')
+    result : Tuple[ConfigModel, bool] = get_or_create_object(database, ConfigModel, str_config_key)
+    db_config, created = result
+
+    db_objects = [(db_config, created)]
+
+    for position,grpc_config_rule in enumerate(grpc_config_rules):
+        result : Tuple[ConfigRuleModel, bool] = set_config_rule(database, db_config, grpc_config_rule, position)
+        db_config_rule, updated = result
+        db_objects.append((db_config_rule, updated))
+
+    return db_objects
diff --git a/src/context/service/database/ConstraintModel.py b/src/context/service/database/ConstraintModel.py
new file mode 100644
index 0000000000000000000000000000000000000000..4d11b123193e9cb4ce2c03cbfac6a3604947fbe2
--- /dev/null
+++ b/src/context/service/database/ConstraintModel.py
@@ -0,0 +1,71 @@
+import logging, operator
+from typing import Dict, List, Tuple, Union
+from common.orm.Database import Database
+from common.orm.HighLevel import get_or_create_object, update_or_create_object
+from common.orm.backend.Tools import key_to_str
+from common.orm.fields.ForeignKeyField import ForeignKeyField
+from common.orm.fields.IntegerField import IntegerField
+from common.orm.fields.PrimaryKeyField import PrimaryKeyField
+from common.orm.fields.StringField import StringField
+from common.orm.model.Model import Model
+from context.proto.context_pb2 import Constraint
+from context.service.database.Tools import fast_hasher, remove_dict_key
+
+LOGGER = logging.getLogger(__name__)
+
+class ConstraintsModel(Model): # pylint: disable=abstract-method
+    pk = PrimaryKeyField()
+
+    def dump(self) -> List[Dict]:
+        db_constraint_pks = self.references(ConstraintModel)
+        constraints = [ConstraintModel(self.database, pk).dump(include_position=True) for pk,_ in db_constraint_pks]
+        constraints = sorted(constraints, key=operator.itemgetter('position'))
+        return [remove_dict_key(constraint, 'position') for constraint in constraints]
+
+class ConstraintModel(Model): # pylint: disable=abstract-method
+    pk = PrimaryKeyField()
+    constraints_fk = ForeignKeyField(ConstraintsModel)
+    position = IntegerField(min_value=0, required=True)
+    constraint_type = StringField(required=True, allow_empty=False)
+    constraint_value = StringField(required=True, allow_empty=False)
+
+    def dump(self, include_position=True) -> Dict: # pylint: disable=arguments-differ
+        result = {
+            'constraint_type': self.constraint_type,
+            'constraint_value': self.constraint_value,
+        }
+        if include_position: result['position'] = self.position
+        return result
+
+def set_constraint(
+    database : Database, db_constraints : ConstraintsModel, grpc_constraint, position : int
+    ) -> Tuple[Constraint, bool]:
+
+    str_constraint_key_hash = fast_hasher(grpc_constraint.constraint_type)
+    str_constraint_key = key_to_str([db_constraints.pk, str_constraint_key_hash], separator=':')
+
+    result : Tuple[ConstraintModel, bool] = update_or_create_object(database, ConstraintModel, str_constraint_key, {
+        'constraints_fk'  : db_constraints,
+        'position'        : position,
+        'constraint_type' : grpc_constraint.constraint_type,
+        'constraint_value': grpc_constraint.constraint_value,
+    })
+    db_config_rule, updated = result
+    return db_config_rule, updated
+
+def set_constraints(
+    database : Database, db_parent_pk : str, constraints_name : str, grpc_constraints
+    ) -> List[Tuple[Union[ConstraintsModel, ConstraintModel], bool]]:
+
+    str_constraints_key = key_to_str([db_parent_pk, constraints_name], separator=':')
+    result : Tuple[ConstraintsModel, bool] = get_or_create_object(database, ConstraintsModel, str_constraints_key)
+    db_constraints, created = result
+
+    db_objects = [(db_constraints, created)]
+
+    for position,grpc_constraint in enumerate(grpc_constraints):
+        result : Tuple[ConstraintModel, bool] = set_constraint(database, db_constraints, grpc_constraint, position)
+        db_constraint, updated = result
+        db_objects.append((db_constraint, updated))
+
+    return db_objects
diff --git a/src/context/service/database/ContextModel.py b/src/context/service/database/ContextModel.py
new file mode 100644
index 0000000000000000000000000000000000000000..77243c29f5306b0137c75f71b435a3fc8036353e
--- /dev/null
+++ b/src/context/service/database/ContextModel.py
@@ -0,0 +1,30 @@
+import logging
+from typing import Dict, List
+from common.orm.fields.PrimaryKeyField import PrimaryKeyField
+from common.orm.fields.StringField import StringField
+from common.orm.model.Model import Model
+
+LOGGER = logging.getLogger(__name__)
+
+class ContextModel(Model):
+    pk = PrimaryKeyField()
+    context_uuid = StringField(required=True, allow_empty=False)
+
+    def dump_id(self) -> Dict:
+        return {'context_uuid': {'uuid': self.context_uuid}}
+
+    def dump_service_ids(self) -> List[Dict]:
+        from .ServiceModel import ServiceModel # pylint: disable=import-outside-toplevel
+        db_service_pks = self.references(ServiceModel)
+        return [ServiceModel(self.database, pk).dump_id() for pk,_ in db_service_pks]
+
+    def dump_topology_ids(self) -> List[Dict]:
+        from .TopologyModel import TopologyModel # pylint: disable=import-outside-toplevel
+        db_topology_pks = self.references(TopologyModel)
+        return [TopologyModel(self.database, pk).dump_id() for pk,_ in db_topology_pks]
+
+    def dump(self, include_services=True, include_topologies=True) -> Dict: # pylint: disable=arguments-differ
+        result = {'context_id': self.dump_id()}
+        if include_services: result['service_ids'] = self.dump_service_ids()
+        if include_topologies: result['topology_ids'] = self.dump_topology_ids()
+        return result
diff --git a/src/context/service/database/DeviceModel.py b/src/context/service/database/DeviceModel.py
new file mode 100644
index 0000000000000000000000000000000000000000..d005292acfd4cf20619548dbfa5b48a08780ec2f
--- /dev/null
+++ b/src/context/service/database/DeviceModel.py
@@ -0,0 +1,87 @@
+import functools, logging
+from enum import Enum
+from typing import Dict, List
+from common.orm.Database import Database
+from common.orm.backend.Tools import key_to_str
+from common.orm.fields.EnumeratedField import EnumeratedField
+from common.orm.fields.ForeignKeyField import ForeignKeyField
+from common.orm.fields.PrimaryKeyField import PrimaryKeyField
+from common.orm.fields.StringField import StringField
+from common.orm.model.Model import Model
+from context.proto.context_pb2 import DeviceDriverEnum, DeviceOperationalStatusEnum
+from .ConfigModel import ConfigModel
+from .Tools import grpc_to_enum
+
+LOGGER = logging.getLogger(__name__)
+
+class ORM_DeviceDriverEnum(Enum):
+    UNDEFINED             = DeviceDriverEnum.DEVICEDRIVER_UNDEFINED
+    OPENCONFIG            = DeviceDriverEnum.DEVICEDRIVER_OPENCONFIG
+    TRANSPORT_API         = DeviceDriverEnum.DEVICEDRIVER_TRANSPORT_API
+    P4                    = DeviceDriverEnum.DEVICEDRIVER_P4
+    IETF_NETWORK_TOPOLOGY = DeviceDriverEnum.DEVICEDRIVER_IETF_NETWORK_TOPOLOGY
+    ONF_TR_352            = DeviceDriverEnum.DEVICEDRIVER_ONF_TR_352
+
+grpc_to_enum__device_driver = functools.partial(
+    grpc_to_enum, DeviceDriverEnum, ORM_DeviceDriverEnum)
+
+class ORM_DeviceOperationalStatusEnum(Enum):
+    UNDEFINED = DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_UNDEFINED
+    DISABLED  = DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_DISABLED
+    ENABLED   = DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_ENABLED
+
+grpc_to_enum__device_operational_status = functools.partial(
+    grpc_to_enum, DeviceOperationalStatusEnum, ORM_DeviceOperationalStatusEnum)
+
+class DeviceModel(Model):
+    pk = PrimaryKeyField()
+    device_uuid = StringField(required=True, allow_empty=False)
+    device_type = StringField()
+    device_config_fk = ForeignKeyField(ConfigModel)
+    device_operational_status = EnumeratedField(ORM_DeviceOperationalStatusEnum, required=True)
+
+    def dump_id(self) -> Dict:
+        return {'device_uuid': {'uuid': self.device_uuid}}
+
+    def dump_config(self) -> Dict:
+        return ConfigModel(self.database, self.device_config_fk).dump()
+
+    def dump_drivers(self) -> List[int]:
+        db_driver_pks = self.references(DriverModel)
+        return [DriverModel(self.database, pk).dump() for pk,_ in db_driver_pks]
+
+    def dump_endpoints(self) -> List[Dict]:
+        from .EndPointModel import EndPointModel # pylint: disable=import-outside-toplevel
+        db_endpoints_pks = self.references(EndPointModel)
+        return [EndPointModel(self.database, pk).dump() for pk,_ in db_endpoints_pks]
+
+    def dump(   # pylint: disable=arguments-differ
+            self, include_config_rules=True, include_drivers=True, include_endpoints=True
+        ) -> Dict:
+        result = {
+            'device_id': self.dump_id(),
+            'device_type': self.device_type,
+            'device_operational_status': self.device_operational_status.value,
+        }
+        if include_config_rules: result.setdefault('device_config', {})['config_rules'] = self.dump_config()
+        if include_drivers: result['device_drivers'] = self.dump_drivers()
+        if include_endpoints: result['device_endpoints'] = self.dump_endpoints()
+        return result
+
+class DriverModel(Model): # pylint: disable=abstract-method
+    pk = PrimaryKeyField()
+    device_fk = ForeignKeyField(DeviceModel)
+    driver = EnumeratedField(ORM_DeviceDriverEnum, required=True)
+
+    def dump(self) -> Dict:
+        return self.driver.value
+
+def set_drivers(database : Database, db_device : DeviceModel, grpc_device_drivers):
+    db_device_pk = db_device.pk
+    for driver in grpc_device_drivers:
+        orm_driver = grpc_to_enum__device_driver(driver)
+        str_device_driver_key = key_to_str([db_device_pk, orm_driver.name])
+        db_device_driver = DriverModel(database, str_device_driver_key)
+        db_device_driver.device_fk = db_device
+        db_device_driver.driver = orm_driver
+        db_device_driver.save()
diff --git a/src/context/service/database/EndPointModel.py b/src/context/service/database/EndPointModel.py
new file mode 100644
index 0000000000000000000000000000000000000000..b7c220a00b2a85b2c3f4c11a2eceb3aa66aadc5a
--- /dev/null
+++ b/src/context/service/database/EndPointModel.py
@@ -0,0 +1,33 @@
+import logging
+from typing import Dict
+from common.orm.fields.ForeignKeyField import ForeignKeyField
+from common.orm.fields.PrimaryKeyField import PrimaryKeyField
+from common.orm.fields.StringField import StringField
+from common.orm.model.Model import Model
+from context.service.database.DeviceModel import DeviceModel
+from context.service.database.TopologyModel import TopologyModel
+
+LOGGER = logging.getLogger(__name__)
+
+class EndPointModel(Model):
+    pk = PrimaryKeyField()
+    topology_fk = ForeignKeyField(TopologyModel, required=False)
+    device_fk = ForeignKeyField(DeviceModel)
+    endpoint_uuid = StringField(required=True, allow_empty=False)
+    endpoint_type = StringField()
+
+    def dump_id(self) -> Dict:
+        device_id = DeviceModel(self.database, self.device_fk).dump_id()
+        result = {
+            'device_id': device_id,
+            'endpoint_uuid': {'uuid': self.endpoint_uuid},
+        }
+        if self.topology_fk is not None:
+            result['topology_id'] = TopologyModel(self.database, self.topology_fk).dump_id()
+        return result
+
+    def dump(self) -> Dict:
+        return {
+            'endpoint_id': self.dump_id(),
+            'endpoint_type': self.endpoint_type,
+        }
diff --git a/src/context/service/database/Events.py b/src/context/service/database/Events.py
new file mode 100644
index 0000000000000000000000000000000000000000..2737b9fb7d400d54e2aad9e943ee1c29461e82ff
--- /dev/null
+++ b/src/context/service/database/Events.py
@@ -0,0 +1,13 @@
+import json, time
+from typing import Dict
+from common.message_broker.Message import Message
+from common.message_broker.MessageBroker import MessageBroker
+from context.proto.context_pb2 import EventTypeEnum
+
+def notify_event(
+    messagebroker : MessageBroker, topic_name : str, event_type : EventTypeEnum, fields : Dict[str, str]) -> None:
+
+    event = {'event': {'timestamp': time.time(), 'event_type': event_type}}
+    for field_name, field_value in fields.items():
+        event[field_name] = field_value
+    messagebroker.publish(Message(topic_name, json.dumps(event)))
diff --git a/src/context/service/database/LinkModel.py b/src/context/service/database/LinkModel.py
new file mode 100644
index 0000000000000000000000000000000000000000..48d67bfa6134818d3ba24fd304507b7d14a37e0f
--- /dev/null
+++ b/src/context/service/database/LinkModel.py
@@ -0,0 +1,26 @@
+import logging, operator
+from typing import Dict, List
+from common.orm.fields.PrimaryKeyField import PrimaryKeyField
+from common.orm.fields.StringField import StringField
+from common.orm.model.Model import Model
+from common.orm.HighLevel import get_related_objects
+
+LOGGER = logging.getLogger(__name__)
+
+class LinkModel(Model):
+    pk = PrimaryKeyField()
+    link_uuid = StringField(required=True, allow_empty=False)
+
+    def dump_id(self) -> Dict:
+        return {'link_uuid': {'uuid': self.link_uuid}}
+
+    def dump_endpoint_ids(self) -> List[Dict]:
+        from .RelationModels import LinkEndPointModel # pylint: disable=import-outside-toplevel
+        db_endpoints = get_related_objects(self, LinkEndPointModel, 'endpoint_fk')
+        return [db_endpoint.dump_id() for db_endpoint in sorted(db_endpoints, key=operator.attrgetter('pk'))]
+
+    def dump(self) -> Dict:
+        return {
+            'link_id': self.dump_id(),
+            'link_endpoint_ids': self.dump_endpoint_ids(),
+        }
diff --git a/src/context/service/database/RelationModels.py b/src/context/service/database/RelationModels.py
new file mode 100644
index 0000000000000000000000000000000000000000..4531e0594f3f213f4a00b1fe70dfb2d8dc0a0f5e
--- /dev/null
+++ b/src/context/service/database/RelationModels.py
@@ -0,0 +1,31 @@
+import logging
+from common.orm.fields.ForeignKeyField import ForeignKeyField
+from common.orm.fields.PrimaryKeyField import PrimaryKeyField
+from common.orm.model.Model import Model
+from .DeviceModel import DeviceModel
+from .EndPointModel import EndPointModel
+from .LinkModel import LinkModel
+from .ServiceModel import ServiceModel
+from .TopologyModel import TopologyModel
+
+LOGGER = logging.getLogger(__name__)
+
+class LinkEndPointModel(Model): # pylint: disable=abstract-method
+    pk = PrimaryKeyField()
+    link_fk = ForeignKeyField(LinkModel)
+    endpoint_fk = ForeignKeyField(EndPointModel)
+
+class ServiceEndPointModel(Model): # pylint: disable=abstract-method
+    pk = PrimaryKeyField()
+    service_fk = ForeignKeyField(ServiceModel)
+    endpoint_fk = ForeignKeyField(EndPointModel)
+
+class TopologyDeviceModel(Model): # pylint: disable=abstract-method
+    pk = PrimaryKeyField()
+    topology_fk = ForeignKeyField(TopologyModel)
+    device_fk = ForeignKeyField(DeviceModel)
+
+class TopologyLinkModel(Model): # pylint: disable=abstract-method
+    pk = PrimaryKeyField()
+    topology_fk = ForeignKeyField(TopologyModel)
+    link_fk = ForeignKeyField(LinkModel)
diff --git a/src/context/service/database/ServiceModel.py b/src/context/service/database/ServiceModel.py
new file mode 100644
index 0000000000000000000000000000000000000000..f6bd2e9c47f77538fac6d65aa0e02ca720126abb
--- /dev/null
+++ b/src/context/service/database/ServiceModel.py
@@ -0,0 +1,74 @@
+import functools, logging, operator
+from enum import Enum
+from typing import Dict, List
+from common.orm.fields.EnumeratedField import EnumeratedField
+from common.orm.fields.ForeignKeyField import ForeignKeyField
+from common.orm.fields.PrimaryKeyField import PrimaryKeyField
+from common.orm.fields.StringField import StringField
+from common.orm.model.Model import Model
+from common.orm.HighLevel import get_related_objects
+from context.proto.context_pb2 import ServiceStatusEnum, ServiceTypeEnum
+from .ConfigModel import ConfigModel
+from .ConstraintModel import ConstraintsModel
+from .ContextModel import ContextModel
+from .Tools import grpc_to_enum
+
+LOGGER = logging.getLogger(__name__)
+
+class ORM_ServiceTypeEnum(Enum):
+    UNKNOWN                   = ServiceTypeEnum.SERVICETYPE_UNKNOWN
+    L3NM                      = ServiceTypeEnum.SERVICETYPE_L3NM
+    L2NM                      = ServiceTypeEnum.SERVICETYPE_L2NM
+    TAPI_CONNECTIVITY_SERVICE = ServiceTypeEnum.SERVICETYPE_TAPI_CONNECTIVITY_SERVICE
+
+grpc_to_enum__service_type = functools.partial(
+    grpc_to_enum, ServiceTypeEnum, ORM_ServiceTypeEnum)
+
+class ORM_ServiceStatusEnum(Enum):
+    UNDEFINED       = ServiceStatusEnum.SERVICESTATUS_UNDEFINED
+    PLANNED         = ServiceStatusEnum.SERVICESTATUS_PLANNED
+    ACTIVE          = ServiceStatusEnum.SERVICESTATUS_ACTIVE
+    PENDING_REMOVAL = ServiceStatusEnum.SERVICESTATUS_PENDING_REMOVAL
+
+grpc_to_enum__service_status = functools.partial(
+    grpc_to_enum, ServiceStatusEnum, ORM_ServiceStatusEnum)
+
+class ServiceModel(Model):
+    pk = PrimaryKeyField()
+    context_fk = ForeignKeyField(ContextModel)
+    service_uuid = StringField(required=True, allow_empty=False)
+    service_type = EnumeratedField(ORM_ServiceTypeEnum, required=True)
+    service_constraints_fk = ForeignKeyField(ConstraintsModel)
+    service_status = EnumeratedField(ORM_ServiceStatusEnum, required=True)
+    service_config_fk = ForeignKeyField(ConfigModel)
+
+    def dump_id(self) -> Dict:
+        context_id = ContextModel(self.database, self.context_fk).dump_id()
+        return {
+            'context_id': context_id,
+            'service_uuid': {'uuid': self.service_uuid},
+        }
+
+    def dump_endpoint_ids(self) -> List[Dict]:
+        from .RelationModels import ServiceEndPointModel # pylint: disable=import-outside-toplevel
+        db_endpoints = get_related_objects(self, ServiceEndPointModel, 'endpoint_fk')
+        return [db_endpoint.dump_id() for db_endpoint in sorted(db_endpoints, key=operator.attrgetter('pk'))]
+
+    def dump_constraints(self) -> List[Dict]:
+        return ConstraintsModel(self.database, self.service_constraints_fk).dump()
+
+    def dump_config(self) -> Dict:
+        return ConfigModel(self.database, self.service_config_fk).dump()
+
+    def dump(   # pylint: disable=arguments-differ
+            self, include_endpoint_ids=True, include_constraints=True, include_config_rules=True
+        ) -> Dict:
+        result = {
+            'service_id': self.dump_id(),
+            'service_type': self.service_type.value,
+            'service_status': {'service_status': self.service_status.value},
+        }
+        if include_endpoint_ids: result['service_endpoint_ids'] = self.dump_endpoint_ids()
+        if include_constraints: result['service_constraints'] = self.dump_constraints()
+        if include_config_rules: result.setdefault('service_config', {})['config_rules'] = self.dump_config()
+        return result
diff --git a/src/context/service/database/Tools.py b/src/context/service/database/Tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..36ffbcd46fcf686371b0799445ce4f9ce5b75838
--- /dev/null
+++ b/src/context/service/database/Tools.py
@@ -0,0 +1,58 @@
+import hashlib, re
+from enum import Enum
+from typing import Dict, List, Tuple, Union
+
+# Convenient helper function to remove dictionary items in dict/list/set comprehensions.
+
+def remove_dict_key(dictionary : Dict, key : str):
+    dictionary.pop(key, None)
+    return dictionary
+
+# Enumeration classes are redundant with gRPC classes, but gRPC does not provide a programmatical method to retrieve
+# the values it expects from strings containing the desired value symbol or its integer value, so a kind of mapping is
+# required. Besides, ORM Models expect Enum classes in EnumeratedFields; we create specific and conveniently defined
+# Enum classes to serve both purposes.
+
+def grpc_to_enum(grpc_enum_class, orm_enum_class : Enum, grpc_enum_value):
+    grpc_enum_name = grpc_enum_class.Name(grpc_enum_value)
+    grpc_enum_prefix = orm_enum_class.__name__.upper()
+    grpc_enum_prefix = re.sub(r'^ORM_(.+)$', r'\1', grpc_enum_prefix)
+    grpc_enum_prefix = re.sub(r'^(.+)ENUM$', r'\1', grpc_enum_prefix)
+    grpc_enum_prefix = grpc_enum_prefix + '_'
+    orm_enum_name = grpc_enum_name.replace(grpc_enum_prefix, '')
+    orm_enum_value = orm_enum_class._member_map_.get(orm_enum_name) # pylint: disable=protected-access
+    return orm_enum_value
+
+# For some models, it is convenient to produce a string hash for fast comparisons of existence or modification. Method
+# fast_hasher computes configurable length (between 1 and 64 byte) hashes and retrieves them in hex representation.
+
+FASTHASHER_ITEM_ACCEPTED_FORMAT = 'Union[bytes, str]'
+FASTHASHER_DATA_ACCEPTED_FORMAT = 'Union[{fmt:s}, List[{fmt:s}], Tuple[{fmt:s}]]'.format(
+    fmt=FASTHASHER_ITEM_ACCEPTED_FORMAT)
+
+def fast_hasher(data : Union[bytes, str, List[Union[bytes, str]], Tuple[Union[bytes, str]]], digest_size : int = 8):
+    hasher = hashlib.blake2b(digest_size=digest_size)
+    # Do not accept sets, dicts, or other unordered dats tructures since their order is arbitrary thus producing
+    # different hashes depending on the order. Consider adding support for sets or dicts with previous sorting of
+    # items by their key.
+
+    if isinstance(data, bytes):
+        data = [data]
+    elif isinstance(data, str):
+        data = [data.encode('UTF-8')]
+    elif isinstance(data, (list, tuple)):
+        pass
+    else:
+        msg = 'data({:s}) must be {:s}, found {:s}'
+        raise TypeError(msg.format(str(data), FASTHASHER_DATA_ACCEPTED_FORMAT, str(type(data))))
+
+    for i,item in enumerate(data):
+        if isinstance(item, str):
+            item = item.encode('UTF-8')
+        elif isinstance(item, bytes):
+            pass
+        else:
+            msg = 'data[{:d}]({:s}) must be {:s}, found {:s}'
+            raise TypeError(msg.format(i, str(item), FASTHASHER_ITEM_ACCEPTED_FORMAT, str(type(item))))
+        hasher.update(item)
+    return hasher.hexdigest()
diff --git a/src/context/service/database/TopologyModel.py b/src/context/service/database/TopologyModel.py
new file mode 100644
index 0000000000000000000000000000000000000000..fc1f2d241e71a4e14511756928ed0b97a0499209
--- /dev/null
+++ b/src/context/service/database/TopologyModel.py
@@ -0,0 +1,40 @@
+import logging, operator
+from typing import Dict, List
+from common.orm.fields.ForeignKeyField import ForeignKeyField
+from common.orm.fields.PrimaryKeyField import PrimaryKeyField
+from common.orm.fields.StringField import StringField
+from common.orm.model.Model import Model
+from common.orm.HighLevel import get_related_objects
+from .ContextModel import ContextModel
+
+LOGGER = logging.getLogger(__name__)
+
+class TopologyModel(Model):
+    pk = PrimaryKeyField()
+    context_fk = ForeignKeyField(ContextModel)
+    topology_uuid = StringField(required=True, allow_empty=False)
+
+    def dump_id(self) -> Dict:
+        context_id = ContextModel(self.database, self.context_fk).dump_id()
+        return {
+            'context_id': context_id,
+            'topology_uuid': {'uuid': self.topology_uuid},
+        }
+
+    def dump_device_ids(self) -> List[Dict]:
+        from .RelationModels import TopologyDeviceModel # pylint: disable=import-outside-toplevel
+        db_devices = get_related_objects(self, TopologyDeviceModel, 'device_fk')
+        return [db_device.dump_id() for db_device in sorted(db_devices, key=operator.attrgetter('pk'))]
+
+    def dump_link_ids(self) -> List[Dict]:
+        from .RelationModels import TopologyLinkModel # pylint: disable=import-outside-toplevel
+        db_links = get_related_objects(self, TopologyLinkModel, 'link_fk')
+        return [db_link.dump_id() for db_link in sorted(db_links, key=operator.attrgetter('pk'))]
+
+    def dump(   # pylint: disable=arguments-differ
+            self, include_devices=True, include_links=True
+        ) -> Dict:
+        result = {'topology_id': self.dump_id()}
+        if include_devices: result['device_ids'] = self.dump_device_ids()
+        if include_links: result['link_ids'] = self.dump_link_ids()
+        return result
diff --git a/src/common/tools/service/__init__.py b/src/context/service/database/__init__.py
similarity index 100%
rename from src/common/tools/service/__init__.py
rename to src/context/service/database/__init__.py
diff --git a/src/context/service/grpc_server/Constants.py b/src/context/service/grpc_server/Constants.py
new file mode 100644
index 0000000000000000000000000000000000000000..80ff198de86644e53d4d8cc6a693efae237f450f
--- /dev/null
+++ b/src/context/service/grpc_server/Constants.py
@@ -0,0 +1,9 @@
+TOPIC_CONTEXT  = 'context'
+TOPIC_TOPOLOGY = 'topology'
+TOPIC_DEVICE   = 'device'
+TOPIC_LINK     = 'link'
+TOPIC_SERVICE  = 'service'
+
+TOPICS = {TOPIC_CONTEXT, TOPIC_TOPOLOGY, TOPIC_DEVICE, TOPIC_LINK, TOPIC_SERVICE}
+
+CONSUME_TIMEOUT = 0.5 # seconds
diff --git a/src/context/service/ContextService.py b/src/context/service/grpc_server/ContextService.py
similarity index 69%
rename from src/context/service/ContextService.py
rename to src/context/service/grpc_server/ContextService.py
index 54f90a64116a4385908fb025735998b9fd4dad55..ab7653e37d318d0bfeea4a60213206d391c0dfda 100644
--- a/src/context/service/ContextService.py
+++ b/src/context/service/grpc_server/ContextService.py
@@ -4,17 +4,20 @@ from concurrent import futures
 from grpc_health.v1.health import HealthServicer, OVERALL_HEALTH
 from grpc_health.v1.health_pb2 import HealthCheckResponse
 from grpc_health.v1.health_pb2_grpc import add_HealthServicer_to_server
-from context.proto.context_pb2_grpc import add_ContextServiceServicer_to_server
-from context.service.ContextServiceServicerImpl import ContextServiceServicerImpl
 from context.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD
+from context.proto.context_pb2_grpc import add_ContextServiceServicer_to_server
+from .ContextServiceServicerImpl import ContextServiceServicerImpl
 
 BIND_ADDRESS = '0.0.0.0'
 LOGGER = logging.getLogger(__name__)
 
 class ContextService:
-    def __init__(self, database, address=BIND_ADDRESS, port=GRPC_SERVICE_PORT, max_workers=GRPC_MAX_WORKERS,
-                 grace_period=GRPC_GRACE_PERIOD):
+    def __init__(
+        self, database, messagebroker, address=BIND_ADDRESS, port=GRPC_SERVICE_PORT, max_workers=GRPC_MAX_WORKERS,
+        grace_period=GRPC_GRACE_PERIOD):
+
         self.database = database
+        self.messagebroker = messagebroker
         self.address = address
         self.port = port
         self.endpoint = None
@@ -26,14 +29,14 @@ class ContextService:
         self.server = None
 
     def start(self):
-        self.endpoint = '{}:{}'.format(self.address, self.port)
-        LOGGER.debug('Starting Service (tentative endpoint: {}, max_workers: {})...'.format(
-            self.endpoint, self.max_workers))
+        self.endpoint = '{:s}:{:s}'.format(self.address, str(self.port))
+        LOGGER.info('Starting Service (tentative endpoint: {:s}, max_workers: {:s})...'.format(
+            str(self.endpoint), str(self.max_workers)))
 
         self.pool = futures.ThreadPoolExecutor(max_workers=self.max_workers)
         self.server = grpc.server(self.pool) # , interceptors=(tracer_interceptor,))
 
-        self.context_servicer = ContextServiceServicerImpl(self.database)
+        self.context_servicer = ContextServiceServicerImpl(self.database, self.messagebroker)
         add_ContextServiceServicer_to_server(self.context_servicer, self.server)
 
         self.health_servicer = HealthServicer(
@@ -41,15 +44,15 @@ class ContextService:
         add_HealthServicer_to_server(self.health_servicer, self.server)
 
         port = self.server.add_insecure_port(self.endpoint)
-        self.endpoint = '{}:{}'.format(self.address, port)
-        LOGGER.info('Listening on {}...'.format(self.endpoint))
+        self.endpoint = '{:s}:{:s}'.format(str(self.address), str(port))
+        LOGGER.info('Listening on {:s}...'.format(str(self.endpoint)))
         self.server.start()
         self.health_servicer.set(OVERALL_HEALTH, HealthCheckResponse.SERVING) # pylint: disable=maybe-no-member
 
         LOGGER.debug('Service started')
 
     def stop(self):
-        LOGGER.debug('Stopping service (grace period {} seconds)...'.format(self.grace_period))
+        LOGGER.debug('Stopping service (grace period {:s} seconds)...'.format(str(self.grace_period)))
         self.health_servicer.enter_graceful_shutdown()
         self.server.stop(self.grace_period)
         LOGGER.debug('Service stopped')
diff --git a/src/context/service/grpc_server/ContextServiceServicerImpl.py b/src/context/service/grpc_server/ContextServiceServicerImpl.py
new file mode 100644
index 0000000000000000000000000000000000000000..d8f7b648b4b919cc61330f236195c444f550ede1
--- /dev/null
+++ b/src/context/service/grpc_server/ContextServiceServicerImpl.py
@@ -0,0 +1,525 @@
+import grpc, json, logging, operator
+from typing import Iterator, List, Set, Tuple
+from common.message_broker.MessageBroker import MessageBroker
+from common.orm.Database import Database
+from common.orm.HighLevel import (
+    get_all_objects, get_object, get_or_create_object, get_related_objects, update_or_create_object)
+from common.orm.backend.Tools import key_to_str
+from common.rpc_method_wrapper.Decorator import create_metrics, safe_and_metered_rpc_method
+from common.rpc_method_wrapper.ServiceExceptions import InvalidArgumentException
+from context.proto.context_pb2 import (
+    Context, ContextEvent, ContextId, ContextIdList, ContextList, Device, DeviceEvent, DeviceId, DeviceIdList,
+    DeviceList, Empty, EventTypeEnum, Link, LinkEvent, LinkId, LinkIdList, LinkList, Service, ServiceEvent, ServiceId,
+    ServiceIdList, ServiceList, Topology, TopologyEvent, TopologyId, TopologyIdList, TopologyList)
+from context.proto.context_pb2_grpc import ContextServiceServicer
+from context.service.database.ConfigModel import ConfigModel, ConfigRuleModel, set_config
+from context.service.database.ConstraintModel import ConstraintModel, ConstraintsModel, set_constraints
+from context.service.database.ContextModel import ContextModel
+from context.service.database.DeviceModel import (
+    DeviceModel, DriverModel, grpc_to_enum__device_operational_status, set_drivers)
+from context.service.database.EndPointModel import EndPointModel
+from context.service.database.Events import notify_event
+from context.service.database.LinkModel import LinkModel
+from context.service.database.RelationModels import (
+    LinkEndPointModel, ServiceEndPointModel, TopologyDeviceModel, TopologyLinkModel)
+from context.service.database.ServiceModel import (
+    ServiceModel, grpc_to_enum__service_status, grpc_to_enum__service_type)
+from context.service.database.TopologyModel import TopologyModel
+from context.service.grpc_server.Constants import (
+    CONSUME_TIMEOUT, TOPIC_CONTEXT, TOPIC_DEVICE, TOPIC_LINK, TOPIC_SERVICE, TOPIC_TOPOLOGY)
+
+LOGGER = logging.getLogger(__name__)
+
+SERVICE_NAME = 'Context'
+METHOD_NAMES = [
+    'ListContextIds',  'ListContexts',   'GetContext',  'SetContext',  'RemoveContext',  'GetContextEvents',
+    'ListTopologyIds', 'ListTopologies', 'GetTopology', 'SetTopology', 'RemoveTopology', 'GetTopologyEvents',
+    'ListDeviceIds',   'ListDevices',    'GetDevice',   'SetDevice',   'RemoveDevice',   'GetDeviceEvents',
+    'ListLinkIds',     'ListLinks',      'GetLink',     'SetLink',     'RemoveLink',     'GetLinkEvents',
+    'ListServiceIds',  'ListServices',   'GetService',  'SetService',  'RemoveService',  'GetServiceEvents',
+]
+METRICS = create_metrics(SERVICE_NAME, METHOD_NAMES)
+
+class ContextServiceServicerImpl(ContextServiceServicer):
+    def __init__(self, database : Database, messagebroker : MessageBroker):
+        LOGGER.debug('Creating Servicer...')
+        self.database = database
+        self.messagebroker = messagebroker
+        LOGGER.debug('Servicer Created')
+
+
+    # ----- Context ----------------------------------------------------------------------------------------------------
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def ListContextIds(self, request: Empty, context : grpc.ServicerContext) -> ContextIdList:
+        db_contexts : List[ContextModel] = get_all_objects(self.database, ContextModel)
+        db_contexts = sorted(db_contexts, key=operator.attrgetter('pk'))
+        return ContextIdList(context_ids=[db_context.dump_id() for db_context in db_contexts])
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def ListContexts(self, request: Empty, context : grpc.ServicerContext) -> ContextList:
+        db_contexts : List[ContextModel] = get_all_objects(self.database, ContextModel)
+        db_contexts = sorted(db_contexts, key=operator.attrgetter('pk'))
+        return ContextList(contexts=[db_context.dump() for db_context in db_contexts])
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def GetContext(self, request: ContextId, context : grpc.ServicerContext) -> Context:
+        context_uuid = request.context_uuid.uuid
+        db_context : ContextModel = get_object(self.database, ContextModel, context_uuid)
+        return Context(**db_context.dump(include_services=True, include_topologies=True))
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def SetContext(self, request: Context, context : grpc.ServicerContext) -> ContextId:
+        context_uuid = request.context_id.context_uuid.uuid
+
+        for i,topology_id in enumerate(request.topology_ids):
+            topology_context_uuid = topology_id.context_id.context_uuid.uuid
+            if topology_context_uuid != context_uuid:
+                raise InvalidArgumentException(
+                    'request.topology_ids[{:d}].context_id.context_uuid.uuid'.format(i), topology_context_uuid,
+                    ['should be == {:s}({:s})'.format('request.context_id.context_uuid.uuid', context_uuid)])
+
+        for i,service_id in enumerate(request.service_ids):
+            service_context_uuid = service_id.context_id.context_uuid.uuid
+            if service_context_uuid != context_uuid:
+                raise InvalidArgumentException(
+                    'request.service_ids[{:d}].context_id.context_uuid.uuid'.format(i), service_context_uuid,
+                    ['should be == {:s}({:s})'.format('request.context_id.context_uuid.uuid', context_uuid)])
+
+        result : Tuple[ContextModel, bool] = update_or_create_object(
+            self.database, ContextModel, context_uuid, {'context_uuid': context_uuid})
+        db_context, updated = result
+
+        for i,topology_id in enumerate(request.topology_ids):
+            topology_context_uuid = topology_id.context_id.context_uuid.uuid
+            topology_uuid = topology_id.topology_uuid.uuid
+            get_object(self.database, TopologyModel, [context_uuid, topology_uuid]) # just to confirm it exists
+
+        for i,service_id in enumerate(request.service_ids):
+            service_context_uuid = service_id.context_id.context_uuid.uuid
+            service_uuid = service_id.service_uuid.uuid
+            get_object(self.database, ServiceModel, [context_uuid, service_uuid]) # just to confirm it exists
+
+        event_type = EventTypeEnum.EVENTTYPE_UPDATE if updated else EventTypeEnum.EVENTTYPE_CREATE
+        dict_context_id = db_context.dump_id()
+        notify_event(self.messagebroker, TOPIC_CONTEXT, event_type, {'context_id': dict_context_id})
+        return ContextId(**dict_context_id)
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def RemoveContext(self, request: ContextId, context : grpc.ServicerContext) -> Empty:
+        context_uuid = request.context_uuid.uuid
+        db_context = ContextModel(self.database, context_uuid, auto_load=False)
+        found = db_context.load()
+        if not found: return Empty()
+
+        dict_context_id = db_context.dump_id()
+        db_context.delete()
+        event_type = EventTypeEnum.EVENTTYPE_REMOVE
+        notify_event(self.messagebroker, TOPIC_CONTEXT, event_type, {'context_id': dict_context_id})
+        return Empty()
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def GetContextEvents(self, request: Empty, context : grpc.ServicerContext) -> Iterator[ContextEvent]:
+        for message in self.messagebroker.consume({TOPIC_CONTEXT}, consume_timeout=CONSUME_TIMEOUT):
+            yield ContextEvent(**json.loads(message.content))
+
+
+    # ----- Topology ---------------------------------------------------------------------------------------------------
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def ListTopologyIds(self, request: ContextId, context : grpc.ServicerContext) -> TopologyIdList:
+        context_uuid = request.context_uuid.uuid
+        db_context : ContextModel = get_object(self.database, ContextModel, context_uuid)
+        db_topologies : Set[TopologyModel] = get_related_objects(db_context, TopologyModel)
+        db_topologies = sorted(db_topologies, key=operator.attrgetter('pk'))
+        return TopologyIdList(topology_ids=[db_topology.dump_id() for db_topology in db_topologies])
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def ListTopologies(self, request: ContextId, context : grpc.ServicerContext) -> TopologyList:
+        context_uuid = request.context_uuid.uuid
+        db_context : ContextModel = get_object(self.database, ContextModel, context_uuid)
+        db_topologies : Set[TopologyModel] = get_related_objects(db_context, TopologyModel)
+        db_topologies = sorted(db_topologies, key=operator.attrgetter('pk'))
+        return TopologyList(topologies=[db_topology.dump() for db_topology in db_topologies])
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def GetTopology(self, request: TopologyId, context : grpc.ServicerContext) -> Topology:
+        str_key = key_to_str([request.context_id.context_uuid.uuid, request.topology_uuid.uuid])
+        db_topology : TopologyModel = get_object(self.database, TopologyModel, str_key)
+        return Topology(**db_topology.dump(include_devices=True, include_links=True))
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def SetTopology(self, request: Topology, context : grpc.ServicerContext) -> TopologyId:
+        context_uuid = request.topology_id.context_id.context_uuid.uuid
+        db_context : ContextModel = get_object(self.database, ContextModel, context_uuid)
+
+        topology_uuid = request.topology_id.topology_uuid.uuid
+        str_topology_key = key_to_str([context_uuid, topology_uuid])
+        result : Tuple[TopologyModel, bool] = update_or_create_object(
+            self.database, TopologyModel, str_topology_key, {'context_fk': db_context, 'topology_uuid': topology_uuid})
+        db_topology,updated = result
+
+        for device_id in request.device_ids:
+            device_uuid = device_id.device_uuid.uuid
+            db_device = get_object(self.database, DeviceModel, device_uuid)
+            str_topology_device_key = key_to_str([str_topology_key, device_uuid], separator='--')
+            result : Tuple[TopologyDeviceModel, bool] = update_or_create_object(
+                self.database, TopologyDeviceModel, str_topology_device_key,
+                {'topology_fk': db_topology, 'device_fk': db_device})
+            #db_topology_device,topology_device_updated = result
+
+        for link_id in request.link_ids:
+            link_uuid = link_id.link_uuid.uuid
+            db_link = get_object(self.database, LinkModel, link_uuid)
+
+            str_topology_link_key = key_to_str([str_topology_key, link_uuid], separator='--')
+            result : Tuple[TopologyLinkModel, bool] = update_or_create_object(
+                self.database, TopologyLinkModel, str_topology_link_key,
+                {'topology_fk': db_topology, 'link_fk': db_link})
+            #db_topology_link,topology_link_updated = result
+
+        event_type = EventTypeEnum.EVENTTYPE_UPDATE if updated else EventTypeEnum.EVENTTYPE_CREATE
+        dict_topology_id = db_topology.dump_id()
+        notify_event(self.messagebroker, TOPIC_TOPOLOGY, event_type, {'topology_id': dict_topology_id})
+        return TopologyId(**dict_topology_id)
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def RemoveTopology(self, request: TopologyId, context : grpc.ServicerContext) -> Empty:
+        context_uuid = request.context_id.context_uuid.uuid
+        topology_uuid = request.topology_uuid.uuid
+        db_topology = TopologyModel(self.database, key_to_str([context_uuid, topology_uuid]), auto_load=False)
+        found = db_topology.load()
+        if not found: return Empty()
+
+        dict_topology_id = db_topology.dump_id()
+        db_topology.delete()
+        event_type = EventTypeEnum.EVENTTYPE_REMOVE
+        notify_event(self.messagebroker, TOPIC_TOPOLOGY, event_type, {'topology_id': dict_topology_id})
+        return Empty()
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def GetTopologyEvents(self, request: Empty, context : grpc.ServicerContext) -> Iterator[TopologyEvent]:
+        for message in self.messagebroker.consume({TOPIC_TOPOLOGY}, consume_timeout=CONSUME_TIMEOUT):
+            yield TopologyEvent(**json.loads(message.content))
+
+
+    # ----- Device -----------------------------------------------------------------------------------------------------
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def ListDeviceIds(self, request: Empty, context : grpc.ServicerContext) -> DeviceIdList:
+        db_devices : List[DeviceModel] = get_all_objects(self.database, DeviceModel)
+        db_devices = sorted(db_devices, key=operator.attrgetter('pk'))
+        return DeviceIdList(device_ids=[db_device.dump_id() for db_device in db_devices])
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def ListDevices(self, request: Empty, context : grpc.ServicerContext) -> DeviceList:
+        db_devices : List[DeviceModel] = get_all_objects(self.database, DeviceModel)
+        db_devices = sorted(db_devices, key=operator.attrgetter('pk'))
+        return DeviceList(devices=[db_device.dump() for db_device in db_devices])
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def GetDevice(self, request: DeviceId, context : grpc.ServicerContext) -> Device:
+        device_uuid = request.device_uuid.uuid
+        db_device : DeviceModel = get_object(self.database, DeviceModel, device_uuid)
+        return Device(**db_device.dump(
+            include_config_rules=True, include_drivers=True, include_endpoints=True))
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def SetDevice(self, request: Device, context : grpc.ServicerContext) -> DeviceId:
+        device_uuid = request.device_id.device_uuid.uuid
+
+        for i,endpoint in enumerate(request.device_endpoints):
+            endpoint_device_uuid = endpoint.endpoint_id.device_id.device_uuid.uuid
+            if len(endpoint_device_uuid) == 0: endpoint_device_uuid = device_uuid
+            if device_uuid != endpoint_device_uuid:
+                raise InvalidArgumentException(
+                    'request.device_endpoints[{:d}].device_id.device_uuid.uuid'.format(i), endpoint_device_uuid,
+                    ['should be == {:s}({:s})'.format('request.device_id.device_uuid.uuid', device_uuid)])
+
+        running_config_result = set_config(self.database, device_uuid, 'running', request.device_config.config_rules)
+        db_running_config = running_config_result[0][0]
+
+        result : Tuple[DeviceModel, bool] = update_or_create_object(self.database, DeviceModel, device_uuid, {
+            'device_uuid'              : device_uuid,
+            'device_type'              : request.device_type,
+            'device_operational_status': grpc_to_enum__device_operational_status(request.device_operational_status),
+            'device_config_fk'         : db_running_config,
+        })
+        db_device, updated = result
+
+        set_drivers(self.database, db_device, request.device_drivers)
+
+        for i,endpoint in enumerate(request.device_endpoints):
+            endpoint_uuid = endpoint.endpoint_id.endpoint_uuid.uuid
+            endpoint_device_uuid = endpoint.endpoint_id.device_id.device_uuid.uuid
+            if len(endpoint_device_uuid) == 0: endpoint_device_uuid = device_uuid
+
+            str_endpoint_key = key_to_str([device_uuid, endpoint_uuid])
+            endpoint_attributes = {
+                'device_fk'    : db_device,
+                'endpoint_uuid': endpoint_uuid,
+                'endpoint_type': endpoint.endpoint_type,
+            }
+
+            endpoint_topology_context_uuid = endpoint.endpoint_id.topology_id.context_id.context_uuid.uuid
+            endpoint_topology_uuid = endpoint.endpoint_id.topology_id.topology_uuid.uuid
+            if len(endpoint_topology_context_uuid) > 0 and len(endpoint_topology_uuid) > 0:
+                str_topology_key = key_to_str([endpoint_topology_context_uuid, endpoint_topology_uuid])
+                db_topology : TopologyModel = get_object(self.database, TopologyModel, str_topology_key)
+
+                str_topology_device_key = key_to_str([str_topology_key, device_uuid], separator='--')
+                result : Tuple[TopologyDeviceModel, bool] = get_or_create_object(
+                    self.database, TopologyDeviceModel, str_topology_device_key, {
+                        'topology_fk': db_topology, 'device_fk': db_device})
+                #db_topology_device, topology_device_created = result
+
+                str_endpoint_key = key_to_str([str_endpoint_key, str_topology_key], separator=':')
+                endpoint_attributes['topology_fk'] = db_topology
+
+            result : Tuple[EndPointModel, bool] = update_or_create_object(
+                self.database, EndPointModel, str_endpoint_key, endpoint_attributes)
+            #db_endpoint, updated = result
+
+        event_type = EventTypeEnum.EVENTTYPE_UPDATE if updated else EventTypeEnum.EVENTTYPE_CREATE
+        dict_device_id = db_device.dump_id()
+        notify_event(self.messagebroker, TOPIC_DEVICE, event_type, {'device_id': dict_device_id})
+        return DeviceId(**dict_device_id)
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def RemoveDevice(self, request: DeviceId, context : grpc.ServicerContext) -> Empty:
+        device_uuid = request.device_uuid.uuid
+        db_device = DeviceModel(self.database, device_uuid, auto_load=False)
+        found = db_device.load()
+        if not found: return Empty()
+
+        dict_device_id = db_device.dump_id()
+
+        for db_endpoint_pk,_ in db_device.references(EndPointModel):
+            EndPointModel(self.database, db_endpoint_pk).delete()
+
+        for db_topology_device_pk,_ in db_device.references(TopologyDeviceModel):
+            TopologyDeviceModel(self.database, db_topology_device_pk).delete()
+
+        for db_driver_pk,_ in db_device.references(DriverModel):
+            DriverModel(self.database, db_driver_pk).delete()
+
+        db_config = ConfigModel(self.database, db_device.device_config_fk)
+        for db_config_rule_pk,_ in db_config.references(ConfigRuleModel):
+            ConfigRuleModel(self.database, db_config_rule_pk).delete()
+
+        db_device.delete()
+        db_config.delete()
+
+        event_type = EventTypeEnum.EVENTTYPE_REMOVE
+        notify_event(self.messagebroker, TOPIC_DEVICE, event_type, {'device_id': dict_device_id})
+        return Empty()
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def GetDeviceEvents(self, request: Empty, context : grpc.ServicerContext) -> Iterator[DeviceEvent]:
+        for message in self.messagebroker.consume({TOPIC_DEVICE}, consume_timeout=CONSUME_TIMEOUT):
+            yield DeviceEvent(**json.loads(message.content))
+
+
+    # ----- Link -------------------------------------------------------------------------------------------------------
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def ListLinkIds(self, request: Empty, context : grpc.ServicerContext) -> LinkIdList:
+        db_links : List[LinkModel] = get_all_objects(self.database, LinkModel)
+        db_links = sorted(db_links, key=operator.attrgetter('pk'))
+        return LinkIdList(link_ids=[db_link.dump_id() for db_link in db_links])
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def ListLinks(self, request: Empty, context : grpc.ServicerContext) -> LinkList:
+        db_links : List[LinkModel] = get_all_objects(self.database, LinkModel)
+        db_links = sorted(db_links, key=operator.attrgetter('pk'))
+        return LinkList(links=[db_link.dump() for db_link in db_links])
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def GetLink(self, request: LinkId, context : grpc.ServicerContext) -> Link:
+        link_uuid = request.link_uuid.uuid
+        db_link : LinkModel = get_object(self.database, LinkModel, link_uuid)
+        return Link(**db_link.dump())
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def SetLink(self, request: Link, context : grpc.ServicerContext) -> LinkId:
+        link_uuid = request.link_id.link_uuid.uuid
+        result : Tuple[LinkModel, bool] = update_or_create_object(
+            self.database, LinkModel, link_uuid, {'link_uuid': link_uuid})
+        db_link, updated = result
+
+        for endpoint_id in request.link_endpoint_ids:
+            endpoint_uuid                  = endpoint_id.endpoint_uuid.uuid
+            endpoint_device_uuid           = endpoint_id.device_id.device_uuid.uuid
+            endpoint_topology_uuid         = endpoint_id.topology_id.topology_uuid.uuid
+            endpoint_topology_context_uuid = endpoint_id.topology_id.context_id.context_uuid.uuid
+
+            str_endpoint_key = key_to_str([endpoint_device_uuid, endpoint_uuid])
+
+            db_topology = None
+            if len(endpoint_topology_context_uuid) > 0 and len(endpoint_topology_uuid) > 0:
+                str_topology_key = key_to_str([endpoint_topology_context_uuid, endpoint_topology_uuid])
+                db_topology : TopologyModel = get_object(self.database, TopologyModel, str_topology_key)
+                str_topology_device_key = key_to_str([str_topology_key, endpoint_device_uuid], separator='--')
+                get_object(self.database, TopologyDeviceModel, str_topology_device_key) # check device is in topology
+                str_endpoint_key = key_to_str([str_endpoint_key, str_topology_key], separator=':')
+
+            db_endpoint : EndPointModel = get_object(self.database, EndPointModel, str_endpoint_key)
+
+            str_link_endpoint_key = key_to_str([link_uuid, endpoint_device_uuid], separator='--')
+            result : Tuple[LinkEndPointModel, bool] = get_or_create_object(
+                self.database, LinkEndPointModel, str_link_endpoint_key, {
+                    'link_fk': db_link, 'endpoint_fk': db_endpoint})
+            #db_link_endpoint, link_endpoint_created = result
+
+            if db_topology is not None:
+                str_topology_link_key = key_to_str([str_topology_key, link_uuid], separator='--')
+                result : Tuple[TopologyLinkModel, bool] = get_or_create_object(
+                    self.database, TopologyLinkModel, str_topology_link_key, {
+                        'topology_fk': db_topology, 'link_fk': db_link})
+                #db_topology_link, topology_link_created = result
+
+        event_type = EventTypeEnum.EVENTTYPE_UPDATE if updated else EventTypeEnum.EVENTTYPE_CREATE
+        dict_link_id = db_link.dump_id()
+        notify_event(self.messagebroker, TOPIC_LINK, event_type, {'link_id': dict_link_id})
+        return LinkId(**dict_link_id)
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def RemoveLink(self, request: LinkId, context : grpc.ServicerContext) -> Empty:
+        link_uuid = request.link_uuid.uuid
+        db_link = LinkModel(self.database, link_uuid, auto_load=False)
+        found = db_link.load()
+        if not found: return Empty()
+
+        dict_link_id = db_link.dump_id()
+
+        for db_link_endpoint_pk,_ in db_link.references(LinkEndPointModel):
+            LinkEndPointModel(self.database, db_link_endpoint_pk).delete()
+
+        for db_topology_link_pk,_ in db_link.references(TopologyLinkModel):
+            TopologyLinkModel(self.database, db_topology_link_pk).delete()
+
+        db_link.delete()
+        event_type = EventTypeEnum.EVENTTYPE_REMOVE
+        notify_event(self.messagebroker, TOPIC_LINK, event_type, {'link_id': dict_link_id})
+        return Empty()
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def GetLinkEvents(self, request: Empty, context : grpc.ServicerContext) -> Iterator[LinkEvent]:
+        for message in self.messagebroker.consume({TOPIC_LINK}, consume_timeout=CONSUME_TIMEOUT):
+            yield LinkEvent(**json.loads(message.content))
+
+
+    # ----- Service ----------------------------------------------------------------------------------------------------
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def ListServiceIds(self, request: ContextId, context : grpc.ServicerContext) -> ServiceIdList:
+        db_context : ContextModel = get_object(self.database, ContextModel, request.context_uuid.uuid)
+        db_services : Set[ServiceModel] = get_related_objects(db_context, ServiceModel)
+        db_services = sorted(db_services, key=operator.attrgetter('pk'))
+        return ServiceIdList(service_ids=[db_service.dump_id() for db_service in db_services])
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def ListServices(self, request: ContextId, context : grpc.ServicerContext) -> ServiceList:
+        db_context : ContextModel = get_object(self.database, ContextModel, request.context_uuid.uuid)
+        db_services : Set[ServiceModel] = get_related_objects(db_context, ServiceModel)
+        db_services = sorted(db_services, key=operator.attrgetter('pk'))
+        return ServiceList(services=[db_service.dump() for db_service in db_services])
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def GetService(self, request: ServiceId, context : grpc.ServicerContext) -> Service:
+        str_key = key_to_str([request.context_id.context_uuid.uuid, request.service_uuid.uuid])
+        db_service : ServiceModel = get_object(self.database, ServiceModel, str_key)
+        return Service(**db_service.dump(
+            include_endpoint_ids=True, include_constraints=True, include_config_rules=True))
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def SetService(self, request: Service, context : grpc.ServicerContext) -> ServiceId:
+        context_uuid = request.service_id.context_id.context_uuid.uuid
+        db_context : ContextModel = get_object(self.database, ContextModel, context_uuid)
+
+        for i,endpoint_id in enumerate(request.service_endpoint_ids):
+            endpoint_topology_context_uuid = endpoint_id.topology_id.context_id.context_uuid.uuid
+            if len(endpoint_topology_context_uuid) > 0 and context_uuid != endpoint_topology_context_uuid:
+                raise InvalidArgumentException(
+                    'request.service_endpoint_ids[{:d}].topology_id.context_id.context_uuid.uuid'.format(i),
+                    endpoint_topology_context_uuid,
+                    ['should be == {:s}({:s})'.format('request.service_id.context_id.context_uuid.uuid', context_uuid)])
+
+        service_uuid = request.service_id.service_uuid.uuid
+        str_service_key = key_to_str([context_uuid, service_uuid])
+
+        constraints_result = set_constraints(
+            self.database, str_service_key, 'constraints', request.service_constraints)
+        db_constraints = constraints_result[0][0]
+
+        running_config_result = set_config(
+            self.database, str_service_key, 'running', request.service_config.config_rules)
+        db_running_config = running_config_result[0][0]
+
+        result : Tuple[ServiceModel, bool] = update_or_create_object(self.database, ServiceModel, str_service_key, {
+            'context_fk'            : db_context,
+            'service_uuid'          : service_uuid,
+            'service_type'          : grpc_to_enum__service_type(request.service_type),
+            'service_constraints_fk': db_constraints,
+            'service_status'        : grpc_to_enum__service_status(request.service_status.service_status),
+            'service_config_fk'     : db_running_config,
+        })
+        db_service, updated = result
+
+        for i,endpoint_id in enumerate(request.service_endpoint_ids):
+            endpoint_uuid                  = endpoint_id.endpoint_uuid.uuid
+            endpoint_device_uuid           = endpoint_id.device_id.device_uuid.uuid
+            endpoint_topology_uuid         = endpoint_id.topology_id.topology_uuid.uuid
+            endpoint_topology_context_uuid = endpoint_id.topology_id.context_id.context_uuid.uuid
+
+            str_endpoint_key = key_to_str([endpoint_device_uuid, endpoint_uuid])
+            if len(endpoint_topology_context_uuid) > 0 and len(endpoint_topology_uuid) > 0:
+                str_topology_key = key_to_str([endpoint_topology_context_uuid, endpoint_topology_uuid])
+                str_endpoint_key = key_to_str([str_endpoint_key, str_topology_key], separator=':')
+
+            db_endpoint : EndPointModel = get_object(self.database, EndPointModel, str_endpoint_key)
+
+            str_service_endpoint_key = key_to_str([service_uuid, endpoint_device_uuid], separator='--')
+            result : Tuple[ServiceEndPointModel, bool] = get_or_create_object(
+                self.database, ServiceEndPointModel, str_service_endpoint_key, {
+                    'service_fk': db_service, 'endpoint_fk': db_endpoint})
+            #db_service_endpoint, service_endpoint_created = result
+
+        event_type = EventTypeEnum.EVENTTYPE_UPDATE if updated else EventTypeEnum.EVENTTYPE_CREATE
+        dict_service_id = db_service.dump_id()
+        notify_event(self.messagebroker, TOPIC_SERVICE, event_type, {'service_id': dict_service_id})
+        return ServiceId(**dict_service_id)
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def RemoveService(self, request: ServiceId, context : grpc.ServicerContext) -> Empty:
+        context_uuid = request.context_id.context_uuid.uuid
+        service_uuid = request.service_uuid.uuid
+        db_service = ServiceModel(self.database, key_to_str([context_uuid, service_uuid]), auto_load=False)
+        found = db_service.load()
+        if not found: return Empty()
+
+        dict_service_id = db_service.dump_id()
+
+        for db_service_endpoint_pk,_ in db_service.references(ServiceEndPointModel):
+            ServiceEndPointModel(self.database, db_service_endpoint_pk).delete()
+
+        db_config = ConfigModel(self.database, db_service.service_config_fk)
+        for db_config_rule_pk,_ in db_config.references(ConfigRuleModel):
+            ConfigRuleModel(self.database, db_config_rule_pk).delete()
+
+        db_constraints = ConstraintsModel(self.database, db_service.service_constraints_fk)
+        for db_constraint_pk,_ in db_constraints.references(ConstraintModel):
+            ConstraintModel(self.database, db_constraint_pk).delete()
+
+        db_service.delete()
+        db_config.delete()
+        db_constraints.delete()
+
+        event_type = EventTypeEnum.EVENTTYPE_REMOVE
+        notify_event(self.messagebroker, TOPIC_SERVICE, event_type, {'service_id': dict_service_id})
+        return Empty()
+
+    @safe_and_metered_rpc_method(METRICS, LOGGER)
+    def GetServiceEvents(self, request: Empty, context : grpc.ServicerContext) -> Iterator[ServiceEvent]:
+        for message in self.messagebroker.consume({TOPIC_SERVICE}, consume_timeout=CONSUME_TIMEOUT):
+            yield ServiceEvent(**json.loads(message.content))
diff --git a/src/context/service/rest_server/resources/__init__.py b/src/context/service/grpc_server/__init__.py
similarity index 100%
rename from src/context/service/rest_server/resources/__init__.py
rename to src/context/service/grpc_server/__init__.py
diff --git a/src/context/service/rest_server/Resources.py b/src/context/service/rest_server/Resources.py
new file mode 100644
index 0000000000000000000000000000000000000000..54a21ed3984ced399d82f72b7ca7f77e447f0459
--- /dev/null
+++ b/src/context/service/rest_server/Resources.py
@@ -0,0 +1,130 @@
+from flask.json import jsonify
+from flask_restful import Resource
+from google.protobuf.json_format import MessageToDict
+from common.message_broker.Factory import LOGGER
+from common.orm.Database import Database
+from context.proto.context_pb2 import ContextId, DeviceId, Empty, LinkId, ServiceId, TopologyId
+from context.service.grpc_server.ContextServiceServicerImpl import ContextServiceServicerImpl
+
+def grpc_context_id(context_uuid):
+    return ContextId(**{
+        'context_uuid': {'uuid': context_uuid}
+    })
+
+def grpc_topology_id(context_uuid, topology_uuid):
+    return TopologyId(**{
+        'context_id': {'context_uuid': {'uuid': context_uuid}},
+        'topology_uuid': {'uuid': topology_uuid}
+    })
+
+def grpc_service_id(context_uuid, service_uuid):
+    return ServiceId(**{
+        'context_id': {'context_uuid': {'uuid': context_uuid}},
+        'service_uuid': {'uuid': service_uuid}
+    })
+
+def grpc_device_id(device_uuid):
+    return DeviceId(**{
+        'device_uuid': {'uuid': device_uuid}
+    })
+
+def grpc_link_id(link_uuid):
+    return LinkId(**{
+        'link_uuid': {'uuid': link_uuid}
+    })
+
+def format_grpc_to_json(grpc_reply):
+    return jsonify(MessageToDict(
+        grpc_reply, including_default_value_fields=True, preserving_proto_field_name=True,
+        use_integers_for_enums=False))
+
+class _Resource(Resource):
+    def __init__(self, database : Database) -> None:
+        super().__init__()
+        self.database = database
+        self.servicer = ContextServiceServicerImpl(self.database, None)
+
+class ContextIds(_Resource):
+    def get(self):
+        return format_grpc_to_json(self.servicer.ListContextIds(Empty(), None))
+
+class Contexts(_Resource):
+    def get(self):
+        return format_grpc_to_json(self.servicer.ListContexts(Empty(), None))
+
+class Context(_Resource):
+    def get(self, context_uuid : str):
+        return format_grpc_to_json(self.servicer.GetContext(grpc_context_id(context_uuid), None))
+
+class TopologyIds(_Resource):
+    def get(self, context_uuid : str):
+        return format_grpc_to_json(self.servicer.ListTopologyIds(grpc_context_id(context_uuid), None))
+
+class Topologies(_Resource):
+    def get(self, context_uuid : str):
+        return format_grpc_to_json(self.servicer.ListTopologies(grpc_context_id(context_uuid), None))
+
+class Topology(_Resource):
+    def get(self, context_uuid : str, topology_uuid : str):
+        return format_grpc_to_json(self.servicer.GetTopology(grpc_topology_id(context_uuid, topology_uuid), None))
+
+class ServiceIds(_Resource):
+    def get(self, context_uuid : str):
+        return format_grpc_to_json(self.servicer.ListServiceIds(grpc_context_id(context_uuid), None))
+
+class Services(_Resource):
+    def get(self, context_uuid : str):
+        return format_grpc_to_json(self.servicer.ListServices(grpc_context_id(context_uuid), None))
+
+class Service(_Resource):
+    def get(self, context_uuid : str, service_uuid : str):
+        return format_grpc_to_json(self.servicer.GetService(grpc_service_id(context_uuid, service_uuid), None))
+
+class DeviceIds(_Resource):
+    def get(self):
+        return format_grpc_to_json(self.servicer.ListDeviceIds(Empty(), None))
+
+class Devices(_Resource):
+    def get(self):
+        return format_grpc_to_json(self.servicer.ListDevices(Empty(), None))
+
+class Device(_Resource):
+    def get(self, device_uuid : str):
+        return format_grpc_to_json(self.servicer.GetDevice(grpc_device_id(device_uuid), None))
+
+class LinkIds(_Resource):
+    def get(self):
+        return format_grpc_to_json(self.servicer.ListLinkIds(Empty(), None))
+
+class Links(_Resource):
+    def get(self):
+        return format_grpc_to_json(self.servicer.ListLinks(Empty(), None))
+
+class Link(_Resource):
+    def get(self, link_uuid : str):
+        return format_grpc_to_json(self.servicer.GetLink(grpc_link_id(link_uuid), None))
+
+# Use 'path' type in Service and Sink because service_uuid and link_uuid might contain char '/' and Flask is unable to
+# recognize them in 'string' type.
+RESOURCES = [
+    # (endpoint_name, resource_class, resource_url)
+    ('api.context_ids',  ContextIds,  '/context_ids'),
+    ('api.contexts',     Contexts,    '/contexts'),
+    ('api.context',      Context,     '/context/<string:context_uuid>'),
+
+    ('api.topology_ids', TopologyIds, '/context/<string:context_uuid>/topology_ids'),
+    ('api.topologies',   Topologies,  '/context/<string:context_uuid>/topologies'),
+    ('api.topology',     Topology,    '/context/<string:context_uuid>/topology/<string:topology_uuid>'),
+
+    ('api.service_ids',  ServiceIds,  '/context/<string:context_uuid>/service_ids'),
+    ('api.services',     Services,    '/context/<string:context_uuid>/services'),
+    ('api.service',      Service,     '/context/<string:context_uuid>/service/<path:service_uuid>'),
+
+    ('api.device_ids',   DeviceIds,   '/device_ids'),
+    ('api.devices',      Devices,     '/devices'),
+    ('api.device',       Device,      '/device/<string:device_uuid>'),
+
+    ('api.link_ids',     LinkIds,     '/link_ids'),
+    ('api.links',        Links,       '/links'),
+    ('api.link',         Link,        '/link/<path:link_uuid>'),
+]
diff --git a/src/context/service/rest_server/Server.py b/src/context/service/rest_server/Server.py
index 16badfce8c84f058aeaeac79993ada726a17f06a..3095d77c48e756dd0c5d655b06a2b0625bcc89d5 100644
--- a/src/context/service/rest_server/Server.py
+++ b/src/context/service/rest_server/Server.py
@@ -15,6 +15,8 @@ class Server(threading.Thread):
         self.host = host
         self.port = port
         self.base_url = base_url
+        self.srv = None
+        self.ctx = None
         self.app = Flask(__name__)
         self.api = Api(self.app, prefix=self.base_url)
 
diff --git a/src/context/service/rest_server/resources/Context.py b/src/context/service/rest_server/resources/Context.py
deleted file mode 100644
index 293ff24edebab8a1ded55e6ff5120409a534a332..0000000000000000000000000000000000000000
--- a/src/context/service/rest_server/resources/Context.py
+++ /dev/null
@@ -1,13 +0,0 @@
-from flask.json import jsonify
-from flask_restful import Resource
-from common.database.api.Database import Database
-from common.database.api.context.Constants import DEFAULT_CONTEXT_ID
-
-class Context(Resource):
-    def __init__(self, database : Database) -> None:
-        super().__init__()
-        self.database = database
-
-    def get(self):
-        with self.database:
-            return jsonify(self.database.context(DEFAULT_CONTEXT_ID).dump())
diff --git a/src/context/tests/example_objects.py b/src/context/tests/example_objects.py
new file mode 100644
index 0000000000000000000000000000000000000000..81339c04e1fe77667bd41179f3fa0813c5fc69df
--- /dev/null
+++ b/src/context/tests/example_objects.py
@@ -0,0 +1,194 @@
+from copy import deepcopy
+from common.Constants import DEFAULT_CONTEXT_UUID, DEFAULT_TOPOLOGY_UUID
+from context.proto.context_pb2 import (
+    ConfigActionEnum, DeviceDriverEnum, DeviceOperationalStatusEnum, ServiceStatusEnum, ServiceTypeEnum)
+
+# Some example objects to be used by the tests
+
+# Helper methods
+def config_rule(action, resource_key, resource_value):
+    return {'action': action, 'resource_key': resource_key, 'resource_value': resource_value}
+
+def endpoint_id(topology_id, device_id, endpoint_uuid):
+    return {'topology_id': deepcopy(topology_id), 'device_id': deepcopy(device_id),
+            'endpoint_uuid': {'uuid': endpoint_uuid}}
+
+def endpoint(topology_id, device_id, endpoint_uuid, endpoint_type):
+    return {'endpoint_id': endpoint_id(topology_id, device_id, endpoint_uuid), 'endpoint_type': endpoint_type}
+
+## use "deepcopy" to prevent propagating forced changes during tests
+CONTEXT_ID = {'context_uuid': {'uuid': DEFAULT_CONTEXT_UUID}}
+CONTEXT = {
+    'context_id': deepcopy(CONTEXT_ID),
+    'topology_ids': [],
+    'service_ids': [],
+}
+
+TOPOLOGY_ID = {
+    'context_id': deepcopy(CONTEXT_ID),
+    'topology_uuid': {'uuid': DEFAULT_TOPOLOGY_UUID},
+}
+TOPOLOGY = {
+    'topology_id': deepcopy(TOPOLOGY_ID),
+    'device_ids': [],
+    'link_ids': [],
+}
+
+DEVICE1_UUID = 'DEV1'
+DEVICE1_ID = {'device_uuid': {'uuid': DEVICE1_UUID}}
+DEVICE1 = {
+    'device_id': deepcopy(DEVICE1_ID),
+    'device_type': 'packet-router',
+    'device_config': {'config_rules': [
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'dev/rsrc1/value', 'value1'),
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'dev/rsrc2/value', 'value2'),
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'dev/rsrc3/value', 'value3'),
+    ]},
+    'device_operational_status': DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_ENABLED,
+    'device_drivers': [DeviceDriverEnum.DEVICEDRIVER_OPENCONFIG, DeviceDriverEnum.DEVICEDRIVER_P4],
+    'device_endpoints': [
+        endpoint(TOPOLOGY_ID, DEVICE1_ID, 'EP2', 'port-packet-100G'),
+        endpoint(TOPOLOGY_ID, DEVICE1_ID, 'EP3', 'port-packet-100G'),
+        endpoint(TOPOLOGY_ID, DEVICE1_ID, 'EP100', 'port-packet-10G'),
+    ],
+}
+
+DEVICE2_UUID = 'DEV2'
+DEVICE2_ID = {'device_uuid': {'uuid': DEVICE2_UUID}}
+DEVICE2 = {
+    'device_id': deepcopy(DEVICE2_ID),
+    'device_type': 'packet-router',
+    'device_config': {'config_rules': [
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'dev/rsrc1/value', 'value4'),
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'dev/rsrc2/value', 'value5'),
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'dev/rsrc3/value', 'value6'),
+    ]},
+    'device_operational_status': DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_ENABLED,
+    'device_drivers': [DeviceDriverEnum.DEVICEDRIVER_OPENCONFIG, DeviceDriverEnum.DEVICEDRIVER_P4],
+    'device_endpoints': [
+        endpoint(TOPOLOGY_ID, DEVICE2_ID, 'EP1', 'port-packet-100G'),
+        endpoint(TOPOLOGY_ID, DEVICE2_ID, 'EP3', 'port-packet-100G'),
+        endpoint(TOPOLOGY_ID, DEVICE2_ID, 'EP100', 'port-packet-10G'),
+    ],
+}
+
+DEVICE3_UUID = 'DEV3'
+DEVICE3_ID = {'device_uuid': {'uuid': DEVICE3_UUID}}
+DEVICE3 = {
+    'device_id': deepcopy(DEVICE3_ID),
+    'device_type': 'packet-router',
+    'device_config': {'config_rules': [
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'dev/rsrc1/value', 'value4'),
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'dev/rsrc2/value', 'value5'),
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'dev/rsrc3/value', 'value6'),
+    ]},
+    'device_operational_status': DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_ENABLED,
+    'device_drivers': [DeviceDriverEnum.DEVICEDRIVER_OPENCONFIG, DeviceDriverEnum.DEVICEDRIVER_P4],
+    'device_endpoints': [
+        endpoint(TOPOLOGY_ID, DEVICE3_ID, 'EP1', 'port-packet-100G'),
+        endpoint(TOPOLOGY_ID, DEVICE3_ID, 'EP2', 'port-packet-100G'),
+        endpoint(TOPOLOGY_ID, DEVICE3_ID, 'EP100', 'port-packet-10G'),
+    ],
+}
+
+LINK_DEV1_DEV2_UUID = 'DEV1/EP2 ==> DEV2/EP1'
+LINK_DEV1_DEV2_ID = {'link_uuid': {'uuid': LINK_DEV1_DEV2_UUID}}
+LINK_DEV1_DEV2 = {
+    'link_id': deepcopy(LINK_DEV1_DEV2_ID),
+    'link_endpoint_ids' : [
+        endpoint_id(TOPOLOGY_ID, DEVICE1_ID, 'EP2'),
+        endpoint_id(TOPOLOGY_ID, DEVICE2_ID, 'EP1'),
+    ]
+}
+
+LINK_DEV2_DEV3_UUID = 'DEV2/EP3 ==> DEV3/EP2'
+LINK_DEV2_DEV3_ID = {'link_uuid': {'uuid': LINK_DEV2_DEV3_UUID}}
+LINK_DEV2_DEV3 = {
+    'link_id': deepcopy(LINK_DEV2_DEV3_ID),
+    'link_endpoint_ids' : [
+        endpoint_id(TOPOLOGY_ID, DEVICE2_ID, 'EP3'),
+        endpoint_id(TOPOLOGY_ID, DEVICE3_ID, 'EP2'),
+    ]
+}
+
+LINK_DEV1_DEV3_UUID = 'DEV1/EP3 ==> DEV3/EP1'
+LINK_DEV1_DEV3_ID = {'link_uuid': {'uuid': LINK_DEV1_DEV3_UUID}}
+LINK_DEV1_DEV3 = {
+    'link_id': deepcopy(LINK_DEV1_DEV3_ID),
+    'link_endpoint_ids' : [
+        endpoint_id(TOPOLOGY_ID, DEVICE1_ID, 'EP3'),
+        endpoint_id(TOPOLOGY_ID, DEVICE3_ID, 'EP1'),
+    ]
+}
+
+SERVICE_DEV1_DEV2_UUID = 'SVC:DEV1/EP100-DEV2/EP100'
+SERVICE_DEV1_DEV2_ID = {
+    'context_id': deepcopy(CONTEXT_ID),
+    'service_uuid': {'uuid': SERVICE_DEV1_DEV2_UUID},
+}
+SERVICE_DEV1_DEV2 = {
+    'service_id': deepcopy(SERVICE_DEV1_DEV2_ID),
+    'service_type': ServiceTypeEnum.SERVICETYPE_L3NM,
+    'service_endpoint_ids' : [
+        endpoint_id(TOPOLOGY_ID, DEVICE1_ID, 'EP100'),
+        endpoint_id(TOPOLOGY_ID, DEVICE2_ID, 'EP100'),
+    ],
+    'service_constraints': [
+        {'constraint_type': 'latency_ms', 'constraint_value': '15.2'},
+        {'constraint_type': 'jitter_us', 'constraint_value': '1.2'},
+    ],
+    'service_status': {'service_status': ServiceStatusEnum.SERVICESTATUS_ACTIVE},
+    'service_config': {'config_rules': [
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'svc/rsrc1/value', 'value7'),
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'svc/rsrc2/value', 'value8'),
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'svc/rsrc3/value', 'value9'),
+    ]},
+}
+
+SERVICE_DEV1_DEV3_UUID = 'SVC:DEV1/EP100-DEV3/EP100'
+SERVICE_DEV1_DEV3_ID = {
+    'context_id': deepcopy(CONTEXT_ID),
+    'service_uuid': {'uuid': SERVICE_DEV1_DEV3_UUID},
+}
+SERVICE_DEV1_DEV3 = {
+    'service_id': deepcopy(SERVICE_DEV1_DEV3_ID),
+    'service_type': ServiceTypeEnum.SERVICETYPE_L3NM,
+    'service_endpoint_ids' : [
+        endpoint_id(TOPOLOGY_ID, DEVICE1_ID, 'EP100'),
+        endpoint_id(TOPOLOGY_ID, DEVICE3_ID, 'EP100'),
+    ],
+    'service_constraints': [
+        {'constraint_type': 'latency_ms', 'constraint_value': '5.8'},
+        {'constraint_type': 'jitter_us', 'constraint_value': '0.1'},
+    ],
+    'service_status': {'service_status': ServiceStatusEnum.SERVICESTATUS_ACTIVE},
+    'service_config': {'config_rules': [
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'svc/rsrc1/value', 'value7'),
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'svc/rsrc2/value', 'value8'),
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'svc/rsrc3/value', 'value9'),
+    ]},
+}
+
+SERVICE_DEV2_DEV3_UUID = 'SVC:DEV2/EP100-DEV3/EP100'
+SERVICE_DEV2_DEV3_ID = {
+    'context_id': deepcopy(CONTEXT_ID),
+    'service_uuid': {'uuid': SERVICE_DEV2_DEV3_UUID},
+}
+SERVICE_DEV2_DEV3 = {
+    'service_id': deepcopy(SERVICE_DEV2_DEV3_ID),
+    'service_type': ServiceTypeEnum.SERVICETYPE_L3NM,
+    'service_endpoint_ids' : [
+        endpoint_id(TOPOLOGY_ID, DEVICE2_ID, 'EP100'),
+        endpoint_id(TOPOLOGY_ID, DEVICE3_ID, 'EP100'),
+    ],
+    'service_constraints': [
+        {'constraint_type': 'latency_ms', 'constraint_value': '23.1'},
+        {'constraint_type': 'jitter_us', 'constraint_value': '3.4'},
+    ],
+    'service_status': {'service_status': ServiceStatusEnum.SERVICESTATUS_ACTIVE},
+    'service_config': {'config_rules': [
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'svc/rsrc1/value', 'value7'),
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'svc/rsrc2/value', 'value8'),
+        config_rule(ConfigActionEnum.CONFIGACTION_SET, 'svc/rsrc3/value', 'value9'),
+    ]},
+}
diff --git a/src/context/tests/test_unitary.py b/src/context/tests/test_unitary.py
index bba2a346adc2554e81b9048394fb964c4144a982..02343f458b06808e33955e43a93bf1a7f9308b34 100644
--- a/src/context/tests/test_unitary.py
+++ b/src/context/tests/test_unitary.py
@@ -1,55 +1,79 @@
-import copy, grpc, logging, pytest, requests, time
-from google.protobuf.json_format import MessageToDict
-from common.database.Factory import get_database, DatabaseEngineEnum
-from common.database.api.Database import Database
-from common.database.api.context.Constants import DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID
-from common.database.tests.script import populate_example
-from common.tests.Assertions import validate_empty, validate_link_id, validate_topology, validate_topology_has_devices,\
-    validate_topology_has_links, validate_topology_is_empty
+import copy, grpc, logging, os, pytest, requests, threading, time, urllib
+from queue import Queue
+from typing import Tuple
+from common.Constants import DEFAULT_CONTEXT_UUID, DEFAULT_TOPOLOGY_UUID
+from common.orm.Database import Database
+from common.orm.Factory import get_database_backend, BackendEnum as DatabaseBackendEnum
+from common.message_broker.Factory import get_messagebroker_backend, BackendEnum as MessageBrokerBackendEnum
+from common.message_broker.MessageBroker import MessageBroker
+from common.type_checkers.Assertions import (
+    validate_context, validate_context_ids, validate_contexts, validate_device, validate_device_ids, validate_devices,
+    validate_link, validate_link_ids, validate_links, validate_service, validate_service_ids, validate_services,
+    validate_topologies, validate_topology, validate_topology_ids)
+from context.Config import (
+    GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD, RESTAPI_SERVICE_PORT, RESTAPI_BASE_URL)
 from context.client.ContextClient import ContextClient
-from context.proto.context_pb2 import Empty, Link, LinkId, Topology
-from context.service.ContextService import ContextService
-from context.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD, RESTAPI_SERVICE_PORT, \
-    RESTAPI_BASE_URL
-from context.service.rest_server.Server import Server
-from context.service.rest_server.resources.Context import Context
-
-grpc_port = 10000 + GRPC_SERVICE_PORT # avoid privileged ports
-restapi_port = 10000 + RESTAPI_SERVICE_PORT # avoid privileged ports
+from context.proto.context_pb2 import (
+    Context, ContextEvent, ContextId, Device, DeviceEvent, DeviceId, DeviceOperationalStatusEnum, Empty,
+    EventTypeEnum, Link, LinkEvent, LinkId, Service, ServiceEvent, ServiceId, ServiceStatusEnum, ServiceTypeEnum,
+    Topology, TopologyEvent, TopologyId)
+from context.service.database.Tools import (
+    FASTHASHER_DATA_ACCEPTED_FORMAT, FASTHASHER_ITEM_ACCEPTED_FORMAT, fast_hasher)
+from context.service.grpc_server.ContextService import ContextService
+from context.service.Populate import populate
+from context.service.rest_server.Server import Server as RestServer
+from context.service.rest_server.Resources import RESOURCES
+from .example_objects import (
+    CONTEXT, CONTEXT_ID, DEVICE1, DEVICE1_ID, DEVICE1_UUID, DEVICE2, DEVICE2_ID, DEVICE2_UUID, LINK_DEV1_DEV2,
+    LINK_DEV1_DEV2_ID, LINK_DEV1_DEV2_UUID, SERVICE_DEV1_DEV2, SERVICE_DEV1_DEV2_ID, SERVICE_DEV1_DEV2_UUID, TOPOLOGY,
+    TOPOLOGY_ID)
 
 LOGGER = logging.getLogger(__name__)
 LOGGER.setLevel(logging.DEBUG)
 
-# use "copy.deepcopy" to prevent propagating forced changes during tests
-CONTEXT_ID = {'contextUuid': {'uuid': DEFAULT_CONTEXT_ID}}
-TOPOLOGY_ID = {'contextId': copy.deepcopy(CONTEXT_ID), 'topoId': {'uuid': DEFAULT_TOPOLOGY_ID}}
-LINK_ID = {'link_id': {'uuid': 'DEV1/EP2 ==> DEV2/EP1'}}
-LINK = {
-    'link_id': {'link_id': {'uuid': 'DEV1/EP2 ==> DEV2/EP1'}},
-    'endpointList' : [
-        {'topoId': copy.deepcopy(TOPOLOGY_ID), 'dev_id': {'device_id': {'uuid': 'DEV1'}}, 'port_id': {'uuid' : 'EP2'}},
-        {'topoId': copy.deepcopy(TOPOLOGY_ID), 'dev_id': {'device_id': {'uuid': 'DEV2'}}, 'port_id': {'uuid' : 'EP1'}},
-    ]
+GRPC_PORT    = 10000 + GRPC_SERVICE_PORT    # avoid privileged ports
+RESTAPI_PORT = 10000 + RESTAPI_SERVICE_PORT # avoid privileged ports
+
+DEFAULT_REDIS_SERVICE_HOST = '127.0.0.1'
+DEFAULT_REDIS_SERVICE_PORT = 6379
+DEFAULT_REDIS_DATABASE_ID  = 0
+
+REDIS_CONFIG = {
+    'REDIS_SERVICE_HOST': os.environ.get('REDIS_SERVICE_HOST', DEFAULT_REDIS_SERVICE_HOST),
+    'REDIS_SERVICE_PORT': os.environ.get('REDIS_SERVICE_PORT', DEFAULT_REDIS_SERVICE_PORT),
+    'REDIS_DATABASE_ID' : os.environ.get('REDIS_DATABASE_ID',  DEFAULT_REDIS_DATABASE_ID ),
 }
 
-@pytest.fixture(scope='session')
-def context_database():
-    _database = get_database(engine=DatabaseEngineEnum.INMEMORY)
-    return _database
+SCENARIOS = [
+    ('all_inmemory', DatabaseBackendEnum.INMEMORY, {},           MessageBrokerBackendEnum.INMEMORY, {}          ),
+    ('all_redis',    DatabaseBackendEnum.REDIS,    REDIS_CONFIG, MessageBrokerBackendEnum.REDIS,    REDIS_CONFIG),
+]
+
+@pytest.fixture(scope='session', ids=[str(scenario[0]) for scenario in SCENARIOS], params=SCENARIOS)
+def context_db_mb(request) -> Tuple[Database, MessageBroker]:
+    name,db_backend,db_settings,mb_backend,mb_settings = request.param
+    msg = 'Running scenario {:s} db_backend={:s}, db_settings={:s}, mb_backend={:s}, mb_settings={:s}...'
+    LOGGER.info(msg.format(str(name), str(db_backend.value), str(db_settings), str(mb_backend.value), str(mb_settings)))
+    _database = Database(get_database_backend(backend=db_backend, **db_settings))
+    _message_broker = MessageBroker(get_messagebroker_backend(backend=mb_backend, **mb_settings))
+    yield _database, _message_broker
+    _message_broker.terminate()
 
 @pytest.fixture(scope='session')
-def context_service(context_database : Database):
+def context_service_grpc(context_db_mb : Tuple[Database, MessageBroker]): # pylint: disable=redefined-outer-name
     _service = ContextService(
-        context_database, port=grpc_port, max_workers=GRPC_MAX_WORKERS, grace_period=GRPC_GRACE_PERIOD)
+        context_db_mb[0], context_db_mb[1], port=GRPC_PORT, max_workers=GRPC_MAX_WORKERS,
+        grace_period=GRPC_GRACE_PERIOD)
     _service.start()
     yield _service
     _service.stop()
 
 @pytest.fixture(scope='session')
-def context_service_rest(context_database : Database):
-    _rest_server = Server(port=restapi_port, base_url=RESTAPI_BASE_URL)
-    _rest_server.add_resource(
-        Context, '/restconf/config/context', endpoint='api.context', resource_class_args=(context_database,))
+def context_service_rest(context_db_mb : Tuple[Database, MessageBroker]): # pylint: disable=redefined-outer-name
+    database = context_db_mb[0]
+    _rest_server = RestServer(port=RESTAPI_PORT, base_url=RESTAPI_BASE_URL)
+    for endpoint_name, resource_class, resource_url in RESOURCES:
+        _rest_server.add_resource(resource_class, resource_url, endpoint=endpoint_name, resource_class_args=(database,))
     _rest_server.start()
     time.sleep(1) # bring time for the server to start
     yield _rest_server
@@ -57,191 +81,983 @@ def context_service_rest(context_database : Database):
     _rest_server.join()
 
 @pytest.fixture(scope='session')
-def context_client(context_service):
-    _client = ContextClient(address='127.0.0.1', port=grpc_port)
+def context_client_grpc(context_service_grpc : ContextService): # pylint: disable=redefined-outer-name
+    _client = ContextClient(address='127.0.0.1', port=GRPC_PORT)
     yield _client
     _client.close()
 
-def test_get_topology_empty(context_client : ContextClient, context_database : Database):
-    # should work
+def do_rest_request(url : str):
+    request_url = 'http://127.0.0.1:{:s}{:s}{:s}'.format(str(RESTAPI_PORT), str(RESTAPI_BASE_URL), url)
+    LOGGER.warning('Request: GET {:s}'.format(str(request_url)))
+    reply = requests.get(request_url)
+    LOGGER.warning('Reply: {:s}'.format(str(reply.text)))
+    assert reply.status_code == 200, 'Reply failed with code {}'.format(reply.status_code)
+    return reply.json()
+
+class EventsCollector:
+    def __init__(self, context_client_grpc : ContextClient) -> None: # pylint: disable=redefined-outer-name
+        self._events_queue = Queue()
+
+        self._context_stream  = context_client_grpc.GetContextEvents(Empty())
+        self._topology_stream = context_client_grpc.GetTopologyEvents(Empty())
+        self._device_stream   = context_client_grpc.GetDeviceEvents(Empty())
+        self._link_stream     = context_client_grpc.GetLinkEvents(Empty())
+        self._service_stream  = context_client_grpc.GetServiceEvents(Empty())
+
+        self._context_thread  = threading.Thread(target=self._collect, args=(self._context_stream ,), daemon=False)
+        self._topology_thread = threading.Thread(target=self._collect, args=(self._topology_stream,), daemon=False)
+        self._device_thread   = threading.Thread(target=self._collect, args=(self._device_stream  ,), daemon=False)
+        self._link_thread     = threading.Thread(target=self._collect, args=(self._link_stream    ,), daemon=False)
+        self._service_thread  = threading.Thread(target=self._collect, args=(self._service_stream ,), daemon=False)
+
+    def _collect(self, events_stream) -> None:
+        try:
+            for event in events_stream:
+                self._events_queue.put_nowait(event)
+        except grpc.RpcError as e:
+            if e.code() != grpc.StatusCode.CANCELLED: # pylint: disable=no-member
+                raise # pragma: no cover
+
+    def start(self):
+        self._context_thread.start()
+        self._topology_thread.start()
+        self._device_thread.start()
+        self._link_thread.start()
+        self._service_thread.start()
+
+    def get_event(self, block : bool = True, timeout : float = 0.1):
+        return self._events_queue.get(block=block, timeout=timeout)
+
+    def stop(self):
+        self._context_stream.cancel()
+        self._topology_stream.cancel()
+        self._device_stream.cancel()
+        self._link_stream.cancel()
+        self._service_stream.cancel()
+
+        self._context_thread.join()
+        self._topology_thread.join()
+        self._device_thread.join()
+        self._link_thread.join()
+        self._service_thread.join()
+
+
+# ----- Test gRPC methods ----------------------------------------------------------------------------------------------
+
+def test_grpc_context(
+    context_client_grpc : ContextClient,                     # pylint: disable=redefined-outer-name
+    context_db_mb : Tuple[Database, MessageBroker]):    # pylint: disable=redefined-outer-name
+    context_database = context_db_mb[0]
+
+    # ----- Clean the database -----------------------------------------------------------------------------------------
     context_database.clear_all()
-    validate_topology_is_empty(MessageToDict(
-        context_client.GetTopology(Empty()),
-        including_default_value_fields=True, preserving_proto_field_name=True,
-        use_integers_for_enums=False))
-
-def test_get_topology_completed(context_client : ContextClient, context_database : Database):
-    # should work
-    populate_example(context_database, add_services=False)
-    topology = MessageToDict(
-        context_client.GetTopology(Empty()),
-        including_default_value_fields=True, preserving_proto_field_name=True,
-        use_integers_for_enums=False)
-    validate_topology(topology)
-    validate_topology_has_devices(topology)
-    validate_topology_has_links(topology)
-
-def test_delete_link_empty_uuid(context_client : ContextClient):
-    # should fail with link not found
-    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-        copy_link_id = copy.deepcopy(LINK_ID)
-        copy_link_id['link_id']['uuid'] = ''
-        context_client.DeleteLink(LinkId(**copy_link_id))
-    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-    msg = 'link_id.link_id.uuid() is out of range: '\
-          'allow_empty(False) min_length(None) max_length(None) allowed_lengths(None).'
-    assert e.value.details() == msg
 
-def test_add_link_already_exists(context_client : ContextClient):
-    # should fail with link already exists
-    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-        context_client.AddLink(Link(**LINK))
-    assert e.value.code() == grpc.StatusCode.ALREADY_EXISTS
-    msg = 'Context(admin)/Topology(admin)/Link(DEV1/EP2 ==> DEV2/EP1) already exists in the database.'
-    assert e.value.details() == msg
+    # ----- Initialize the EventsCollector -----------------------------------------------------------------------------
+    events_collector = EventsCollector(context_client_grpc)
+    events_collector.start()
 
-def test_delete_link(context_client : ContextClient):
-    # should work
-    validate_empty(MessageToDict(
-        context_client.DeleteLink(LinkId(**LINK_ID)),
-        including_default_value_fields=True, preserving_proto_field_name=True,
-        use_integers_for_enums=False))
-
-def test_delete_link_not_existing(context_client : ContextClient):
-    # should fail with link not found
-    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-        context_client.DeleteLink(LinkId(**LINK_ID))
+    # ----- Get when the object does not exist -------------------------------------------------------------------------
+    with pytest.raises(grpc.RpcError) as e:
+        context_client_grpc.GetContext(ContextId(**CONTEXT_ID))
     assert e.value.code() == grpc.StatusCode.NOT_FOUND
-    msg = 'Context(admin)/Topology(admin)/Link(DEV1/EP2 ==> DEV2/EP1) does not exist in the database.'
-    assert e.value.details() == msg
+    assert e.value.details() == 'Context({:s}) not found'.format(DEFAULT_CONTEXT_UUID)
 
-def test_add_link_uuid_empty(context_client : ContextClient):
-    # should fail with link uuid empty
-    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-        copy_link = copy.deepcopy(LINK)
-        copy_link['link_id']['link_id']['uuid'] = ''
-        context_client.AddLink(Link(**copy_link))
-    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-    msg = 'link.link_id.link_id.uuid() is out of range: '\
-          'allow_empty(False) min_length(None) max_length(None) allowed_lengths(None).'
-    assert e.value.details() == msg
+    # ----- List when the object does not exist ------------------------------------------------------------------------
+    response = context_client_grpc.ListContextIds(Empty())
+    assert len(response.context_ids) == 0
 
-def test_add_link_wrong_endpoint(context_client : ContextClient):
-    # should fail with wrong endpoint context
-    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-        copy_link = copy.deepcopy(LINK)
-        copy_link['endpointList'][0]['topoId']['contextId']['contextUuid']['uuid'] = 'wrong-context'
-        context_client.AddLink(Link(**copy_link))
-    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-    msg = 'Context(wrong-context) in Endpoint(#0) of '\
-          'Context(admin)/Topology(admin)/Link(DEV1/EP2 ==> DEV2/EP1) mismatches acceptable Contexts({\'admin\'}). '\
-          'Optionally, leave field empty to use predefined Context(admin).'
-    assert e.value.details() == msg
+    response = context_client_grpc.ListContexts(Empty())
+    assert len(response.contexts) == 0
+
+    # ----- Dump state of database before create the object ------------------------------------------------------------
+    db_entries = context_database.dump()
+    LOGGER.info('----- Database Dump [{:3d} entries] -------------------------'.format(len(db_entries)))
+    for db_entry in db_entries:
+        LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
+    LOGGER.info('-----------------------------------------------------------')
+    assert len(db_entries) == 0
+
+    # ----- Create the object ------------------------------------------------------------------------------------------
+    response = context_client_grpc.SetContext(Context(**CONTEXT))
+    assert response.context_uuid.uuid == DEFAULT_CONTEXT_UUID
 
-    # should fail with wrong endpoint topology
-    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-        copy_link = copy.deepcopy(LINK)
-        copy_link['endpointList'][0]['topoId']['topoId']['uuid'] = 'wrong-topo'
-        context_client.AddLink(Link(**copy_link))
+    with pytest.raises(grpc.RpcError) as e:
+        WRONG_TOPOLOGY_ID = copy.deepcopy(TOPOLOGY_ID)
+        WRONG_TOPOLOGY_ID['context_id']['context_uuid']['uuid'] = 'wrong-context-uuid'
+        WRONG_CONTEXT = copy.deepcopy(CONTEXT)
+        WRONG_CONTEXT['topology_ids'].append(WRONG_TOPOLOGY_ID)
+        context_client_grpc.SetContext(Context(**WRONG_CONTEXT))
     assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-    msg = 'Context(admin)/Topology(wrong-topo) in Endpoint(#0) of '\
-          'Context(admin)/Topology(admin)/Link(DEV1/EP2 ==> DEV2/EP1) mismatches acceptable Topologies({\'admin\'}). '\
-          'Optionally, leave field empty to use predefined Topology(admin).'
+    msg = 'request.topology_ids[0].context_id.context_uuid.uuid(wrong-context-uuid) is invalid; '\
+          'should be == request.context_id.context_uuid.uuid(admin)'
     assert e.value.details() == msg
 
-    # should fail with device uuid is empty
-    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-        copy_link = copy.deepcopy(LINK)
-        copy_link['endpointList'][0]['dev_id']['device_id']['uuid'] = ''
-        context_client.AddLink(Link(**copy_link))
+    with pytest.raises(grpc.RpcError) as e:
+        WRONG_SERVICE_ID = copy.deepcopy(SERVICE_DEV1_DEV2_ID)
+        WRONG_SERVICE_ID['context_id']['context_uuid']['uuid'] = 'wrong-context-uuid'
+        WRONG_CONTEXT = copy.deepcopy(CONTEXT)
+        WRONG_CONTEXT['service_ids'].append(WRONG_SERVICE_ID)
+        context_client_grpc.SetContext(Context(**WRONG_CONTEXT))
     assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-    msg = 'endpoint_id[#0].dev_id.device_id.uuid() is out of range: '\
-          'allow_empty(False) min_length(None) max_length(None) allowed_lengths(None).'
+    msg = 'request.service_ids[0].context_id.context_uuid.uuid(wrong-context-uuid) is invalid; '\
+          'should be == request.context_id.context_uuid.uuid(admin)'
     assert e.value.details() == msg
 
-    # should fail with wrong endpoint device
-    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-        copy_link = copy.deepcopy(LINK)
-        copy_link['endpointList'][0]['dev_id']['device_id']['uuid'] = 'wrong-device'
-        context_client.AddLink(Link(**copy_link))
+    # ----- Check create event -----------------------------------------------------------------------------------------
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, ContextEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert event.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+
+    # ----- Update the object ------------------------------------------------------------------------------------------
+    response = context_client_grpc.SetContext(Context(**CONTEXT))
+    assert response.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+
+    # ----- Check update event -----------------------------------------------------------------------------------------
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, ContextEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_UPDATE
+    assert event.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+
+    # ----- Dump state of database after create/update the object ------------------------------------------------------
+    db_entries = context_database.dump()
+    LOGGER.info('----- Database Dump [{:3d} entries] -------------------------'.format(len(db_entries)))
+    for db_entry in db_entries:
+        LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
+    LOGGER.info('-----------------------------------------------------------')
+    assert len(db_entries) == 2
+
+    # ----- Get when the object exists ---------------------------------------------------------------------------------
+    response = context_client_grpc.GetContext(ContextId(**CONTEXT_ID))
+    assert response.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert len(response.topology_ids) == 0
+    assert len(response.service_ids) == 0
+
+    # ----- List when the object exists --------------------------------------------------------------------------------
+    response = context_client_grpc.ListContextIds(Empty())
+    assert len(response.context_ids) == 1
+    assert response.context_ids[0].context_uuid.uuid == DEFAULT_CONTEXT_UUID
+
+    response = context_client_grpc.ListContexts(Empty())
+    assert len(response.contexts) == 1
+    assert response.contexts[0].context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert len(response.contexts[0].topology_ids) == 0
+    assert len(response.contexts[0].service_ids) == 0
+
+    # ----- Remove the object ------------------------------------------------------------------------------------------
+    context_client_grpc.RemoveContext(ContextId(**CONTEXT_ID))
+
+    # ----- Check remove event -----------------------------------------------------------------------------------------
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, ContextEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert event.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+
+    # ----- Stop the EventsCollector -----------------------------------------------------------------------------------
+    events_collector.stop()
+
+    # ----- Dump state of database after remove the object -------------------------------------------------------------
+    db_entries = context_database.dump()
+    LOGGER.info('----- Database Dump [{:3d} entries] -------------------------'.format(len(db_entries)))
+    for db_entry in db_entries:
+        LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
+    LOGGER.info('-----------------------------------------------------------')
+    assert len(db_entries) == 0
+
+
+def test_grpc_topology(
+    context_client_grpc : ContextClient,                     # pylint: disable=redefined-outer-name
+    context_db_mb : Tuple[Database, MessageBroker]):    # pylint: disable=redefined-outer-name
+    context_database = context_db_mb[0]
+
+    # ----- Clean the database -----------------------------------------------------------------------------------------
+    context_database.clear_all()
+
+    # ----- Initialize the EventsCollector -----------------------------------------------------------------------------
+    events_collector = EventsCollector(context_client_grpc)
+    events_collector.start()
+
+    # ----- Prepare dependencies for the test and capture related events -----------------------------------------------
+    response = context_client_grpc.SetContext(Context(**CONTEXT))
+    assert response.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, ContextEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert event.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+
+    # ----- Get when the object does not exist -------------------------------------------------------------------------
+    with pytest.raises(grpc.RpcError) as e:
+        context_client_grpc.GetTopology(TopologyId(**TOPOLOGY_ID))
     assert e.value.code() == grpc.StatusCode.NOT_FOUND
-    msg = 'Context(admin)/Topology(admin)/Device(wrong-device) in Endpoint(#0) of '\
-          'Context(admin)/Topology(admin)/Link(DEV1/EP2 ==> DEV2/EP1) does not exist in the database.'
-    assert e.value.details() == msg
+    assert e.value.details() == 'Topology({:s}/{:s}) not found'.format(DEFAULT_CONTEXT_UUID, DEFAULT_TOPOLOGY_UUID)
+
+    # ----- List when the object does not exist ------------------------------------------------------------------------
+    response = context_client_grpc.ListTopologyIds(ContextId(**CONTEXT_ID))
+    assert len(response.topology_ids) == 0
+
+    response = context_client_grpc.ListTopologies(ContextId(**CONTEXT_ID))
+    assert len(response.topologies) == 0
 
-    # should fail with endpoint uuid is empty
-    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-        copy_link = copy.deepcopy(LINK)
-        copy_link['endpointList'][0]['port_id']['uuid'] = ''
-        context_client.AddLink(Link(**copy_link))
+    # ----- Dump state of database before create the object ------------------------------------------------------------
+    db_entries = context_database.dump()
+    LOGGER.info('----- Database Dump [{:3d} entries] -------------------------'.format(len(db_entries)))
+    for db_entry in db_entries:
+        LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
+    LOGGER.info('-----------------------------------------------------------')
+    assert len(db_entries) == 2
+
+    # ----- Create the object ------------------------------------------------------------------------------------------
+    response = context_client_grpc.SetTopology(Topology(**TOPOLOGY))
+    assert response.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert response.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+
+    CONTEXT_WITH_TOPOLOGY = copy.deepcopy(CONTEXT)
+    CONTEXT_WITH_TOPOLOGY['topology_ids'].append(TOPOLOGY_ID)
+    response = context_client_grpc.SetContext(Context(**CONTEXT_WITH_TOPOLOGY))
+    assert response.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+
+    # ----- Check create event -----------------------------------------------------------------------------------------
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, TopologyEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert event.topology_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert event.topology_id.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, ContextEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_UPDATE
+    assert event.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+
+    # ----- Update the object ------------------------------------------------------------------------------------------
+    response = context_client_grpc.SetTopology(Topology(**TOPOLOGY))
+    assert response.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert response.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+
+    # ----- Check update event -----------------------------------------------------------------------------------------
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, TopologyEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_UPDATE
+    assert event.topology_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert event.topology_id.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+
+    # ----- Dump state of database after create/update the object ------------------------------------------------------
+    db_entries = context_database.dump()
+    LOGGER.info('----- Database Dump [{:3d} entries] -------------------------'.format(len(db_entries)))
+    for db_entry in db_entries:
+        LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
+    LOGGER.info('-----------------------------------------------------------')
+    assert len(db_entries) == 5
+
+    # ----- Get when the object exists ---------------------------------------------------------------------------------
+    response = context_client_grpc.GetTopology(TopologyId(**TOPOLOGY_ID))
+    assert response.topology_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert response.topology_id.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+    assert len(response.device_ids) == 0
+    assert len(response.link_ids) == 0
+
+    # ----- List when the object exists --------------------------------------------------------------------------------
+    response = context_client_grpc.ListTopologyIds(ContextId(**CONTEXT_ID))
+    assert len(response.topology_ids) == 1
+    assert response.topology_ids[0].context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert response.topology_ids[0].topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+
+    response = context_client_grpc.ListTopologies(ContextId(**CONTEXT_ID))
+    assert len(response.topologies) == 1
+    assert response.topologies[0].topology_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert response.topologies[0].topology_id.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+    assert len(response.topologies[0].device_ids) == 0
+    assert len(response.topologies[0].link_ids) == 0
+
+    # ----- Remove the object ------------------------------------------------------------------------------------------
+    context_client_grpc.RemoveTopology(TopologyId(**TOPOLOGY_ID))
+    context_client_grpc.RemoveContext(ContextId(**CONTEXT_ID))
+
+    # ----- Check remove event -----------------------------------------------------------------------------------------
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, TopologyEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert event.topology_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert event.topology_id.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, ContextEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert event.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+
+    # ----- Stop the EventsCollector -----------------------------------------------------------------------------------
+    events_collector.stop()
+
+    # ----- Dump state of database after remove the object -------------------------------------------------------------
+    db_entries = context_database.dump()
+    LOGGER.info('----- Database Dump [{:3d} entries] -------------------------'.format(len(db_entries)))
+    for db_entry in db_entries:
+        LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
+    LOGGER.info('-----------------------------------------------------------')
+    assert len(db_entries) == 0
+
+
+def test_grpc_device(
+    context_client_grpc : ContextClient,                     # pylint: disable=redefined-outer-name
+    context_db_mb : Tuple[Database, MessageBroker]):    # pylint: disable=redefined-outer-name
+    context_database = context_db_mb[0]
+
+    # ----- Clean the database -----------------------------------------------------------------------------------------
+    context_database.clear_all()
+
+    # ----- Initialize the EventsCollector -----------------------------------------------------------------------------
+    events_collector = EventsCollector(context_client_grpc)
+    events_collector.start()
+
+    # ----- Prepare dependencies for the test and capture related events -----------------------------------------------
+    response = context_client_grpc.SetContext(Context(**CONTEXT))
+    assert response.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+
+    response = context_client_grpc.SetTopology(Topology(**TOPOLOGY))
+    assert response.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert response.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, ContextEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert event.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, TopologyEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert event.topology_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert event.topology_id.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+
+    # ----- Get when the object does not exist -------------------------------------------------------------------------
+    with pytest.raises(grpc.RpcError) as e:
+        context_client_grpc.GetDevice(DeviceId(**DEVICE1_ID))
+    assert e.value.code() == grpc.StatusCode.NOT_FOUND
+    assert e.value.details() == 'Device({:s}) not found'.format(DEVICE1_UUID)
+
+    # ----- List when the object does not exist ------------------------------------------------------------------------
+    response = context_client_grpc.ListDeviceIds(Empty())
+    assert len(response.device_ids) == 0
+
+    response = context_client_grpc.ListDevices(Empty())
+    assert len(response.devices) == 0
+
+    # ----- Dump state of database before create the object ------------------------------------------------------------
+    db_entries = context_database.dump()
+    LOGGER.info('----- Database Dump [{:3d} entries] -------------------------'.format(len(db_entries)))
+    for db_entry in db_entries:
+        LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
+    LOGGER.info('-----------------------------------------------------------')
+    assert len(db_entries) == 5
+
+    # ----- Create the object ------------------------------------------------------------------------------------------
+    with pytest.raises(grpc.RpcError) as e:
+        WRONG_DEVICE = copy.deepcopy(DEVICE1)
+        WRONG_DEVICE['device_endpoints'][0]['endpoint_id']['device_id']['device_uuid']['uuid'] = 'wrong-device-uuid'
+        context_client_grpc.SetDevice(Device(**WRONG_DEVICE))
     assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-    msg = 'endpoint_id[#0].port_id.uuid() is out of range: '\
-          'allow_empty(False) min_length(None) max_length(None) allowed_lengths(None).'
+    msg = 'request.device_endpoints[0].device_id.device_uuid.uuid(wrong-device-uuid) is invalid; '\
+          'should be == request.device_id.device_uuid.uuid(DEV1)'
     assert e.value.details() == msg
 
-    # should fail with wrong endpoint port
-    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-        copy_link = copy.deepcopy(LINK)
-        copy_link['endpointList'][0]['port_id']['uuid'] = 'wrong-port'
-        context_client.AddLink(Link(**copy_link))
+    response = context_client_grpc.SetDevice(Device(**DEVICE1))
+    assert response.device_uuid.uuid == DEVICE1_UUID
+
+    # ----- Check create event -----------------------------------------------------------------------------------------
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, DeviceEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert event.device_id.device_uuid.uuid == DEVICE1_UUID
+
+    # ----- Update the object ------------------------------------------------------------------------------------------
+    response = context_client_grpc.SetDevice(Device(**DEVICE1))
+    assert response.device_uuid.uuid == DEVICE1_UUID
+
+    # ----- Check update event -----------------------------------------------------------------------------------------
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, DeviceEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_UPDATE
+    assert event.device_id.device_uuid.uuid == DEVICE1_UUID
+
+    # ----- Dump state of database after create/update the object ------------------------------------------------------
+    db_entries = context_database.dump()
+    LOGGER.info('----- Database Dump [{:3d} entries] -------------------------'.format(len(db_entries)))
+    for db_entry in db_entries:
+        LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
+    LOGGER.info('-----------------------------------------------------------')
+    assert len(db_entries) == 25
+
+    # ----- Get when the object exists ---------------------------------------------------------------------------------
+    response = context_client_grpc.GetDevice(DeviceId(**DEVICE1_ID))
+    assert response.device_id.device_uuid.uuid == DEVICE1_UUID
+    assert response.device_type == 'packet-router'
+    assert len(response.device_config.config_rules) == 3
+    assert response.device_operational_status == DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_ENABLED
+    assert len(response.device_drivers) == 2
+    assert len(response.device_endpoints) == 3
+
+    # ----- List when the object exists --------------------------------------------------------------------------------
+    response = context_client_grpc.ListDeviceIds(Empty())
+    assert len(response.device_ids) == 1
+    assert response.device_ids[0].device_uuid.uuid == DEVICE1_UUID
+
+    response = context_client_grpc.ListDevices(Empty())
+    assert len(response.devices) == 1
+    assert response.devices[0].device_id.device_uuid.uuid == DEVICE1_UUID
+    assert response.devices[0].device_type == 'packet-router'
+    assert len(response.devices[0].device_config.config_rules) == 3
+    assert response.devices[0].device_operational_status == DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_ENABLED
+    assert len(response.devices[0].device_drivers) == 2
+    assert len(response.devices[0].device_endpoints) == 3
+
+    # ----- Create object relation -------------------------------------------------------------------------------------
+    TOPOLOGY_WITH_DEVICE = copy.deepcopy(TOPOLOGY)
+    TOPOLOGY_WITH_DEVICE['device_ids'].append(DEVICE1_ID)
+    response = context_client_grpc.SetTopology(Topology(**TOPOLOGY_WITH_DEVICE))
+    assert response.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert response.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+
+    # ----- Check update event -----------------------------------------------------------------------------------------
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, TopologyEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_UPDATE
+    assert response.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert response.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+
+    # ----- Check relation was created ---------------------------------------------------------------------------------
+    response = context_client_grpc.GetTopology(TopologyId(**TOPOLOGY_ID))
+    assert response.topology_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert response.topology_id.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+    assert len(response.device_ids) == 1
+    assert response.device_ids[0].device_uuid.uuid == DEVICE1_UUID
+    assert len(response.link_ids) == 0
+
+    # ----- Dump state of database after creating the object relation --------------------------------------------------
+    db_entries = context_database.dump()
+    LOGGER.info('----- Database Dump [{:3d} entries] -------------------------'.format(len(db_entries)))
+    for db_entry in db_entries:
+        LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
+    LOGGER.info('-----------------------------------------------------------')
+    assert len(db_entries) == 25
+
+    # ----- Remove the object ------------------------------------------------------------------------------------------
+    context_client_grpc.RemoveDevice(DeviceId(**DEVICE1_ID))
+    context_client_grpc.RemoveTopology(TopologyId(**TOPOLOGY_ID))
+    context_client_grpc.RemoveContext(ContextId(**CONTEXT_ID))
+
+    # ----- Check remove event -----------------------------------------------------------------------------------------
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, DeviceEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert event.device_id.device_uuid.uuid == DEVICE1_UUID
+
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, TopologyEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert event.topology_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert event.topology_id.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, ContextEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert event.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+
+    # ----- Stop the EventsCollector -----------------------------------------------------------------------------------
+    events_collector.stop()
+
+    # ----- Dump state of database after remove the object -------------------------------------------------------------
+    db_entries = context_database.dump()
+    LOGGER.info('----- Database Dump [{:3d} entries] -------------------------'.format(len(db_entries)))
+    for db_entry in db_entries:
+        LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
+    LOGGER.info('-----------------------------------------------------------')
+    assert len(db_entries) == 0
+
+
+def test_grpc_link(
+    context_client_grpc : ContextClient,                     # pylint: disable=redefined-outer-name
+    context_db_mb : Tuple[Database, MessageBroker]):    # pylint: disable=redefined-outer-name
+    context_database = context_db_mb[0]
+
+    # ----- Clean the database -----------------------------------------------------------------------------------------
+    context_database.clear_all()
+
+    # ----- Initialize the EventsCollector -----------------------------------------------------------------------------
+    events_collector = EventsCollector(context_client_grpc)
+    events_collector.start()
+
+    # ----- Prepare dependencies for the test and capture related events -----------------------------------------------
+    response = context_client_grpc.SetContext(Context(**CONTEXT))
+    assert response.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+
+    response = context_client_grpc.SetTopology(Topology(**TOPOLOGY))
+    assert response.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert response.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+
+    response = context_client_grpc.SetDevice(Device(**DEVICE1))
+    assert response.device_uuid.uuid == DEVICE1_UUID
+
+    response = context_client_grpc.SetDevice(Device(**DEVICE2))
+    assert response.device_uuid.uuid == DEVICE2_UUID
+
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, ContextEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert event.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, TopologyEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert event.topology_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert event.topology_id.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, DeviceEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert event.device_id.device_uuid.uuid == DEVICE1_UUID
+
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, DeviceEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert event.device_id.device_uuid.uuid == DEVICE2_UUID
+
+    # ----- Get when the object does not exist -------------------------------------------------------------------------
+    with pytest.raises(grpc.RpcError) as e:
+        context_client_grpc.GetLink(LinkId(**LINK_DEV1_DEV2_ID))
     assert e.value.code() == grpc.StatusCode.NOT_FOUND
-    msg = 'Context(admin)/Topology(admin)/Device(DEV1)/Port(wrong-port) in Endpoint(#0) of '\
-          'Context(admin)/Topology(admin)/Link(DEV1/EP2 ==> DEV2/EP1) does not exist in the database.'
-    assert e.value.details() == msg
+    assert e.value.details() == 'Link({:s}) not found'.format(LINK_DEV1_DEV2_UUID)
+
+    # ----- List when the object does not exist ------------------------------------------------------------------------
+    response = context_client_grpc.ListLinkIds(Empty())
+    assert len(response.link_ids) == 0
+
+    response = context_client_grpc.ListLinks(Empty())
+    assert len(response.links) == 0
+
+    # ----- Dump state of database before create the object ------------------------------------------------------------
+    db_entries = context_database.dump()
+    LOGGER.info('----- Database Dump [{:3d} entries] -------------------------'.format(len(db_entries)))
+    for db_entry in db_entries:
+        LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
+    LOGGER.info('-----------------------------------------------------------')
+    assert len(db_entries) == 38
+
+    # ----- Create the object ------------------------------------------------------------------------------------------
+    response = context_client_grpc.SetLink(Link(**LINK_DEV1_DEV2))
+    assert response.link_uuid.uuid == LINK_DEV1_DEV2_UUID
+
+    # ----- Check create event -----------------------------------------------------------------------------------------
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, LinkEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert event.link_id.link_uuid.uuid == LINK_DEV1_DEV2_UUID
+
+    # ----- Update the object ------------------------------------------------------------------------------------------
+    response = context_client_grpc.SetLink(Link(**LINK_DEV1_DEV2))
+    assert response.link_uuid.uuid == LINK_DEV1_DEV2_UUID
+
+    # ----- Check update event -----------------------------------------------------------------------------------------
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, LinkEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_UPDATE
+    assert event.link_id.link_uuid.uuid == LINK_DEV1_DEV2_UUID
+
+    # ----- Dump state of database after create/update the object ------------------------------------------------------
+    db_entries = context_database.dump()
+    LOGGER.info('----- Database Dump [{:3d} entries] -------------------------'.format(len(db_entries)))
+    for db_entry in db_entries:
+        LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
+    LOGGER.info('-----------------------------------------------------------')
+    assert len(db_entries) == 48
+
+    # ----- Get when the object exists ---------------------------------------------------------------------------------
+    response = context_client_grpc.GetLink(LinkId(**LINK_DEV1_DEV2_ID))
+    assert response.link_id.link_uuid.uuid == LINK_DEV1_DEV2_UUID
+    assert len(response.link_endpoint_ids) == 2
+
+    # ----- List when the object exists --------------------------------------------------------------------------------
+    response = context_client_grpc.ListLinkIds(Empty())
+    assert len(response.link_ids) == 1
+    assert response.link_ids[0].link_uuid.uuid == LINK_DEV1_DEV2_UUID
+
+    response = context_client_grpc.ListLinks(Empty())
+    assert len(response.links) == 1
+    assert response.links[0].link_id.link_uuid.uuid == LINK_DEV1_DEV2_UUID
+    assert len(response.links[0].link_endpoint_ids) == 2
+
+    # ----- Create object relation -------------------------------------------------------------------------------------
+    TOPOLOGY_WITH_LINK = copy.deepcopy(TOPOLOGY)
+    TOPOLOGY_WITH_LINK['link_ids'].append(LINK_DEV1_DEV2_ID)
+    response = context_client_grpc.SetTopology(Topology(**TOPOLOGY_WITH_LINK))
+    assert response.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert response.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+
+    # ----- Check update event -----------------------------------------------------------------------------------------
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, TopologyEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_UPDATE
+    assert response.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert response.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+
+    # ----- Check relation was created ---------------------------------------------------------------------------------
+    response = context_client_grpc.GetTopology(TopologyId(**TOPOLOGY_ID))
+    assert response.topology_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert response.topology_id.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+    assert len(response.device_ids) == 2
+    assert response.device_ids[0].device_uuid.uuid == DEVICE1_UUID
+    assert response.device_ids[1].device_uuid.uuid == DEVICE2_UUID
+    assert len(response.link_ids) == 1
+    assert response.link_ids[0].link_uuid.uuid == LINK_DEV1_DEV2_UUID
+
+    db_entries = context_database.dump()
+    LOGGER.info('----- Database Dump [{:3d} entries] -------------------------'.format(len(db_entries)))
+    for db_entry in db_entries:
+        LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
+    LOGGER.info('-----------------------------------------------------------')
+    assert len(db_entries) == 48
+
+    # ----- Remove the object ------------------------------------------------------------------------------------------
+    context_client_grpc.RemoveLink(LinkId(**LINK_DEV1_DEV2_ID))
+    context_client_grpc.RemoveDevice(DeviceId(**DEVICE1_ID))
+    context_client_grpc.RemoveDevice(DeviceId(**DEVICE2_ID))
+    context_client_grpc.RemoveTopology(TopologyId(**TOPOLOGY_ID))
+    context_client_grpc.RemoveContext(ContextId(**CONTEXT_ID))
+
+    # ----- Check remove event -----------------------------------------------------------------------------------------
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, LinkEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert event.link_id.link_uuid.uuid == LINK_DEV1_DEV2_UUID
+
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, DeviceEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert event.device_id.device_uuid.uuid == DEVICE1_UUID
+
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, DeviceEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert event.device_id.device_uuid.uuid == DEVICE2_UUID
+
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, TopologyEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert event.topology_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert event.topology_id.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, ContextEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert event.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+
+    # ----- Stop the EventsCollector -----------------------------------------------------------------------------------
+    events_collector.stop()
+
+    # ----- Dump state of database after remove the object -------------------------------------------------------------
+    db_entries = context_database.dump()
+    LOGGER.info('----- Database Dump [{:3d} entries] -------------------------'.format(len(db_entries)))
+    for db_entry in db_entries:
+        LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
+    LOGGER.info('-----------------------------------------------------------')
+    assert len(db_entries) == 0
+
+
+def test_grpc_service(
+    context_client_grpc : ContextClient,                     # pylint: disable=redefined-outer-name
+    context_db_mb : Tuple[Database, MessageBroker]):    # pylint: disable=redefined-outer-name
+    context_database = context_db_mb[0]
+
+    # ----- Clean the database -----------------------------------------------------------------------------------------
+    context_database.clear_all()
+
+    # ----- Initialize the EventsCollector -----------------------------------------------------------------------------
+    events_collector = EventsCollector(context_client_grpc)
+    events_collector.start()
+
+    # ----- Prepare dependencies for the test and capture related events -----------------------------------------------
+    response = context_client_grpc.SetContext(Context(**CONTEXT))
+    assert response.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+
+    response = context_client_grpc.SetTopology(Topology(**TOPOLOGY))
+    assert response.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert response.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+
+    response = context_client_grpc.SetDevice(Device(**DEVICE1))
+    assert response.device_uuid.uuid == DEVICE1_UUID
+
+    response = context_client_grpc.SetDevice(Device(**DEVICE2))
+    assert response.device_uuid.uuid == DEVICE2_UUID
+
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, ContextEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert event.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, TopologyEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert event.topology_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert event.topology_id.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, DeviceEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert event.device_id.device_uuid.uuid == DEVICE1_UUID
+
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, DeviceEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert event.device_id.device_uuid.uuid == DEVICE2_UUID
 
-    # should fail with endpoint device duplicated
-    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-        copy_link = copy.deepcopy(LINK)
-        copy_link['endpointList'][1]['dev_id']['device_id']['uuid'] = 'DEV1'
-        context_client.AddLink(Link(**copy_link))
+    # ----- Get when the object does not exist -------------------------------------------------------------------------
+    with pytest.raises(grpc.RpcError) as e:
+        context_client_grpc.GetService(ServiceId(**SERVICE_DEV1_DEV2_ID))
+    assert e.value.code() == grpc.StatusCode.NOT_FOUND
+    assert e.value.details() == 'Service({:s}/{:s}) not found'.format(DEFAULT_CONTEXT_UUID, SERVICE_DEV1_DEV2_UUID)
+
+    # ----- List when the object does not exist ------------------------------------------------------------------------
+    response = context_client_grpc.ListServiceIds(ContextId(**CONTEXT_ID))
+    assert len(response.service_ids) == 0
+
+    response = context_client_grpc.ListServices(ContextId(**CONTEXT_ID))
+    assert len(response.services) == 0
+
+    # ----- Dump state of database before create the object ------------------------------------------------------------
+    db_entries = context_database.dump()
+    LOGGER.info('----- Database Dump [{:3d} entries] -------------------------'.format(len(db_entries)))
+    for db_entry in db_entries:
+        LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
+    LOGGER.info('-----------------------------------------------------------')
+    assert len(db_entries) == 38
+
+    # ----- Create the object ------------------------------------------------------------------------------------------
+    with pytest.raises(grpc.RpcError) as e:
+        WRONG_SERVICE = copy.deepcopy(SERVICE_DEV1_DEV2)
+        WRONG_SERVICE['service_endpoint_ids'][0]\
+            ['topology_id']['context_id']['context_uuid']['uuid'] = 'wrong-context-uuid'
+        context_client_grpc.SetService(Service(**WRONG_SERVICE))
     assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-    msg = 'Duplicated Context(admin)/Topology(admin)/Device(DEV1) in Endpoint(#1) of '\
-          'Context(admin)/Topology(admin)/Link(DEV1/EP2 ==> DEV2/EP1).'
+    msg = 'request.service_endpoint_ids[0].topology_id.context_id.context_uuid.uuid(wrong-context-uuid) is invalid; '\
+          'should be == request.service_id.context_id.context_uuid.uuid(admin)'
     assert e.value.details() == msg
 
-def test_add_link(context_client : ContextClient):
-    # should work
-    validate_link_id(MessageToDict(
-        context_client.AddLink(Link(**LINK)),
-        including_default_value_fields=True, preserving_proto_field_name=True,
-        use_integers_for_enums=False))
-
-def test_delete_link_2(context_client : ContextClient):
-    # should work
-    validate_empty(MessageToDict(
-        context_client.DeleteLink(LinkId(**LINK_ID)),
-        including_default_value_fields=True, preserving_proto_field_name=True,
-        use_integers_for_enums=False))
-
-def test_add_link_default_endpoint_context_topology(context_client : ContextClient):
-    # should work
-    copy_link = copy.deepcopy(LINK)
-    copy_link['endpointList'][0]['topoId']['contextId']['contextUuid']['uuid'] = ''
-    copy_link['endpointList'][0]['topoId']['topoId']['uuid'] = ''
-    validate_link_id(MessageToDict(
-            context_client.AddLink(Link(**copy_link)),
-            including_default_value_fields=True, preserving_proto_field_name=True,
-            use_integers_for_enums=False))
-
-def test_get_topology_completed_2(context_client : ContextClient):
-    # should work
-    topology = MessageToDict(
-        context_client.GetTopology(Empty()),
-        including_default_value_fields=True, preserving_proto_field_name=True,
-        use_integers_for_enums=False)
-    validate_topology(topology)
-    validate_topology_has_devices(topology)
-    validate_topology_has_links(topology)
-
-def test_get_topology_completed_rest_api(context_service_rest : Server):
-    # should work
-    request_url = 'http://127.0.0.1:{}{}/restconf/config/context'.format(restapi_port, RESTAPI_BASE_URL)
-    reply = requests.get(request_url)
-    json_reply = reply.json()
-    topology = MessageToDict(
-        Topology(**json_reply['topologies'][0]),
-        including_default_value_fields=True, preserving_proto_field_name=True,
-        use_integers_for_enums=False)
-    validate_topology(topology)
-    validate_topology_has_devices(topology)
-    validate_topology_has_links(topology)
+    response = context_client_grpc.SetService(Service(**SERVICE_DEV1_DEV2))
+    assert response.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert response.service_uuid.uuid == SERVICE_DEV1_DEV2_UUID
+
+    CONTEXT_WITH_SERVICE = copy.deepcopy(CONTEXT)
+    CONTEXT_WITH_SERVICE['service_ids'].append(SERVICE_DEV1_DEV2_ID)
+    response = context_client_grpc.SetContext(Context(**CONTEXT_WITH_SERVICE))
+    assert response.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+
+    # ----- Check create event -----------------------------------------------------------------------------------------
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, ServiceEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE
+    assert event.service_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert event.service_id.service_uuid.uuid == SERVICE_DEV1_DEV2_UUID
+
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, ContextEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_UPDATE
+    assert event.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+
+    # ----- Update the object ------------------------------------------------------------------------------------------
+    response = context_client_grpc.SetService(Service(**SERVICE_DEV1_DEV2))
+    assert response.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert response.service_uuid.uuid == SERVICE_DEV1_DEV2_UUID
+
+    # ----- Check update event -----------------------------------------------------------------------------------------
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, ServiceEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_UPDATE
+    assert event.service_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert event.service_id.service_uuid.uuid == SERVICE_DEV1_DEV2_UUID
+
+    # ----- Dump state of database after create/update the object ------------------------------------------------------
+    db_entries = context_database.dump()
+    LOGGER.info('----- Database Dump [{:3d} entries] -------------------------'.format(len(db_entries)))
+    for db_entry in db_entries:
+        LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
+    LOGGER.info('-----------------------------------------------------------')
+    assert len(db_entries) == 57
+
+    # ----- Get when the object exists ---------------------------------------------------------------------------------
+    response = context_client_grpc.GetService(ServiceId(**SERVICE_DEV1_DEV2_ID))
+    assert response.service_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert response.service_id.service_uuid.uuid == SERVICE_DEV1_DEV2_UUID
+    assert response.service_type == ServiceTypeEnum.SERVICETYPE_L3NM
+    assert len(response.service_endpoint_ids) == 2
+    assert len(response.service_constraints) == 2
+    assert response.service_status.service_status == ServiceStatusEnum.SERVICESTATUS_ACTIVE
+    assert len(response.service_config.config_rules) == 3
+
+    # ----- List when the object exists --------------------------------------------------------------------------------
+    response = context_client_grpc.ListServiceIds(ContextId(**CONTEXT_ID))
+    assert len(response.service_ids) == 1
+    assert response.service_ids[0].context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert response.service_ids[0].service_uuid.uuid == SERVICE_DEV1_DEV2_UUID
+
+    response = context_client_grpc.ListServices(ContextId(**CONTEXT_ID))
+    assert len(response.services) == 1
+    assert response.services[0].service_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert response.services[0].service_id.service_uuid.uuid == SERVICE_DEV1_DEV2_UUID
+    assert response.services[0].service_type == ServiceTypeEnum.SERVICETYPE_L3NM
+    assert len(response.services[0].service_endpoint_ids) == 2
+    assert len(response.services[0].service_constraints) == 2
+    assert response.services[0].service_status.service_status == ServiceStatusEnum.SERVICESTATUS_ACTIVE
+    assert len(response.services[0].service_config.config_rules) == 3
+
+    # ----- Remove the object ------------------------------------------------------------------------------------------
+    context_client_grpc.RemoveService(ServiceId(**SERVICE_DEV1_DEV2_ID))
+    context_client_grpc.RemoveDevice(DeviceId(**DEVICE1_ID))
+    context_client_grpc.RemoveDevice(DeviceId(**DEVICE2_ID))
+    context_client_grpc.RemoveTopology(TopologyId(**TOPOLOGY_ID))
+    context_client_grpc.RemoveContext(ContextId(**CONTEXT_ID))
+
+    # ----- Check remove event -----------------------------------------------------------------------------------------
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, ServiceEvent)
+    assert event.service_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert event.service_id.service_uuid.uuid == SERVICE_DEV1_DEV2_UUID
+
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, DeviceEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert event.device_id.device_uuid.uuid == DEVICE1_UUID
+
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, DeviceEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert event.device_id.device_uuid.uuid == DEVICE2_UUID
+
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, TopologyEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert event.topology_id.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+    assert event.topology_id.topology_uuid.uuid == DEFAULT_TOPOLOGY_UUID
+
+    event = events_collector.get_event(block=True)
+    assert isinstance(event, ContextEvent)
+    assert event.event.event_type == EventTypeEnum.EVENTTYPE_REMOVE
+    assert event.context_id.context_uuid.uuid == DEFAULT_CONTEXT_UUID
+
+    # ----- Stop the EventsCollector -----------------------------------------------------------------------------------
+    events_collector.stop()
+
+    # ----- Dump state of database after remove the object -------------------------------------------------------------
+    db_entries = context_database.dump()
+    LOGGER.info('----- Database Dump [{:3d} entries] -------------------------'.format(len(db_entries)))
+    for db_entry in db_entries:
+        LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
+    LOGGER.info('-----------------------------------------------------------')
+    assert len(db_entries) == 0
+
+
+# ----- Test REST API methods ------------------------------------------------------------------------------------------
+
+def test_rest_populate_database(
+    context_db_mb : Tuple[Database, MessageBroker], # pylint: disable=redefined-outer-name
+    context_service_grpc : ContextService           # pylint: disable=redefined-outer-name
+    ):
+    database = context_db_mb[0]
+    database.clear_all()
+    populate('127.0.0.1', GRPC_PORT)
+
+
+def test_rest_get_context_ids(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+    reply = do_rest_request('/context_ids')
+    validate_context_ids(reply)
+
+def test_rest_get_contexts(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+    reply = do_rest_request('/contexts')
+    validate_contexts(reply)
+
+def test_rest_get_context(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+    context_uuid = urllib.parse.quote('admin')
+    reply = do_rest_request('/context/{:s}'.format(context_uuid))
+    validate_context(reply)
+
+def test_rest_get_topology_ids(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+    context_uuid = urllib.parse.quote('admin')
+    reply = do_rest_request('/context/{:s}/topology_ids'.format(context_uuid))
+    validate_topology_ids(reply)
+
+def test_rest_get_topologies(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+    context_uuid = urllib.parse.quote('admin')
+    reply = do_rest_request('/context/{:s}/topologies'.format(context_uuid))
+    validate_topologies(reply)
+
+def test_rest_get_topology(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+    context_uuid = urllib.parse.quote('admin')
+    topology_uuid = urllib.parse.quote('admin')
+    reply = do_rest_request('/context/{:s}/topology/{:s}'.format(context_uuid, topology_uuid))
+    validate_topology(reply, num_devices=3, num_links=3)
+
+def test_rest_get_service_ids(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+    context_uuid = urllib.parse.quote('admin')
+    reply = do_rest_request('/context/{:s}/service_ids'.format(context_uuid))
+    validate_service_ids(reply)
+
+def test_rest_get_services(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+    context_uuid = urllib.parse.quote('admin')
+    reply = do_rest_request('/context/{:s}/services'.format(context_uuid))
+    validate_services(reply)
+
+def test_rest_get_service(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+    context_uuid = urllib.parse.quote('admin')
+    service_uuid = urllib.parse.quote('SVC:DEV1/EP100-DEV2/EP100', safe='')
+    reply = do_rest_request('/context/{:s}/service/{:s}'.format(context_uuid, service_uuid))
+    validate_service(reply)
+
+def test_rest_get_device_ids(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+    reply = do_rest_request('/device_ids')
+    validate_device_ids(reply)
+
+def test_rest_get_devices(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+    reply = do_rest_request('/devices')
+    validate_devices(reply)
+
+def test_rest_get_device(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+    device_uuid = urllib.parse.quote('DEV1', safe='')
+    reply = do_rest_request('/device/{:s}'.format(device_uuid))
+    validate_device(reply)
+
+def test_rest_get_link_ids(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+    reply = do_rest_request('/link_ids')
+    validate_link_ids(reply)
+
+def test_rest_get_links(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+    reply = do_rest_request('/links')
+    validate_links(reply)
+
+def test_rest_get_link(context_service_rest : RestServer): # pylint: disable=redefined-outer-name
+    link_uuid = urllib.parse.quote('DEV1/EP2 ==> DEV2/EP1', safe='')
+    reply = do_rest_request('/link/{:s}'.format(link_uuid))
+    validate_link(reply)
+
+
+# ----- Test misc. Context internal tools ------------------------------------------------------------------------------
+
+def test_tools_fast_string_hasher():
+    with pytest.raises(TypeError) as e:
+        fast_hasher(27)
+    assert str(e.value) == "data(27) must be " + FASTHASHER_DATA_ACCEPTED_FORMAT + ", found <class 'int'>"
+
+    with pytest.raises(TypeError) as e:
+        fast_hasher({27})
+    assert str(e.value) == "data({27}) must be " + FASTHASHER_DATA_ACCEPTED_FORMAT + ", found <class 'set'>"
+
+    with pytest.raises(TypeError) as e:
+        fast_hasher({'27'})
+    assert str(e.value) == "data({'27'}) must be " + FASTHASHER_DATA_ACCEPTED_FORMAT + ", found <class 'set'>"
+
+    with pytest.raises(TypeError) as e:
+        fast_hasher([27])
+    assert str(e.value) == "data[0](27) must be " + FASTHASHER_ITEM_ACCEPTED_FORMAT + ", found <class 'int'>"
+
+    fast_hasher('hello-world')
+    fast_hasher('hello-world'.encode('UTF-8'))
+    fast_hasher(['hello', 'world'])
+    fast_hasher(('hello', 'world'))
+    fast_hasher(['hello'.encode('UTF-8'), 'world'.encode('UTF-8')])
+    fast_hasher(('hello'.encode('UTF-8'), 'world'.encode('UTF-8')))