diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 11c3b25da72abb7259acf56e1ed67c20df50fb36..7938f7ec1aa82e5d883b7af4bd8c4f6884ebe779 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -2,10 +2,14 @@
 stages:
   - build
   - test
+  - dependencies
   - deploy
+  - integration_test
 
 # include the individual .gitlab-ci.yml of each micro-service
 include: 
-  - local: '/src/monitoring/.gitlab-ci.yml'
+  - local: '/manifests/.gitlab-ci.yml'
+  #- local: '/src/monitoring/.gitlab-ci.yml'
   - local: '/src/context/.gitlab-ci.yml'
   - local: '/src/device/.gitlab-ci.yml'
+  - local: '/src/integration_tester/.gitlab-ci.yml'
diff --git a/manifests/.gitlab-ci.yml b/manifests/.gitlab-ci.yml
new file mode 100644
index 0000000000000000000000000000000000000000..0c0b1d4dc0783e1bf3158d0629558b4fedd355d6
--- /dev/null
+++ b/manifests/.gitlab-ci.yml
@@ -0,0 +1,10 @@
+# Deployment of the dependency services in Kubernetes Cluster
+
+dependencies all:
+  stage: dependencies
+  script:
+    - kubectl version
+    - kubectl get all
+    - kubectl apply -f "manifests/prometheus.yaml"
+    - kubectl apply -f "manifests/redis.yaml"
+    - kubectl get all
diff --git a/manifests/contextservice.yaml b/manifests/contextservice.yaml
index 6675c00aa5ea5de249caed3d78c04355b3463003..75406da088820da3e8360e7c37fc0d05aa8f40db 100644
--- a/manifests/contextservice.yaml
+++ b/manifests/contextservice.yaml
@@ -14,14 +14,14 @@ spec:
       terminationGracePeriodSeconds: 5
       containers:
       - name: server
-        image: context_service:develop
-        imagePullPolicy: Never
+        image: registry.gitlab.com/teraflow-h2020/controller/context:latest
+        imagePullPolicy: Always
         ports:
         - containerPort: 1010
         env:
         - name: DB_ENGINE
           value: "redis"
-        - name: REDISDB_DATABASE_ID
+        - name: REDIS_DATABASE_ID
           value: "0"
         - name: LOG_LEVEL
           value: "DEBUG"
diff --git a/manifests/deviceservice.yaml b/manifests/deviceservice.yaml
index 631ff0cb4115806b2064f434755d728e7e821cc7..7aa02e815034ff05633b2febaaf38ceb72bf06a3 100644
--- a/manifests/deviceservice.yaml
+++ b/manifests/deviceservice.yaml
@@ -14,14 +14,14 @@ spec:
       terminationGracePeriodSeconds: 5
       containers:
       - name: server
-        image: device:dockerfile
-        imagePullPolicy: Never
+        image: registry.gitlab.com/teraflow-h2020/controller/device:latest
+        imagePullPolicy: Always
         ports:
         - containerPort: 2020
         env:
         - name: DB_ENGINE
           value: "redis"
-        - name: REDISDB_DATABASE_ID
+        - name: REDIS_DATABASE_ID
           value: "0"
         - name: LOG_LEVEL
           value: "DEBUG"
diff --git a/manifests/prometheus.yaml b/manifests/prometheus.yaml
index 311cb28b007f6ec5e7699b14d87198c7cfb5b4d4..72e2e65c8c8b266ebdfef974c6795d12f1c682c4 100644
--- a/manifests/prometheus.yaml
+++ b/manifests/prometheus.yaml
@@ -15,19 +15,19 @@ data:
       - role: pod
         namespaces:
           names:
-          - teraflow-development
+          - gitlab-ci
     - job_name: 'teraflow-metrics-service'
       kubernetes_sd_configs:
       - role: service
         namespaces:
           names:
-          - teraflow-development
+          - gitlab-ci
     - job_name: 'teraflow-metrics-endpoints'
       kubernetes_sd_configs:
       - role: endpoints
         namespaces:
           names:
-          - teraflow-development
+          - gitlab-ci
 ---
 apiVersion: apps/v1
 kind: Deployment
@@ -71,7 +71,7 @@ spec:
   selector:
     app: prometheus
   ports:
-  - name: prometheus-http
+  - name: http
     protocol: TCP
     port: 9090
     targetPort: 9090
diff --git a/manifests/redisdb.yaml b/manifests/redis.yaml
similarity index 77%
rename from manifests/redisdb.yaml
rename to manifests/redis.yaml
index 0655295cdb9b00649dab38758f1bacabcda1bd28..4d6d6cbf2e5d71806ebeddbb3d6b67cf19a5d3f6 100644
--- a/manifests/redisdb.yaml
+++ b/manifests/redis.yaml
@@ -2,20 +2,20 @@
 apiVersion: apps/v1
 kind: Deployment
 metadata:
-  name: redisdb
+  name: redis
 spec:
   selector:
     matchLabels:
-      app: redisdb
+      app: redis
   replicas: 1
   template:
     metadata:
       labels:
-        app: redisdb
+        app: redis
         version: v1
     spec:
       containers:
-      - name: redisdb
+      - name: redis
         image: redis:6.2
         ports:
         - containerPort: 6379
@@ -23,13 +23,13 @@ spec:
 apiVersion: v1
 kind: Service
 metadata:
-  name: redisdb
+  name: redis
   labels:
-    app: redisdb
+    app: redis
 spec:
   type: ClusterIP
   selector:
-    app: redisdb
+    app: redis
   ports:
   - name: redis
     protocol: TCP
diff --git a/report_coverage.sh b/report_coverage_all.sh
similarity index 100%
rename from report_coverage.sh
rename to report_coverage_all.sh
diff --git a/report_coverage_context.sh b/report_coverage_context.sh
new file mode 100755
index 0000000000000000000000000000000000000000..f2f71fa744b5d8209589b283c7a375b4f25be0c8
--- /dev/null
+++ b/report_coverage_context.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+./report_coverage_all.sh | grep --color -E -i "^.*context.*$|$"
diff --git a/report_coverage_device.sh b/report_coverage_device.sh
index f884fb1c7806069412e29fcb11ba278974520c35..b4215cd30141bb524a7d99717841de127d7cda15 100755
--- a/report_coverage_device.sh
+++ b/report_coverage_device.sh
@@ -1,3 +1,3 @@
 #!/bin/bash
 
-./report_coverage.sh | grep --color -E -i "^.*device.*$|$"
+./report_coverage_all.sh | grep --color -E -i "^.*device.*$|$"
diff --git a/run_integration_tests.sh b/run_integration_tests.sh
new file mode 100755
index 0000000000000000000000000000000000000000..7cbbda122078ae1443fe9db8c51d7f47cd92c9d6
--- /dev/null
+++ b/run_integration_tests.sh
@@ -0,0 +1,10 @@
+#!/bin/bash
+
+IMAGE_NAME='integration_tester'
+IMAGE_TAG='latest'
+CI_REGISTRY_IMAGE='registry.gitlab.com/teraflow-h2020/controller'
+
+kubectl delete pod $(echo $IMAGE_NAME | sed -r 's/[^a-zA-Z0-9\.\-]/-/g') --wait=true --ignore-not-found=true
+kubectl get all
+kubectl run $(echo $IMAGE_NAME | sed -r 's/[^a-zA-Z0-9\.\-]/-/g') --image "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG" --restart=Never --rm -i
+kubectl get all
diff --git a/run_unitary_tests.sh b/run_unitary_tests.sh
index 1305084800620e0e97f9f85849917540178b5368..84b8010341b9fa70275cd2c1039c8c0cca3d2fdc 100755
--- a/run_unitary_tests.sh
+++ b/run_unitary_tests.sh
@@ -5,7 +5,9 @@ RCFILE=~/teraflow/controller/coverage/.coveragerc
 
 # Run unitary tests and analyze coverage of code at same time
 coverage run --rcfile=$RCFILE -m pytest --log-level=DEBUG --verbose \
-    common/database/tests/test_unitary_inmemory.py \
+    common/database/tests/test_unitary.py \
+    common/database/tests/test_engine_inmemory.py \
+    context/tests/test_unitary.py \
     device/tests/test_unitary.py
 
 ## Run integration tests and analyze coverage of code at same time
diff --git a/src/common/database/Factory.py b/src/common/database/Factory.py
index 66d4d6b4d8ade4ccf17aa0dc3d88a669885261df..4b46ed0b145878c703fececd9b68fef54cbf7001 100644
--- a/src/common/database/Factory.py
+++ b/src/common/database/Factory.py
@@ -23,9 +23,13 @@ ENGINES = {
 
 DEFAULT_DB_ENGINE = DatabaseEngineEnum.INMEMORY
 
-def get_database(**settings) -> Database:
-    engine = os.environ.get('DB_ENGINE', DEFAULT_DB_ENGINE)
-    engine = settings.pop('DB_ENGINE', engine)
+def get_database(engine=None, **settings) -> Database:
+    # return an instance of Database initialized with selected engine.
+    # Engine is selected using following criteria (first that is not None is selected):
+    # 1. user selected by parameter (engine=...)
+    # 2. environment variable DB_ENGINE
+    # 3. default engine: INMEMORY
+    if engine is None: engine = os.environ.get('DB_ENGINE', DEFAULT_DB_ENGINE)
     if engine is None: raise Exception('Database Engine not specified')
     if isinstance(engine, DatabaseEngineEnum): engine = engine.value
     engine_class = ENGINES.get(engine)
diff --git a/src/common/database/api/Constants.py b/src/common/database/api/Constants.py
new file mode 100644
index 0000000000000000000000000000000000000000..120b094a30e06a9476c1dce8bf91f42b383d0fa1
--- /dev/null
+++ b/src/common/database/api/Constants.py
@@ -0,0 +1,2 @@
+DEFAULT_CONTEXT_ID = 'admin'
+DEFAULT_TOPOLOGY_ID = 'admin'
diff --git a/src/common/database/api/Database.py b/src/common/database/api/Database.py
index 3ce5d8dfbaf7a46e77ce14a5e2165e68023d067e..c3aeaf628339f8ba58e3c616b7eb6a501cad9278 100644
--- a/src/common/database/api/Database.py
+++ b/src/common/database/api/Database.py
@@ -2,20 +2,21 @@ import logging
 from typing import List
 from ..engines._DatabaseEngine import _DatabaseEngine
 from .context.Context import Context
+from .Exceptions import WrongDatabaseEngine, MutexException
 
 LOGGER = logging.getLogger(__name__)
 
 class Database:
     def __init__(self, database_engine : _DatabaseEngine):
-        if not isinstance(database_engine, _DatabaseEngine):
-            raise Exception('database_engine must inherit from _DatabaseEngine')
+        if not isinstance(database_engine, _DatabaseEngine):    
+            raise WrongDatabaseEngine('database_engine must inherit from _DatabaseEngine')
         self._database_engine = database_engine
         self._acquired = False
         self._owner_key = None
 
     def __enter__(self) -> '_DatabaseEngine':
         self._acquired, self._owner_key = self._database_engine.lock()
-        if not self._acquired: raise Exception('Unable to acquire database lock')
+        if not self._acquired: raise MutexException('Unable to acquire database lock')
         return self
 
     def __exit__(self, exc_type, exc_val, exc_tb):
diff --git a/src/common/database/api/Exceptions.py b/src/common/database/api/Exceptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..ef60ac5f5ab9c3ec3de6cbf4c1af54dcc4389470
--- /dev/null
+++ b/src/common/database/api/Exceptions.py
@@ -0,0 +1,5 @@
+class WrongDatabaseEngine(Exception):
+    pass
+
+class MutexException(Exception):
+    pass
diff --git a/src/common/database/api/context/Context.py b/src/common/database/api/context/Context.py
index dfd3a5957b105824d75ae3d087304b96e8b3ae34..32991cc5ad2b29fe8492d42539edb7cccad7c5f1 100644
--- a/src/common/database/api/context/Context.py
+++ b/src/common/database/api/context/Context.py
@@ -1,24 +1,18 @@
 from typing import Dict
 from ...engines._DatabaseEngine import _DatabaseEngine
-from ..entity._Entity import _Entity
-from ..entity.EntityAttributes import EntityAttributes
+from ..entity._RootEntity import _RootEntity
 from ..entity.EntityCollection import EntityCollection
 from .Keys import KEY_CONTEXT, KEY_TOPOLOGIES
 from .Topology import Topology
 
-VALIDATORS = {}
+VALIDATORS = {}  # no attributes accepted
+TRANSCODERS = {} # no transcoding applied to attributes
 
-class Context(_Entity):
+class Context(_RootEntity):
     def __init__(self, context_uuid : str, database_engine : _DatabaseEngine):
-        self._database_engine = database_engine
-        super().__init__(parent=self)
-        self._context_uuid = context_uuid
-        self._attributes = EntityAttributes(self, KEY_CONTEXT, validators=VALIDATORS)
+        super().__init__(database_engine, context_uuid, KEY_CONTEXT, VALIDATORS, TRANSCODERS)
         self._topologies = EntityCollection(self, KEY_TOPOLOGIES)
 
-    @property
-    def database_engine(self) -> _DatabaseEngine: return self._database_engine
-
     @property
     def parent(self) -> 'Context': return self
 
@@ -26,10 +20,7 @@ class Context(_Entity):
     def context(self) -> 'Context': return self
 
     @property
-    def context_uuid(self) -> str: return self._context_uuid
-
-    @property
-    def attributes(self) -> EntityAttributes: return self._attributes
+    def context_uuid(self) -> str: return self._entity_uuid
 
     @property
     def topologies(self) -> EntityCollection: return self._topologies
@@ -42,5 +33,10 @@ class Context(_Entity):
         for topology_uuid in self.topologies.get(): self.topology(topology_uuid).delete()
         self.attributes.delete()
 
+    def dump_id(self) -> Dict:
+        return {
+            'contextUuid': {'uuid': self.context_uuid},
+        }
+
     def dump(self) -> Dict:
         return {topology_uuid : self.topology(topology_uuid).dump() for topology_uuid in self.topologies.get()}
diff --git a/src/common/database/api/context/Device.py b/src/common/database/api/context/Device.py
index 5771c7fe937cd0d0cf60959d4c0e4f42e9c299af..fb4b5becb6de1f158447b3e7630ff6f87fbbdf7d 100644
--- a/src/common/database/api/context/Device.py
+++ b/src/common/database/api/context/Device.py
@@ -1,7 +1,6 @@
 from __future__ import annotations
 from typing import TYPE_CHECKING, Dict
 from ..entity._Entity import _Entity
-from ..entity.EntityAttributes import EntityAttributes
 from ..entity.EntityCollection import EntityCollection
 from .Endpoint import Endpoint
 from .Keys import KEY_DEVICE, KEY_DEVICE_ENDPOINTS
@@ -12,9 +11,9 @@ if TYPE_CHECKING:
     from .Topology import Topology
 
 VALIDATORS = {
-    'device_type': lambda v: v is None or isinstance(v, str),
-    'device_config': lambda v: v is None or isinstance(v, str),
-    'device_operational_status': lambda v: v is None or isinstance(v, OperationalStatus),
+    'device_type': lambda v: v is not None and isinstance(v, str) and (len(v) > 0),
+    'device_config': lambda v: v is not None and isinstance(v, str) and (len(v) > 0),
+    'device_operational_status': lambda v: v is not None and isinstance(v, OperationalStatus),
 }
 
 TRANSCODERS = {
@@ -27,30 +26,26 @@ TRANSCODERS = {
 
 class Device(_Entity):
     def __init__(self, device_uuid : str, parent : 'Topology'):
-        super().__init__(parent=parent)
-        self._device_uuid = device_uuid
-        self._topology_uuid = self._parent.topology_uuid
-        self._context_uuid = self._parent.context_uuid
-        self._attributes = EntityAttributes(self, KEY_DEVICE, VALIDATORS, transcoders=TRANSCODERS)
+        super().__init__(parent, device_uuid, KEY_DEVICE, VALIDATORS, TRANSCODERS)
         self._endpoints = EntityCollection(self, KEY_DEVICE_ENDPOINTS)
 
     @property
     def parent(self) -> 'Topology': return self._parent
 
     @property
-    def context(self) -> 'Context': return self._parent.context
+    def context(self) -> 'Context': return self.parent.context
 
     @property
-    def context_uuid(self) -> str: return self.context.context_uuid
+    def context_uuid(self) -> str: return self.parent.context_uuid
 
     @property
-    def topology_uuid(self) -> str: return self.parent.topology_uuid
+    def topology(self) -> 'Topology': return self.parent
 
     @property
-    def device_uuid(self) -> str: return self._device_uuid
+    def topology_uuid(self) -> str: return self.parent.topology_uuid
 
     @property
-    def attributes(self) -> EntityAttributes: return self._attributes
+    def device_uuid(self) -> str: return self._entity_uuid
 
     @property
     def endpoints(self) -> EntityCollection: return self._endpoints
@@ -75,13 +70,18 @@ class Device(_Entity):
         self.attributes.delete()
         self.parent.devices.delete(self.device_uuid)
 
+    def dump_id(self) -> Dict:
+        return {
+            'device_id': {'uuid': self.device_uuid},
+        }
+
     def dump(self) -> Dict:
         attributes = self.attributes.get()
         dev_op_status = attributes.get('device_operational_status', None)
         if isinstance(dev_op_status, OperationalStatus): dev_op_status = dev_op_status.value
         endpoints = [self.endpoint(endpoint_uuid).dump() for endpoint_uuid in self.endpoints.get()]
         return {
-            'device_id': {'device_id': {'uuid': self.device_uuid}},
+            'device_id': self.dump_id(),
             'device_type': attributes.get('device_type', None),
             'device_config': {'device_config': attributes.get('device_config', None)},
             'devOperationalStatus': dev_op_status,
diff --git a/src/common/database/api/context/Endpoint.py b/src/common/database/api/context/Endpoint.py
index 8f165848d93c20c19ab49bd43ad14c4dd3395220..413a680a8e8cdb13e8df120a2514c46497d7a071 100644
--- a/src/common/database/api/context/Endpoint.py
+++ b/src/common/database/api/context/Endpoint.py
@@ -1,52 +1,52 @@
 from __future__ import annotations
 from typing import TYPE_CHECKING, Dict
 from ..entity._Entity import _Entity
-from ..entity.EntityAttributes import EntityAttributes
 from .Keys import KEY_ENDPOINT
 
 if TYPE_CHECKING:
     from .Context import Context
+    from .Topology import Topology
     from .Device import Device
 
 VALIDATORS = {
-    'port_type': lambda v: v is None or isinstance(v, str),
+    'port_type': lambda v: v is not None and isinstance(v, str) and (len(v) > 0),
 }
 
+TRANSCODERS = {} # no transcoding applied to attributes
+
 class Endpoint(_Entity):
     def __init__(self, endpoint_uuid : str, parent : 'Device'):
-        super().__init__(parent=parent)
-        self._endpoint_uuid = endpoint_uuid
-        self._device_uuid = self._parent.device_uuid
-        self._topology_uuid = self._parent.topology_uuid
-        self._context_uuid = self._parent.context_uuid
-        self._attributes = EntityAttributes(self, KEY_ENDPOINT, VALIDATORS)
+        super().__init__(parent, endpoint_uuid, KEY_ENDPOINT, VALIDATORS, TRANSCODERS)
 
     @property
     def parent(self) -> 'Device': return self._parent
 
     @property
-    def context(self) -> 'Context': return self._parent.context
+    def context(self) -> 'Context': return self.parent.context
+
+    @property
+    def context_uuid(self) -> str: return self.parent.context_uuid
 
     @property
-    def context_uuid(self) -> str: return self.context.context_uuid
+    def topology(self) -> 'Topology': return self.parent.topology
 
     @property
     def topology_uuid(self) -> str: return self.parent.topology_uuid
 
     @property
-    def device_uuid(self) -> str: return self.parent.device_uuid
+    def device(self) -> 'Device': return self.parent
 
     @property
-    def endpoint_uuid(self) -> str: return self._endpoint_uuid
+    def device_uuid(self) -> str: return self.parent.device_uuid
 
     @property
-    def attributes(self) -> EntityAttributes: return self._attributes
+    def endpoint_uuid(self) -> str: return self._entity_uuid
 
     def create(self, port_type : str) -> 'Endpoint':
         self.update(update_attributes={
             'port_type': port_type
         })
-        self.parent.endpoints.add(self._endpoint_uuid)
+        self.parent.endpoints.add(self.endpoint_uuid)
         return self
 
     def update(self, update_attributes={}, remove_attributes=[]) -> 'Endpoint':
@@ -55,18 +55,18 @@ class Endpoint(_Entity):
 
     def delete(self) -> None:
         self.attributes.delete()
-        self.parent.endpoints.delete(self._endpoint_uuid)
+        self.parent.endpoints.delete(self.endpoint_uuid)
 
-    def dump_uuid(self) -> Dict:
+    def dump_id(self) -> Dict:
         return {
-            'topoId': {'uuid': self.topology_uuid},
-            'dev_id': {'uuid': self.device_uuid},
+            'topoId': self.topology.dump_id(),
+            'dev_id': self.device.dump_id(),
             'port_id': {'uuid': self.endpoint_uuid},
         }
 
     def dump(self) -> Dict:
         attributes = self.attributes.get()
         return {
-            'port_id': self.dump_uuid(),
+            'port_id': self.dump_id(),
             'port_type': attributes.get('port_type', None),
         }
diff --git a/src/common/database/api/context/Link.py b/src/common/database/api/context/Link.py
index ed11103cea97c1f5f78f39f79e6116f13d6dcce6..bf661dbb2897822a45071c157619b97c1ebca1d9 100644
--- a/src/common/database/api/context/Link.py
+++ b/src/common/database/api/context/Link.py
@@ -3,34 +3,37 @@ from typing import TYPE_CHECKING, Dict
 from ..entity._Entity import _Entity
 from ..entity.EntityCollection import EntityCollection
 from .LinkEndpoint import LinkEndpoint
-from .Keys import KEY_LINK_ENDPOINTS
+from .Keys import KEY_LINK, KEY_LINK_ENDPOINTS
 
 if TYPE_CHECKING:
     from .Context import Context
     from .Topology import Topology
 
+VALIDATORS = {}  # no attributes accepted
+TRANSCODERS = {} # no transcoding applied to attributes
+
 class Link(_Entity):
     def __init__(self, link_uuid : str, parent : 'Topology'):
-        super().__init__(parent=parent)
-        self._link_uuid = link_uuid
-        self._topology_uuid = self._parent.topology_uuid
-        self._context_uuid = self._parent.context_uuid
+        super().__init__(parent, link_uuid, KEY_LINK, VALIDATORS, TRANSCODERS)
         self._endpoints = EntityCollection(self, KEY_LINK_ENDPOINTS)
 
     @property
-    def context(self) -> 'Context': return self._parent.context
+    def parent(self) -> 'Topology': return self._parent
 
     @property
-    def context_uuid(self) -> str: return self.context.context_uuid
+    def context(self) -> 'Context': return self.parent.context
 
     @property
-    def parent(self) -> 'Topology': return self._parent
+    def context_uuid(self) -> str: return self.parent.context_uuid
+
+    @property
+    def topology(self) -> 'Topology': return self.parent
 
     @property
     def topology_uuid(self) -> str: return self.parent.topology_uuid
 
     @property
-    def link_uuid(self) -> str: return self._link_uuid
+    def link_uuid(self) -> str: return self._entity_uuid
 
     @property
     def endpoints(self) -> EntityCollection: return self._endpoints
@@ -43,11 +46,17 @@ class Link(_Entity):
 
     def delete(self) -> None:
         for endpoint_uuid in self.endpoints.get(): self.endpoint(endpoint_uuid).delete()
+        self.attributes.delete()
         self.parent.links.delete(self.link_uuid)
 
+    def dump_id(self) -> Dict:
+        return {
+            'link_id': {'uuid': self.link_uuid},
+        }
+
     def dump(self) -> Dict:
         endpoints = [self.endpoint(link_endpoint_uuid).dump() for link_endpoint_uuid in self.endpoints.get()]
         return {
-            'link_id': {'link_id': {'uuid': self.link_uuid}},
+            'link_id': self.dump_id(),
             'endpointList': endpoints
         }
diff --git a/src/common/database/api/context/LinkEndpoint.py b/src/common/database/api/context/LinkEndpoint.py
index 3607ba5edc262afeb028f2c153cafc400b05a14f..4acb62fdb36fe78f65710d361370f41962312859 100644
--- a/src/common/database/api/context/LinkEndpoint.py
+++ b/src/common/database/api/context/LinkEndpoint.py
@@ -1,48 +1,48 @@
 from __future__ import annotations
 from typing import TYPE_CHECKING, Dict
 from ..entity._Entity import _Entity
-from ..entity.EntityAttributes import EntityAttributes
 from .Endpoint import Endpoint
 from .Keys import KEY_LINK_ENDPOINT
 
 if TYPE_CHECKING:
     from .Context import Context
+    from .Topology import Topology
     from .Link import Link
 
 VALIDATORS = {
-    'device_uuid': lambda v: v is None or isinstance(v, str),
-    'endpoint_uuid': lambda v: v is None or isinstance(v, str),
+    'device_uuid': lambda v: v is not None and isinstance(v, str) and (len(v) > 0),
+    'endpoint_uuid': lambda v: v is not None and isinstance(v, str) and (len(v) > 0),
 }
 
+TRANSCODERS = {} # no transcoding applied to attributes
+
 class LinkEndpoint(_Entity):
     def __init__(self, link_endpoint_uuid : str, parent : 'Link'):
-        super().__init__(parent=parent)
-        self._link_endpoint_uuid = link_endpoint_uuid
-        self._link_uuid = self._parent.link_uuid
-        self._topology_uuid = self._parent.topology_uuid
-        self._context_uuid = self._parent.context_uuid
-        self._attributes = EntityAttributes(self, KEY_LINK_ENDPOINT, VALIDATORS)
+        super().__init__(parent, link_endpoint_uuid, KEY_LINK_ENDPOINT, VALIDATORS, TRANSCODERS)
 
     @property
     def parent(self) -> 'Link': return self._parent
 
     @property
-    def context(self) -> 'Context': return self._parent.context
+    def context(self) -> 'Context': return self.parent.context
+
+    @property
+    def context_uuid(self) -> str: return self.parent.context_uuid
 
     @property
-    def context_uuid(self) -> str: return self.context.context_uuid
+    def topology(self) -> 'Topology': return self.parent.topology
 
     @property
     def topology_uuid(self) -> str: return self.parent.topology_uuid
 
     @property
-    def link_uuid(self) -> str: return self.parent.link_uuid
+    def link(self) -> 'Link': return self.parent
 
     @property
-    def link_endpoint_uuid(self) -> str: return self._link_endpoint_uuid
+    def link_uuid(self) -> str: return self.parent.link_uuid
 
     @property
-    def attributes(self) -> EntityAttributes: return self._attributes
+    def link_endpoint_uuid(self) -> str: return self._entity_uuid
 
     def create(self, endpoint : Endpoint) -> 'LinkEndpoint':
         self.update(update_attributes={
@@ -60,10 +60,12 @@ class LinkEndpoint(_Entity):
         self.attributes.delete()
         self.parent.endpoints.delete(self.link_endpoint_uuid)
 
-    def dump(self) -> Dict:
+    def dump_id(self) -> Dict:
         attributes = self.attributes.get()
-        return {
-            'topoId': {'uuid': self.topology_uuid},
-            'dev_id': {'uuid': attributes.get('device_uuid', None)},
-            'port_id': {'uuid': attributes.get('endpoint_uuid', None)},
-        }
+        device_uuid = attributes.get('device_uuid', None)
+        endpoint_uuid = attributes.get('endpoint_uuid', None)
+        endpoint = self.topology.device(device_uuid).endpoint(endpoint_uuid)
+        return endpoint.dump_id()
+
+    def dump(self) -> Dict:
+        return self.dump_id()
diff --git a/src/common/database/api/context/Topology.py b/src/common/database/api/context/Topology.py
index c95d300781f1b6b2633adae9c3512e3fe878dd5d..2fc36ed3c46a64dfb92337c2f62608529d2e65d9 100644
--- a/src/common/database/api/context/Topology.py
+++ b/src/common/database/api/context/Topology.py
@@ -1,7 +1,6 @@
 from __future__ import annotations
 from typing import TYPE_CHECKING, Dict
 from ..entity._Entity import _Entity
-from ..entity.EntityAttributes import EntityAttributes
 from ..entity.EntityCollection import EntityCollection
 from .Keys import KEY_TOPOLOGY, KEY_DEVICES, KEY_LINKS
 from .Device import Device
@@ -10,14 +9,12 @@ from .Link import Link
 if TYPE_CHECKING:
     from .Context import Context
 
-VALIDATORS = {}
+VALIDATORS = {}  # no attributes accepted
+TRANSCODERS = {} # no transcoding applied to attributes
 
 class Topology(_Entity):
     def __init__(self, topology_uuid : str, parent : 'Context'):
-        super().__init__(parent=parent)
-        self._topology_uuid = topology_uuid
-        self._context_uuid = self._parent.context_uuid
-        self._attributes = EntityAttributes(self, KEY_TOPOLOGY, validators=VALIDATORS)
+        super().__init__(parent, topology_uuid, KEY_TOPOLOGY, VALIDATORS, TRANSCODERS)
         self._devices = EntityCollection(self, KEY_DEVICES)
         self._links = EntityCollection(self, KEY_LINKS)
 
@@ -31,19 +28,16 @@ class Topology(_Entity):
     def context_uuid(self) -> str: return self.context.context_uuid
 
     @property
-    def topology_uuid(self) -> str: return self._topology_uuid
-
-    @property
-    def attributes(self) -> EntityAttributes: return self._attributes
+    def topology_uuid(self) -> str: return self._entity_uuid
 
     @property
     def devices(self) -> EntityCollection: return self._devices
 
-    def device(self, device_uuid : str) -> Device: return Device(device_uuid, self)
-
     @property
     def links(self) -> EntityCollection: return self._links
 
+    def device(self, device_uuid : str) -> Device: return Device(device_uuid, self)
+
     def link(self, link_uuid : str) -> Link: return Link(link_uuid, self)
 
     def create(self) -> 'Topology':
@@ -56,14 +50,17 @@ class Topology(_Entity):
         self.attributes.delete()
         self.parent.topologies.delete(self.topology_uuid)
 
+    def dump_id(self) -> Dict:
+        return {
+            'contextId': self.context.dump_id(),
+            'topoId': {'uuid': self.topology_uuid},
+        }
+
     def dump(self) -> Dict:
         devices = [self.device(device_uuid).dump() for device_uuid in self.devices.get()]
         links   = [self.link  (link_uuid  ).dump() for link_uuid   in self.links.get()]
         return {
-            'topoId': {
-                'contextId': {'contextUuid': {'uuid': self.context_uuid}},
-                'topoId': {'uuid': self.topology_uuid},
-            },
+            'topoId': self.dump_id(),
             'device': devices,
             'link': links,
         }
diff --git a/src/common/database/api/entity/EntityAttributes.py b/src/common/database/api/entity/EntityAttributes.py
index 244b43bc711cfa67067de47866cee302cfc9138e..47642b823de3164db18667aa474a95aed84e730c 100644
--- a/src/common/database/api/entity/EntityAttributes.py
+++ b/src/common/database/api/entity/EntityAttributes.py
@@ -8,7 +8,7 @@ if TYPE_CHECKING:
     from ._Entity import _Entity
 
 class EntityAttributes:
-    def __init__(self, parent : '_Entity', entity_key : str, validators : Dict, transcoders : Dict={}):
+    def __init__(self, parent : '_Entity', entity_key : str, validators : Dict, transcoders : Dict = {}):
         self._parent = parent
         self._database_engine : _DatabaseEngine = self._parent.database_engine
         self._entity_key = format_key(entity_key, self._parent)
@@ -18,6 +18,7 @@ class EntityAttributes:
     def validate(self, update_attributes, remove_attributes, attribute_name):
         remove_attributes.discard(attribute_name)
         value = update_attributes.pop(attribute_name, None)
+        if value is None: return
         validator = self._validators.get(attribute_name)
         if validator is None: return
         if not validator(value): raise AttributeError('{} is invalid'.format(attribute_name))
diff --git a/src/common/database/api/entity/_Entity.py b/src/common/database/api/entity/_Entity.py
index 48637a8eebdf78afeb8721028d59b9fa4b5aa8e1..9d0b3dfb05f58e9af1a595e7527f3e48b5bb7a5d 100644
--- a/src/common/database/api/entity/_Entity.py
+++ b/src/common/database/api/entity/_Entity.py
@@ -1,16 +1,35 @@
-from typing import Dict
+from typing import Any, Callable, Dict
+from ...engines._DatabaseEngine import _DatabaseEngine
+from .EntityAttributes import EntityAttributes
 
 class _Entity:
-    def __init__(self, parent=None):
-        if (parent is None) or (not isinstance(parent, _Entity)):
+    def __init__(self, parent, entity_uuid : str, attributes_key : str,
+                 attribute_validators : Dict[str, Callable[[Any], bool]],
+                 attribute_transcoders : Dict[str, Dict[Any, Callable[[Any], Any]]]):
+        if not isinstance(parent, _Entity):
             raise AttributeError('parent must be an instance of _Entity')
+        if (not isinstance(entity_uuid, str)) or (len(entity_uuid) == 0):
+            raise AttributeError('entity_uuid must be a non-empty instance of str')
+        if (not isinstance(attributes_key, str)) or (len(attributes_key) == 0):
+            raise AttributeError('attributes_key must be a non-empty instance of str')
+        if not isinstance(attribute_validators, dict):
+            raise AttributeError('attribute_validators must be an instance of dict')
+        if not isinstance(attribute_transcoders, dict):
+            raise AttributeError('attribute_transcoders must be an instance of dict')
+
+        self._entity_uuid = entity_uuid
         self._parent = parent
+        self._attributes = EntityAttributes(self, attributes_key, attribute_validators,
+                                            transcoders=attribute_transcoders)
 
     @property
     def parent(self) -> '_Entity': return self._parent
 
     @property
-    def database_engine(self) -> object: return self._parent.database_engine
+    def database_engine(self) -> _DatabaseEngine: return self._parent.database_engine
+
+    @property
+    def attributes(self) -> EntityAttributes: return self._attributes
 
     def load(self):
         raise NotImplementedError()
@@ -21,5 +40,8 @@ class _Entity:
     def delete(self):
         raise NotImplementedError()
 
+    def dump_id(self) -> Dict:
+        raise NotImplementedError()
+
     def dump(self) -> Dict:
         raise NotImplementedError()
diff --git a/src/common/database/api/entity/_RootEntity.py b/src/common/database/api/entity/_RootEntity.py
new file mode 100644
index 0000000000000000000000000000000000000000..6047bec8b268d4e412161d4515b2c3c7c56d800a
--- /dev/null
+++ b/src/common/database/api/entity/_RootEntity.py
@@ -0,0 +1,16 @@
+from typing import Any, Callable, Dict
+from ._Entity import _Entity
+from ...engines._DatabaseEngine import _DatabaseEngine
+
+class _RootEntity(_Entity):
+    def __init__(self, database_engine : _DatabaseEngine, entity_uuid: str, attributes_key: str,
+                 attributes_validators: Dict[str, Callable[[Any], bool]],
+                 attribute_transcoders: Dict[str, Dict[Any, Callable[[Any], Any]]]):
+        self._database_engine = database_engine
+        super().__init__(self, entity_uuid, attributes_key, attributes_validators, attribute_transcoders)
+
+    @property
+    def parent(self) -> '_RootEntity': return self
+
+    @property
+    def database_engine(self) -> _DatabaseEngine: return self._database_engine
diff --git a/src/common/database/engines/redis/RedisDatabaseEngine.py b/src/common/database/engines/redis/RedisDatabaseEngine.py
index b8bad839830ce8a9309100c08a6a29640e0b516c..e8777796823424fc9fd37a691c0616f5915b3dfa 100644
--- a/src/common/database/engines/redis/RedisDatabaseEngine.py
+++ b/src/common/database/engines/redis/RedisDatabaseEngine.py
@@ -13,9 +13,9 @@ def get_setting(settings, name):
 
 class RedisDatabaseEngine(_DatabaseEngine):
     def __init__(self, **settings) -> None:
-        host = get_setting(settings, 'REDISDB_SERVICE_HOST')
-        port = get_setting(settings, 'REDISDB_SERVICE_PORT')
-        dbid = get_setting(settings, 'REDISDB_DATABASE_ID')
+        host = get_setting(settings, 'REDIS_SERVICE_HOST')
+        port = get_setting(settings, 'REDIS_SERVICE_PORT')
+        dbid = get_setting(settings, 'REDIS_DATABASE_ID')
         self._client = Redis.from_url('redis://{host}:{port}/{dbid}'.format(host=host, port=port, dbid=dbid))
         self._mutex = Mutex(self._client)
 
diff --git a/src/common/database/tests/script.py b/src/common/database/tests/script.py
index ca133220176a096de04ffeeb1331a98cab47ca4f..2bd1a40296f943004d4c90db519406be512a449b 100644
--- a/src/common/database/tests/script.py
+++ b/src/common/database/tests/script.py
@@ -1,15 +1,16 @@
 import json, logging, time
+from ..api.Constants import DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID
 from ..api.context.OperationalStatus import OperationalStatus
 from ..api.Database import Database
 
 LOGGER = logging.getLogger(__name__)
 
-def sequence(database : Database):
+def populate_example(database : Database, context_uuid=DEFAULT_CONTEXT_ID, topology_uuid=DEFAULT_TOPOLOGY_ID):
     database.clear_all()
 
     with database:
-        context = database.context('ctx-test').create()
-        topology = context.topology('base-topo').create()
+        context = database.context(context_uuid).create()
+        topology = context.topology(topology_uuid).create()
 
         device_1 = topology.device('dev1').create(
             device_type='ROADM', device_config='<config/>', device_operational_status=OperationalStatus.ENABLED)
@@ -83,20 +84,24 @@ def sequence(database : Database):
         link_dev4_to_dev3.endpoint('dev4/to-dev3').create(endpoint_dev4_to_dev3)
         link_dev4_to_dev3.endpoint('dev3/to-dev4').create(endpoint_dev3_to_dev4)
 
+def sequence(database : Database):
+    populate_example(database)
+
+    with database:
         LOGGER.info('Dump:')
         for entry in database.dump():
             LOGGER.info('  {}'.format(entry))
 
     with database:
         t0 = time.time()
-        context = database.context('ctx-test').create()
+        context = database.context(DEFAULT_CONTEXT_ID).create()
         json_context = context.dump()
         t1 = time.time()
         LOGGER.info(json.dumps(json_context))
         LOGGER.info('Dump elapsed: {}'.format(1000.0 * (t1-t0)))
 
     with database:
-        database.context('ctx-test').delete()
+        database.context(DEFAULT_CONTEXT_ID).delete()
         LOGGER.info('Dump:')
         for entry in database.dump():
             LOGGER.info('  {}'.format(entry))
diff --git a/src/common/database/tests/test_unitary_inmemory.py b/src/common/database/tests/test_engine_inmemory.py
similarity index 81%
rename from src/common/database/tests/test_unitary_inmemory.py
rename to src/common/database/tests/test_engine_inmemory.py
index 71c47854248a22b52ae09ef7746950b22fb276b8..403f428200a6580a0fc32bfe2b90ce20b855d2a9 100644
--- a/src/common/database/tests/test_unitary_inmemory.py
+++ b/src/common/database/tests/test_engine_inmemory.py
@@ -6,5 +6,5 @@ logging.basicConfig(level=logging.INFO)
 
 def test_inmemory():
     database = get_database(
-        DB_ENGINE=DatabaseEngineEnum.INMEMORY)
+        engine=DatabaseEngineEnum.INMEMORY)
     sequence(database)
diff --git a/src/common/database/tests/test_integration_redis.py b/src/common/database/tests/test_engine_redis.py
similarity index 69%
rename from src/common/database/tests/test_integration_redis.py
rename to src/common/database/tests/test_engine_redis.py
index c56bc0024e569f240319b4b4adac5d7853dc14ac..39df1cf393d22fccfd1f14e0b47327e9cf79f73e 100644
--- a/src/common/database/tests/test_integration_redis.py
+++ b/src/common/database/tests/test_engine_redis.py
@@ -6,6 +6,6 @@ logging.basicConfig(level=logging.INFO)
 
 def test_redis():
     database = get_database(
-        DB_ENGINE=DatabaseEngineEnum.REDIS, REDISDB_SERVICE_HOST='127.0.0.1', REDISDB_SERVICE_PORT=31926,
+        engine=DatabaseEngineEnum.REDIS, REDISDB_SERVICE_HOST='127.0.0.1', REDISDB_SERVICE_PORT=31926,
         REDISDB_DATABASE_ID=0)
     sequence(database)
diff --git a/src/common/database/tests/test_unitary.py b/src/common/database/tests/test_unitary.py
new file mode 100644
index 0000000000000000000000000000000000000000..c00e2f7c9165ee84bccf4a34f06c0915bf4726c9
--- /dev/null
+++ b/src/common/database/tests/test_unitary.py
@@ -0,0 +1,84 @@
+import logging, pytest
+from ..api.Database import Database
+from ..api.entity._Entity import _Entity
+from ..api.entity._RootEntity import _RootEntity
+from ..api.entity.EntityAttributes import EntityAttributes
+from ..api.Exceptions import WrongDatabaseEngine
+from ..engines._DatabaseEngine import _DatabaseEngine
+from ..engines.inmemory.InMemoryDatabaseEngine import InMemoryDatabaseEngine
+
+logging.basicConfig(level=logging.INFO)
+
+def test_database_gets_none_database_engine():
+    # should fail with invalid database engine
+    with pytest.raises(WrongDatabaseEngine) as e:
+        Database(None)
+    assert str(e.value) == 'database_engine must inherit from _DatabaseEngine'
+
+def test_database_gets_correct_database_engine():
+    # should work
+    assert Database(InMemoryDatabaseEngine()) is not None
+
+def test_entity_gets_invalid_parameters():
+
+    class RootMockEntity(_RootEntity):
+        def __init__(self, database_engine : _DatabaseEngine):
+            super().__init__(database_engine, 'valid-uuid', 'valid-key', {}, {})
+
+    # should fail with invalid parent
+    with pytest.raises(AttributeError) as e:
+        _Entity(None, 'valid-uuid', 'valid-attributes-key', {}, {})
+    assert str(e.value) == 'parent must be an instance of _Entity'
+
+    # should fail with invalid entity uuid
+    with pytest.raises(AttributeError) as e:
+        _Entity(RootMockEntity(InMemoryDatabaseEngine()), None, 'valid-attributes-key', {}, {})
+    assert str(e.value) == 'entity_uuid must be a non-empty instance of str'
+
+    # should fail with invalid entity uuid
+    with pytest.raises(AttributeError) as e:
+        _Entity(RootMockEntity(InMemoryDatabaseEngine()), '', 'valid-attributes-key', {}, {})
+    assert str(e.value) == 'entity_uuid must be a non-empty instance of str'
+
+    # should fail with invalid attribute key
+    with pytest.raises(AttributeError) as e:
+        _Entity(RootMockEntity(InMemoryDatabaseEngine()), 'valid-uuid', None, {}, {})
+    assert str(e.value) == 'attributes_key must be a non-empty instance of str'
+
+    # should fail with invalid attribute key
+    with pytest.raises(AttributeError) as e:
+        _Entity(RootMockEntity(InMemoryDatabaseEngine()), 'valid-uuid', '', {}, {})
+    assert str(e.value) == 'attributes_key must be a non-empty instance of str'
+
+    # should fail with invalid attribute validators
+    with pytest.raises(AttributeError) as e:
+        _Entity(RootMockEntity(InMemoryDatabaseEngine()), 'valid-uuid', 'valid-attributes-key', [], {})
+    assert str(e.value) == 'attribute_validators must be an instance of dict'
+
+    # should fail with invalid attribute transcoders
+    with pytest.raises(AttributeError) as e:
+        _Entity(RootMockEntity(InMemoryDatabaseEngine()), 'valid-uuid', 'valid-attributes-key', {}, [])
+    assert str(e.value) == 'attribute_transcoders must be an instance of dict'
+
+    # should work
+    assert _Entity(RootMockEntity(InMemoryDatabaseEngine()), 'valid-uuid', 'valid-attributes-key', {}, {}) is not None
+
+def test_entity_attributes_gets_invalid_parameters():
+
+    class RootMockEntity(_RootEntity):
+        def __init__(self, database_engine : _DatabaseEngine):
+            super().__init__(database_engine, 'valid-uuid', 'valid-key', {}, {})
+
+    # should work
+    root_entity = RootMockEntity(InMemoryDatabaseEngine())
+    validators = {'attr': lambda v: True}
+    entity_attrs = EntityAttributes(root_entity, 'valid-attributes-key', validators, {})
+    assert entity_attrs is not None
+
+    with pytest.raises(AttributeError) as e:
+        entity_attrs.update(update_attributes={'non-defined-attr': 'random-value'})
+    assert str(e.value) == "Unexpected update_attributes: {'non-defined-attr': 'random-value'}"
+
+    with pytest.raises(AttributeError) as e:
+        entity_attrs.update(remove_attributes=['non-defined-attr'])
+    assert str(e.value) == "Unexpected remove_attributes: {'non-defined-attr'}"
diff --git a/src/common/exceptions/ServiceException.py b/src/common/exceptions/ServiceException.py
new file mode 100644
index 0000000000000000000000000000000000000000..476650b039b90c9cef318015664cf72509df4b58
--- /dev/null
+++ b/src/common/exceptions/ServiceException.py
@@ -0,0 +1,7 @@
+import grpc
+
+class ServiceException(Exception):
+    def __init__(self, code : grpc.StatusCode, details : str) -> None:
+        self.code = code
+        self.details = details
+        super().__init__()
diff --git a/src/context/tests/tools/__init__.py b/src/common/exceptions/__init__.py
similarity index 100%
rename from src/context/tests/tools/__init__.py
rename to src/common/exceptions/__init__.py
diff --git a/src/common/tests/Assertions.py b/src/common/tests/Assertions.py
index f4f88e4aa2aac645f8cd84a7ee3912071cd2606b..7e08621f107805dd89978ad380675ef7b547d582 100644
--- a/src/common/tests/Assertions.py
+++ b/src/common/tests/Assertions.py
@@ -16,9 +16,28 @@ def validate_device_id(message):
     assert 'device_id' in message
     validate_uuid(message['device_id'])
 
+def validate_link_id(message):
+    assert type(message) is dict
+    assert len(message.keys()) == 1
+    assert 'link_id' in message
+    validate_uuid(message['link_id'])
+
 def validate_topology(message):
     assert type(message) is dict
     assert len(message.keys()) > 0
     assert 'topoId' in message
     assert 'device' in message
     assert 'link' in message
+
+def validate_topology_is_empty(message):
+    validate_topology(message)
+    assert len(message['device']) == 0
+    assert len(message['link']) == 0
+
+def validate_topology_has_devices(message):
+    validate_topology(message)
+    assert len(message['device']) > 0
+
+def validate_topology_has_links(message):
+    validate_topology(message)
+    assert len(message['link']) > 0
diff --git a/src/context/.gitlab-ci.yml b/src/context/.gitlab-ci.yml
index 9ef5ae21e6f16044f4c0a99d10a62d897f7be6c4..1d702f3c9d689fd40217f458fe2dfb627c85d478 100644
--- a/src/context/.gitlab-ci.yml
+++ b/src/context/.gitlab-ci.yml
@@ -17,7 +17,7 @@ build context:
       - .gitlab-ci.yml
 
 # Pull, execute, and run unitary tests for the Docker image from the GitLab registry
-test unitary context:
+test context:
   variables:
     IMAGE_NAME: 'context' # name of the microservice
     IMAGE_NAME_TEST: 'context-test' # name of the microservice
@@ -44,15 +44,16 @@ test unitary context:
       - src/$IMAGE_NAME/**
       - .gitlab-ci.yml
 
-# Deployment of the monitoring service in Kubernetes Cluster
+# Deployment of the service in Kubernetes Cluster
 deploy context:
   stage: deploy
   needs:
     - build context
-    - test unitary context
+    - test context
+    - dependencies all
   script:
     - kubectl version
     - kubectl get all
     - kubectl apply -f "manifests/contextservice.yaml"
+    - kubectl delete pods --selector app=contextservice
     - kubectl get all
-  when: manual
diff --git a/src/context/client/ContextClient.py b/src/context/client/ContextClient.py
index 83d511cd70fedeb266be3c53516a4be19c6b677f..64bd0010b3d1d57a90023b5473d8d89b8a69823a 100644
--- a/src/context/client/ContextClient.py
+++ b/src/context/client/ContextClient.py
@@ -1,6 +1,6 @@
 import grpc, logging
-from google.protobuf.json_format import MessageToDict
 from common.tools.RetryDecorator import retry, delay_exponential
+from context.proto.context_pb2 import Link, LinkId, Empty, Topology
 from context.proto.context_pb2_grpc import ContextServiceStub
 
 LOGGER = logging.getLogger(__name__)
@@ -26,10 +26,22 @@ class ContextClient:
         self.stub = None
 
     @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
-    def GetTopology(self, request):
+    def GetTopology(self, request : Empty) -> Topology:
         LOGGER.debug('GetTopology request: {}'.format(request))
         response = self.stub.GetTopology(request)
         LOGGER.debug('GetTopology result: {}'.format(response))
-        return MessageToDict(
-            response, including_default_value_fields=True, preserving_proto_field_name=True,
-            use_integers_for_enums=False)
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def AddLink(self, request : Link) -> LinkId:
+        LOGGER.debug('AddLink request: {}'.format(request))
+        response = self.stub.AddLink(request)
+        LOGGER.debug('AddLink result: {}'.format(response))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def DeleteLink(self, request : LinkId) -> Empty:
+        LOGGER.debug('DeleteLink request: {}'.format(request))
+        response = self.stub.DeleteLink(request)
+        LOGGER.debug('DeleteLink result: {}'.format(response))
+        return response
diff --git a/src/context/genproto.sh b/src/context/genproto.sh
index 55d596b50a7aef5243eb293658292130ae336776..d64a736122d5004519167ee0f2a26eddb97df24d 100755
--- a/src/context/genproto.sh
+++ b/src/context/genproto.sh
@@ -20,9 +20,10 @@
 cd $(dirname $0)
 
 rm -rf proto/*.py
+rm -rf proto/__pycache__
 touch proto/__init__.py
 
 python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto context.proto
 
-sed -i -E 's/(import\ .*)_pb2/from context.proto \1_pb2/g' proto/context_pb2.py
-sed -i -E 's/(import\ .*)_pb2/from context.proto \1_pb2/g' proto/context_pb2_grpc.py
+sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/context_pb2.py
+sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/context_pb2_grpc.py
diff --git a/src/context/proto/context_pb2.py b/src/context/proto/context_pb2.py
index e4acb11a579694017d1ee5572f1f94848731802a..a41b1de47f4df97a6e90b42a02fab7556feafd34 100644
--- a/src/context/proto/context_pb2.py
+++ b/src/context/proto/context_pb2.py
@@ -20,7 +20,7 @@ DESCRIPTOR = _descriptor.FileDescriptor(
   syntax='proto3',
   serialized_options=None,
   create_key=_descriptor._internal_create_key,
-  serialized_pb=b'\n\rcontext.proto\x12\x07\x63ontext\"\x07\n\x05\x45mpty\"{\n\x07\x43ontext\x12%\n\tcontextId\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x1f\n\x04topo\x18\x02 \x01(\x0b\x32\x11.context.Topology\x12(\n\x03\x63tl\x18\x03 \x01(\x0b\x32\x1b.context.TeraFlowController\"/\n\tContextId\x12\"\n\x0b\x63ontextUuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"m\n\x08Topology\x12#\n\x06topoId\x18\x02 \x01(\x0b\x32\x13.context.TopologyId\x12\x1f\n\x06\x64\x65vice\x18\x03 \x03(\x0b\x32\x0f.context.Device\x12\x1b\n\x04link\x18\x04 \x03(\x0b\x32\r.context.Link\"1\n\x04Link\x12)\n\x0c\x65ndpointList\x18\x01 \x03(\x0b\x32\x13.context.EndPointId\"R\n\nTopologyId\x12%\n\tcontextId\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x1d\n\x06topoId\x18\x02 \x01(\x0b\x32\r.context.Uuid\"?\n\nConstraint\x12\x17\n\x0f\x63onstraint_type\x18\x01 \x01(\t\x12\x18\n\x10\x63onstraint_value\x18\x02 \x01(\t\"\xda\x01\n\x06\x44\x65vice\x12$\n\tdevice_id\x18\x01 \x01(\x0b\x32\x11.context.DeviceId\x12\x13\n\x0b\x64\x65vice_type\x18\x02 \x01(\t\x12,\n\rdevice_config\x18\x03 \x01(\x0b\x32\x15.context.DeviceConfig\x12>\n\x14\x64\x65vOperationalStatus\x18\x04 \x01(\x0e\x32 .context.DeviceOperationalStatus\x12\'\n\x0c\x65ndpointList\x18\x05 \x03(\x0b\x32\x11.context.EndPoint\"%\n\x0c\x44\x65viceConfig\x12\x15\n\rdevice_config\x18\x01 \x01(\t\"C\n\x08\x45ndPoint\x12$\n\x07port_id\x18\x01 \x01(\x0b\x32\x13.context.EndPointId\x12\x11\n\tport_type\x18\x02 \x01(\t\"t\n\nEndPointId\x12#\n\x06topoId\x18\x01 \x01(\x0b\x32\x13.context.TopologyId\x12!\n\x06\x64\x65v_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\x12\x1e\n\x07port_id\x18\x03 \x01(\x0b\x32\r.context.Uuid\",\n\x08\x44\x65viceId\x12 \n\tdevice_id\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\x14\n\x04Uuid\x12\x0c\n\x04uuid\x18\x01 \x01(\t\"K\n\x12TeraFlowController\x12\"\n\x06\x63tl_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x11\n\tipaddress\x18\x02 \x01(\t\"Q\n\x14\x41uthenticationResult\x12\"\n\x06\x63tl_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x15\n\rauthenticated\x18\x02 \x01(\x08*4\n\x17\x44\x65viceOperationalStatus\x12\x0c\n\x08\x44ISABLED\x10\x00\x12\x0b\n\x07\x45NABLED\x10\x01\x32\x44\n\x0e\x43ontextService\x12\x32\n\x0bGetTopology\x12\x0e.context.Empty\x1a\x11.context.Topology\"\x00\x62\x06proto3'
+  serialized_pb=b'\n\rcontext.proto\x12\x07\x63ontext\"\x07\n\x05\x45mpty\"{\n\x07\x43ontext\x12%\n\tcontextId\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x1f\n\x04topo\x18\x02 \x01(\x0b\x32\x11.context.Topology\x12(\n\x03\x63tl\x18\x03 \x01(\x0b\x32\x1b.context.TeraFlowController\"/\n\tContextId\x12\"\n\x0b\x63ontextUuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"m\n\x08Topology\x12#\n\x06topoId\x18\x02 \x01(\x0b\x32\x13.context.TopologyId\x12\x1f\n\x06\x64\x65vice\x18\x03 \x03(\x0b\x32\x0f.context.Device\x12\x1b\n\x04link\x18\x04 \x03(\x0b\x32\r.context.Link\"S\n\x04Link\x12 \n\x07link_id\x18\x01 \x01(\x0b\x32\x0f.context.LinkId\x12)\n\x0c\x65ndpointList\x18\x02 \x03(\x0b\x32\x13.context.EndPointId\"R\n\nTopologyId\x12%\n\tcontextId\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x1d\n\x06topoId\x18\x02 \x01(\x0b\x32\r.context.Uuid\"?\n\nConstraint\x12\x17\n\x0f\x63onstraint_type\x18\x01 \x01(\t\x12\x18\n\x10\x63onstraint_value\x18\x02 \x01(\t\"\xda\x01\n\x06\x44\x65vice\x12$\n\tdevice_id\x18\x01 \x01(\x0b\x32\x11.context.DeviceId\x12\x13\n\x0b\x64\x65vice_type\x18\x02 \x01(\t\x12,\n\rdevice_config\x18\x03 \x01(\x0b\x32\x15.context.DeviceConfig\x12>\n\x14\x64\x65vOperationalStatus\x18\x04 \x01(\x0e\x32 .context.DeviceOperationalStatus\x12\'\n\x0c\x65ndpointList\x18\x05 \x03(\x0b\x32\x11.context.EndPoint\"%\n\x0c\x44\x65viceConfig\x12\x15\n\rdevice_config\x18\x01 \x01(\t\"C\n\x08\x45ndPoint\x12$\n\x07port_id\x18\x01 \x01(\x0b\x32\x13.context.EndPointId\x12\x11\n\tport_type\x18\x02 \x01(\t\"t\n\nEndPointId\x12#\n\x06topoId\x18\x01 \x01(\x0b\x32\x13.context.TopologyId\x12!\n\x06\x64\x65v_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\x12\x1e\n\x07port_id\x18\x03 \x01(\x0b\x32\r.context.Uuid\",\n\x08\x44\x65viceId\x12 \n\tdevice_id\x18\x01 \x01(\x0b\x32\r.context.Uuid\"(\n\x06LinkId\x12\x1e\n\x07link_id\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\x14\n\x04Uuid\x12\x0c\n\x04uuid\x18\x01 \x01(\t\"K\n\x12TeraFlowController\x12\"\n\x06\x63tl_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x11\n\tipaddress\x18\x02 \x01(\t\"Q\n\x14\x41uthenticationResult\x12\"\n\x06\x63tl_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x15\n\rauthenticated\x18\x02 \x01(\x08*N\n\x17\x44\x65viceOperationalStatus\x12\x0f\n\x0bKEEP_STATUS\x10\x00\x12\x15\n\x08\x44ISABLED\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01\x12\x0b\n\x07\x45NABLED\x10\x01\x32\xa2\x01\n\x0e\x43ontextService\x12\x32\n\x0bGetTopology\x12\x0e.context.Empty\x1a\x11.context.Topology\"\x00\x12+\n\x07\x41\x64\x64Link\x12\r.context.Link\x1a\x0f.context.LinkId\"\x00\x12/\n\nDeleteLink\x12\x0f.context.LinkId\x1a\x0e.context.Empty\"\x00\x62\x06proto3'
 )
 
 _DEVICEOPERATIONALSTATUS = _descriptor.EnumDescriptor(
@@ -31,25 +31,31 @@ _DEVICEOPERATIONALSTATUS = _descriptor.EnumDescriptor(
   create_key=_descriptor._internal_create_key,
   values=[
     _descriptor.EnumValueDescriptor(
-      name='DISABLED', index=0, number=0,
+      name='KEEP_STATUS', index=0, number=0,
       serialized_options=None,
       type=None,
       create_key=_descriptor._internal_create_key),
     _descriptor.EnumValueDescriptor(
-      name='ENABLED', index=1, number=1,
+      name='DISABLED', index=1, number=-1,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='ENABLED', index=2, number=1,
       serialized_options=None,
       type=None,
       create_key=_descriptor._internal_create_key),
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=1195,
-  serialized_end=1247,
+  serialized_start=1271,
+  serialized_end=1349,
 )
 _sym_db.RegisterEnumDescriptor(_DEVICEOPERATIONALSTATUS)
 
 DeviceOperationalStatus = enum_type_wrapper.EnumTypeWrapper(_DEVICEOPERATIONALSTATUS)
-DISABLED = 0
+KEEP_STATUS = 0
+DISABLED = -1
 ENABLED = 1
 
 
@@ -212,8 +218,15 @@ _LINK = _descriptor.Descriptor(
   create_key=_descriptor._internal_create_key,
   fields=[
     _descriptor.FieldDescriptor(
-      name='endpointList', full_name='context.Link.endpointList', index=0,
-      number=1, type=11, cpp_type=10, label=3,
+      name='link_id', full_name='context.Link.link_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='endpointList', full_name='context.Link.endpointList', index=1,
+      number=2, type=11, cpp_type=10, label=3,
       has_default_value=False, default_value=[],
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
@@ -231,7 +244,7 @@ _LINK = _descriptor.Descriptor(
   oneofs=[
   ],
   serialized_start=320,
-  serialized_end=369,
+  serialized_end=403,
 )
 
 
@@ -269,8 +282,8 @@ _TOPOLOGYID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=371,
-  serialized_end=453,
+  serialized_start=405,
+  serialized_end=487,
 )
 
 
@@ -308,8 +321,8 @@ _CONSTRAINT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=455,
-  serialized_end=518,
+  serialized_start=489,
+  serialized_end=552,
 )
 
 
@@ -368,8 +381,8 @@ _DEVICE = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=521,
-  serialized_end=739,
+  serialized_start=555,
+  serialized_end=773,
 )
 
 
@@ -400,8 +413,8 @@ _DEVICECONFIG = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=741,
-  serialized_end=778,
+  serialized_start=775,
+  serialized_end=812,
 )
 
 
@@ -439,8 +452,8 @@ _ENDPOINT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=780,
-  serialized_end=847,
+  serialized_start=814,
+  serialized_end=881,
 )
 
 
@@ -485,8 +498,8 @@ _ENDPOINTID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=849,
-  serialized_end=965,
+  serialized_start=883,
+  serialized_end=999,
 )
 
 
@@ -517,8 +530,40 @@ _DEVICEID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=967,
-  serialized_end=1011,
+  serialized_start=1001,
+  serialized_end=1045,
+)
+
+
+_LINKID = _descriptor.Descriptor(
+  name='LinkId',
+  full_name='context.LinkId',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='link_id', full_name='context.LinkId.link_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1047,
+  serialized_end=1087,
 )
 
 
@@ -549,8 +594,8 @@ _UUID = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1013,
-  serialized_end=1033,
+  serialized_start=1089,
+  serialized_end=1109,
 )
 
 
@@ -588,8 +633,8 @@ _TERAFLOWCONTROLLER = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1035,
-  serialized_end=1110,
+  serialized_start=1111,
+  serialized_end=1186,
 )
 
 
@@ -627,8 +672,8 @@ _AUTHENTICATIONRESULT = _descriptor.Descriptor(
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=1112,
-  serialized_end=1193,
+  serialized_start=1188,
+  serialized_end=1269,
 )
 
 _CONTEXT.fields_by_name['contextId'].message_type = _CONTEXTID
@@ -638,6 +683,7 @@ _CONTEXTID.fields_by_name['contextUuid'].message_type = _UUID
 _TOPOLOGY.fields_by_name['topoId'].message_type = _TOPOLOGYID
 _TOPOLOGY.fields_by_name['device'].message_type = _DEVICE
 _TOPOLOGY.fields_by_name['link'].message_type = _LINK
+_LINK.fields_by_name['link_id'].message_type = _LINKID
 _LINK.fields_by_name['endpointList'].message_type = _ENDPOINTID
 _TOPOLOGYID.fields_by_name['contextId'].message_type = _CONTEXTID
 _TOPOLOGYID.fields_by_name['topoId'].message_type = _UUID
@@ -650,6 +696,7 @@ _ENDPOINTID.fields_by_name['topoId'].message_type = _TOPOLOGYID
 _ENDPOINTID.fields_by_name['dev_id'].message_type = _DEVICEID
 _ENDPOINTID.fields_by_name['port_id'].message_type = _UUID
 _DEVICEID.fields_by_name['device_id'].message_type = _UUID
+_LINKID.fields_by_name['link_id'].message_type = _UUID
 _TERAFLOWCONTROLLER.fields_by_name['ctl_id'].message_type = _CONTEXTID
 _AUTHENTICATIONRESULT.fields_by_name['ctl_id'].message_type = _CONTEXTID
 DESCRIPTOR.message_types_by_name['Empty'] = _EMPTY
@@ -664,6 +711,7 @@ DESCRIPTOR.message_types_by_name['DeviceConfig'] = _DEVICECONFIG
 DESCRIPTOR.message_types_by_name['EndPoint'] = _ENDPOINT
 DESCRIPTOR.message_types_by_name['EndPointId'] = _ENDPOINTID
 DESCRIPTOR.message_types_by_name['DeviceId'] = _DEVICEID
+DESCRIPTOR.message_types_by_name['LinkId'] = _LINKID
 DESCRIPTOR.message_types_by_name['Uuid'] = _UUID
 DESCRIPTOR.message_types_by_name['TeraFlowController'] = _TERAFLOWCONTROLLER
 DESCRIPTOR.message_types_by_name['AuthenticationResult'] = _AUTHENTICATIONRESULT
@@ -754,6 +802,13 @@ DeviceId = _reflection.GeneratedProtocolMessageType('DeviceId', (_message.Messag
   })
 _sym_db.RegisterMessage(DeviceId)
 
+LinkId = _reflection.GeneratedProtocolMessageType('LinkId', (_message.Message,), {
+  'DESCRIPTOR' : _LINKID,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.LinkId)
+  })
+_sym_db.RegisterMessage(LinkId)
+
 Uuid = _reflection.GeneratedProtocolMessageType('Uuid', (_message.Message,), {
   'DESCRIPTOR' : _UUID,
   '__module__' : 'context_pb2'
@@ -784,8 +839,8 @@ _CONTEXTSERVICE = _descriptor.ServiceDescriptor(
   index=0,
   serialized_options=None,
   create_key=_descriptor._internal_create_key,
-  serialized_start=1249,
-  serialized_end=1317,
+  serialized_start=1352,
+  serialized_end=1514,
   methods=[
   _descriptor.MethodDescriptor(
     name='GetTopology',
@@ -797,6 +852,26 @@ _CONTEXTSERVICE = _descriptor.ServiceDescriptor(
     serialized_options=None,
     create_key=_descriptor._internal_create_key,
   ),
+  _descriptor.MethodDescriptor(
+    name='AddLink',
+    full_name='context.ContextService.AddLink',
+    index=1,
+    containing_service=None,
+    input_type=_LINK,
+    output_type=_LINKID,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='DeleteLink',
+    full_name='context.ContextService.DeleteLink',
+    index=2,
+    containing_service=None,
+    input_type=_LINKID,
+    output_type=_EMPTY,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
 ])
 _sym_db.RegisterServiceDescriptor(_CONTEXTSERVICE)
 
diff --git a/src/context/proto/context_pb2_grpc.py b/src/context/proto/context_pb2_grpc.py
index 51c61c053221a4bd98b322b96e3ea3ba95e7cbeb..bfae5108cc4bc9a2f5ad93d0cbf25dbcfc65fc47 100644
--- a/src/context/proto/context_pb2_grpc.py
+++ b/src/context/proto/context_pb2_grpc.py
@@ -2,7 +2,7 @@
 """Client and server classes corresponding to protobuf-defined services."""
 import grpc
 
-from context.proto import context_pb2 as context__pb2
+from . import context_pb2 as context__pb2
 
 
 class ContextServiceStub(object):
@@ -19,6 +19,16 @@ class ContextServiceStub(object):
                 request_serializer=context__pb2.Empty.SerializeToString,
                 response_deserializer=context__pb2.Topology.FromString,
                 )
+        self.AddLink = channel.unary_unary(
+                '/context.ContextService/AddLink',
+                request_serializer=context__pb2.Link.SerializeToString,
+                response_deserializer=context__pb2.LinkId.FromString,
+                )
+        self.DeleteLink = channel.unary_unary(
+                '/context.ContextService/DeleteLink',
+                request_serializer=context__pb2.LinkId.SerializeToString,
+                response_deserializer=context__pb2.Empty.FromString,
+                )
 
 
 class ContextServiceServicer(object):
@@ -30,6 +40,18 @@ class ContextServiceServicer(object):
         context.set_details('Method not implemented!')
         raise NotImplementedError('Method not implemented!')
 
+    def AddLink(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def DeleteLink(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
 
 def add_ContextServiceServicer_to_server(servicer, server):
     rpc_method_handlers = {
@@ -38,6 +60,16 @@ def add_ContextServiceServicer_to_server(servicer, server):
                     request_deserializer=context__pb2.Empty.FromString,
                     response_serializer=context__pb2.Topology.SerializeToString,
             ),
+            'AddLink': grpc.unary_unary_rpc_method_handler(
+                    servicer.AddLink,
+                    request_deserializer=context__pb2.Link.FromString,
+                    response_serializer=context__pb2.LinkId.SerializeToString,
+            ),
+            'DeleteLink': grpc.unary_unary_rpc_method_handler(
+                    servicer.DeleteLink,
+                    request_deserializer=context__pb2.LinkId.FromString,
+                    response_serializer=context__pb2.Empty.SerializeToString,
+            ),
     }
     generic_handler = grpc.method_handlers_generic_handler(
             'context.ContextService', rpc_method_handlers)
@@ -64,3 +96,37 @@ class ContextService(object):
             context__pb2.Topology.FromString,
             options, channel_credentials,
             insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def AddLink(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/context.ContextService/AddLink',
+            context__pb2.Link.SerializeToString,
+            context__pb2.LinkId.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def DeleteLink(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/context.ContextService/DeleteLink',
+            context__pb2.LinkId.SerializeToString,
+            context__pb2.Empty.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
diff --git a/src/context/run_integration_tests.sh b/src/context/run_integration_tests.sh
deleted file mode 100755
index ab607bed4c644caed9722ae99f6bba7151e82a5c..0000000000000000000000000000000000000000
--- a/src/context/run_integration_tests.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/bash
-
-# Make folder containing the script the root folder for its execution
-cd $(dirname $0)
-
-ENDPOINT=($(kubectl --namespace teraflow-development get service contextservice -o 'jsonpath={.spec.clusterIP} {.spec.ports[?(@.name=="grpc")].port}'))
-docker run -it --env TEST_TARGET_ADDRESS=${ENDPOINT[0]} --env TEST_TARGET_PORT=${ENDPOINT[1]} context_service:test
diff --git a/src/context/run_unitary_tests.sh b/src/context/run_unitary_tests.sh
deleted file mode 100755
index 08e941f31502fe8dc32ffcfc1563c2223bb4d8d3..0000000000000000000000000000000000000000
--- a/src/context/run_unitary_tests.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/bash
-
-# Make folder containing the script the root folder for its execution
-cd $(dirname $0)
-
-mkdir -p data
-pytest -v --log-level=DEBUG tests/test_unitary.py
diff --git a/src/context/service/ContextServiceServicerImpl.py b/src/context/service/ContextServiceServicerImpl.py
index 599c8b2c4985957426c56b922937fc529283e5fb..6a189d67835a3185c3d87f52bd76dd9582e261dc 100644
--- a/src/context/service/ContextServiceServicerImpl.py
+++ b/src/context/service/ContextServiceServicerImpl.py
@@ -1,6 +1,11 @@
+from typing import Dict, List, Set, Tuple
 import grpc, logging
 from prometheus_client import Counter, Histogram
-from context.proto.context_pb2 import Topology
+from common.Checkers import chk_string
+from common.database.api.Constants import DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID
+from common.database.api.Database import Database
+from common.exceptions.ServiceException import ServiceException
+from context.proto.context_pb2 import Empty, Link, LinkId, Topology
 from context.proto.context_pb2_grpc import ContextServiceServicer
 
 LOGGER = logging.getLogger(__name__)
@@ -14,24 +19,199 @@ GETTOPOLOGY_COUNTER_FAILED     = Counter  ('context_gettopology_counter_failed',
 GETTOPOLOGY_HISTOGRAM_DURATION = Histogram('context_gettopology_histogram_duration',
                                           'Context:GetTopology histogram of request duration')
 
+ADDLINK_COUNTER_STARTED    = Counter  ('context_addlink_counter_started',
+                                       'Context:AddLink counter of requests started'  )
+ADDLINK_COUNTER_COMPLETED  = Counter  ('context_addlink_counter_completed',
+                                       'Context:AddLink counter of requests completed')
+ADDLINK_COUNTER_FAILED     = Counter  ('context_addlink_counter_failed',
+                                       'Context:AddLink counter of requests failed'   )
+ADDLINK_HISTOGRAM_DURATION = Histogram('context_addlink_histogram_duration',
+                                       'Context:AddLink histogram of request duration')
+
+DELETELINK_COUNTER_STARTED    = Counter  ('context_deletelink_counter_started',
+                                          'Context:DeleteLink counter of requests started'  )
+DELETELINK_COUNTER_COMPLETED  = Counter  ('context_deletelink_counter_completed',
+                                          'Context:DeleteLink counter of requests completed')
+DELETELINK_COUNTER_FAILED     = Counter  ('context_deletelink_counter_failed',
+                                          'Context:DeleteLink counter of requests failed'   )
+DELETELINK_HISTOGRAM_DURATION = Histogram('context_deletelink_histogram_duration',
+                                          'Context:DeleteLink histogram of request duration')
+
 class ContextServiceServicerImpl(ContextServiceServicer):
-    def __init__(self, database):
+    def __init__(self, database : Database):
         LOGGER.debug('Creating Servicer...')
         self.database = database
         LOGGER.debug('Servicer Created')
 
     @GETTOPOLOGY_HISTOGRAM_DURATION.time()
-    def GetTopology(self, request, context):
-        # request=Empty(), returns=Topology()
+    def GetTopology(self, request : Empty, grpc_context : grpc.ServicerContext) -> Topology:
         GETTOPOLOGY_COUNTER_STARTED.inc()
         try:
             LOGGER.debug('GetTopology request: {}'.format(str(request)))
-            reply = Topology(**self.database.get_topology())
+
+            # ----- Validate request data and pre-conditions -----------------------------------------------------------
+            db_context = self.database.context(DEFAULT_CONTEXT_ID).create()
+            db_topology = db_context.topology(DEFAULT_TOPOLOGY_ID).create()
+
+            # ----- Retrieve data from the database --------------------------------------------------------------------
+            json_topology = db_topology.dump()
+
+            # ----- Compose reply --------------------------------------------------------------------------------------
+            reply = Topology(**json_topology)
             LOGGER.debug('GetTopology reply: {}'.format(str(reply)))
             GETTOPOLOGY_COUNTER_COMPLETED.inc()
             return reply
-        except:
-            LOGGER.exception('GetTopology exception')
-            GETTOPOLOGY_COUNTER_FAILED.inc()
-            context.set_code(grpc.StatusCode.INTERNAL)
-            return Topology()
+        except ServiceException as e:                               # pragma: no cover (ServiceException not thrown)
+            grpc_context.abort(e.code, e.details)                   # pragma: no cover (ServiceException not thrown)
+        except Exception as e:                                      # pragma: no cover
+            LOGGER.exception('GetTopology exception')               # pragma: no cover
+            GETTOPOLOGY_COUNTER_FAILED.inc()                        # pragma: no cover
+            grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))    # pragma: no cover
+
+    @ADDLINK_HISTOGRAM_DURATION.time()
+    def AddLink(self, request : Link, grpc_context : grpc.ServicerContext) -> LinkId:
+        ADDLINK_COUNTER_STARTED.inc()
+        try:
+            LOGGER.debug('AddLink request: {}'.format(str(request)))
+
+            # ----- Validate request data and pre-conditions -----------------------------------------------------------
+            try:
+                link_id = chk_string('link.link_id.link_id.uuid',
+                                     request.link_id.link_id.uuid,
+                                     allow_empty=False)
+            except Exception as e:
+                LOGGER.exception('Invalid arguments:')
+                raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e))
+
+            db_context = self.database.context(DEFAULT_CONTEXT_ID).create()
+            db_topology = db_context.topology(DEFAULT_TOPOLOGY_ID).create()
+
+            if db_topology.links.contains(link_id):
+                msg = 'Link({}) already exists in the database.'
+                msg = msg.format(link_id)
+                raise ServiceException(grpc.StatusCode.ALREADY_EXISTS, msg)
+
+            added_devices_and_endpoints : Dict[str, Set[str]] = {}
+            device_endpoint_pairs : List[Tuple[str, str]] = []
+            for i,endpoint in enumerate(request.endpointList):
+                try:
+                    ep_context_id  = chk_string('endpoint[#{}].topoId.contextId.contextUuid.uuid'.format(i),
+                                                endpoint.topoId.contextId.contextUuid.uuid,
+                                                allow_empty=True)
+                    ep_topology_id = chk_string('endpoint[#{}].topoId.topoId.uuid'.format(i),
+                                                endpoint.topoId.topoId.uuid,
+                                                allow_empty=True)
+                    ep_device_id   = chk_string('endpoint[#{}].dev_id.device_id.uuid'.format(i),
+                                                endpoint.dev_id.device_id.uuid,
+                                                allow_empty=False)
+                    ep_port_id     = chk_string('endpoint[#{}].port_id.uuid'.format(i),
+                                                endpoint.port_id.uuid,
+                                                allow_empty=False)
+                except Exception as e:
+                    LOGGER.exception('Invalid arguments:')
+                    raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e))
+
+                if (len(ep_context_id) > 0) and (ep_context_id != DEFAULT_CONTEXT_ID):
+                    msg = ' '.join([
+                        'Unsupported Context({}) in Endpoint(#{}) of Link({}).',
+                        'Only default Context({}) is currently supported.',
+                        'Optionally, leave field empty to use default Context.',
+                    ])
+                    msg = msg.format(ep_context_id, i, link_id, DEFAULT_CONTEXT_ID)
+                    raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
+                elif len(ep_context_id) == 0:
+                    ep_context_id = DEFAULT_CONTEXT_ID
+
+                if (len(ep_topology_id) > 0) and (ep_topology_id != DEFAULT_TOPOLOGY_ID):
+                    msg = ' '.join([
+                        'Unsupported Topology({}) in Endpoint(#{}) of Link({}).',
+                        'Only default Topology({}) is currently supported.',
+                        'Optionally, leave field empty to use default Topology.',
+                    ])
+                    msg = msg.format(ep_topology_id, i, link_id, DEFAULT_TOPOLOGY_ID)
+                    raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
+                elif len(ep_topology_id) == 0:
+                    ep_topology_id = DEFAULT_TOPOLOGY_ID
+
+                if ep_device_id in added_devices_and_endpoints:
+                    msg = 'Duplicated Device({}) in Endpoint(#{}) of Link({}).'
+                    msg = msg.format(ep_device_id, i, link_id)
+                    raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
+
+                if not db_topology.devices.contains(ep_device_id):
+                    msg = 'Device({}) in Endpoint(#{}) of Link({}) does not exist in the database.'
+                    msg = msg.format(ep_device_id, i, link_id)
+                    raise ServiceException(grpc.StatusCode.NOT_FOUND, msg)
+
+                added_device_and_endpoints = added_devices_and_endpoints.setdefault(ep_device_id, set())
+
+                # should never happen since same device cannot appear 2 times in the link
+                if ep_port_id in added_device_and_endpoints:                                # pragma: no cover
+                    msg = 'Duplicated Device({})/Port({}) in Endpoint(#{}) of Link({}).'    # pragma: no cover
+                    msg = msg.format(ep_device_id, ep_port_id, i, link_id)                  # pragma: no cover
+                    raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)           # pragma: no cover
+
+                if not db_topology.device(ep_device_id).endpoints.contains(ep_port_id):
+                    msg = 'Device({})/Port({}) in Endpoint(#{}) of Link({}) does not exist in the database.'
+                    msg = msg.format(ep_device_id, ep_port_id, i, link_id)
+                    raise ServiceException(grpc.StatusCode.NOT_FOUND, msg)
+
+                added_device_and_endpoints.add(ep_port_id)
+                device_endpoint_pairs.append((ep_device_id, ep_port_id))
+
+            # ----- Implement changes in the database ------------------------------------------------------------------
+            db_link = db_topology.link(link_id).create()
+            for device_id,endpoint_id in device_endpoint_pairs:
+                link_endpoint_id = '{}/{}'.format(device_id, endpoint_id)
+                db_endpoint = db_topology.device(ep_device_id).endpoint(ep_port_id)
+                db_link.endpoint(link_endpoint_id).create(db_endpoint)
+
+            # ----- Compose reply --------------------------------------------------------------------------------------
+            reply = LinkId(**db_link.dump_id())
+            LOGGER.debug('AddLink reply: {}'.format(str(reply)))
+            ADDLINK_COUNTER_COMPLETED.inc()
+            return reply
+        except ServiceException as e:
+            grpc_context.abort(e.code, e.details)
+        except Exception as e:                                      # pragma: no cover
+            LOGGER.exception('AddLink exception')                   # pragma: no cover
+            ADDLINK_COUNTER_FAILED.inc()                            # pragma: no cover
+            grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))    # pragma: no cover
+
+    @DELETELINK_HISTOGRAM_DURATION.time()
+    def DeleteLink(self, request : LinkId, grpc_context : grpc.ServicerContext) -> Empty:
+        DELETELINK_COUNTER_STARTED.inc()
+        try:
+            LOGGER.debug('DeleteLink request: {}'.format(str(request)))
+
+            # ----- Validate request data and pre-conditions -----------------------------------------------------------
+            try:
+                link_id = chk_string('link_id.link_id.uuid',
+                                     request.link_id.uuid,
+                                     allow_empty=False)
+            except Exception as e:
+                LOGGER.exception('Invalid arguments:')
+                raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e))
+
+            db_context = self.database.context(DEFAULT_CONTEXT_ID).create()
+            db_topology = db_context.topology(DEFAULT_TOPOLOGY_ID).create()
+
+            if not db_topology.links.contains(link_id):
+                msg = 'Link({}) does not exist in the database.'
+                msg = msg.format(link_id)
+                raise ServiceException(grpc.StatusCode.NOT_FOUND, msg)
+
+            # ----- Implement changes in the database ------------------------------------------------------------------
+            db_topology.link(link_id).delete()
+
+            # ----- Compose reply --------------------------------------------------------------------------------------
+            reply = Empty()
+            LOGGER.debug('DeleteLink reply: {}'.format(str(reply)))
+            DELETELINK_COUNTER_COMPLETED.inc()
+            return reply
+        except ServiceException as e:
+            grpc_context.abort(e.code, e.details)
+        except Exception as e:                                      # pragma: no cover
+            LOGGER.exception('DeleteLink exception')                # pragma: no cover
+            DELETELINK_COUNTER_FAILED.inc()                         # pragma: no cover
+            grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))    # pragma: no cover
diff --git a/src/context/tests/Dockerfile b/src/context/tests/Dockerfile
deleted file mode 100644
index ed70aa86e5c2db5eddb8bf7fc4106bdcc7f8323c..0000000000000000000000000000000000000000
--- a/src/context/tests/Dockerfile
+++ /dev/null
@@ -1,4 +0,0 @@
-FROM context:latest
-
-# Run integration tests
-ENTRYPOINT ["pytest", "-v", "--log-level=DEBUG", "context/tests/test_integration.py"]
diff --git a/src/context/tests/test_integration.py b/src/context/tests/test_integration.py
deleted file mode 100644
index eab068b493a06754ec335ea118fa60e671fddec7..0000000000000000000000000000000000000000
--- a/src/context/tests/test_integration.py
+++ /dev/null
@@ -1,24 +0,0 @@
-import logging, os, pytest, sys
-
-from pathlib import Path
-sys.path.append(__file__.split('src')[0] + 'src')
-print(sys.path)
-
-from context.client.ContextClient import ContextClient
-from context.proto.context_pb2 import Empty
-from .tools.ValidateTopology import validate_topology_dict
-
-LOGGER = logging.getLogger(__name__)
-LOGGER.setLevel(logging.DEBUG)
-
-@pytest.fixture(scope='session')
-def remote_context_client():
-    address = os.environ.get('TEST_TARGET_ADDRESS')
-    if(address is None): raise Exception('EnvironmentVariable(TEST_TARGET_ADDRESS) not specified')
-    port = os.environ.get('TEST_TARGET_PORT')
-    if(port is None): raise Exception('EnvironmentVariable(TEST_TARGET_PORT) not specified')
-    return ContextClient(address=address, port=port)
-
-def test_remote_get_topology(remote_context_client):
-    response = remote_context_client.GetTopology(Empty())
-    validate_topology_dict(response)
diff --git a/src/context/tests/test_unitary.py b/src/context/tests/test_unitary.py
index 641f6f6dfba459d723042f6ff5556b1963d26971..104736af195f0eb0643563f816bdaeefa5b0743b 100644
--- a/src/context/tests/test_unitary.py
+++ b/src/context/tests/test_unitary.py
@@ -1,36 +1,219 @@
-#import logging, pytest, sys
-
-#from pathlib import Path
-#sys.path.append(__file__.split('src')[0] + 'src')
-#print(sys.path)
-
-#from common.database.Factory import get_database, DatabaseEngineEnum
-#from context.client.ContextClient import ContextClient
-#from context.proto.context_pb2 import Empty
-#from context.service.ContextService import ContextService
-#from context.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD
-#from context.tests.tools.ValidateTopology import validate_topology_dict
-
-#LOGGER = logging.getLogger(__name__)
-#LOGGER.setLevel(logging.DEBUG)
-
-#@pytest.fixture(scope='session')
-#def local_context_service():
-#    database = get_database(engine=DatabaseEngineEnum.INMEMORY, filepath='data/topo_nsfnet.json')
-#    _service = ContextService(
-#        database, port=GRPC_SERVICE_PORT, max_workers=GRPC_MAX_WORKERS, grace_period=GRPC_GRACE_PERIOD)
-#    _service.start()
-#    yield _service
-#    _service.stop()
-
-#@pytest.fixture(scope='session')
-#def local_context_client(local_context_service):
-#    return ContextClient(address='127.0.0.1', port=GRPC_SERVICE_PORT)
-
-#def test_local_get_topology(local_context_client):
-#    response = local_context_client.GetTopology(Empty())
-#    validate_topology_dict(response)
-
-def test_nothing():
-    pass
+import copy, grpc, logging, pytest
+from google.protobuf.json_format import MessageToDict
+from common.database.Factory import get_database, DatabaseEngineEnum
+from common.database.api.Database import Database
+from common.database.tests.script import populate_example
+from common.tests.Assertions import validate_empty, validate_link_id, validate_topology
+from context.client.ContextClient import ContextClient
+from context.proto.context_pb2 import Empty, Link, LinkId
+from context.service.ContextService import ContextService
+from context.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD
 
+port = 10000 + GRPC_SERVICE_PORT # avoid first 1024 privileged ports to avoid evelating permissions for tests
+
+LOGGER = logging.getLogger(__name__)
+LOGGER.setLevel(logging.DEBUG)
+
+LINK_ID = {'link_id': {'uuid': 'dev1/to-dev2 ==> dev2/to-dev1'}}
+LINK = {
+    'link_id': {'link_id': {'uuid': 'dev1/to-dev2 ==> dev2/to-dev1'}},
+    'endpointList' : [
+        {
+            'topoId': {
+                'contextId': {'contextUuid': {'uuid': 'admin'}},
+                'topoId': {'uuid': 'admin'}
+            },
+            'dev_id': {'device_id': {'uuid': 'dev1'}},
+            'port_id': {'uuid' : 'to-dev2'}
+        },
+        {
+            'topoId': {
+                'contextId': {'contextUuid': {'uuid': 'admin'}},
+                'topoId': {'uuid': 'admin'}
+            },
+            'dev_id': {'device_id': {'uuid': 'dev2'}},
+            'port_id': {'uuid' : 'to-dev1'}
+        },
+    ]
+}
+
+@pytest.fixture(scope='session')
+def context_database():
+    _database = get_database(engine=DatabaseEngineEnum.INMEMORY)
+    return _database
+
+@pytest.fixture(scope='session')
+def context_service(context_database : Database):
+    _service = ContextService(
+        context_database, port=port, max_workers=GRPC_MAX_WORKERS, grace_period=GRPC_GRACE_PERIOD)
+    _service.start()
+    yield _service
+    _service.stop()
+
+@pytest.fixture(scope='session')
+def context_client(context_service):
+    _client = ContextClient(address='127.0.0.1', port=port)
+    yield _client
+    _client.close()
+
+def test_get_topology_empty(context_client : ContextClient, context_database : Database):
+    # should work
+    context_database.clear_all()
+    validate_topology(MessageToDict(
+        context_client.GetTopology(Empty()),
+        including_default_value_fields=True, preserving_proto_field_name=True,
+        use_integers_for_enums=False))
+
+def test_get_topology_completed(context_client : ContextClient, context_database : Database):
+    # should work
+    populate_example(context_database)
+    validate_topology(MessageToDict(
+        context_client.GetTopology(Empty()),
+        including_default_value_fields=True, preserving_proto_field_name=True,
+        use_integers_for_enums=False))
+
+def test_delete_link_empty_uuid(context_client : ContextClient):
+    # should fail with link not found
+    with pytest.raises(grpc._channel._InactiveRpcError) as e:
+        copy_link_id = copy.deepcopy(LINK_ID)
+        copy_link_id['link_id']['uuid'] = ''
+        context_client.DeleteLink(LinkId(**copy_link_id))
+    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
+    assert e.value.details() == 'link_id.link_id.uuid() string is empty.'
+
+def test_add_link_already_exists(context_client : ContextClient):
+    # should fail with link already exists
+    with pytest.raises(grpc._channel._InactiveRpcError) as e:
+        context_client.AddLink(Link(**LINK))
+    assert e.value.code() == grpc.StatusCode.ALREADY_EXISTS
+    assert e.value.details() == 'Link(dev1/to-dev2 ==> dev2/to-dev1) already exists in the database.'
+
+def test_delete_link(context_client : ContextClient):
+    # should work
+    validate_empty(MessageToDict(
+        context_client.DeleteLink(LinkId(**LINK_ID)),
+        including_default_value_fields=True, preserving_proto_field_name=True,
+        use_integers_for_enums=False))
+
+def test_delete_link_not_existing(context_client : ContextClient):
+    # should fail with link not found
+    with pytest.raises(grpc._channel._InactiveRpcError) as e:
+        context_client.DeleteLink(LinkId(**LINK_ID))
+    assert e.value.code() == grpc.StatusCode.NOT_FOUND
+    assert e.value.details() == 'Link(dev1/to-dev2 ==> dev2/to-dev1) does not exist in the database.'
+
+def test_add_link_uuid_empty(context_client : ContextClient):
+    # should fail with link uuid empty
+    with pytest.raises(grpc._channel._InactiveRpcError) as e:
+        copy_link = copy.deepcopy(LINK)
+        copy_link['link_id']['link_id']['uuid'] = ''
+        context_client.AddLink(Link(**copy_link))
+    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
+    assert e.value.details() == 'link.link_id.link_id.uuid() string is empty.'
+
+def test_add_link_endpoint_wrong_context(context_client : ContextClient):
+    # should fail with unsupported context
+    with pytest.raises(grpc._channel._InactiveRpcError) as e:
+        copy_link = copy.deepcopy(LINK)
+        copy_link['endpointList'][0]['topoId']['contextId']['contextUuid']['uuid'] = 'wrong-context'
+        context_client.AddLink(Link(**copy_link))
+    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
+    msg = ' '.join([
+        'Unsupported Context(wrong-context) in Endpoint(#0) of Link(dev1/to-dev2 ==> dev2/to-dev1).',
+        'Only default Context(admin) is currently supported.',
+        'Optionally, leave field empty to use default Context.',
+    ])
+    assert e.value.details() == msg
+
+def test_add_link_endpoint_wrong_topology(context_client : ContextClient):
+    # should fail with unsupported topology
+    with pytest.raises(grpc._channel._InactiveRpcError) as e:
+        copy_link = copy.deepcopy(LINK)
+        copy_link['endpointList'][0]['topoId']['topoId']['uuid'] = 'wrong-topo'
+        context_client.AddLink(Link(**copy_link))
+    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
+    msg = ' '.join([
+        'Unsupported Topology(wrong-topo) in Endpoint(#0) of Link(dev1/to-dev2 ==> dev2/to-dev1).',
+        'Only default Topology(admin) is currently supported.',
+        'Optionally, leave field empty to use default Topology.',
+    ])
+    assert e.value.details() == msg
+
+def test_add_link_empty_device_uuid(context_client : ContextClient):
+    # should fail with port uuid is empty
+    with pytest.raises(grpc._channel._InactiveRpcError) as e:
+        copy_link = copy.deepcopy(LINK)
+        copy_link['endpointList'][0]['dev_id']['device_id']['uuid'] = ''
+        context_client.AddLink(Link(**copy_link))
+    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
+    assert e.value.details() == 'endpoint[#0].dev_id.device_id.uuid() string is empty.'
+
+def test_add_link_endpoint_wrong_device(context_client : ContextClient):
+    # should fail with wrong endpoint device
+    with pytest.raises(grpc._channel._InactiveRpcError) as e:
+        copy_link = copy.deepcopy(LINK)
+        copy_link['endpointList'][0]['dev_id']['device_id']['uuid'] = 'wrong-device'
+        context_client.AddLink(Link(**copy_link))
+    assert e.value.code() == grpc.StatusCode.NOT_FOUND
+    msg = 'Device(wrong-device) in Endpoint(#0) of Link(dev1/to-dev2 ==> dev2/to-dev1) does not exist in the database.'
+    assert e.value.details() == msg
+
+def test_add_link_endpoint_wrong_port(context_client : ContextClient):
+    # should fail with wrong endpoint port
+    with pytest.raises(grpc._channel._InactiveRpcError) as e:
+        copy_link = copy.deepcopy(LINK)
+        copy_link['endpointList'][0]['port_id']['uuid'] = 'wrong-port'
+        context_client.AddLink(Link(**copy_link))
+    assert e.value.code() == grpc.StatusCode.NOT_FOUND
+    msg = 'Device(dev1)/Port(wrong-port) in Endpoint(#0) of Link(dev1/to-dev2 ==> dev2/to-dev1) does not exist in the database.'
+    assert e.value.details() == msg
+
+def test_add_link_endpoint_duplicated_device(context_client : ContextClient):
+    # should fail with duplicated endpoint device
+    with pytest.raises(grpc._channel._InactiveRpcError) as e:
+        copy_link = copy.deepcopy(LINK)
+        copy_link['endpointList'][1]['dev_id']['device_id']['uuid'] = 'dev1'
+        context_client.AddLink(Link(**copy_link))
+    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
+    msg = 'Duplicated Device(dev1) in Endpoint(#1) of Link(dev1/to-dev2 ==> dev2/to-dev1).'
+    assert e.value.details() == msg
+
+def test_add_link_empty_port_uuid(context_client : ContextClient):
+    # should fail with port uuid is empty
+    with pytest.raises(grpc._channel._InactiveRpcError) as e:
+        copy_link = copy.deepcopy(LINK)
+        copy_link['endpointList'][0]['port_id']['uuid'] = ''
+        context_client.AddLink(Link(**copy_link))
+    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
+    assert e.value.details() == 'endpoint[#0].port_id.uuid() string is empty.'
+
+def test_add_link(context_client : ContextClient):
+    # should work
+    validate_link_id(MessageToDict(
+        context_client.AddLink(Link(**LINK)),
+        including_default_value_fields=True, preserving_proto_field_name=True,
+        use_integers_for_enums=False))
+
+def test_delete_link_2(context_client : ContextClient):
+    # should work
+    validate_empty(MessageToDict(
+        context_client.DeleteLink(LinkId(**LINK_ID)),
+        including_default_value_fields=True, preserving_proto_field_name=True,
+        use_integers_for_enums=False))
+
+def test_add_link_default_endpoint_context_topology(context_client : ContextClient):
+    # should work
+    copy_link = copy.deepcopy(LINK)
+    copy_link['endpointList'][0]['topoId']['contextId']['contextUuid']['uuid'] = ''
+    copy_link['endpointList'][0]['topoId']['topoId']['uuid'] = ''
+    validate_link_id(MessageToDict(
+            context_client.AddLink(Link(**copy_link)),
+            including_default_value_fields=True, preserving_proto_field_name=True,
+            use_integers_for_enums=False))
+
+def test_get_topology_completed_2(context_client : ContextClient):
+    # should work
+    validate_topology(MessageToDict(
+        context_client.GetTopology(Empty()),
+        including_default_value_fields=True, preserving_proto_field_name=True,
+        use_integers_for_enums=False))
diff --git a/src/context/tests/tools/ValidateTopology.py b/src/context/tests/tools/ValidateTopology.py
deleted file mode 100644
index b52546e39c27292bec4f11755dade987929e5e71..0000000000000000000000000000000000000000
--- a/src/context/tests/tools/ValidateTopology.py
+++ /dev/null
@@ -1,6 +0,0 @@
-def validate_topology_dict(topology):
-    assert type(topology) is dict
-    assert len(topology.keys()) > 0
-    assert 'topoId' in topology
-    assert 'device' in topology
-    assert 'link' in topology
diff --git a/src/device/.gitlab-ci.yml b/src/device/.gitlab-ci.yml
index 6921f17094ac9521dea00a3c0e1d300c4cbbd0b6..8fd0205c895e67f3d63c8f56cd8434280dd19e25 100644
--- a/src/device/.gitlab-ci.yml
+++ b/src/device/.gitlab-ci.yml
@@ -17,7 +17,7 @@ build device:
       - .gitlab-ci.yml
 
 # Pull, execute, and run unitary tests for the Docker image from the GitLab registry
-test unitary device:
+test device:
   variables:
     IMAGE_NAME: 'device' # name of the microservice
     IMAGE_NAME_TEST: 'device-test' # name of the microservice
@@ -44,12 +44,16 @@ test unitary device:
       - src/$IMAGE_NAME/**
       - .gitlab-ci.yml
 
-# Deployment of the monitoring service in Kubernetes Cluster
+# Deployment of the service in Kubernetes Cluster
 deploy device:
   stage: deploy
   needs:
     - build device
-    - test unitary device
+    - test device
+    - dependencies all
   script:
-    - kubectl apply -f "manisfests/deviceservice.yaml"
-  when: manual
+    - kubectl version
+    - kubectl get all
+    - kubectl apply -f "manifests/deviceservice.yaml"
+    - kubectl delete pods --selector app=deviceservice
+    - kubectl get all
diff --git a/src/device/client/DeviceClient.py b/src/device/client/DeviceClient.py
index 3e9f83f3f5459a738f7863955b78164c81ec21a3..a517ebdd1551465f9404714ec07bd7326cad7c2d 100644
--- a/src/device/client/DeviceClient.py
+++ b/src/device/client/DeviceClient.py
@@ -1,5 +1,6 @@
 import grpc, logging
 from common.tools.RetryDecorator import retry, delay_exponential
+from device.proto.context_pb2 import Device, DeviceId, Empty
 from device.proto.device_pb2_grpc import DeviceServiceStub
 
 LOGGER = logging.getLogger(__name__)
@@ -25,21 +26,21 @@ class DeviceClient:
         self.stub = None
 
     @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
-    def AddDevice(self, request):
+    def AddDevice(self, request : Device) -> DeviceId:
         LOGGER.debug('AddDevice request: {}'.format(request))
         response = self.stub.AddDevice(request)
         LOGGER.debug('AddDevice result: {}'.format(response))
         return response
 
     @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
-    def ConfigureDevice(self, request):
+    def ConfigureDevice(self, request : Device) -> DeviceId:
         LOGGER.debug('ConfigureDevice request: {}'.format(request))
         response = self.stub.ConfigureDevice(request)
         LOGGER.debug('ConfigureDevice result: {}'.format(response))
         return response
 
     @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
-    def DeleteDevice(self, request):
+    def DeleteDevice(self, request : DeviceId) -> Empty:
         LOGGER.debug('DeleteDevice request: {}'.format(request))
         response = self.stub.DeleteDevice(request)
         LOGGER.debug('DeleteDevice result: {}'.format(response))
diff --git a/src/device/genproto.sh b/src/device/genproto.sh
index 725f2f95fa4d15b7fa7a5a001131010057dca129..bca79fc07c8f91164c23e1e01cc7c08bca35fb2d 100755
--- a/src/device/genproto.sh
+++ b/src/device/genproto.sh
@@ -19,7 +19,7 @@
 # Make folder containing the script the root folder for its execution
 cd $(dirname $0)
 
-rm -rf proto/*.py proto/*.proto
+rm -rf proto/*.py
 rm -rf proto/__pycache__
 touch proto/__init__.py
 
diff --git a/src/device/service/DeviceServiceServicerImpl.py b/src/device/service/DeviceServiceServicerImpl.py
index 25aaa429678ea9aa78293af5469e3c54ac177f6e..71c4bfc6d8dc809a837669614aaf60c7e6578fd9 100644
--- a/src/device/service/DeviceServiceServicerImpl.py
+++ b/src/device/service/DeviceServiceServicerImpl.py
@@ -2,17 +2,16 @@ from typing import List, Tuple
 import grpc, logging
 from prometheus_client import Counter, Histogram
 from common.Checkers import chk_options, chk_string
+from common.database.api.Constants import DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID
 from common.database.api.Database import Database
 from common.database.api.context.OperationalStatus import OperationalStatus, operationalstatus_enum_values, \
     to_operationalstatus_enum
+from common.exceptions.ServiceException import ServiceException
 from device.proto.context_pb2 import DeviceId, Device, Empty
 from device.proto.device_pb2_grpc import DeviceServiceServicer
 
 LOGGER = logging.getLogger(__name__)
 
-DEFAULT_CONTEXT_ID = 'admin'
-DEFAULT_TOPOLOGY_ID = 'admin'
-
 ADDDEVICE_COUNTER_STARTED    = Counter  ('device_adddevice_counter_started',
                                           'Device:AddDevice counter of requests started'  )
 ADDDEVICE_COUNTER_COMPLETED  = Counter  ('device_adddevice_counter_completed',
@@ -40,12 +39,6 @@ DELETEDEVICE_COUNTER_FAILED     = Counter  ('device_deletedevice_counter_failed'
 DELETEDEVICE_HISTOGRAM_DURATION = Histogram('device_deletedevice_histogram_duration',
                                             'Device:DeleteDevice histogram of request duration')
 
-class ServiceException(Exception):
-    def __init__(self, code : grpc.StatusCode, details : str) -> None:
-        self.code = code
-        self.details = details
-        super().__init__()
-
 class DeviceServiceServicerImpl(DeviceServiceServicer):
     def __init__(self, database : Database):
         LOGGER.debug('Creating Servicer...')
@@ -60,16 +53,29 @@ class DeviceServiceServicerImpl(DeviceServiceServicer):
 
             # ----- Validate request data and pre-conditions -----------------------------------------------------------
             try:
-                device_uuid = chk_string('device_uuid', request.device_id.device_id.uuid, allow_empty=False)
-                device_type = chk_string('device_type', request.device_type, allow_empty=False)
-                device_config = chk_string('device_config', request.device_config.device_config, allow_empty=True)
-                device_opstat = chk_options('devOperationalStatus', request.devOperationalStatus,
+                device_id     = chk_string ('device.device_id.device_id.uuid',
+                                            request.device_id.device_id.uuid,
+                                            allow_empty=False)
+                device_type   = chk_string ('device.device_type',
+                                            request.device_type,
+                                            allow_empty=False)
+                device_config = chk_string ('device.device_config.device_config',
+                                            request.device_config.device_config,
+                                            allow_empty=True)
+                device_opstat = chk_options('device.devOperationalStatus',
+                                            request.devOperationalStatus,
                                             operationalstatus_enum_values())
             except Exception as e:
                 LOGGER.exception('Invalid arguments:')
                 raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e))
 
             device_opstat = to_operationalstatus_enum(device_opstat)
+            # should not happen because gRPC limits accepted values in enums
+            if device_opstat is None:                                           # pragma: no cover
+                msg = 'Unsupported OperationalStatus({}).'                      # pragma: no cover
+                msg = msg.format(request.devOperationalStatus)                  # pragma: no cover
+                raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)   # pragma: no cover
+
             if device_opstat == OperationalStatus.KEEP_STATE:
                 msg = ' '.join([
                     'Device has to be created with either ENABLED/DISABLED Operational State.',
@@ -80,71 +86,81 @@ class DeviceServiceServicerImpl(DeviceServiceServicer):
             db_context = self.database.context(DEFAULT_CONTEXT_ID).create()
             db_topology = db_context.topology(DEFAULT_TOPOLOGY_ID).create()
 
-            if db_topology.devices.contains(device_uuid):
-                msg = 'device_uuid({}) already exists.'
-                msg = msg.format(device_uuid)
+            if db_topology.devices.contains(device_id):
+                msg = 'Device({}) already exists in the database.'
+                msg = msg.format(device_id)
                 raise ServiceException(grpc.StatusCode.ALREADY_EXISTS, msg)
 
             added_endpoint_uuids = set()
             endpoint_pairs : List[Tuple[str, str]] = []
             for i,endpoint in enumerate(request.endpointList):
-                contextId = endpoint.port_id.topoId.contextId.contextUuid.uuid
-                if (len(contextId) > 0) and (contextId != DEFAULT_CONTEXT_ID):
+                try:
+                    ep_context_id  = chk_string('endpoint[#{}].port_id.topoId.contextId.contextUuid.uuid'.format(i),
+                                                endpoint.port_id.topoId.contextId.contextUuid.uuid,
+                                                allow_empty=True)
+                    ep_topology_id = chk_string('endpoint[#{}].port_id.topoId.topoId.uuid'.format(i),
+                                                endpoint.port_id.topoId.topoId.uuid,
+                                                allow_empty=True)
+                    ep_device_id   = chk_string('endpoint[#{}].port_id.dev_id.device_id.uuid'.format(i),
+                                                endpoint.port_id.dev_id.device_id.uuid,
+                                                allow_empty=True)
+                    ep_port_id     = chk_string('endpoint[#{}].port_id.port_id.uuid'.format(i),
+                                                endpoint.port_id.port_id.uuid,
+                                                allow_empty=False)
+                    ep_port_type   = chk_string('endpoint[#{}].port_type'.format(i),
+                                                endpoint.port_type,
+                                                allow_empty=False)
+                except Exception as e:
+                    LOGGER.exception('Invalid arguments:')
+                    raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e))
+
+                if (len(ep_context_id) > 0) and (ep_context_id != DEFAULT_CONTEXT_ID):
                     msg = ' '.join([
-                        'Unsupported context_id({}) in endpoint #{}.',
-                        'Only default context_id({}) is currently supported.',
-                        'Optionally, leave field empty to use default context_id.',
+                        'Unsupported Context({}) in Endpoint(#{}) of Device({}).',
+                        'Only default Context({}) is currently supported.',
+                        'Optionally, leave field empty to use default Context.',
                     ])
-                    msg = msg.format(contextId, i, DEFAULT_CONTEXT_ID)
+                    msg = msg.format(ep_context_id, i, device_id, DEFAULT_CONTEXT_ID)
                     raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
-                elif len(contextId) == 0:
-                    contextId = DEFAULT_CONTEXT_ID
+                elif len(ep_context_id) == 0:
+                    ep_context_id = DEFAULT_CONTEXT_ID
 
-                topoId = endpoint.port_id.topoId.topoId.uuid
-                if (len(topoId) > 0) and (topoId != DEFAULT_TOPOLOGY_ID):
+                if (len(ep_topology_id) > 0) and (ep_topology_id != DEFAULT_TOPOLOGY_ID):
                     msg = ' '.join([
-                        'Unsupported topology_id({}) in endpoint #{}.',
-                        'Only default topology_id({}) is currently supported.',
-                        'Optionally, leave field empty to use default topology_id.',
+                        'Unsupported Topology({}) in Endpoint(#{}) of Device({}).',
+                        'Only default Topology({}) is currently supported.',
+                        'Optionally, leave field empty to use default Topology.',
                     ])
-                    msg = msg.format(topoId, i, DEFAULT_TOPOLOGY_ID)
+                    msg = msg.format(ep_topology_id, i, device_id, DEFAULT_TOPOLOGY_ID)
                     raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
-                elif len(topoId) == 0:
-                    topoId = DEFAULT_TOPOLOGY_ID
+                elif len(ep_topology_id) == 0:
+                    ep_topology_id = DEFAULT_TOPOLOGY_ID
 
-                dev_id = endpoint.port_id.dev_id.device_id.uuid
-                if (len(dev_id) > 0) and (dev_id != device_uuid):
+                if (len(ep_device_id) > 0) and (ep_device_id != device_id):
                     msg = ' '.join([
-                        'Wrong device_id({}) in endpoint #{}.',
-                        'Parent specified in message is device_id({}).',
-                        'Optionally, leave field empty to use parent device_id.',
+                        'Wrong Device({}) in Endpoint(#{}).',
+                        'Parent specified in message is Device({}).',
+                        'Optionally, leave field empty to use parent Device.',
                     ])
-                    msg = msg.format(dev_id, i, device_uuid)
+                    msg = msg.format(ep_device_id, i, device_id)
                     raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
-                elif len(dev_id) == 0:
-                    dev_id = device_uuid
+                elif len(ep_device_id) == 0:
+                    ep_device_id = device_id
 
-                try:
-                    port_id = chk_string('port_uuid', endpoint.port_id.port_id.uuid, allow_empty=False)
-                    port_type = chk_string('port_type', endpoint.port_type, allow_empty=False)
-                except Exception as e:
-                    LOGGER.exception('Invalid arguments:')
-                    raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e))
-
-                if port_id in added_endpoint_uuids:
-                    msg = 'Duplicated port_id({}) in device_id({}).'
-                    msg = msg.format(port_id, device_uuid)
+                if ep_port_id in added_endpoint_uuids:
+                    msg = 'Duplicated Port({}) in Endpoint(#{}) of Device({}).'
+                    msg = msg.format(ep_port_id, i, device_id)
                     raise ServiceException(grpc.StatusCode.ALREADY_EXISTS, msg)
 
-                added_endpoint_uuids.add(port_id)
-                endpoint_pairs.append((port_id, port_type))
+                added_endpoint_uuids.add(ep_port_id)
+                endpoint_pairs.append((ep_port_id, ep_port_type))
 
-            # ----- Implement changes in database ----------------------------------------------------------------------
-            db_device = db_topology.device(device_uuid).create(device_type, device_config, device_opstat)
+            # ----- Implement changes in the database ------------------------------------------------------------------
+            db_device = db_topology.device(device_id).create(device_type, device_config, device_opstat)
             for port_id,port_type in endpoint_pairs: db_device.endpoint(port_id).create(port_type)
 
             # ----- Compose reply --------------------------------------------------------------------------------------
-            reply = DeviceId(device_id=dict(uuid=device_uuid))
+            reply = DeviceId(**db_device.dump_id())
             LOGGER.debug('AddDevice reply: {}'.format(str(reply)))
             ADDDEVICE_COUNTER_COMPLETED.inc()
             return reply
@@ -159,54 +175,64 @@ class DeviceServiceServicerImpl(DeviceServiceServicer):
     def ConfigureDevice(self, request : Device, grpc_context : grpc.ServicerContext) -> DeviceId:
         CONFIGUREDEVICE_COUNTER_STARTED.inc()
         try:
-            LOGGER.info('ConfigureDevice request: {}'.format(str(request)))
+            LOGGER.debug('ConfigureDevice request: {}'.format(str(request)))
 
             # ----- Validate request data and pre-conditions -----------------------------------------------------------
             try:
-                device_uuid = chk_string('device_uuid', request.device_id.device_id.uuid, allow_empty=False)
-                device_type = chk_string('device_type', request.device_type, allow_empty=True)
-                device_config = chk_string('device_config', request.device_config.device_config, allow_empty=True)
-                device_opstat = chk_options('devOperationalStatus', request.devOperationalStatus,
+                device_id     = chk_string ('device.device_id.device_id.uuid',
+                                            request.device_id.device_id.uuid,
+                                            allow_empty=False)
+                device_type   = chk_string ('device.device_type',
+                                            request.device_type,
+                                            allow_empty=True)
+                device_config = chk_string ('device.device_config.device_config',
+                                            request.device_config.device_config,
+                                            allow_empty=True)
+                device_opstat = chk_options('device.devOperationalStatus',
+                                            request.devOperationalStatus,
                                             operationalstatus_enum_values())
             except Exception as e:
                 LOGGER.exception('Invalid arguments:')
                 raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e))
 
             device_opstat = to_operationalstatus_enum(device_opstat)
-            if device_opstat is None:
-                msg = 'Unsupported OperationalStatus({}).'
-                msg = msg.format(request.devOperationalStatus)
-                raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
+            # should not happen because gRPC limits accepted values in enums
+            if device_opstat is None:                                           # pragma: no cover
+                msg = 'Unsupported OperationalStatus({}).'                      # pragma: no cover
+                msg = msg.format(request.devOperationalStatus)                  # pragma: no cover
+                raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)   # pragma: no cover
 
             db_context = self.database.context(DEFAULT_CONTEXT_ID).create()
             db_topology = db_context.topology(DEFAULT_TOPOLOGY_ID).create()
 
-            if not db_topology.devices.contains(device_uuid):
-                msg = 'device_uuid({}) does not exist.'
-                msg = msg.format(device_uuid)
+            if not db_topology.devices.contains(device_id):
+                msg = 'Device({}) does not exist in the database.'
+                msg = msg.format(device_id)
                 raise ServiceException(grpc.StatusCode.NOT_FOUND, msg)
 
-            db_device = db_topology.device(device_uuid)
+            db_device = db_topology.device(device_id)
             db_device_attributes = db_device.attributes.get(attributes=['device_type'])
-            if len(db_device_attributes) == 0:
-                msg = 'attribute device_type for device_uuid({}) does not exist.'
-                msg = msg.format(device_uuid)
-                raise ServiceException(grpc.StatusCode.FAILED_PRECONDITION, msg)
+            # should not happen, device creation through Database API ensures all fields are always present
+            if len(db_device_attributes) == 0:                                                  # pragma: no cover
+                msg = 'Attribute device_type for Device({}) does not exist in the database.'    # pragma: no cover
+                msg = msg.format(device_id)                                                     # pragma: no cover
+                raise ServiceException(grpc.StatusCode.FAILED_PRECONDITION, msg)                # pragma: no cover
 
             db_device_type = db_device_attributes.get('device_type')
-            if len(db_device_type) == 0:
-                msg = 'attribute device_type for device_uuid({}) is empty.'
-                msg = msg.format(device_uuid)
-                raise ServiceException(grpc.StatusCode.FAILED_PRECONDITION, msg)
+            # should not happen, device creation through Database API ensures all fields are always present
+            if len(db_device_type) == 0:                                                # pragma: no cover
+                msg = 'Attribute device_type for Device({}) is empty in the database.'  # pragma: no cover
+                msg = msg.format(device_id)                                             # pragma: no cover
+                raise ServiceException(grpc.StatusCode.FAILED_PRECONDITION, msg)        # pragma: no cover
 
             if db_device_type != device_type:
-                msg = 'Device({}) has Type({}). Cannot be changed to Type({}).'
-                msg = msg.format(device_uuid, db_device_type, device_type)
+                msg = 'Device({}) has Type({}) in the database. Cannot be changed to Type({}).'
+                msg = msg.format(device_id, db_device_type, device_type)
                 raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
 
             if len(request.endpointList) > 0:
                 msg = 'Endpoints belonging to Device({}) cannot be modified.'
-                msg = msg.format(device_uuid)
+                msg = msg.format(device_id)
                 raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
 
             update_attributes = {}
@@ -217,22 +243,20 @@ class DeviceServiceServicerImpl(DeviceServiceServicer):
             if device_opstat != OperationalStatus.KEEP_STATE:
                 update_attributes['device_operational_status'] = device_opstat
 
-            LOGGER.info('update_attributes={}'.format(str(update_attributes)))
-
             if len(update_attributes) == 0:
                 msg = ' '.join([
                     'Any change has been requested for Device({}).',
-                    'Either specify a new configuration or a new device state.',
+                    'Either specify a new configuration or a new device operational status.',
                 ])
-                msg = msg.format(device_uuid)
+                msg = msg.format(device_id)
                 raise ServiceException(grpc.StatusCode.ABORTED, msg)
 
-            # ----- Implement changes in database ----------------------------------------------------------------------
+            # ----- Implement changes in the database ------------------------------------------------------------------
             db_device.update(update_attributes=update_attributes)
 
             # ----- Compose reply --------------------------------------------------------------------------------------
-            reply = DeviceId(device_id=dict(uuid=device_uuid))
-            LOGGER.info('ConfigureDevice reply: {}'.format(str(reply)))
+            reply = DeviceId(**db_device.dump_id())
+            LOGGER.debug('ConfigureDevice reply: {}'.format(str(reply)))
             CONFIGUREDEVICE_COUNTER_COMPLETED.inc()
             return reply
         except ServiceException as e:
@@ -250,7 +274,9 @@ class DeviceServiceServicerImpl(DeviceServiceServicer):
 
             # ----- Validate request data and pre-conditions -----------------------------------------------------------
             try:
-                device_uuid = chk_string('device_uuid', request.device_id.uuid, allow_empty=False)
+                device_id = chk_string('device_id.device_id.uuid',
+                                       request.device_id.uuid,
+                                       allow_empty=False)
             except Exception as e:
                 LOGGER.exception('Invalid arguments:')
                 raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e))
@@ -258,13 +284,13 @@ class DeviceServiceServicerImpl(DeviceServiceServicer):
             db_context = self.database.context(DEFAULT_CONTEXT_ID).create()
             db_topology = db_context.topology(DEFAULT_TOPOLOGY_ID).create()
 
-            if not db_topology.devices.contains(device_uuid):
-                msg = 'device_uuid({}) does not exist.'
-                msg = msg.format(device_uuid)
+            if not db_topology.devices.contains(device_id):
+                msg = 'Device({}) does not exist in the database.'
+                msg = msg.format(device_id)
                 raise ServiceException(grpc.StatusCode.NOT_FOUND, msg)
 
-            # ----- Implement changes in database ----------------------------------------------------------------------
-            db_topology.device(device_uuid).delete()
+            # ----- Implement changes in the database ------------------------------------------------------------------
+            db_topology.device(device_id).delete()
 
             # ----- Compose reply --------------------------------------------------------------------------------------
             reply = Empty()
diff --git a/src/device/tests/Dockerfile b/src/device/tests/Dockerfile
deleted file mode 100644
index e8789ce7442b58063f96970d2f52b8c31b1818c5..0000000000000000000000000000000000000000
--- a/src/device/tests/Dockerfile
+++ /dev/null
@@ -1,4 +0,0 @@
-FROM device:latest
-
-# Run integration tests
-ENTRYPOINT ["pytest", "-v", "--log-level=DEBUG", "device/tests/test_integration.py"]
diff --git a/src/device/tests/test_integration.py b/src/device/tests/test_integration.py
deleted file mode 100644
index 3f1519aba7b1a35972d467279eeb45c2723390b2..0000000000000000000000000000000000000000
--- a/src/device/tests/test_integration.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#import logging, os, pytest, sys
-
-#from pathlib import Path
-#sys.path.append(__file__.split('src')[0] + 'src')
-#print(sys.path)
-
-#from context.client.ContextClient import ContextClient
-#from context.proto.context_pb2 import Empty
-#from .tools.ValidateTopology import validate_topology_dict
-
-#LOGGER = logging.getLogger(__name__)
-#LOGGER.setLevel(logging.DEBUG)
-
-#@pytest.fixture(scope='session')
-#def remote_context_client():
-#    address = os.environ.get('TEST_TARGET_ADDRESS')
-#    if(address is None): raise Exception('EnvironmentVariable(TEST_TARGET_ADDRESS) not specified')
-#    port = os.environ.get('TEST_TARGET_PORT')
-#    if(port is None): raise Exception('EnvironmentVariable(TEST_TARGET_PORT) not specified')
-#    return ContextClient(address=address, port=port)
-
-#def test_remote_get_topology(remote_context_client):
-#    response = remote_context_client.GetTopology(Empty())
-#    validate_topology_dict(response)
diff --git a/src/device/tests/test_unitary.py b/src/device/tests/test_unitary.py
index 21d3bef4646521957a387795386ba55570d19659..9834c5c39761997b336ed9feda11d6899dde19c0 100644
--- a/src/device/tests/test_unitary.py
+++ b/src/device/tests/test_unitary.py
@@ -11,12 +11,12 @@ from device.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD
 LOGGER = logging.getLogger(__name__)
 LOGGER.setLevel(logging.DEBUG)
 
-DEVICE_ID = {'device_id': {'uuid': 'test-device-001'}}
+DEVICE_ID = {'device_id': {'uuid': 'dev1'}}
 DEVICE = {
-    'device_id': {'device_id': {'uuid': 'test-device-001'}},
+    'device_id': {'device_id': {'uuid': 'dev1'}},
     'device_type': 'ROADM',
-    'device_config': {'device_config': ''},
-    'devOperationalStatus': 1,
+    'device_config': {'device_config': '<config/>'},
+    'devOperationalStatus': OperationalStatus.ENABLED.value,
     'endpointList' : [
         {
             'port_id': {
@@ -24,10 +24,10 @@ DEVICE = {
                     'contextId': {'contextUuid': {'uuid': 'admin'}},
                     'topoId': {'uuid': 'admin'}
                 },
-                'dev_id': {'device_id': {'uuid': 'test-device-001'}},
-                'port_id': {'uuid' : 'port-101'}
+                'dev_id': {'device_id': {'uuid': 'dev1'}},
+                'port_id': {'uuid' : 'to-dev2'}
             },
-            'port_type': 'LINE'
+            'port_type': 'WDM'
         },
         {
             'port_id': {
@@ -35,53 +35,68 @@ DEVICE = {
                     'contextId': {'contextUuid': {'uuid': 'admin'}},
                     'topoId': {'uuid': 'admin'}
                 },
-                'dev_id': {'device_id': {'uuid': 'test-device-001'}},
-                'port_id': {'uuid' : 'port-102'}
+                'dev_id': {'device_id': {'uuid': 'dev1'}},
+                'port_id': {'uuid' : 'to-dev3'}
             },
-            'port_type': 'LINE'
+            'port_type': 'WDM'
+        },
+        {
+            'port_id': {
+                'topoId': {
+                    'contextId': {'contextUuid': {'uuid': 'admin'}},
+                    'topoId': {'uuid': 'admin'}
+                },
+                'dev_id': {'device_id': {'uuid': 'dev1'}},
+                'port_id': {'uuid' : 'to-dev4'}
+            },
+            'port_type': 'WDM'
         },
     ]
 }
 
 @pytest.fixture(scope='session')
-def service():
-    database = get_database(engine=DatabaseEngineEnum.INMEMORY, filepath='data/topo_nsfnet.json')
+def device_database():
+    _database = get_database(engine=DatabaseEngineEnum.INMEMORY)
+    return _database
+
+@pytest.fixture(scope='session')
+def device_service(device_database):
     _service = DeviceService(
-        database, port=GRPC_SERVICE_PORT, max_workers=GRPC_MAX_WORKERS, grace_period=GRPC_GRACE_PERIOD)
+        device_database, port=GRPC_SERVICE_PORT, max_workers=GRPC_MAX_WORKERS, grace_period=GRPC_GRACE_PERIOD)
     _service.start()
     yield _service
     _service.stop()
 
 @pytest.fixture(scope='session')
-def client(service):
+def device_client(device_service):
     _client = DeviceClient(address='127.0.0.1', port=GRPC_SERVICE_PORT)
     yield _client
     _client.close()
 
-def test_create_empty_device_uuid(client : DeviceClient):
+def test_add_device_empty_device_uuid(device_client : DeviceClient):
     # should fail with device uuid is empty
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         copy_device = copy.deepcopy(DEVICE)
         copy_device['device_id']['device_id']['uuid'] = ''
-        client.AddDevice(Device(**copy_device))
+        device_client.AddDevice(Device(**copy_device))
     assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-    assert e.value.details() == 'device_uuid() string is empty.'
+    assert e.value.details() == 'device.device_id.device_id.uuid() string is empty.'
 
-def test_create_empty_device_type(client : DeviceClient):
+def test_add_device_empty_device_type(device_client : DeviceClient):
     # should fail with device type is empty
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         copy_device = copy.deepcopy(DEVICE)
         copy_device['device_type'] = ''
-        client.AddDevice(Device(**copy_device))
+        device_client.AddDevice(Device(**copy_device))
     assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-    assert e.value.details() == 'device_type() string is empty.'
+    assert e.value.details() == 'device.device_type() string is empty.'
 
-def test_create_wrong_device_operational_status(client : DeviceClient):
+def test_add_device_wrong_device_operational_status(device_client : DeviceClient):
     # should fail with wrong device operational status
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         copy_device = copy.deepcopy(DEVICE)
         copy_device['devOperationalStatus'] = OperationalStatus.KEEP_STATE.value
-        client.AddDevice(Device(**copy_device))
+        device_client.AddDevice(Device(**copy_device))
     assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
     msg = ' '.join([
         'Device has to be created with either ENABLED/DISABLED Operational State.',
@@ -89,183 +104,183 @@ def test_create_wrong_device_operational_status(client : DeviceClient):
     ])
     assert e.value.details() == msg
 
-def test_create_endpoint_wrong_context(client : DeviceClient):
+def test_add_device_endpoint_wrong_context(device_client : DeviceClient):
     # should fail with unsupported context
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         copy_device = copy.deepcopy(DEVICE)
         copy_device['endpointList'][0]['port_id']['topoId']['contextId']['contextUuid']['uuid'] = 'wrong-context'
         request = Device(**copy_device)
-        LOGGER.warning('request = {}'.format(request))
-        client.AddDevice(request)
+        device_client.AddDevice(request)
     assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
     msg = ' '.join([
-        'Unsupported context_id(wrong-context) in endpoint #0.',
-        'Only default context_id(admin) is currently supported.',
-        'Optionally, leave field empty to use default context_id.',
+        'Unsupported Context(wrong-context) in Endpoint(#0) of Device(dev1).',
+        'Only default Context(admin) is currently supported.',
+        'Optionally, leave field empty to use default Context.',
     ])
     assert e.value.details() == msg
 
-def test_create_endpoint_wrong_topology(client : DeviceClient):
+def test_add_device_endpoint_wrong_topology(device_client : DeviceClient):
     # should fail with unsupported topology
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         copy_device = copy.deepcopy(DEVICE)
         copy_device['endpointList'][0]['port_id']['topoId']['topoId']['uuid'] = 'wrong-topo'
-        client.AddDevice(Device(**copy_device))
+        device_client.AddDevice(Device(**copy_device))
     assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
     msg = ' '.join([
-        'Unsupported topology_id(wrong-topo) in endpoint #0.',
-        'Only default topology_id(admin) is currently supported.',
-        'Optionally, leave field empty to use default topology_id.',
+        'Unsupported Topology(wrong-topo) in Endpoint(#0) of Device(dev1).',
+        'Only default Topology(admin) is currently supported.',
+        'Optionally, leave field empty to use default Topology.',
     ])
     assert e.value.details() == msg
 
-def test_create_endpoint_wrong_device(client : DeviceClient):
+def test_add_device_endpoint_wrong_device(device_client : DeviceClient):
     # should fail with wrong endpoint device
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         copy_device = copy.deepcopy(DEVICE)
         copy_device['endpointList'][0]['port_id']['dev_id']['device_id']['uuid'] = 'wrong-device'
-        client.AddDevice(Device(**copy_device))
+        device_client.AddDevice(Device(**copy_device))
     assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
     msg = ' '.join([
-        'Wrong device_id(wrong-device) in endpoint #0.',
-        'Parent specified in message is device_id(test-device-001).',
-        'Optionally, leave field empty to use parent device_id.',
+        'Wrong Device(wrong-device) in Endpoint(#0).',
+        'Parent specified in message is Device(dev1).',
+        'Optionally, leave field empty to use parent Device.',
     ])
     assert e.value.details() == msg
 
-def test_create_empty_port_uuid(client : DeviceClient):
+def test_add_device_empty_port_uuid(device_client : DeviceClient):
     # should fail with port uuid is empty
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         copy_device = copy.deepcopy(DEVICE)
         copy_device['endpointList'][0]['port_id']['port_id']['uuid'] = ''
-        client.AddDevice(Device(**copy_device))
+        device_client.AddDevice(Device(**copy_device))
     assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-    assert e.value.details() == 'port_uuid() string is empty.'
+    assert e.value.details() == 'endpoint[#0].port_id.port_id.uuid() string is empty.'
 
-def test_create_empty_port_type(client : DeviceClient):
+def test_add_device_empty_port_type(device_client : DeviceClient):
     # should fail with port type is empty
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         copy_device = copy.deepcopy(DEVICE)
         copy_device['endpointList'][0]['port_type'] = ''
-        client.AddDevice(Device(**copy_device))
+        device_client.AddDevice(Device(**copy_device))
     assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-    assert e.value.details() == 'port_type() string is empty.'
+    assert e.value.details() == 'endpoint[#0].port_type() string is empty.'
 
-def test_create_duplicate_port(client : DeviceClient):
+def test_add_device_duplicate_port(device_client : DeviceClient):
     # should fail with uplicate port in device
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         copy_device = copy.deepcopy(DEVICE)
-        copy_device['endpointList'][1]['port_id']['port_id']['uuid'] = 'port-101'
-        client.AddDevice(Device(**copy_device))
+        copy_device['endpointList'][1]['port_id']['port_id']['uuid'] = 'to-dev2'
+        device_client.AddDevice(Device(**copy_device))
     assert e.value.code() == grpc.StatusCode.ALREADY_EXISTS
-    assert e.value.details() == 'Duplicated port_id(port-101) in device_id(test-device-001).'
+    assert e.value.details() == 'Duplicated Port(to-dev2) in Endpoint(#1) of Device(dev1).'
 
-def test_create(client : DeviceClient):
+def test_add_device(device_client : DeviceClient):
     # should work
     validate_device_id(MessageToDict(
-            client.AddDevice(Device(**DEVICE)),
+            device_client.AddDevice(Device(**DEVICE)),
             including_default_value_fields=True, preserving_proto_field_name=True,
             use_integers_for_enums=False))
 
-def test_create_duplicate(client : DeviceClient):
+def test_add_device_duplicate(device_client : DeviceClient):
     # should fail with device already exists
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
-        client.AddDevice(Device(**DEVICE))
+        device_client.AddDevice(Device(**DEVICE))
     assert e.value.code() == grpc.StatusCode.ALREADY_EXISTS
-    assert e.value.details() == 'device_uuid(test-device-001) already exists.'
+    assert e.value.details() == 'Device(dev1) already exists in the database.'
 
-def test_delete_empty_uuid(client : DeviceClient):
+def test_delete_device_empty_uuid(device_client : DeviceClient):
     # should fail with device uuid is empty
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         copy_device_id = copy.deepcopy(DEVICE_ID)
         copy_device_id['device_id']['uuid'] = ''
-        client.DeleteDevice(DeviceId(**copy_device_id))
+        device_client.DeleteDevice(DeviceId(**copy_device_id))
     assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-    assert e.value.details() == 'device_uuid() string is empty.'
+    assert e.value.details() == 'device_id.device_id.uuid() string is empty.'
 
-def test_delete_device_not_found(client : DeviceClient):
+def test_delete_device_not_found(device_client : DeviceClient):
     # should fail with device not found
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         copy_device_id = copy.deepcopy(DEVICE_ID)
         copy_device_id['device_id']['uuid'] = 'wrong-device-id'
-        client.DeleteDevice(DeviceId(**copy_device_id))
+        device_client.DeleteDevice(DeviceId(**copy_device_id))
     assert e.value.code() == grpc.StatusCode.NOT_FOUND
-    assert e.value.details() == 'device_uuid(wrong-device-id) does not exist.'
+    assert e.value.details() == 'Device(wrong-device-id) does not exist in the database.'
 
-def test_delete(client : DeviceClient):
+def test_delete_device(device_client : DeviceClient):
     # should work
     validate_empty(MessageToDict(
-            client.DeleteDevice(DeviceId(**DEVICE_ID)),
+            device_client.DeleteDevice(DeviceId(**DEVICE_ID)),
             including_default_value_fields=True, preserving_proto_field_name=True,
             use_integers_for_enums=False))
 
-def test_configure_empty_device_uuid(client : DeviceClient):
+def test_configure_device_empty_device_uuid(device_client : DeviceClient):
     # should fail with device uuid is empty
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         copy_device = copy.deepcopy(DEVICE)
         copy_device['device_id']['device_id']['uuid'] = ''
-        client.ConfigureDevice(Device(**copy_device))
+        device_client.ConfigureDevice(Device(**copy_device))
     assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-    assert e.value.details() == 'device_uuid() string is empty.'
+    assert e.value.details() == 'device.device_id.device_id.uuid() string is empty.'
 
-def test_configure_device_not_found(client : DeviceClient):
+def test_configure_device_not_found(device_client : DeviceClient):
     # should fail with device not found
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         copy_device = copy.deepcopy(DEVICE)
         copy_device['device_id']['device_id']['uuid'] = 'wrong-device-id'
-        client.ConfigureDevice(Device(**copy_device))
+        device_client.ConfigureDevice(Device(**copy_device))
     assert e.value.code() == grpc.StatusCode.NOT_FOUND
-    assert e.value.details() == 'device_uuid(wrong-device-id) does not exist.'
+    assert e.value.details() == 'Device(wrong-device-id) does not exist in the database.'
 
-def test_create_device_default_endpoint_context_topology(client : DeviceClient):
+def test_add_device_default_endpoint_context_topology_device(device_client : DeviceClient):
     # should work
     copy_device = copy.deepcopy(DEVICE)
     copy_device['endpointList'][0]['port_id']['topoId']['contextId']['contextUuid']['uuid'] = ''
     copy_device['endpointList'][0]['port_id']['topoId']['topoId']['uuid'] = ''
     copy_device['endpointList'][0]['port_id']['dev_id']['device_id']['uuid'] = ''
     validate_device_id(MessageToDict(
-            client.AddDevice(Device(**copy_device)),
+            device_client.AddDevice(Device(**copy_device)),
             including_default_value_fields=True, preserving_proto_field_name=True,
             use_integers_for_enums=False))
 
-def test_configure_wrong_device_type(client : DeviceClient):
+def test_configure_device_wrong_device_type(device_client : DeviceClient):
     # should fail with device type is wrong
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         copy_device = copy.deepcopy(DEVICE)
         copy_device['device_type'] = 'wrong-type'
-        client.ConfigureDevice(Device(**copy_device))
+        device_client.ConfigureDevice(Device(**copy_device))
     assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-    assert e.value.details() == 'Device(test-device-001) has Type(ROADM). Cannot be changed to Type(wrong-type).'
+    assert e.value.details() == 'Device(dev1) has Type(ROADM) in the database. Cannot be changed to Type(wrong-type).'
 
-def test_configure_with_endpoints(client : DeviceClient):
+def test_configure_device_with_endpoints(device_client : DeviceClient):
     # should fail with endpoints cannot be modified
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         copy_device = copy.deepcopy(DEVICE)
-        client.ConfigureDevice(Device(**copy_device))
+        device_client.ConfigureDevice(Device(**copy_device))
     assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-    assert e.value.details() == 'Endpoints belonging to Device(test-device-001) cannot be modified.'
+    assert e.value.details() == 'Endpoints belonging to Device(dev1) cannot be modified.'
 
-def test_configure_no_change(client : DeviceClient):
+def test_configure_device_no_change(device_client : DeviceClient):
     # should fail with any change detected
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         copy_device = copy.deepcopy(DEVICE)
+        copy_device['device_config']['device_config'] = ''
         copy_device['devOperationalStatus'] = OperationalStatus.KEEP_STATE.value
         copy_device['endpointList'].clear()
-        client.ConfigureDevice(Device(**copy_device))
+        device_client.ConfigureDevice(Device(**copy_device))
     assert e.value.code() == grpc.StatusCode.ABORTED
     msg = ' '.join([
-        'Any change has been requested for Device(test-device-001).',
-        'Either specify a new configuration or a new device state.',
+        'Any change has been requested for Device(dev1).',
+        'Either specify a new configuration or a new device operational status.',
     ])
     assert e.value.details() == msg
 
-def test_configure(client : DeviceClient):
+def test_configure_device(device_client : DeviceClient):
     # should work
     copy_device = copy.deepcopy(DEVICE)
     copy_device['device_config']['device_config'] = '<new_config/>'
     copy_device['devOperationalStatus'] = OperationalStatus.DISABLED.value
     copy_device['endpointList'].clear()
     validate_device_id(MessageToDict(
-            client.ConfigureDevice(Device(**copy_device)),
+            device_client.ConfigureDevice(Device(**copy_device)),
             including_default_value_fields=True, preserving_proto_field_name=True,
             use_integers_for_enums=False))
diff --git a/src/integration_tester/.gitlab-ci.yml b/src/integration_tester/.gitlab-ci.yml
new file mode 100644
index 0000000000000000000000000000000000000000..d090e73a20e3eea4c0fb19cac579fd3aa251f79e
--- /dev/null
+++ b/src/integration_tester/.gitlab-ci.yml
@@ -0,0 +1,63 @@
+# Build, tag, and push the Docker images to the GitLab Docker registry
+build integration_tester:
+  variables:
+    IMAGE_NAME: 'integration_tester' # name of the microservice
+    IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
+  stage: build
+  before_script:
+    - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
+  script:
+    - docker build -t "$IMAGE_NAME:$IMAGE_TAG" -f ./src/$IMAGE_NAME/Dockerfile ./src/
+    - docker tag "$IMAGE_NAME:$IMAGE_TAG" "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
+    - docker push "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
+  rules:
+    - changes:
+      - src/common/**
+      - src/context/**
+      - src/device/**
+      - src/$IMAGE_NAME/**
+      - .gitlab-ci.yml
+
+# Pull, execute, and run unitary tests for the Docker image from the GitLab registry
+test integration_tester:
+  variables:
+    IMAGE_NAME: 'integration_tester' # name of the microservice
+    IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
+  stage: test
+  needs:
+    - build integration_tester
+  before_script:
+    - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
+  script:
+    - docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
+  rules:
+    - changes:
+      - src/common/**
+      - src/context/**
+      - src/device/**
+      - src/$IMAGE_NAME/**
+      - .gitlab-ci.yml
+
+# Run integration tests in Kubernetes Cluster
+integration_test integration_tester:
+  variables:
+    IMAGE_NAME: 'integration_tester' # name of the microservice
+    IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
+  stage: integration_test
+  needs:
+    - build integration_tester
+    - test integration_tester
+    - deploy context
+    - deploy device
+    - dependencies all
+  before_script:
+    - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
+  script:
+    - kubectl version
+    - kubectl get all
+    - kubectl delete pod $(echo $IMAGE_NAME | sed -r 's/[^a-zA-Z0-9\.\-]/-/g') --wait=true --ignore-not-found=true
+    - kubectl get all
+    - kubectl run $(echo $IMAGE_NAME | sed -r 's/[^a-zA-Z0-9\.\-]/-/g') --image "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG" --restart=Never -i --rm
+    - kubectl get all
+  #when: manual
+  #allow_failure: false
diff --git a/src/integration_tester/Dockerfile b/src/integration_tester/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..5aabc8bc89115a45b40095aa325808eaf3c4cbaf
--- /dev/null
+++ b/src/integration_tester/Dockerfile
@@ -0,0 +1,34 @@
+FROM python:3-slim
+
+# Install dependencies
+RUN apt-get --yes --quiet --quiet update && \
+    apt-get --yes --quiet --quiet install wget g++ && \
+    rm -rf /var/lib/apt/lists/*
+
+# Set Python to show logs as they occur
+ENV PYTHONUNBUFFERED=0
+
+# Get generic Python packages
+RUN python3 -m pip install --upgrade pip setuptools wheel pip-tools
+
+# Set working directory
+WORKDIR /var/teraflow
+
+# Create module sub-folders
+RUN mkdir -p /var/teraflow/integration_tester
+
+# Get Python packages per module
+COPY integration_tester/requirements.in integration_tester/requirements.in
+RUN pip-compile --output-file=integration_tester/requirements.txt integration_tester/requirements.in
+RUN python3 -m pip install -r integration_tester/requirements.in
+
+# Add files into working directory
+COPY common/. common
+COPY context/. context
+COPY device/. device
+COPY integration_tester/. integration_tester
+
+# Run integration tester
+ENTRYPOINT ["pytest", "-v", "--log-level=DEBUG", \
+            "integration_tester/test_context_device.py" \
+]
diff --git a/src/integration_tester/__init__.py b/src/integration_tester/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/integration_tester/definitions.py b/src/integration_tester/definitions.py
new file mode 100644
index 0000000000000000000000000000000000000000..ebdc3a43de438722716bf4f8d87eab3165526980
--- /dev/null
+++ b/src/integration_tester/definitions.py
@@ -0,0 +1,92 @@
+from common.database.api.context.OperationalStatus import OperationalStatus
+
+TOPOLOGY_ID = {
+    'contextId': {'contextUuid': {'uuid': 'admin'}},
+    'topoId': {'uuid': 'admin'}
+}
+
+DEVICE_ID_DEV1 = {'device_id': {'uuid': 'dev1'}}
+DEVICE_DEV1 = {
+    'device_id': {'device_id': {'uuid': 'dev1'}}, 'device_type': 'ROADM', 'device_config': {'device_config': '<config/>'},
+    'devOperationalStatus': OperationalStatus.ENABLED.value,
+    'endpointList' : [
+        {'port_id': {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev1'}}, 'port_id': {'uuid' : 'to-dev2'}}, 'port_type': 'WDM'},
+        {'port_id': {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev1'}}, 'port_id': {'uuid' : 'to-dev3'}}, 'port_type': 'WDM'},
+    ]
+}
+
+DEVICE_ID_DEV2 = {'device_id': {'uuid': 'dev2'}}
+DEVICE_DEV2 = {
+    'device_id': {'device_id': {'uuid': 'dev2'}}, 'device_type': 'ROADM', 'device_config': {'device_config': '<config/>'},
+    'devOperationalStatus': OperationalStatus.ENABLED.value,
+    'endpointList' : [
+        {'port_id': {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev2'}}, 'port_id': {'uuid' : 'to-dev1'}}, 'port_type': 'WDM'},
+        {'port_id': {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev2'}}, 'port_id': {'uuid' : 'to-dev3'}}, 'port_type': 'WDM'},
+    ]
+}
+
+DEVICE_ID_DEV3 = {'device_id': {'uuid': 'dev3'}}
+DEVICE_DEV3 = {
+    'device_id': {'device_id': {'uuid': 'dev3'}},
+    'device_type': 'ROADM',
+    'device_config': {'device_config': '<config/>'},
+    'devOperationalStatus': OperationalStatus.ENABLED.value,
+    'endpointList' : [
+        {'port_id': {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev3'}}, 'port_id': {'uuid' : 'to-dev1'}}, 'port_type': 'WDM'},
+        {'port_id': {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev3'}}, 'port_id': {'uuid' : 'to-dev2'}}, 'port_type': 'WDM'},
+    ]
+}
+
+LINK_ID_DEV1_DEV2 = {'link_id': {'uuid': 'dev1/to-dev2 ==> dev2/to-dev1'}}
+LINK_DEV1_DEV2 = {
+    'link_id': {'link_id': {'uuid': 'dev1/to-dev2 ==> dev2/to-dev1'}},
+    'endpointList' : [
+        {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev1'}}, 'port_id': {'uuid' : 'to-dev2'}},
+        {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev2'}}, 'port_id': {'uuid' : 'to-dev1'}},
+    ]
+}
+
+LINK_ID_DEV1_DEV3 = {'link_id': {'uuid': 'dev1/to-dev3 ==> dev3/to-dev1'}}
+LINK_DEV1_DEV3 = {
+    'link_id': {'link_id': {'uuid': 'dev1/to-dev3 ==> dev3/to-dev1'}},
+    'endpointList' : [
+        {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev1'}}, 'port_id': {'uuid' : 'to-dev3'}},
+        {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev3'}}, 'port_id': {'uuid' : 'to-dev1'}},
+    ]
+}
+
+LINK_ID_DEV2_DEV1 = {'link_id': {'uuid': 'dev2/to-dev1 ==> dev1/to-dev2'}}
+LINK_DEV2_DEV1 = {
+    'link_id': {'link_id': {'uuid': 'dev2/to-dev1 ==> dev1/to-dev2'}},
+    'endpointList' : [
+        {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev2'}}, 'port_id': {'uuid' : 'to-dev1'}},
+        {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev1'}}, 'port_id': {'uuid' : 'to-dev2'}},
+    ]
+}
+
+LINK_ID_DEV2_DEV3 = {'link_id': {'uuid': 'dev2/to-dev3 ==> dev3/to-dev2'}}
+LINK_DEV2_DEV3 = {
+    'link_id': {'link_id': {'uuid': 'dev2/to-dev3 ==> dev3/to-dev2'}},
+    'endpointList' : [
+        {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev2'}}, 'port_id': {'uuid' : 'to-dev3'}},
+        {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev3'}}, 'port_id': {'uuid' : 'to-dev2'}},
+    ]
+}
+
+LINK_ID_DEV3_DEV1 = {'link_id': {'uuid': 'dev3/to-dev1 ==> dev1/to-dev3'}}
+LINK_DEV3_DEV1 = {
+    'link_id': {'link_id': {'uuid': 'dev3/to-dev1 ==> dev1/to-dev3'}},
+    'endpointList' : [
+        {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev3'}}, 'port_id': {'uuid' : 'to-dev1'}},
+        {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev1'}}, 'port_id': {'uuid' : 'to-dev3'}},
+    ]
+}
+
+LINK_ID_DEV3_DEV2 = {'link_id': {'uuid': 'dev3/to-dev2 ==> dev2/to-dev3'}}
+LINK_DEV3_DEV2 = {
+    'link_id': {'link_id': {'uuid': 'dev3/to-dev2 ==> dev2/to-dev3'}},
+    'endpointList' : [
+        {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev3'}}, 'port_id': {'uuid' : 'to-dev2'}},
+        {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev2'}}, 'port_id': {'uuid' : 'to-dev3'}},
+    ]
+}
diff --git a/src/integration_tester/requirements.in b/src/integration_tester/requirements.in
new file mode 100644
index 0000000000000000000000000000000000000000..25abdad1b5767117956a88b816399635348884c7
--- /dev/null
+++ b/src/integration_tester/requirements.in
@@ -0,0 +1,6 @@
+grpcio-health-checking
+grpcio
+prometheus-client
+pytest
+pytest-benchmark
+redis
diff --git a/src/integration_tester/test_context_device.py b/src/integration_tester/test_context_device.py
new file mode 100644
index 0000000000000000000000000000000000000000..7c101f17dea8847de4e5579ffce28b3318586f9f
--- /dev/null
+++ b/src/integration_tester/test_context_device.py
@@ -0,0 +1,79 @@
+import logging, os, pytest
+from google.protobuf.json_format import MessageToDict
+from common.database.Factory import get_database, DatabaseEngineEnum
+from common.database.api.Database import Database
+from common.tests.Assertions import validate_device_id, validate_link_id, validate_topology_has_devices, \
+    validate_topology_has_links, validate_topology_is_empty
+from context.client.ContextClient import ContextClient
+from context.proto.context_pb2 import Device, Empty, Link
+from device.client.DeviceClient import DeviceClient
+from .definitions import DEVICE_DEV1, DEVICE_DEV2, DEVICE_DEV3, LINK_DEV1_DEV2, LINK_DEV1_DEV3, LINK_DEV2_DEV1, \
+    LINK_DEV2_DEV3, LINK_DEV3_DEV1, LINK_DEV3_DEV2
+
+LOGGER = logging.getLogger(__name__)
+LOGGER.setLevel(logging.DEBUG)
+
+def get_setting(name):
+    value = os.environ.get(name)
+    if value is None: raise Exception('Unable to find variable({})'.format(name))
+    return value
+
+@pytest.fixture(scope='session')
+def redis_database():
+    _database = get_database(engine=DatabaseEngineEnum.REDIS, REDIS_DATABASE_ID=0)
+    return _database
+
+@pytest.fixture(scope='session')
+def context_client():
+    service_host = get_setting('CONTEXTSERVICE_SERVICE_HOST')
+    service_port = get_setting('CONTEXTSERVICE_SERVICE_PORT_GRPC')
+    _client = ContextClient(address=service_host, port=service_port)
+    yield _client
+    _client.close()
+
+@pytest.fixture(scope='session')
+def device_client():
+    service_host = get_setting('DEVICESERVICE_SERVICE_HOST')
+    service_port = get_setting('DEVICESERVICE_SERVICE_PORT_GRPC')
+    _client = DeviceClient(address=service_host, port=service_port)
+    yield _client
+    _client.close()
+
+def test_clean_database(redis_database : Database):
+    # should work
+    redis_database.clear_all()
+
+def test_get_topology_empty(context_client : ContextClient):
+    # should work
+    validate_topology_is_empty(MessageToDict(
+        context_client.GetTopology(Empty()),
+        including_default_value_fields=True, preserving_proto_field_name=True,
+        use_integers_for_enums=False))
+
+def test_add_devices(context_client : ContextClient, device_client : DeviceClient):
+    # should work
+    for device in [DEVICE_DEV1, DEVICE_DEV2, DEVICE_DEV3]:
+        validate_device_id(MessageToDict(
+            device_client.AddDevice(Device(**device)),
+            including_default_value_fields=True, preserving_proto_field_name=True,
+            use_integers_for_enums=False))
+
+    # should work
+    validate_topology_has_devices(MessageToDict(
+        context_client.GetTopology(Empty()),
+        including_default_value_fields=True, preserving_proto_field_name=True,
+        use_integers_for_enums=False))
+
+def test_add_links(context_client : ContextClient):
+    # should work
+    for link in [LINK_DEV1_DEV2, LINK_DEV1_DEV3, LINK_DEV2_DEV1, LINK_DEV2_DEV3, LINK_DEV3_DEV1, LINK_DEV3_DEV2]:
+        validate_link_id(MessageToDict(
+            context_client.AddLink(Link(**link)),
+            including_default_value_fields=True, preserving_proto_field_name=True,
+            use_integers_for_enums=False))
+
+    # should work
+    validate_topology_has_links(MessageToDict(
+        context_client.GetTopology(Empty()),
+        including_default_value_fields=True, preserving_proto_field_name=True,
+        use_integers_for_enums=False))