diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 7938f7ec1aa82e5d883b7af4bd8c4f6884ebe779..d627359e93ea57b865c91f211f5683ee7b5a8a07 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -2,9 +2,11 @@
 stages:
   - build
   - test
+  - unit_test
+  - integ_test
   - dependencies
   - deploy
-  - integration_test
+  - funct_test
 
 # include the individual .gitlab-ci.yml of each micro-service
 include: 
@@ -12,4 +14,6 @@ include:
   #- local: '/src/monitoring/.gitlab-ci.yml'
   - local: '/src/context/.gitlab-ci.yml'
   - local: '/src/device/.gitlab-ci.yml'
-  - local: '/src/integration_tester/.gitlab-ci.yml'
+  - local: '/src/service/.gitlab-ci.yml'
+  - local: '/src/tester_integration/.gitlab-ci.yml'
+  - local: '/src/tester_functional/.gitlab-ci.yml'
diff --git a/manifests/contextservice.yaml b/manifests/contextservice.yaml
index 75406da088820da3e8360e7c37fc0d05aa8f40db..cf7da7e43fbfc02f5872745fc6f10f3cfcee6cb2 100644
--- a/manifests/contextservice.yaml
+++ b/manifests/contextservice.yaml
@@ -18,6 +18,7 @@ spec:
         imagePullPolicy: Always
         ports:
         - containerPort: 1010
+        - containerPort: 8080
         env:
         - name: DB_ENGINE
           value: "redis"
@@ -51,3 +52,27 @@ spec:
   - name: grpc
     port: 1010
     targetPort: 1010
+  - name: http
+    port: 8080
+    targetPort: 8080
+---
+apiVersion: v1
+kind: Service
+metadata:
+  name: contextservice-public
+  labels:
+    app: contextservice
+spec:
+  type: NodePort
+  selector:
+    app: contextservice
+  ports:
+  - name: grpc
+    protocol: TCP
+    port: 1010
+    targetPort: 1010
+  - name: http
+    protocol: TCP
+    port: 8080
+    targetPort: 8080
+---
diff --git a/manifests/deviceservice.yaml b/manifests/deviceservice.yaml
index 7aa02e815034ff05633b2febaaf38ceb72bf06a3..5afd6c7c85e0f62dd8902db746b3f54b412a7c56 100644
--- a/manifests/deviceservice.yaml
+++ b/manifests/deviceservice.yaml
@@ -49,5 +49,6 @@ spec:
     app: deviceservice
   ports:
   - name: grpc
+    protocol: TCP
     port: 2020
     targetPort: 2020
diff --git a/manifests/monitoringservice.yaml b/manifests/monitoringservice.yaml
index bf886f4f9643676e22173537a1de4523f95386ae..885e50adc04d86ed59d118c9223f11b1e39385da 100644
--- a/manifests/monitoringservice.yaml
+++ b/manifests/monitoringservice.yaml
@@ -46,5 +46,6 @@ spec:
     app: monitoringservice
   ports:
   - name: grpc
+    protocol: TCP
     port: 8080
     targetPort: 8080
diff --git a/manifests/redis.yaml b/manifests/redis.yaml
index 4d6d6cbf2e5d71806ebeddbb3d6b67cf19a5d3f6..9aaebb1673637e6afc4fcf2d5887009f5d365a4d 100644
--- a/manifests/redis.yaml
+++ b/manifests/redis.yaml
@@ -36,3 +36,19 @@ spec:
     port: 6379
     targetPort: 6379
 ---
+apiVersion: v1
+kind: Service
+metadata:
+  name: redis-public
+  labels:
+    app: redis
+spec:
+  type: NodePort
+  selector:
+    app: redis
+  ports:
+  - name: redis
+    protocol: TCP
+    port: 6379
+    targetPort: 6379
+---
diff --git a/manifests/serviceservice.yaml b/manifests/serviceservice.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..72fd1c61564831f61bdf78aa494092829f0dd676
--- /dev/null
+++ b/manifests/serviceservice.yaml
@@ -0,0 +1,54 @@
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  name: serviceservice
+spec:
+  selector:
+    matchLabels:
+      app: serviceservice
+  template:
+    metadata:
+      labels:
+        app: serviceservice
+    spec:
+      terminationGracePeriodSeconds: 5
+      containers:
+      - name: server
+        image: registry.gitlab.com/teraflow-h2020/controller/service:latest
+        imagePullPolicy: Always
+        ports:
+        - containerPort: 3030
+        env:
+        - name: DB_ENGINE
+          value: "redis"
+        - name: REDIS_DATABASE_ID
+          value: "0"
+        - name: LOG_LEVEL
+          value: "DEBUG"
+        readinessProbe:
+          exec:
+            command: ["/bin/grpc_health_probe", "-addr=:3030"]
+        livenessProbe:
+          exec:
+            command: ["/bin/grpc_health_probe", "-addr=:3030"]
+        resources:
+          requests:
+            cpu: 250m
+            memory: 512Mi
+          limits:
+            cpu: 700m
+            memory: 1024Mi
+---
+apiVersion: v1
+kind: Service
+metadata:
+  name: serviceservice
+spec:
+  type: ClusterIP
+  selector:
+    app: serviceservice
+  ports:
+  - name: grpc
+    protocol: TCP
+    port: 3030
+    targetPort: 3030
diff --git a/proto/service.proto b/proto/service.proto
index 33feb28717adf9671b8ebcf6ed5a7ba757ff6c6f..fb10d11b77b4c35d89bdb6047691045c71322644 100644
--- a/proto/service.proto
+++ b/proto/service.proto
@@ -8,7 +8,7 @@ service ServiceService {
   rpc GetServiceList (context.Empty) returns (ServiceList) {}
   rpc CreateService (Service) returns (ServiceId) {}
   rpc UpdateService (Service) returns (ServiceId) {}
-  rpc DeleteService (Service) returns (ServiceId) {}
+  rpc DeleteService (ServiceId) returns (context.Empty) {}
   rpc GetServiceById (ServiceId) returns (Service) {}
   rpc GetConnectionList (context.Empty) returns (ConnectionList) {}
   
diff --git a/report_coverage_all.sh b/report_coverage_all.sh
index 752f1383da444eca42b91e1301f4abf0402b7e70..e8e11c913accf4b32e2d78ae38d1b9c8d5ee23a4 100755
--- a/report_coverage_all.sh
+++ b/report_coverage_all.sh
@@ -6,6 +6,6 @@ RCFILE=~/teraflow/controller/coverage/.coveragerc
 echo
 echo "Coverage report:"
 echo "----------------"
-coverage report --rcfile=$RCFILE --skip-covered --sort cover --show-missing
+coverage report --rcfile=$RCFILE --sort cover --show-missing --skip-covered
 #coverage html --rcfile=$RCFILE
 #coverage xml --rcfile=$RCFILE
diff --git a/report_coverage_context.sh b/report_coverage_context.sh
index f2f71fa744b5d8209589b283c7a375b4f25be0c8..3a404a62698cdd95f94c9ed7d4c8b4b073778d08 100755
--- a/report_coverage_context.sh
+++ b/report_coverage_context.sh
@@ -1,3 +1,3 @@
 #!/bin/bash
 
-./report_coverage_all.sh | grep --color -E -i "^.*context.*$|$"
+./report_coverage_all.sh | grep --color -E -i "^context/.*$|$"
diff --git a/report_coverage_device.sh b/report_coverage_device.sh
index b4215cd30141bb524a7d99717841de127d7cda15..be2612d89ce56d518d992327f93a24853e591a4d 100755
--- a/report_coverage_device.sh
+++ b/report_coverage_device.sh
@@ -1,3 +1,3 @@
 #!/bin/bash
 
-./report_coverage_all.sh | grep --color -E -i "^.*device.*$|$"
+./report_coverage_all.sh | grep --color -E -i "^device/.*$|$"
diff --git a/report_coverage_service.sh b/report_coverage_service.sh
new file mode 100755
index 0000000000000000000000000000000000000000..160f52f126a78be5338456667a97ec9184be421a
--- /dev/null
+++ b/report_coverage_service.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+./report_coverage_all.sh | grep --color -E -i "^service/.*$|$"
diff --git a/run_integration_tests.sh b/run_in_kubernetes.sh
similarity index 100%
rename from run_integration_tests.sh
rename to run_in_kubernetes.sh
diff --git a/run_local_tests.sh b/run_local_tests.sh
new file mode 100755
index 0000000000000000000000000000000000000000..59aef4c79f8e179e432db74b37cfc9cc01a201b2
--- /dev/null
+++ b/run_local_tests.sh
@@ -0,0 +1,32 @@
+#!/bin/bash
+
+cd $(dirname $0)/src
+RCFILE=~/teraflow/controller/coverage/.coveragerc
+COVERAGEFILE=~/teraflow/controller/coverage/.coverage
+
+# Run unitary tests and analyze coverage of code at same time
+
+# First destroy old coverage file
+rm -f $COVERAGEFILE
+
+coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \
+    common/database/tests/test_unitary.py \
+    common/database/tests/test_engine_inmemory.py
+
+coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \
+    context/tests/test_unitary.py
+
+coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \
+    device/tests/test_unitary.py
+
+coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \
+    service/tests/test_unitary.py
+
+# Run integration tests and analyze coverage of code at same time
+export DB_ENGINE='redis'
+export REDIS_SERVICE_HOST='10.1.7.194'
+export REDIS_SERVICE_PORT='31789'
+export REDIS_DATABASE_ID='0'
+coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \
+    common/database/tests/test_engine_redis.py \
+    tester_integration/test_context_device_service.py
diff --git a/run_unitary_tests.sh b/run_unitary_tests.sh
deleted file mode 100755
index 84b8010341b9fa70275cd2c1039c8c0cca3d2fdc..0000000000000000000000000000000000000000
--- a/run_unitary_tests.sh
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/bin/bash
-
-cd $(dirname $0)/src
-RCFILE=~/teraflow/controller/coverage/.coveragerc
-
-# Run unitary tests and analyze coverage of code at same time
-coverage run --rcfile=$RCFILE -m pytest --log-level=DEBUG --verbose \
-    common/database/tests/test_unitary.py \
-    common/database/tests/test_engine_inmemory.py \
-    context/tests/test_unitary.py \
-    device/tests/test_unitary.py
-
-## Run integration tests and analyze coverage of code at same time
-#coverage run --rcfile=$RCFILE --append -m pytest --log-level=WARN --verbose \
-#    common/database/tests/test_integration_redis.py \
-#    device/tests/test_integration.py
diff --git a/src/common/database/Factory.py b/src/common/database/Factory.py
index 4b46ed0b145878c703fececd9b68fef54cbf7001..27dec581862c1636ac04efd57e48178a304bbf3b 100644
--- a/src/common/database/Factory.py
+++ b/src/common/database/Factory.py
@@ -1,8 +1,8 @@
 import logging, os
 from enum import Enum
-from .api.Database import Database
-from .engines.inmemory.InMemoryDatabaseEngine import InMemoryDatabaseEngine
-from .engines.redis.RedisDatabaseEngine import RedisDatabaseEngine
+from common.database.api.Database import Database
+from common.database.engines.inmemory.InMemoryDatabaseEngine import InMemoryDatabaseEngine
+from common.database.engines.redis.RedisDatabaseEngine import RedisDatabaseEngine
 
 LOGGER = logging.getLogger(__name__)
 
diff --git a/src/common/database/api/Database.py b/src/common/database/api/Database.py
index c3aeaf628339f8ba58e3c616b7eb6a501cad9278..319f9bbd380241d11cf6a34cdd2f14a003aed560 100644
--- a/src/common/database/api/Database.py
+++ b/src/common/database/api/Database.py
@@ -1,18 +1,29 @@
 import logging
 from typing import List
-from ..engines._DatabaseEngine import _DatabaseEngine
-from .context.Context import Context
-from .Exceptions import WrongDatabaseEngine, MutexException
+from common.database.api.Exceptions import WrongDatabaseEngine, MutexException
+from common.database.api.context.Context import Context
+from common.database.api.context.Keys import KEY_CONTEXTS
+from common.database.api.entity._Entity import _Entity
+from common.database.api.entity.EntityCollection import EntityCollection
+from common.database.engines._DatabaseEngine import _DatabaseEngine
 
 LOGGER = logging.getLogger(__name__)
 
-class Database:
+class Database(_Entity):
     def __init__(self, database_engine : _DatabaseEngine):
-        if not isinstance(database_engine, _DatabaseEngine):    
+        if not isinstance(database_engine, _DatabaseEngine):
             raise WrongDatabaseEngine('database_engine must inherit from _DatabaseEngine')
         self._database_engine = database_engine
+        super().__init__(self, 'root', 'database', {}, {})
         self._acquired = False
         self._owner_key = None
+        self._contexts = EntityCollection(self, KEY_CONTEXTS)
+
+    @property
+    def parent(self) -> 'Database': return self
+
+    @property
+    def database_engine(self) -> _DatabaseEngine: return self._database_engine
 
     def __enter__(self) -> '_DatabaseEngine':
         self._acquired, self._owner_key = self._database_engine.lock()
@@ -36,4 +47,7 @@ class Database:
         entries.sort()
         return ['[{:>4s}] {:100s} :: {}'.format(k_type, k_name, k_value) for k_name,k_type,k_value in entries]
 
-    def context(self, context_uuid : str) -> Context: return Context(context_uuid, self._database_engine)
+    @property
+    def contexts(self) -> EntityCollection: return self._contexts
+
+    def context(self, context_uuid : str) -> Context: return Context(context_uuid, self)
diff --git a/src/common/database/api/Constants.py b/src/common/database/api/context/Constants.py
similarity index 100%
rename from src/common/database/api/Constants.py
rename to src/common/database/api/context/Constants.py
diff --git a/src/common/database/api/context/Context.py b/src/common/database/api/context/Context.py
index 32991cc5ad2b29fe8492d42539edb7cccad7c5f1..f4b530dd2e4519568f9f27c97b0f78d8efbaa53d 100644
--- a/src/common/database/api/context/Context.py
+++ b/src/common/database/api/context/Context.py
@@ -1,20 +1,24 @@
-from typing import Dict
-from ...engines._DatabaseEngine import _DatabaseEngine
-from ..entity._RootEntity import _RootEntity
-from ..entity.EntityCollection import EntityCollection
-from .Keys import KEY_CONTEXT, KEY_TOPOLOGIES
-from .Topology import Topology
+from typing import TYPE_CHECKING, Dict, List
+from common.database.api.context.service.Service import Service
+from common.database.api.context.topology.Topology import Topology
+from common.database.api.context.Keys import KEY_CONTEXT, KEY_SERVICES, KEY_TOPOLOGIES
+from common.database.api.entity._Entity import _Entity
+from common.database.api.entity.EntityCollection import EntityCollection
+
+if TYPE_CHECKING:
+    from common.database.api.Database import Database
 
 VALIDATORS = {}  # no attributes accepted
 TRANSCODERS = {} # no transcoding applied to attributes
 
-class Context(_RootEntity):
-    def __init__(self, context_uuid : str, database_engine : _DatabaseEngine):
-        super().__init__(database_engine, context_uuid, KEY_CONTEXT, VALIDATORS, TRANSCODERS)
+class Context(_Entity):
+    def __init__(self, context_uuid : str, parent : 'Database'):
+        super().__init__(parent, context_uuid, KEY_CONTEXT, VALIDATORS, TRANSCODERS)
         self._topologies = EntityCollection(self, KEY_TOPOLOGIES)
+        self._services = EntityCollection(self, KEY_SERVICES)
 
     @property
-    def parent(self) -> 'Context': return self
+    def parent(self) -> 'Database': return self._parent
 
     @property
     def context(self) -> 'Context': return self
@@ -25,12 +29,21 @@ class Context(_RootEntity):
     @property
     def topologies(self) -> EntityCollection: return self._topologies
 
+    @property
+    def services(self) -> EntityCollection: return self._services
+
     def topology(self, topology_uuid : str) -> Topology: return Topology(topology_uuid, self)
 
-    def create(self) -> 'Context': return self
+    def service(self, service_uuid : str) -> Service: return Service(service_uuid, self)
+
+    def create(self) -> 'Context':
+        self.parent.contexts.add(self.context_uuid)
+        return self
 
     def delete(self):
+        for service_uuid in self.services.get(): self.service(service_uuid).delete()
         for topology_uuid in self.topologies.get(): self.topology(topology_uuid).delete()
+        self.parent.contexts.delete(self.context_uuid)
         self.attributes.delete()
 
     def dump_id(self) -> Dict:
@@ -38,5 +51,19 @@ class Context(_RootEntity):
             'contextUuid': {'uuid': self.context_uuid},
         }
 
+    def dump_topologies(self) -> List:
+        return [
+            self.topology(topology_uuid).dump() for topology_uuid in self.topologies.get()
+        ]
+
+    def dump_services(self) -> List:
+        return [
+            self.service(service_uuid).dump() for service_uuid in self.services.get()
+        ]
+
     def dump(self) -> Dict:
-        return {topology_uuid : self.topology(topology_uuid).dump() for topology_uuid in self.topologies.get()}
+        return {
+            'contextId': self.dump_id(),
+            'topologies': self.dump_topologies(),
+            'services': self.dump_services(),
+        }
diff --git a/src/common/database/api/context/Keys.py b/src/common/database/api/context/Keys.py
index 1c107ec1509949ef8b2f91a3e7e4505846fdf62a..0842bb4e4e639c8fc83a0733e8c951eb4994fafe 100644
--- a/src/common/database/api/context/Keys.py
+++ b/src/common/database/api/context/Keys.py
@@ -1,11 +1,37 @@
-KEY_CONTEXT          =                'context[{context_uuid}]'
-KEY_TOPOLOGIES       = KEY_CONTEXT  + '/topologies{container_name}'
-KEY_TOPOLOGY         = KEY_CONTEXT  + '/topology[{topology_uuid}]' 
-KEY_DEVICES          = KEY_TOPOLOGY + '/devices{container_name}'
-KEY_LINKS            = KEY_TOPOLOGY + '/links{container_name}'
-KEY_DEVICE           = KEY_TOPOLOGY + '/device[{device_uuid}]' 
-KEY_DEVICE_ENDPOINTS = KEY_DEVICE   + '/endpoints{container_name}'
-KEY_ENDPOINT         = KEY_DEVICE   + '/endpoint[{endpoint_uuid}]' 
-KEY_LINK             = KEY_TOPOLOGY + '/link[{link_uuid}]' 
-KEY_LINK_ENDPOINTS   = KEY_LINK     + '/endpoints{container_name}'
-KEY_LINK_ENDPOINT    = KEY_LINK     + '/endpoint[{link_endpoint_uuid}]'
+# Database keys
+KEY_CONTEXTS            =                'contexts{container_name}'
+
+# Context keys
+KEY_CONTEXT             =                'context[{context_uuid}]'
+KEY_TOPOLOGIES          = KEY_CONTEXT  + '/topologies{container_name}'
+KEY_SERVICES            = KEY_CONTEXT  + '/services{container_name}'
+
+# Context.Topology keys
+KEY_TOPOLOGY            = KEY_CONTEXT  + '/topology[{topology_uuid}]'
+KEY_DEVICES             = KEY_TOPOLOGY + '/devices{container_name}'
+KEY_LINKS               = KEY_TOPOLOGY + '/links{container_name}'
+
+# Context.Topology.Device keys
+KEY_DEVICE              = KEY_TOPOLOGY + '/device[{device_uuid}]'
+KEY_DEVICE_ENDPOINTS    = KEY_DEVICE   + '/endpoints{container_name}'
+
+# Context.Topology.Device.Endpoint keys
+KEY_DEVICE_ENDPOINT     = KEY_DEVICE   + '/endpoint[{endpoint_uuid}]'
+
+# Context.Topology.Link keys
+KEY_LINK                = KEY_TOPOLOGY + '/link[{link_uuid}]'
+KEY_LINK_ENDPOINTS      = KEY_LINK     + '/endpoints{container_name}'
+
+# Context.Topology.Link.Endpoint Keys
+KEY_LINK_ENDPOINT       = KEY_LINK     + '/endpoint[{endpoint_uuid}]'
+
+# Service keys
+KEY_SERVICE             = KEY_CONTEXT  + '/service[{service_uuid}]'
+KEY_SERVICE_ENDPOINTS   = KEY_SERVICE  + '/endpoints{container_name}'
+KEY_SERVICE_CONSTRAINTS = KEY_SERVICE  + '/constraints{container_name}'
+
+# Context.Service.Endpoint Keys
+KEY_SERVICE_ENDPOINT    = KEY_SERVICE  + '/endpoint[{endpoint_uuid}]'
+
+# Context.Service.Constraint Keys
+KEY_SERVICE_CONSTRAINT  = KEY_SERVICE  + '/constraint[{constraint_type}]'
diff --git a/src/common/database/api/context/_structure.txt b/src/common/database/api/context/_structure.txt
index 7168763e394ebd0499434e1f62a7bc3d74086c30..0dc6219f8801bcb9d9d26c61707d82eb2925e895 100644
--- a/src/common/database/api/context/_structure.txt
+++ b/src/common/database/api/context/_structure.txt
@@ -2,7 +2,7 @@
 # Internal structure #
 ######################
 
-Note (1): for containers like topologies, devices, links, etc. two containers are defined:
+Note (1): for containers like topologies, devices, links, services, etc. two containers are defined:
 list    List is a sorted list containing the uuid's of th elements belonging to the parent element. It is used to
         define the order of the elements and enable to iterate them deterministically.
 
@@ -26,6 +26,14 @@ context[<context_uuid>]/topologies_<container>
         context[ctx-test]/topologies_set
             {'base-topo', 'other-topo'}
 
+context[<context_uuid>]/services_<container>
+    Containers (see Note 1) with the service_uuid's belonging to the context.
+    Examples:
+        context[ctx-test]/service_list
+            ['service-1', 'service-2']
+        context[ctx-test]/service_set
+            {'service-1', 'service-2'}
+
 
 Topology structure:
 -------------------
@@ -58,11 +66,11 @@ context[<context_uuid>]/topology[<topology_uuid>]/device[<device_uuid>]
     Defined attributes are:
         device_type              : string
         device_config            : string
-        device_operational_status: string "0"/"1"
+        device_operational_status: string "0" (KEEP_STATUS) / "-1" (DISABLED) / "1" (ENABLED)
     Example: {'device_type': 'ROADM', 'device_config': '<config/>', 'device_operational_status': '1'}
 
 context[<context_uuid>]/topology[<topology_uuid>]/device[<device_uuid>]/endpoints_<container>
-    Containers (see Note 1) with the endpoints_uuid's belonging to the device.
+    Containers (see Note 1) with the device_endpoints_uuid's belonging to the device.
     Examples:
         context[ctx-test]/topology[base-topo]/device[dev1]/endpoints_list
             ['to-dev2', 'to-dev3', 'to-dev4']
@@ -72,8 +80,8 @@ context[<context_uuid>]/topology[<topology_uuid>]/device[<device_uuid>]/endpoint
 
 Device Endpoint structure:
 --------------------------
-context[<context_uuid>]/topology[<topology_uuid>]/device[<device_uuid>]/endpoint[<endpoint_uuid>]
-    Hash set containing the attributes for the endpoint.
+context[<context_uuid>]/topology[<topology_uuid>]/device[<device_uuid>]/endpoint[<device_endpoint_uuid>]
+    Hash set containing the attributes for the device_endpoint.
     Defined attributes are:
         port_type: string
     Example: {'port_type': 'WDM'}
@@ -103,5 +111,55 @@ context[<context_uuid>]/topology[<topology_uuid>]/link[<link_uuid>]/endpoint[<li
         device_uuid: string
         endpoint_uuid: string
     Example:
-        context[ctx-test]/topology[base-topo]/link[dev1/to-dev2 ==> dev2/to-dev1]/endpointdev1/to-dev2
+        context[ctx-test]/topology[base-topo]/link[dev1/to-dev2 ==> dev2/to-dev1]/endpoint[dev1/to-dev2]
             {'device_uuid': 'dev1', 'endpoint_uuid': 'to-dev2'}
+
+
+Service structure:
+------------------
+context[<context_uuid>]/service[<service_uuid>]
+    Hash set containing the attributes for the service.
+    Defined attributes are:
+        service_type  : string "0" (UNKNOWN) / "1" (L3NM) / "2" (L2NM) / "3" (TAPI_CONNECTIVITY_SERVICE)
+        service_config: string
+        service_state : string "0" (PLANNED) / "1" (ACTIVE) / "2" (PENDING_REMOVAL)
+    Example: ...
+
+context[<context_uuid>]/service[<service_uuid>]/endpoints_<container>
+    Containers (see Note 1) with the service_endpoint_uuid's belonging to the service.
+    Examples:
+        context[ctx-test]/service[service-1]/endpoints_list
+            ['base-topo:dev2/to-dev1', 'base-topo:dev3/to-dev1']
+        context[ctx-test]/service[service-1]/endpoints_set
+            {'base-topo:dev2/to-dev1', 'base-topo:dev3/to-dev1'}
+
+context[<context_uuid>]/service[<service_uuid>]/constraints_<container>
+    Containers (see Note 1) with the constraint_type's belonging to the service.
+    Examples:
+        context[ctx-test]/service[service-1]/constraints_list
+            ['latency_ms', 'hops']
+        context[ctx-test]/service[service-1]/constraints_set
+            {'latency_ms', 'hops'}
+
+
+Service Endpoint structure:
+---------------------------
+context[<context_uuid>]/service[<service_uuid>]/endpoint[<service_endpoint_uuid>]
+    Hash set containing the attributes for the service_endpoint.
+    Defined attributes are:
+        topology_uuid: string
+        device_uuid: string
+        endpoint_uuid: string
+    Example:
+        context[ctx-test]/service[service-1]/endpoint[dev2/to-dev1]
+            {'topology_uuid': 'base-topo', 'device_uuid': 'dev2', 'endpoint_uuid': 'to-dev1'}
+
+Service Constraint structure:
+-----------------------------
+context[<context_uuid>]/service[<service_uuid>]/constraint[<constraint_type>]
+    Hash set containing the attributes for the constraint.
+    Defined attributes are:
+        constraint_value: string
+    Example:
+        context[ctx-test]/service[service-1]/constraint['latency_ms']
+            {'constraint_value': '100'}
diff --git a/src/common/database/api/context/service/Constraint.py b/src/common/database/api/context/service/Constraint.py
new file mode 100644
index 0000000000000000000000000000000000000000..866f98c1761ba399a3d27fc440524515626a019d
--- /dev/null
+++ b/src/common/database/api/context/service/Constraint.py
@@ -0,0 +1,62 @@
+from __future__ import annotations
+from typing import TYPE_CHECKING, Dict
+from common.database.api.context.Keys import KEY_SERVICE_CONSTRAINT
+from common.database.api.entity._Entity import _Entity
+
+if TYPE_CHECKING:
+    from common.database.api.context.Context import Context
+    from common.database.api.context.service.Service import Service
+
+VALIDATORS = {
+    'constraint_value': lambda v: v is not None and isinstance(v, str) and (len(v) > 0),
+}
+
+TRANSCODERS = {} # no transcoding applied to attributes
+
+class Constraint(_Entity):
+    def __init__(self, constraint_type : str, parent : 'Service'):
+        super().__init__(parent, constraint_type, KEY_SERVICE_CONSTRAINT, VALIDATORS, TRANSCODERS)
+
+    @property
+    def parent(self) -> 'Service': return self._parent
+
+    @property
+    def context(self) -> 'Context': return self.parent.context
+
+    @property
+    def context_uuid(self) -> str: return self.parent.context_uuid
+
+    @property
+    def service(self) -> 'Service': return self.parent
+
+    @property
+    def service_uuid(self) -> str: return self.parent.service_uuid
+
+    @property
+    def constraint_type(self) -> str: return self._entity_uuid
+
+    def create(self, constraint_value : str) -> 'Constraint':
+        self.update(update_attributes={
+            'constraint_value': constraint_value,
+        })
+        self.parent.constraints.add(self.constraint_type)
+        return self
+
+    def update(self, update_attributes={}, remove_attributes=[]) -> 'Constraint':
+        self.attributes.update(update_attributes=update_attributes, remove_attributes=remove_attributes)
+        return self
+
+    def delete(self) -> None:
+        self.attributes.delete()
+        self.parent.constraints.delete(self.constraint_type)
+
+    def dump_id(self) -> Dict:
+        return {
+            'constraint_type': self.constraint_type,
+        }
+
+    def dump(self) -> Dict:
+        attributes = self.attributes.get()
+        result = self.dump_id()
+        result.update({'constraint_value': attributes.get('constraint_value', None)})
+        return result
diff --git a/src/common/database/api/context/service/Endpoint.py b/src/common/database/api/context/service/Endpoint.py
new file mode 100644
index 0000000000000000000000000000000000000000..5f86002371f7c2288b75332061ba928c120ca621
--- /dev/null
+++ b/src/common/database/api/context/service/Endpoint.py
@@ -0,0 +1,67 @@
+from __future__ import annotations
+from typing import TYPE_CHECKING, Dict
+from common.database.api.context.topology.device.Endpoint import Endpoint as DeviceEndpoint
+from common.database.api.context.Keys import KEY_SERVICE_ENDPOINT
+from common.database.api.entity._Entity import _Entity
+
+if TYPE_CHECKING:
+    from common.database.api.context.Context import Context
+    from common.database.api.context.service.Service import Service
+
+VALIDATORS = {
+    'topology_uuid': lambda v: v is not None and isinstance(v, str) and (len(v) > 0),
+    'device_uuid': lambda v: v is not None and isinstance(v, str) and (len(v) > 0),
+    'endpoint_uuid': lambda v: v is not None and isinstance(v, str) and (len(v) > 0),
+}
+
+TRANSCODERS = {} # no transcoding applied to attributes
+
+class Endpoint(_Entity):
+    def __init__(self, endpoint_uuid : str, parent : 'Service'):
+        super().__init__(parent, endpoint_uuid, KEY_SERVICE_ENDPOINT, VALIDATORS, TRANSCODERS)
+
+    @property
+    def parent(self) -> 'Service': return self._parent
+
+    @property
+    def context(self) -> 'Context': return self.parent.context
+
+    @property
+    def context_uuid(self) -> str: return self.parent.context_uuid
+
+    @property
+    def service(self) -> 'Service': return self.parent
+
+    @property
+    def service_uuid(self) -> str: return self.parent.service_uuid
+
+    @property
+    def endpoint_uuid(self) -> str: return self._entity_uuid
+
+    def create(self, endpoint : DeviceEndpoint) -> 'Endpoint':
+        self.update(update_attributes={
+            'topology_uuid': endpoint.topology_uuid,
+            'device_uuid': endpoint.device_uuid,
+            'endpoint_uuid': endpoint.endpoint_uuid,
+        })
+        self.parent.endpoints.add(self.endpoint_uuid)
+        return self
+
+    def update(self, update_attributes={}, remove_attributes=[]) -> 'Endpoint':
+        self.attributes.update(update_attributes=update_attributes, remove_attributes=remove_attributes)
+        return self
+
+    def delete(self) -> None:
+        self.attributes.delete()
+        self.parent.endpoints.delete(self.endpoint_uuid)
+
+    def dump_id(self) -> Dict:
+        attributes = self.attributes.get()
+        topology_uuid = attributes.get('topology_uuid', None)
+        device_uuid = attributes.get('device_uuid', None)
+        endpoint_uuid = attributes.get('endpoint_uuid', None)
+        endpoint = self.context.topology(topology_uuid).device(device_uuid).endpoint(endpoint_uuid)
+        return endpoint.dump_id()
+
+    def dump(self) -> Dict:
+        return self.dump_id()
diff --git a/src/common/database/api/context/service/Service.py b/src/common/database/api/context/service/Service.py
new file mode 100644
index 0000000000000000000000000000000000000000..71fe3488db8691259e02d2c0ba76e7565adeaf15
--- /dev/null
+++ b/src/common/database/api/context/service/Service.py
@@ -0,0 +1,102 @@
+from __future__ import annotations
+from typing import TYPE_CHECKING, Dict
+from common.database.api.context.Keys import KEY_SERVICE, KEY_SERVICE_CONSTRAINTS, KEY_SERVICE_ENDPOINTS
+from common.database.api.context.service.Constraint import Constraint
+from common.database.api.context.service.Endpoint import Endpoint
+from common.database.api.context.service.ServiceState import ServiceState, to_servicestate_enum
+from common.database.api.context.service.ServiceType import ServiceType, to_servicetype_enum
+from common.database.api.entity._Entity import _Entity
+from common.database.api.entity.EntityCollection import EntityCollection
+
+if TYPE_CHECKING:
+    from common.database.api.context.Context import Context
+
+VALIDATORS = {
+    'service_type': lambda v: v is not None and isinstance(v, ServiceType),
+    'service_config': lambda v: v is not None and isinstance(v, str) and (len(v) > 0),
+    'service_state': lambda v: v is not None and isinstance(v, ServiceState),
+}
+
+TRANSCODERS = {
+    'service_type': {
+        ServiceType: lambda v: v.value,
+        int        : lambda v: to_servicetype_enum(v),
+        str        : lambda v: to_servicetype_enum(v),
+    },
+    'service_state': {
+        ServiceState: lambda v: v.value,
+        int         : lambda v: to_servicestate_enum(v),
+        str         : lambda v: to_servicestate_enum(v),
+    },
+}
+
+class Service(_Entity):
+    def __init__(self, service_uuid : str, parent : 'Context'):
+        super().__init__(parent, service_uuid, KEY_SERVICE, VALIDATORS, TRANSCODERS)
+        self._endpoints = EntityCollection(self, KEY_SERVICE_ENDPOINTS)
+        self._constraints = EntityCollection(self, KEY_SERVICE_CONSTRAINTS)
+
+    @property
+    def parent(self) -> 'Context': return self._parent
+
+    @property
+    def context(self) -> 'Context': return self._parent
+
+    @property
+    def context_uuid(self) -> str: return self.context.context_uuid
+
+    @property
+    def service_uuid(self) -> str: return self._entity_uuid
+
+    @property
+    def endpoints(self) -> EntityCollection: return self._endpoints
+
+    @property
+    def constraints(self) -> EntityCollection: return self._constraints
+
+    def endpoint(self, endpoint_uuid : str) -> Endpoint: return Endpoint(endpoint_uuid, self)
+
+    def constraint(self, constraint_type : str) -> Constraint: return Constraint(constraint_type, self)
+
+    def create(self, service_type : ServiceType, service_config : str, service_state : ServiceState) -> 'Service':
+        self.update(update_attributes={
+            'service_type': service_type,
+            'service_config': service_config,
+            'service_state': service_state,
+        })
+        self.parent.services.add(self.service_uuid)
+        return self
+
+    def update(self, update_attributes={}, remove_attributes=[]) -> 'Service':
+        self.attributes.update(update_attributes=update_attributes, remove_attributes=remove_attributes)
+        return self
+
+    def delete(self) -> None:
+        for endpoint_uuid in self.endpoints.get(): self.endpoint(endpoint_uuid).delete()
+        for constraint_uuid in self.constraints.get(): self.constraint(constraint_uuid).delete()
+        self.attributes.delete()
+        self.parent.services.delete(self.service_uuid)
+
+    def dump_id(self) -> Dict:
+        return {
+            'contextId': self.context.dump_id(),
+            'cs_id': {'uuid': self.service_uuid},
+        }
+
+    def dump(self) -> Dict:
+        attributes = self.attributes.get()
+        service_type = attributes.get('service_type', None)
+        if isinstance(service_type, ServiceType): service_type = service_type.value
+        service_state = attributes.get('service_state', None)
+        if isinstance(service_state, ServiceState): service_state = service_state.value
+        service_config = attributes.get('service_config', None)
+        endpoints = [self.endpoint(endpoint_uuid).dump() for endpoint_uuid in self.endpoints.get()]
+        constraints = [self.constraint(constraint_type).dump() for constraint_type in self.constraints.get()]
+        return {
+            'cs_id': self.dump_id(),
+            'serviceType': service_type,
+            'endpointList': endpoints,
+            'constraint': constraints,
+            'serviceState': {'serviceState': service_state},
+            'serviceConfig': {'serviceConfig': service_config}
+        }
diff --git a/src/common/database/api/context/service/ServiceState.py b/src/common/database/api/context/service/ServiceState.py
new file mode 100644
index 0000000000000000000000000000000000000000..3855138d99c40b743885f256fe3aafbaa44aa18b
--- /dev/null
+++ b/src/common/database/api/context/service/ServiceState.py
@@ -0,0 +1,27 @@
+from enum import Enum
+
+class ServiceState(Enum):
+    PLANNED = 0
+    ACTIVE = 1
+    PENDING_REMOVAL = 2
+
+ANY_TO_ENUM = {
+    0: ServiceState.PLANNED,
+    1: ServiceState.ACTIVE,
+    2: ServiceState.PENDING_REMOVAL,
+
+    '0': ServiceState.PLANNED,
+    '1': ServiceState.ACTIVE,
+    '2': ServiceState.PENDING_REMOVAL,
+
+    'PLANNED': ServiceState.PLANNED,
+    'ACTIVE': ServiceState.ACTIVE,
+    'PENDING_REMOVAL': ServiceState.PENDING_REMOVAL,
+}
+
+def servicestate_enum_values():
+    return {m.value for m in ServiceState.__members__.values()}
+
+def to_servicestate_enum(int_or_str):
+    if isinstance(int_or_str, str): int_or_str = int_or_str.lower()
+    return ANY_TO_ENUM.get(int_or_str)
diff --git a/src/common/database/api/context/service/ServiceType.py b/src/common/database/api/context/service/ServiceType.py
new file mode 100644
index 0000000000000000000000000000000000000000..c779fc31c89c8746a547a38135f99d07716e2bbb
--- /dev/null
+++ b/src/common/database/api/context/service/ServiceType.py
@@ -0,0 +1,31 @@
+from enum import Enum
+
+class ServiceType(Enum):
+    UNKNOWN = 0
+    L3NM = 1
+    L2NM = 2
+    TAPI_CONNECTIVITY_SERVICE = 3
+
+ANY_TO_ENUM = {
+    0: ServiceType.UNKNOWN,
+    1: ServiceType.L3NM,
+    2: ServiceType.L2NM,
+    3: ServiceType.TAPI_CONNECTIVITY_SERVICE,
+
+    '0': ServiceType.UNKNOWN,
+    '1': ServiceType.L3NM,
+    '2': ServiceType.L2NM,
+    '3': ServiceType.TAPI_CONNECTIVITY_SERVICE,
+
+    'UNKNOWN': ServiceType.UNKNOWN,
+    'L3NM': ServiceType.L3NM,
+    'L2NM': ServiceType.L2NM,
+    'TAPI_CONNECTIVITY_SERVICE': ServiceType.TAPI_CONNECTIVITY_SERVICE,
+}
+
+def servicetype_enum_values():
+    return {m.value for m in ServiceType.__members__.values()}
+
+def to_servicetype_enum(int_or_str):
+    if isinstance(int_or_str, str): int_or_str = int_or_str.lower()
+    return ANY_TO_ENUM.get(int_or_str)
diff --git a/src/integration_tester/__init__.py b/src/common/database/api/context/service/__init__.py
similarity index 100%
rename from src/integration_tester/__init__.py
rename to src/common/database/api/context/service/__init__.py
diff --git a/src/common/database/api/context/Topology.py b/src/common/database/api/context/topology/Topology.py
similarity index 78%
rename from src/common/database/api/context/Topology.py
rename to src/common/database/api/context/topology/Topology.py
index 2fc36ed3c46a64dfb92337c2f62608529d2e65d9..de9cd67a41b822800f78f07208b090adf91b7bd8 100644
--- a/src/common/database/api/context/Topology.py
+++ b/src/common/database/api/context/topology/Topology.py
@@ -1,13 +1,13 @@
 from __future__ import annotations
 from typing import TYPE_CHECKING, Dict
-from ..entity._Entity import _Entity
-from ..entity.EntityCollection import EntityCollection
-from .Keys import KEY_TOPOLOGY, KEY_DEVICES, KEY_LINKS
-from .Device import Device
-from .Link import Link
+from common.database.api.context.Keys import KEY_TOPOLOGY, KEY_DEVICES, KEY_LINKS
+from common.database.api.context.topology.device.Device import Device
+from common.database.api.context.topology.link.Link import Link
+from common.database.api.entity._Entity import _Entity
+from common.database.api.entity.EntityCollection import EntityCollection
 
 if TYPE_CHECKING:
-    from .Context import Context
+    from common.database.api.context.Context import Context
 
 VALIDATORS = {}  # no attributes accepted
 TRANSCODERS = {} # no transcoding applied to attributes
@@ -44,7 +44,7 @@ class Topology(_Entity):
         self.parent.topologies.add(self.topology_uuid)
         return self
 
-    def delete(self):
+    def delete(self) -> None:
         for device_uuid in self.devices.get(): self.device(device_uuid).delete()
         for link_uuid in self.links.get(): self.link(link_uuid).delete()
         self.attributes.delete()
@@ -58,7 +58,7 @@ class Topology(_Entity):
 
     def dump(self) -> Dict:
         devices = [self.device(device_uuid).dump() for device_uuid in self.devices.get()]
-        links   = [self.link  (link_uuid  ).dump() for link_uuid   in self.links.get()]
+        links = [self.link(link_uuid).dump() for link_uuid in self.links.get()]
         return {
             'topoId': self.dump_id(),
             'device': devices,
diff --git a/src/common/database/api/context/topology/__init__.py b/src/common/database/api/context/topology/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/common/database/api/context/Device.py b/src/common/database/api/context/topology/device/Device.py
similarity index 85%
rename from src/common/database/api/context/Device.py
rename to src/common/database/api/context/topology/device/Device.py
index fb4b5becb6de1f158447b3e7630ff6f87fbbdf7d..06c560051e1b3d7d930efb6ddca16d9096a2509e 100644
--- a/src/common/database/api/context/Device.py
+++ b/src/common/database/api/context/topology/device/Device.py
@@ -1,14 +1,14 @@
 from __future__ import annotations
 from typing import TYPE_CHECKING, Dict
-from ..entity._Entity import _Entity
-from ..entity.EntityCollection import EntityCollection
-from .Endpoint import Endpoint
-from .Keys import KEY_DEVICE, KEY_DEVICE_ENDPOINTS
-from .OperationalStatus import OperationalStatus, to_operationalstatus_enum
+from common.database.api.context.Keys import KEY_DEVICE, KEY_DEVICE_ENDPOINTS
+from common.database.api.context.topology.device.Endpoint import Endpoint
+from common.database.api.context.topology.device.OperationalStatus import OperationalStatus, to_operationalstatus_enum
+from common.database.api.entity._Entity import _Entity
+from common.database.api.entity.EntityCollection import EntityCollection
 
 if TYPE_CHECKING:
-    from .Context import Context
-    from .Topology import Topology
+    from common.database.api.context.Context import Context
+    from common.database.api.context.topology.Topology import Topology
 
 VALIDATORS = {
     'device_type': lambda v: v is not None and isinstance(v, str) and (len(v) > 0),
diff --git a/src/common/database/api/context/Endpoint.py b/src/common/database/api/context/topology/device/Endpoint.py
similarity index 81%
rename from src/common/database/api/context/Endpoint.py
rename to src/common/database/api/context/topology/device/Endpoint.py
index 413a680a8e8cdb13e8df120a2514c46497d7a071..8ea516f3e50ad14fd133048570555abf4d872372 100644
--- a/src/common/database/api/context/Endpoint.py
+++ b/src/common/database/api/context/topology/device/Endpoint.py
@@ -1,12 +1,12 @@
 from __future__ import annotations
 from typing import TYPE_CHECKING, Dict
-from ..entity._Entity import _Entity
-from .Keys import KEY_ENDPOINT
+from common.database.api.entity._Entity import _Entity
+from common.database.api.context.Keys import KEY_DEVICE_ENDPOINT
 
 if TYPE_CHECKING:
-    from .Context import Context
-    from .Topology import Topology
-    from .Device import Device
+    from common.database.api.context.Context import Context
+    from common.database.api.context.topology.Topology import Topology
+    from common.database.api.context.topology.device.Device import Device
 
 VALIDATORS = {
     'port_type': lambda v: v is not None and isinstance(v, str) and (len(v) > 0),
@@ -16,7 +16,7 @@ TRANSCODERS = {} # no transcoding applied to attributes
 
 class Endpoint(_Entity):
     def __init__(self, endpoint_uuid : str, parent : 'Device'):
-        super().__init__(parent, endpoint_uuid, KEY_ENDPOINT, VALIDATORS, TRANSCODERS)
+        super().__init__(parent, endpoint_uuid, KEY_DEVICE_ENDPOINT, VALIDATORS, TRANSCODERS)
 
     @property
     def parent(self) -> 'Device': return self._parent
diff --git a/src/common/database/api/context/OperationalStatus.py b/src/common/database/api/context/topology/device/OperationalStatus.py
similarity index 100%
rename from src/common/database/api/context/OperationalStatus.py
rename to src/common/database/api/context/topology/device/OperationalStatus.py
diff --git a/src/common/database/api/context/topology/device/__init__.py b/src/common/database/api/context/topology/device/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/common/database/api/context/LinkEndpoint.py b/src/common/database/api/context/topology/link/Endpoint.py
similarity index 66%
rename from src/common/database/api/context/LinkEndpoint.py
rename to src/common/database/api/context/topology/link/Endpoint.py
index 4acb62fdb36fe78f65710d361370f41962312859..0fbdd26cbc222f7591ccfc8fbfdc6aa9062ead58 100644
--- a/src/common/database/api/context/LinkEndpoint.py
+++ b/src/common/database/api/context/topology/link/Endpoint.py
@@ -1,13 +1,13 @@
 from __future__ import annotations
 from typing import TYPE_CHECKING, Dict
-from ..entity._Entity import _Entity
-from .Endpoint import Endpoint
-from .Keys import KEY_LINK_ENDPOINT
+from common.database.api.context.topology.device.Endpoint import Endpoint as DeviceEndpoint
+from common.database.api.context.Keys import KEY_LINK_ENDPOINT
+from common.database.api.entity._Entity import _Entity
 
 if TYPE_CHECKING:
-    from .Context import Context
-    from .Topology import Topology
-    from .Link import Link
+    from common.database.api.context.Context import Context
+    from common.database.api.context.topology.Topology import Topology
+    from common.database.api.context.topology.link.Link import Link
 
 VALIDATORS = {
     'device_uuid': lambda v: v is not None and isinstance(v, str) and (len(v) > 0),
@@ -16,9 +16,9 @@ VALIDATORS = {
 
 TRANSCODERS = {} # no transcoding applied to attributes
 
-class LinkEndpoint(_Entity):
-    def __init__(self, link_endpoint_uuid : str, parent : 'Link'):
-        super().__init__(parent, link_endpoint_uuid, KEY_LINK_ENDPOINT, VALIDATORS, TRANSCODERS)
+class Endpoint(_Entity):
+    def __init__(self, endpoint_uuid : str, parent : 'Link'):
+        super().__init__(parent, endpoint_uuid, KEY_LINK_ENDPOINT, VALIDATORS, TRANSCODERS)
 
     @property
     def parent(self) -> 'Link': return self._parent
@@ -42,23 +42,23 @@ class LinkEndpoint(_Entity):
     def link_uuid(self) -> str: return self.parent.link_uuid
 
     @property
-    def link_endpoint_uuid(self) -> str: return self._entity_uuid
+    def endpoint_uuid(self) -> str: return self._entity_uuid
 
-    def create(self, endpoint : Endpoint) -> 'LinkEndpoint':
+    def create(self, endpoint : DeviceEndpoint) -> 'Endpoint':
         self.update(update_attributes={
             'device_uuid': endpoint.device_uuid,
             'endpoint_uuid': endpoint.endpoint_uuid,
         })
-        self.parent.endpoints.add(self.link_endpoint_uuid)
+        self.parent.endpoints.add(self.endpoint_uuid)
         return self
 
-    def update(self, update_attributes={}, remove_attributes=[]) -> 'LinkEndpoint':
+    def update(self, update_attributes={}, remove_attributes=[]) -> 'Endpoint':
         self.attributes.update(update_attributes=update_attributes, remove_attributes=remove_attributes)
         return self
 
     def delete(self) -> None:
         self.attributes.delete()
-        self.parent.endpoints.delete(self.link_endpoint_uuid)
+        self.parent.endpoints.delete(self.endpoint_uuid)
 
     def dump_id(self) -> Dict:
         attributes = self.attributes.get()
diff --git a/src/common/database/api/context/Link.py b/src/common/database/api/context/topology/link/Link.py
similarity index 76%
rename from src/common/database/api/context/Link.py
rename to src/common/database/api/context/topology/link/Link.py
index bf661dbb2897822a45071c157619b97c1ebca1d9..41d72e0d2284b6569a550216e0539ce950fcdc14 100644
--- a/src/common/database/api/context/Link.py
+++ b/src/common/database/api/context/topology/link/Link.py
@@ -1,13 +1,13 @@
 from __future__ import annotations
 from typing import TYPE_CHECKING, Dict
-from ..entity._Entity import _Entity
-from ..entity.EntityCollection import EntityCollection
-from .LinkEndpoint import LinkEndpoint
-from .Keys import KEY_LINK, KEY_LINK_ENDPOINTS
+from common.database.api.entity._Entity import _Entity
+from common.database.api.entity.EntityCollection import EntityCollection
+from common.database.api.context.topology.link.Endpoint import Endpoint
+from common.database.api.context.Keys import KEY_LINK, KEY_LINK_ENDPOINTS
 
 if TYPE_CHECKING:
-    from .Context import Context
-    from .Topology import Topology
+    from common.database.api.context.Context import Context
+    from common.database.api.context.topology.Topology import Topology
 
 VALIDATORS = {}  # no attributes accepted
 TRANSCODERS = {} # no transcoding applied to attributes
@@ -38,7 +38,7 @@ class Link(_Entity):
     @property
     def endpoints(self) -> EntityCollection: return self._endpoints
 
-    def endpoint(self, link_endpoint_uuid : str) -> LinkEndpoint: return LinkEndpoint(link_endpoint_uuid, self)
+    def endpoint(self, link_endpoint_uuid : str) -> Endpoint: return Endpoint(link_endpoint_uuid, self)
 
     def create(self) -> 'Link':
         self.parent.links.add(self.link_uuid)
diff --git a/src/common/database/api/context/topology/link/__init__.py b/src/common/database/api/context/topology/link/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/common/database/api/entity/EntityAttributes.py b/src/common/database/api/entity/EntityAttributes.py
index 47642b823de3164db18667aa474a95aed84e730c..b3e553453f7ebdda9439c81e11e1833ab32f1e41 100644
--- a/src/common/database/api/entity/EntityAttributes.py
+++ b/src/common/database/api/entity/EntityAttributes.py
@@ -1,11 +1,11 @@
 from __future__ import annotations
 import copy
 from typing import Any, Dict, TYPE_CHECKING
-from ...engines._DatabaseEngine import _DatabaseEngine
-from .Tools import format_key
+from common.database.engines._DatabaseEngine import _DatabaseEngine
+from common.database.api.entity.Tools import format_key
 
 if TYPE_CHECKING:
-    from ._Entity import _Entity
+    from common.database.api.entity._Entity import _Entity
 
 class EntityAttributes:
     def __init__(self, parent : '_Entity', entity_key : str, validators : Dict, transcoders : Dict = {}):
diff --git a/src/common/database/api/entity/EntityCollection.py b/src/common/database/api/entity/EntityCollection.py
index 4f3ffae040767fd7ce0b45dbb52595b944c40a5c..ed155370b43a91c7d64c921433d65327733fba54 100644
--- a/src/common/database/api/entity/EntityCollection.py
+++ b/src/common/database/api/entity/EntityCollection.py
@@ -1,10 +1,10 @@
 from __future__ import annotations
 from typing import TYPE_CHECKING
-from ...engines._DatabaseEngine import _DatabaseEngine
-from .Tools import format_key
+from common.database.engines._DatabaseEngine import _DatabaseEngine
+from common.database.api.entity.Tools import format_key
 
 if TYPE_CHECKING:
-    from ._Entity import _Entity
+    from common.database.api.entity._Entity import _Entity
 
 class EntityCollection:
     def __init__(self, parent : '_Entity', entity_key : str):
diff --git a/src/common/database/api/entity/_Entity.py b/src/common/database/api/entity/_Entity.py
index 9d0b3dfb05f58e9af1a595e7527f3e48b5bb7a5d..784ffbf61c7958329c28f1c7ac371f5126289971 100644
--- a/src/common/database/api/entity/_Entity.py
+++ b/src/common/database/api/entity/_Entity.py
@@ -1,6 +1,6 @@
 from typing import Any, Callable, Dict
-from ...engines._DatabaseEngine import _DatabaseEngine
-from .EntityAttributes import EntityAttributes
+from common.database.engines._DatabaseEngine import _DatabaseEngine
+from common.database.api.entity.EntityAttributes import EntityAttributes
 
 class _Entity:
     def __init__(self, parent, entity_uuid : str, attributes_key : str,
diff --git a/src/common/database/api/entity/_RootEntity.py b/src/common/database/api/entity/_RootEntity.py
deleted file mode 100644
index 6047bec8b268d4e412161d4515b2c3c7c56d800a..0000000000000000000000000000000000000000
--- a/src/common/database/api/entity/_RootEntity.py
+++ /dev/null
@@ -1,16 +0,0 @@
-from typing import Any, Callable, Dict
-from ._Entity import _Entity
-from ...engines._DatabaseEngine import _DatabaseEngine
-
-class _RootEntity(_Entity):
-    def __init__(self, database_engine : _DatabaseEngine, entity_uuid: str, attributes_key: str,
-                 attributes_validators: Dict[str, Callable[[Any], bool]],
-                 attribute_transcoders: Dict[str, Dict[Any, Callable[[Any], Any]]]):
-        self._database_engine = database_engine
-        super().__init__(self, entity_uuid, attributes_key, attributes_validators, attribute_transcoders)
-
-    @property
-    def parent(self) -> '_RootEntity': return self
-
-    @property
-    def database_engine(self) -> _DatabaseEngine: return self._database_engine
diff --git a/src/common/database/engines/inmemory/InMemoryDatabaseEngine.py b/src/common/database/engines/inmemory/InMemoryDatabaseEngine.py
index ba828d79e8ee569703162f621f0630dae68a07b4..80c1669c73f603ac1eb477d883196b144d1ea143 100644
--- a/src/common/database/engines/inmemory/InMemoryDatabaseEngine.py
+++ b/src/common/database/engines/inmemory/InMemoryDatabaseEngine.py
@@ -1,6 +1,6 @@
 import copy, logging, threading, uuid
 from typing import Dict, List, Set, Tuple, Union
-from .._DatabaseEngine import _DatabaseEngine
+from common.database.engines._DatabaseEngine import _DatabaseEngine
 
 LOGGER = logging.getLogger(__name__)
 
diff --git a/src/common/database/engines/redis/RedisDatabaseEngine.py b/src/common/database/engines/redis/RedisDatabaseEngine.py
index e8777796823424fc9fd37a691c0616f5915b3dfa..4e7e071040e3acd2bfe7d715e426c302b8dde1f7 100644
--- a/src/common/database/engines/redis/RedisDatabaseEngine.py
+++ b/src/common/database/engines/redis/RedisDatabaseEngine.py
@@ -1,8 +1,8 @@
 import os, uuid
 from typing import Dict, List, Set, Tuple
 from redis.client import Redis
-from .._DatabaseEngine import _DatabaseEngine
-from .Mutex import Mutex
+from common.database.engines._DatabaseEngine import _DatabaseEngine
+from common.database.engines.redis.Mutex import Mutex
 
 KEY_ENTIRE_DATABASE_LOCK = 'everything'
 
diff --git a/src/common/database/tests/script.py b/src/common/database/tests/script.py
index 2bd1a40296f943004d4c90db519406be512a449b..78efa9d6aaaf7c5288faf112b70d200b917b82f1 100644
--- a/src/common/database/tests/script.py
+++ b/src/common/database/tests/script.py
@@ -1,88 +1,133 @@
 import json, logging, time
-from ..api.Constants import DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID
-from ..api.context.OperationalStatus import OperationalStatus
-from ..api.Database import Database
+from common.database.api.Database import Database
+from common.database.api.context.Constants import DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID
+from common.database.api.context.service.ServiceState import ServiceState
+from common.database.api.context.service.ServiceType import ServiceType
+from common.database.api.context.topology.device.OperationalStatus import OperationalStatus
 
 LOGGER = logging.getLogger(__name__)
 
-def populate_example(database : Database, context_uuid=DEFAULT_CONTEXT_ID, topology_uuid=DEFAULT_TOPOLOGY_ID):
-    database.clear_all()
+def populate_example(
+    database : Database, context_uuid : str = DEFAULT_CONTEXT_ID, topology_uuid : str = DEFAULT_TOPOLOGY_ID,
+    add_devices : bool = True, add_links : bool = True, add_services : bool = True):
+
+    if add_links:
+        if not add_devices: raise Exception('add_links requires add_devices')
+
+    if add_services:
+        if not add_devices: raise Exception('add_services requires add_devices')
+        if not add_links: raise Exception('add_services requires add_links')
 
     with database:
+        database.clear_all()
+
         context = database.context(context_uuid).create()
         topology = context.topology(topology_uuid).create()
 
-        device_1 = topology.device('dev1').create(
-            device_type='ROADM', device_config='<config/>', device_operational_status=OperationalStatus.ENABLED)
-        endpoint_dev1_to_dev2 = device_1.endpoint('to-dev2').create(port_type='WDM')
-        endpoint_dev1_to_dev3 = device_1.endpoint('to-dev3').create(port_type='WDM')
-        endpoint_dev1_to_dev4 = device_1.endpoint('to-dev4').create(port_type='WDM')
-
-        device_2 = topology.device('dev2').create(
-            device_type='ROADM', device_config='<config/>', device_operational_status=OperationalStatus.ENABLED)
-        endpoint_dev2_to_dev1 = device_2.endpoint('to-dev1').create(port_type='WDM')
-        endpoint_dev2_to_dev3 = device_2.endpoint('to-dev3').create(port_type='WDM')
-        endpoint_dev2_to_dev4 = device_2.endpoint('to-dev4').create(port_type='WDM')
-
-        device_3 = topology.device('dev3').create(
-            device_type='ROADM', device_config='<config/>', device_operational_status=OperationalStatus.ENABLED)
-        endpoint_dev3_to_dev1 = device_3.endpoint('to-dev1').create(port_type='WDM')
-        endpoint_dev3_to_dev2 = device_3.endpoint('to-dev2').create(port_type='WDM')
-        endpoint_dev3_to_dev4 = device_3.endpoint('to-dev4').create(port_type='WDM')
-
-        device_4 = topology.device('dev4').create(
-            device_type='ROADM', device_config='<config/>', device_operational_status=OperationalStatus.ENABLED)
-        endpoint_dev4_to_dev1 = device_4.endpoint('to-dev1').create(port_type='WDM')
-        endpoint_dev4_to_dev2 = device_4.endpoint('to-dev2').create(port_type='WDM')
-        endpoint_dev4_to_dev3 = device_4.endpoint('to-dev3').create(port_type='WDM')
-
-        link_dev1_to_dev2 = topology.link('dev1/to-dev2 ==> dev2/to-dev1').create()
-        link_dev1_to_dev2.endpoint('dev1/to-dev2').create(endpoint_dev1_to_dev2)
-        link_dev1_to_dev2.endpoint('dev2/to-dev1').create(endpoint_dev2_to_dev1)
-
-        link_dev1_to_dev3 = topology.link('dev1/to-dev3 ==> dev3/to-dev1').create()
-        link_dev1_to_dev3.endpoint('dev1/to-dev3').create(endpoint_dev1_to_dev3)
-        link_dev1_to_dev3.endpoint('dev3/to-dev1').create(endpoint_dev3_to_dev1)
-
-        link_dev1_to_dev4 = topology.link('dev1/to-dev4 ==> dev4/to-dev1').create()
-        link_dev1_to_dev4.endpoint('dev1/to-dev4').create(endpoint_dev1_to_dev4)
-        link_dev1_to_dev4.endpoint('dev4/to-dev1').create(endpoint_dev4_to_dev1)
-
-        link_dev2_to_dev1 = topology.link('dev2/to-dev1 ==> dev1/to-dev2').create()
-        link_dev2_to_dev1.endpoint('dev2/to-dev1').create(endpoint_dev2_to_dev1)
-        link_dev2_to_dev1.endpoint('dev1/to-dev2').create(endpoint_dev1_to_dev2)
-
-        link_dev2_to_dev3 = topology.link('dev2/to-dev3 ==> dev3/to-dev2').create()
-        link_dev2_to_dev3.endpoint('dev2/to-dev3').create(endpoint_dev2_to_dev3)
-        link_dev2_to_dev3.endpoint('dev3/to-dev2').create(endpoint_dev3_to_dev2)
-
-        link_dev2_to_dev4 = topology.link('dev2/to-dev4 ==> dev4/to-dev2').create()
-        link_dev2_to_dev4.endpoint('dev2/to-dev4').create(endpoint_dev2_to_dev4)
-        link_dev2_to_dev4.endpoint('dev4/to-dev2').create(endpoint_dev4_to_dev2)
-
-        link_dev3_to_dev1 = topology.link('dev3/to-dev1 ==> dev1/to-dev3').create()
-        link_dev3_to_dev1.endpoint('dev3/to-dev1').create(endpoint_dev3_to_dev1)
-        link_dev3_to_dev1.endpoint('dev1/to-dev3').create(endpoint_dev1_to_dev3)
-
-        link_dev3_to_dev2 = topology.link('dev3/to-dev2 ==> dev2/to-dev3').create()
-        link_dev3_to_dev2.endpoint('dev3/to-dev2').create(endpoint_dev3_to_dev2)
-        link_dev3_to_dev2.endpoint('dev2/to-dev3').create(endpoint_dev2_to_dev3)
-
-        link_dev3_to_dev4 = topology.link('dev3/to-dev4 ==> dev4/to-dev3').create()
-        link_dev3_to_dev4.endpoint('dev3/to-dev4').create(endpoint_dev3_to_dev4)
-        link_dev3_to_dev4.endpoint('dev4/to-dev3').create(endpoint_dev4_to_dev3)
-
-        link_dev4_to_dev1 = topology.link('dev4/to-dev1 ==> dev1/to-dev4').create()
-        link_dev4_to_dev1.endpoint('dev4/to-dev1').create(endpoint_dev4_to_dev1)
-        link_dev4_to_dev1.endpoint('dev1/to-dev4').create(endpoint_dev1_to_dev4)
-
-        link_dev4_to_dev2 = topology.link('dev4/to-dev2 ==> dev2/to-dev4').create()
-        link_dev4_to_dev2.endpoint('dev4/to-dev2').create(endpoint_dev4_to_dev2)
-        link_dev4_to_dev2.endpoint('dev2/to-dev4').create(endpoint_dev2_to_dev4)
-
-        link_dev4_to_dev3 = topology.link('dev4/to-dev3 ==> dev3/to-dev4').create()
-        link_dev4_to_dev3.endpoint('dev4/to-dev3').create(endpoint_dev4_to_dev3)
-        link_dev4_to_dev3.endpoint('dev3/to-dev4').create(endpoint_dev3_to_dev4)
+        if add_devices:
+            dev_1 = topology.device('DEV1').create(
+                device_type='ROADM', device_config='<config/>', device_operational_status=OperationalStatus.ENABLED)
+            dev1_ep2 = dev_1.endpoint('EP2').create(port_type='WDM')
+            dev1_ep3 = dev_1.endpoint('EP3').create(port_type='WDM')
+            dev1_ep4 = dev_1.endpoint('EP4').create(port_type='WDM')
+            dev1_ep5 = dev_1.endpoint('EP5').create(port_type='OCH')
+            dev1_ep6 = dev_1.endpoint('EP6').create(port_type='OCH')
+            dev1_ep7 = dev_1.endpoint('EP7').create(port_type='OCH')
+
+            dev_2 = topology.device('DEV2').create(
+                device_type='ROADM', device_config='<config/>', device_operational_status=OperationalStatus.ENABLED)
+            dev2_ep1 = dev_2.endpoint('EP1').create(port_type='WDM')
+            dev2_ep3 = dev_2.endpoint('EP3').create(port_type='WDM')
+            dev2_ep4 = dev_2.endpoint('EP4').create(port_type='WDM')
+            dev2_ep5 = dev_2.endpoint('EP5').create(port_type='OCH')
+            dev2_ep6 = dev_2.endpoint('EP6').create(port_type='OCH')
+            dev2_ep7 = dev_2.endpoint('EP7').create(port_type='OCH')
+
+            dev_3 = topology.device('DEV3').create(
+                device_type='ROADM', device_config='<config/>', device_operational_status=OperationalStatus.ENABLED)
+            dev3_ep1 = dev_3.endpoint('EP1').create(port_type='WDM')
+            dev3_ep2 = dev_3.endpoint('EP2').create(port_type='WDM')
+            dev3_ep4 = dev_3.endpoint('EP4').create(port_type='WDM')
+            dev3_ep5 = dev_3.endpoint('EP5').create(port_type='OCH')
+            dev3_ep6 = dev_3.endpoint('EP6').create(port_type='OCH')
+            dev3_ep7 = dev_3.endpoint('EP7').create(port_type='OCH')
+
+            dev_4 = topology.device('DEV4').create(
+                device_type='ROADM', device_config='<config/>', device_operational_status=OperationalStatus.ENABLED)
+            dev4_ep1 = dev_4.endpoint('EP1').create(port_type='WDM')
+            dev4_ep2 = dev_4.endpoint('EP2').create(port_type='WDM')
+            dev4_ep3 = dev_4.endpoint('EP3').create(port_type='WDM')
+            dev4_ep5 = dev_4.endpoint('EP5').create(port_type='OCH')
+            dev4_ep6 = dev_4.endpoint('EP6').create(port_type='OCH')
+            dev4_ep7 = dev_4.endpoint('EP7').create(port_type='OCH')
+
+        if add_links:
+            link_dev1_to_dev2 = topology.link('DEV1/EP2 ==> DEV2/EP1').create()
+            link_dev1_to_dev2.endpoint('DEV1/EP2').create(dev1_ep2)
+            link_dev1_to_dev2.endpoint('DEV2/EP1').create(dev2_ep1)
+
+            link_dev1_to_dev3 = topology.link('DEV1/EP3 ==> DEV3/EP1').create()
+            link_dev1_to_dev3.endpoint('DEV1/EP3').create(dev1_ep3)
+            link_dev1_to_dev3.endpoint('DEV3/EP1').create(dev3_ep1)
+
+            link_dev1_to_dev4 = topology.link('DEV1/EP4 ==> DEV4/EP1').create()
+            link_dev1_to_dev4.endpoint('DEV1/EP4').create(dev1_ep4)
+            link_dev1_to_dev4.endpoint('DEV4/EP1').create(dev4_ep1)
+
+            link_dev2_to_dev1 = topology.link('DEV2/EP1 ==> DEV1/EP2').create()
+            link_dev2_to_dev1.endpoint('DEV2/EP1').create(dev2_ep1)
+            link_dev2_to_dev1.endpoint('DEV1/EP2').create(dev1_ep2)
+
+            link_dev2_to_dev3 = topology.link('DEV2/EP3 ==> DEV3/EP2').create()
+            link_dev2_to_dev3.endpoint('DEV2/EP3').create(dev2_ep3)
+            link_dev2_to_dev3.endpoint('DEV3/EP2').create(dev3_ep2)
+
+            link_dev2_to_dev4 = topology.link('DEV2/EP4 ==> DEV4/EP2').create()
+            link_dev2_to_dev4.endpoint('DEV2/EP4').create(dev2_ep4)
+            link_dev2_to_dev4.endpoint('DEV4/EP2').create(dev4_ep2)
+
+            link_dev3_to_dev1 = topology.link('DEV3/EP1 ==> DEV1/EP3').create()
+            link_dev3_to_dev1.endpoint('DEV3/EP1').create(dev3_ep1)
+            link_dev3_to_dev1.endpoint('DEV1/EP3').create(dev1_ep3)
+
+            link_dev3_to_dev2 = topology.link('DEV3/EP2 ==> DEV2/EP3').create()
+            link_dev3_to_dev2.endpoint('DEV3/EP2').create(dev3_ep2)
+            link_dev3_to_dev2.endpoint('DEV2/EP3').create(dev2_ep3)
+
+            link_dev3_to_dev4 = topology.link('DEV3/EP4 ==> DEV4/EP3').create()
+            link_dev3_to_dev4.endpoint('DEV3/EP4').create(dev3_ep4)
+            link_dev3_to_dev4.endpoint('DEV4/EP3').create(dev4_ep3)
+
+            link_dev4_to_dev1 = topology.link('DEV4/EP1 ==> DEV1/EP4').create()
+            link_dev4_to_dev1.endpoint('DEV4/EP1').create(dev4_ep1)
+            link_dev4_to_dev1.endpoint('DEV1/EP4').create(dev1_ep4)
+
+            link_dev4_to_dev2 = topology.link('DEV4/EP2 ==> DEV2/EP4').create()
+            link_dev4_to_dev2.endpoint('DEV4/EP2').create(dev4_ep2)
+            link_dev4_to_dev2.endpoint('DEV2/EP4').create(dev2_ep4)
+
+            link_dev4_to_dev3 = topology.link('DEV4/EP3 ==> DEV3/EP4').create()
+            link_dev4_to_dev3.endpoint('DEV4/EP3').create(dev4_ep3)
+            link_dev4_to_dev3.endpoint('DEV3/EP4').create(dev3_ep4)
+
+        if add_services:
+            service = context.service('S01').create(ServiceType.L3NM, '<config/>', ServiceState.PLANNED)
+            service.endpoint('S01/EP01').create(dev1_ep5)
+            service.endpoint('S01/EP02').create(dev2_ep5)
+            service.endpoint('S01/EP03').create(dev3_ep5)
+            service.endpoint('S01/EP04').create(dev4_ep5)
+
+            service = context.service('S02').create(ServiceType.L3NM, '<config/>', ServiceState.PLANNED)
+            service.endpoint('S02/EP01').create(dev1_ep6)
+            service.endpoint('S02/EP02').create(dev2_ep6)
+            service.endpoint('S02/EP03').create(dev3_ep6)
+            service.endpoint('S02/EP04').create(dev4_ep6)
+
+            service = context.service('S03').create(ServiceType.L3NM, '<config/>', ServiceState.PLANNED)
+            service.endpoint('S03/EP01').create(dev1_ep7)
+            service.endpoint('S03/EP02').create(dev2_ep7)
+            service.endpoint('S03/EP03').create(dev3_ep7)
+            service.endpoint('S03/EP04').create(dev4_ep7)
 
 def sequence(database : Database):
     populate_example(database)
diff --git a/src/common/database/tests/test_engine_inmemory.py b/src/common/database/tests/test_engine_inmemory.py
index 403f428200a6580a0fc32bfe2b90ce20b855d2a9..e3afd995573b04926bbec080e2dd5797eac6ce00 100644
--- a/src/common/database/tests/test_engine_inmemory.py
+++ b/src/common/database/tests/test_engine_inmemory.py
@@ -1,6 +1,6 @@
 import logging
-from ..Factory import get_database, DatabaseEngineEnum
-from .script import sequence
+from common.database.Factory import get_database, DatabaseEngineEnum
+from common.database.tests.script import sequence
 
 logging.basicConfig(level=logging.INFO)
 
diff --git a/src/common/database/tests/test_engine_redis.py b/src/common/database/tests/test_engine_redis.py
index 39df1cf393d22fccfd1f14e0b47327e9cf79f73e..e68600db6a8e196890526c97d79b132c03dd4b32 100644
--- a/src/common/database/tests/test_engine_redis.py
+++ b/src/common/database/tests/test_engine_redis.py
@@ -1,6 +1,6 @@
 import logging
-from ..Factory import get_database, DatabaseEngineEnum
-from .script import sequence
+from common.database.Factory import get_database, DatabaseEngineEnum
+from common.database.tests.script import sequence
 
 logging.basicConfig(level=logging.INFO)
 
diff --git a/src/common/database/tests/test_unitary.py b/src/common/database/tests/test_unitary.py
index c00e2f7c9165ee84bccf4a34f06c0915bf4726c9..8589c7cfacb04505c75108f510b5f7bcca4005a2 100644
--- a/src/common/database/tests/test_unitary.py
+++ b/src/common/database/tests/test_unitary.py
@@ -1,11 +1,10 @@
 import logging, pytest
-from ..api.Database import Database
-from ..api.entity._Entity import _Entity
-from ..api.entity._RootEntity import _RootEntity
-from ..api.entity.EntityAttributes import EntityAttributes
-from ..api.Exceptions import WrongDatabaseEngine
-from ..engines._DatabaseEngine import _DatabaseEngine
-from ..engines.inmemory.InMemoryDatabaseEngine import InMemoryDatabaseEngine
+from common.database.api.Database import Database
+from common.database.api.entity._Entity import _Entity
+from common.database.api.entity.EntityAttributes import EntityAttributes
+from common.database.api.Exceptions import WrongDatabaseEngine
+from common.database.engines._DatabaseEngine import _DatabaseEngine
+from common.database.engines.inmemory.InMemoryDatabaseEngine import InMemoryDatabaseEngine
 
 logging.basicConfig(level=logging.INFO)
 
@@ -21,10 +20,6 @@ def test_database_gets_correct_database_engine():
 
 def test_entity_gets_invalid_parameters():
 
-    class RootMockEntity(_RootEntity):
-        def __init__(self, database_engine : _DatabaseEngine):
-            super().__init__(database_engine, 'valid-uuid', 'valid-key', {}, {})
-
     # should fail with invalid parent
     with pytest.raises(AttributeError) as e:
         _Entity(None, 'valid-uuid', 'valid-attributes-key', {}, {})
@@ -32,45 +27,41 @@ def test_entity_gets_invalid_parameters():
 
     # should fail with invalid entity uuid
     with pytest.raises(AttributeError) as e:
-        _Entity(RootMockEntity(InMemoryDatabaseEngine()), None, 'valid-attributes-key', {}, {})
+        _Entity(Database(InMemoryDatabaseEngine()), None, 'valid-attributes-key', {}, {})
     assert str(e.value) == 'entity_uuid must be a non-empty instance of str'
 
     # should fail with invalid entity uuid
     with pytest.raises(AttributeError) as e:
-        _Entity(RootMockEntity(InMemoryDatabaseEngine()), '', 'valid-attributes-key', {}, {})
+        _Entity(Database(InMemoryDatabaseEngine()), '', 'valid-attributes-key', {}, {})
     assert str(e.value) == 'entity_uuid must be a non-empty instance of str'
 
     # should fail with invalid attribute key
     with pytest.raises(AttributeError) as e:
-        _Entity(RootMockEntity(InMemoryDatabaseEngine()), 'valid-uuid', None, {}, {})
+        _Entity(Database(InMemoryDatabaseEngine()), 'valid-uuid', None, {}, {})
     assert str(e.value) == 'attributes_key must be a non-empty instance of str'
 
     # should fail with invalid attribute key
     with pytest.raises(AttributeError) as e:
-        _Entity(RootMockEntity(InMemoryDatabaseEngine()), 'valid-uuid', '', {}, {})
+        _Entity(Database(InMemoryDatabaseEngine()), 'valid-uuid', '', {}, {})
     assert str(e.value) == 'attributes_key must be a non-empty instance of str'
 
     # should fail with invalid attribute validators
     with pytest.raises(AttributeError) as e:
-        _Entity(RootMockEntity(InMemoryDatabaseEngine()), 'valid-uuid', 'valid-attributes-key', [], {})
+        _Entity(Database(InMemoryDatabaseEngine()), 'valid-uuid', 'valid-attributes-key', [], {})
     assert str(e.value) == 'attribute_validators must be an instance of dict'
 
     # should fail with invalid attribute transcoders
     with pytest.raises(AttributeError) as e:
-        _Entity(RootMockEntity(InMemoryDatabaseEngine()), 'valid-uuid', 'valid-attributes-key', {}, [])
+        _Entity(Database(InMemoryDatabaseEngine()), 'valid-uuid', 'valid-attributes-key', {}, [])
     assert str(e.value) == 'attribute_transcoders must be an instance of dict'
 
     # should work
-    assert _Entity(RootMockEntity(InMemoryDatabaseEngine()), 'valid-uuid', 'valid-attributes-key', {}, {}) is not None
+    assert _Entity(Database(InMemoryDatabaseEngine()), 'valid-uuid', 'valid-attributes-key', {}, {}) is not None
 
 def test_entity_attributes_gets_invalid_parameters():
 
-    class RootMockEntity(_RootEntity):
-        def __init__(self, database_engine : _DatabaseEngine):
-            super().__init__(database_engine, 'valid-uuid', 'valid-key', {}, {})
-
     # should work
-    root_entity = RootMockEntity(InMemoryDatabaseEngine())
+    root_entity = Database(InMemoryDatabaseEngine())
     validators = {'attr': lambda v: True}
     entity_attrs = EntityAttributes(root_entity, 'valid-attributes-key', validators, {})
     assert entity_attrs is not None
diff --git a/src/common/exceptions/ServiceException.py b/src/common/exceptions/ServiceException.py
index 476650b039b90c9cef318015664cf72509df4b58..864e44e2345dcc53ae54fb9dcc35ea1efd5cdd52 100644
--- a/src/common/exceptions/ServiceException.py
+++ b/src/common/exceptions/ServiceException.py
@@ -4,4 +4,4 @@ class ServiceException(Exception):
     def __init__(self, code : grpc.StatusCode, details : str) -> None:
         self.code = code
         self.details = details
-        super().__init__()
+        super().__init__(self.details)
diff --git a/src/common/tests/Assertions.py b/src/common/tests/Assertions.py
index 7e08621f107805dd89978ad380675ef7b547d582..c7b87a671f88ab65768525c73dcf2b34361b579f 100644
--- a/src/common/tests/Assertions.py
+++ b/src/common/tests/Assertions.py
@@ -10,6 +10,12 @@ def validate_uuid(message, allow_empty=False):
     if allow_empty: return
     assert len(message['uuid']) > 1
 
+def validate_context_id(message):
+    assert type(message) is dict
+    assert len(message.keys()) == 1
+    assert 'contextUuid' in message
+    validate_uuid(message['contextUuid'])
+
 def validate_device_id(message):
     assert type(message) is dict
     assert len(message.keys()) == 1
@@ -22,12 +28,77 @@ def validate_link_id(message):
     assert 'link_id' in message
     validate_uuid(message['link_id'])
 
+def validate_topology_id(message):
+    assert type(message) is dict
+    assert len(message.keys()) == 2
+    assert 'contextId' in message
+    validate_context_id(message['contextId'])
+    assert 'topoId' in message
+    validate_uuid(message['topoId'])
+
+def validate_device_config(message):
+    assert type(message) is dict
+    assert len(message.keys()) == 1
+    assert 'device_config' in message
+    assert type(message['device_config']) is str
+
+def validate_device_operational_status(message):
+    assert type(message) is str
+    assert message in ['KEEP_STATE', 'ENABLED', 'DISABLED']
+
+def validate_endpoint_id(message):
+    assert type(message) is dict
+    assert len(message.keys()) == 3
+    assert 'topoId' in message
+    validate_topology_id(message['topoId'])
+    assert 'dev_id' in message
+    validate_device_id(message['dev_id'])
+    assert 'port_id' in message
+    validate_uuid(message['port_id'])
+
+def validate_endpoint(message):
+    assert type(message) is dict
+    assert len(message.keys()) == 2
+    assert 'port_id' in message
+    validate_endpoint_id(message['port_id'])
+    assert 'port_type' in message
+    assert type(message['port_type']) is str
+
+def validate_device(message):
+    assert type(message) is dict
+    assert len(message.keys()) == 5
+    assert 'device_id' in message
+    validate_device_id(message['device_id'])
+    assert 'device_type' in message
+    assert type(message['device_type']) is str
+    assert 'device_config' in message
+    validate_device_config(message['device_config'])
+    assert 'devOperationalStatus' in message
+    validate_device_operational_status(message['devOperationalStatus'])
+    assert 'endpointList' in message
+    assert type(message['endpointList']) is list
+    for endpoint in message['endpointList']: validate_endpoint(endpoint)
+
+def validate_link(message):
+    assert type(message) is dict
+    assert len(message.keys()) == 2
+    assert 'link_id' in message
+    validate_link_id(message['link_id'])
+    assert 'endpointList' in message
+    assert type(message['endpointList']) is list
+    for endpoint_id in message['endpointList']: validate_endpoint_id(endpoint_id)
+
 def validate_topology(message):
     assert type(message) is dict
     assert len(message.keys()) > 0
     assert 'topoId' in message
+    validate_topology_id(message['topoId'])
     assert 'device' in message
+    assert type(message['device']) is list
+    for device in message['device']: validate_device(device)
     assert 'link' in message
+    assert type(message['link']) is list
+    for link in message['link']: validate_link(link)
 
 def validate_topology_is_empty(message):
     validate_topology(message)
@@ -41,3 +112,72 @@ def validate_topology_has_devices(message):
 def validate_topology_has_links(message):
     validate_topology(message)
     assert len(message['link']) > 0
+
+def validate_constraint(message):
+    assert type(message) is dict
+    assert len(message.keys()) == 2
+    assert 'constraint_type' in message
+    assert type(message['constraint_type']) is str
+    assert 'constraint_value' in message
+    assert type(message['constraint_value']) is str
+
+def validate_service_id(message):
+    assert type(message) is dict
+    assert len(message.keys()) == 2
+    assert 'contextId' in message
+    validate_context_id(message['contextId'])
+    assert 'cs_id' in message
+    validate_uuid(message['cs_id'])
+
+def validate_service_config(message):
+    assert type(message) is dict
+    assert len(message.keys()) == 1
+    assert 'serviceConfig' in message
+    assert type(message['serviceConfig']) is str
+
+def validate_service_type(message):
+    assert type(message) is str
+    assert message in ['UNKNOWN', 'L3NM', 'L2NM', 'TAPI_CONNECTIVITY_SERVICE']
+
+def validate_service_state_enum(message):
+    assert type(message) is str
+    assert message in ['PLANNED', 'ACTIVE', 'PENDING_REMOVAL']
+
+def validate_service_state(message):
+    assert type(message) is dict
+    assert len(message.keys()) == 1
+    assert 'serviceState' in message
+    validate_service_state_enum(message['serviceState'])
+
+def validate_service(message):
+    assert type(message) is dict
+    assert len(message.keys()) == 6
+    assert 'cs_id' in message
+    validate_service_id(message['cs_id'])
+    assert 'serviceType' in message
+    validate_service_type(message['serviceType'])
+    assert 'endpointList' in message
+    assert type(message['endpointList']) is list
+    for endpoint_id in message['endpointList']: validate_endpoint_id(endpoint_id)
+    assert 'constraint' in message
+    assert type(message['constraint']) is list
+    for constraint in message['constraint']: validate_constraint(constraint)
+    assert 'serviceState' in message
+    validate_service_state(message['serviceState'])
+    assert 'serviceConfig' in message
+    validate_service_config(message['serviceConfig'])
+
+def validate_service_list(message):
+    assert type(message) is dict
+    assert len(message.keys()) == 1
+    assert 'cs' in message
+    assert type(message['cs']) is list
+    for cs in message['cs']: validate_service(cs)
+
+def validate_service_list_is_empty(message):
+    validate_service_list(message)
+    assert len(message['cs']) == 0
+
+def validate_service_list_is_not_empty(message):
+    validate_service_list(message)
+    assert len(message['cs']) > 0
diff --git a/src/common/tools/RetryDecorator.py b/src/common/tools/client/RetryDecorator.py
similarity index 100%
rename from src/common/tools/RetryDecorator.py
rename to src/common/tools/client/RetryDecorator.py
diff --git a/src/common/tools/client/__init__.py b/src/common/tools/client/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/common/tools/service/DeviceCheckers.py b/src/common/tools/service/DeviceCheckers.py
new file mode 100644
index 0000000000000000000000000000000000000000..9233b683e91ef26c112990dee139e21b3cc4a0c2
--- /dev/null
+++ b/src/common/tools/service/DeviceCheckers.py
@@ -0,0 +1,50 @@
+import grpc
+from common.database.api.Database import Database
+from common.database.api.context.topology.device.Endpoint import Endpoint
+from common.exceptions.ServiceException import ServiceException
+
+def check_device_exists(database : Database, context_id : str, topology_id : str, device_id : str):
+    db_context = database.context(context_id).create()
+    db_topology = db_context.topology(topology_id).create()
+    if db_topology.devices.contains(device_id): return
+    msg = 'Context({})/Topology({})/Device({}) does not exist in the database.'
+    msg = msg.format(context_id, topology_id, device_id)
+    raise ServiceException(grpc.StatusCode.NOT_FOUND, msg)
+
+def check_device_not_exists(database : Database, context_id : str, topology_id : str, device_id : str):
+    db_context = database.context(context_id).create()
+    db_topology = db_context.topology(topology_id).create()
+    if not db_topology.devices.contains(device_id): return
+    msg = 'Context({})/Topology({})/Device({}) already exists in the database.'
+    msg = msg.format(context_id, topology_id, device_id)
+    raise ServiceException(grpc.StatusCode.ALREADY_EXISTS, msg)
+
+def check_device_endpoint_exists(
+    database : Database, parent_name : str,
+    context_id : str, topology_id : str, device_id : str, port_id : str) -> Endpoint:
+
+    # Implicit validation: parent.context == endpoint.context, and parent.context created automatically
+    if not database.contexts.contains(context_id):          # pragma: no cover
+        msg = 'Context({}) in {} does not exist in the database.'
+        msg = msg.format(context_id, parent_name)
+        raise ServiceException(grpc.StatusCode.NOT_FOUND, msg)
+    db_context = database.context(context_id)
+
+    if not db_context.topologies.contains(topology_id):
+        msg = 'Context({})/Topology({}) in {} does not exist in the database.'
+        msg = msg.format(context_id, topology_id, parent_name)
+        raise ServiceException(grpc.StatusCode.NOT_FOUND, msg)
+    db_topology = db_context.topology(topology_id)
+
+    if not db_topology.devices.contains(device_id):
+        msg = 'Context({})/Topology({})/Device({}) in {} does not exist in the database.'
+        msg = msg.format(context_id, topology_id, device_id, parent_name)
+        raise ServiceException(grpc.StatusCode.NOT_FOUND, msg)
+    db_device = db_topology.device(device_id)
+
+    if not db_device.endpoints.contains(port_id):
+        msg = 'Context({})/Topology({})/Device({})/Port({}) in {} does not exist in the database.'
+        msg = msg.format(context_id, topology_id, device_id, port_id, parent_name)
+        raise ServiceException(grpc.StatusCode.NOT_FOUND, msg)
+
+    return db_device.endpoint(port_id)
diff --git a/src/common/tools/service/EndpointIdCheckers.py b/src/common/tools/service/EndpointIdCheckers.py
new file mode 100644
index 0000000000000000000000000000000000000000..5ac0fe92dd458f38778d9a62011c4279b42ed918
--- /dev/null
+++ b/src/common/tools/service/EndpointIdCheckers.py
@@ -0,0 +1,84 @@
+import grpc, logging
+from typing import Dict, Set, Tuple, Union
+from common.Checkers import chk_string
+from common.exceptions.ServiceException import ServiceException
+from common.database.api.context.Constants import DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID
+
+def check_endpoint_id(
+    logger : logging.Logger, endpoint_number : int, parent_name : str, endpoint_id : 'EndpointId',
+    add_topology_devices_endpoints : Dict[str, Dict[str, Set[str]]],
+    predefined_context_id : str = DEFAULT_CONTEXT_ID, acceptable_context_ids : Set[str] = set([DEFAULT_CONTEXT_ID]),
+    predefined_topology_id : str = DEFAULT_TOPOLOGY_ID, acceptable_topology_ids : Set[str] = set([DEFAULT_TOPOLOGY_ID]),
+    predefined_device_id : Union[str, None] = None, acceptable_device_ids : Set[str] = set(),
+    prevent_same_device_multiple_times : bool = True) -> Tuple[str, str, str]:
+
+    try:
+        ep_context_id  = chk_string('endpoint_id[#{}].topoId.contextId.contextUuid.uuid'.format(endpoint_number),
+                                    endpoint_id.topoId.contextId.contextUuid.uuid,
+                                    allow_empty=True)
+        ep_topology_id = chk_string('endpoint_id[#{}].topoId.topoId.uuid'.format(endpoint_number),
+                                    endpoint_id.topoId.topoId.uuid,
+                                    allow_empty=True)
+        ep_device_id   = chk_string('endpoint_id[#{}].dev_id.device_id.uuid'.format(endpoint_number),
+                                    endpoint_id.dev_id.device_id.uuid,
+                                    allow_empty=(predefined_device_id is not None))
+        ep_port_id     = chk_string('endpoint_id[#{}].port_id.uuid'.format(endpoint_number),
+                                    endpoint_id.port_id.uuid,
+                                    allow_empty=False)
+    except Exception as e:
+        logger.exception('Invalid arguments:')
+        raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e))
+
+    if len(ep_context_id) == 0:
+        # Assumption: if no context is specified for an endpoint_id, use predefined context
+        ep_context_id = predefined_context_id
+    elif (len(acceptable_context_ids) > 0) and (ep_context_id not in acceptable_context_ids):
+        # Assumption: parent and endpoints should belong to the same context
+        msg = ' '.join([
+            'Context({}) in {} mismatches acceptable Contexts({}).',
+            'Optionally, leave field empty to use predefined Context({}).',
+        ])
+        msg = msg.format(
+            ep_context_id, parent_name, str(acceptable_context_ids), predefined_context_id)
+        raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
+
+    if len(ep_topology_id) == 0:
+        # Assumption: if no topology is specified for an endpoint_id, use predefined topology
+        ep_topology_id = predefined_topology_id
+    elif (len(acceptable_topology_ids) > 0) and (ep_topology_id not in acceptable_topology_ids):
+        msg = ' '.join([
+            'Context({})/Topology({}) in {} mismatches acceptable Topologies({}).',
+            'Optionally, leave field empty to use predefined Topology({}).',
+        ])
+        msg = msg.format(
+            ep_context_id, ep_topology_id, parent_name, str(acceptable_topology_ids), predefined_topology_id)
+        raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
+
+    if (predefined_device_id is not None) and (len(ep_device_id) == 0):
+        # Assumption: if no device is specified for an endpoint_id, use predefined device, if available
+        ep_device_id = predefined_device_id
+    elif (len(acceptable_device_ids) > 0) and (ep_device_id not in acceptable_device_ids):
+        msg = ' '.join([
+            'Context({})/Topology({})/Device({}) in {} mismatches acceptable Devices({}).',
+            'Optionally, leave field empty to use predefined Device({}).',
+        ])
+        msg = msg.format(
+            ep_context_id, ep_topology_id, ep_device_id, parent_name, str(acceptable_device_ids), predefined_device_id)
+        raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
+
+    add_devices = add_topology_devices_endpoints.setdefault(ep_topology_id, dict())
+    if prevent_same_device_multiple_times and (ep_device_id in add_devices):
+        msg = 'Duplicated Context({})/Topology({})/Device({}) in {}.'
+        msg = msg.format(ep_context_id, ep_topology_id, ep_device_id, parent_name)
+        raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
+
+    add_device_and_endpoints = add_devices.setdefault(ep_device_id, set())
+
+    # Implicit validation: same device cannot appear 2 times in the list of endpoints
+    if ep_port_id in add_device_and_endpoints:                                # pragma: no cover
+        msg = 'Duplicated Context({})/Topology({})/Device({})/Port({}) in {}.'
+        msg = msg.format(ep_context_id, ep_topology_id, ep_device_id, ep_port_id, parent_name)
+        raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
+
+    add_device_and_endpoints.add(ep_port_id)
+    return ep_topology_id, ep_device_id, ep_port_id
diff --git a/src/common/tools/service/EnumCheckers.py b/src/common/tools/service/EnumCheckers.py
new file mode 100644
index 0000000000000000000000000000000000000000..4f2a84dc74470858efa97e52ad2a0dddd4cbfc88
--- /dev/null
+++ b/src/common/tools/service/EnumCheckers.py
@@ -0,0 +1,24 @@
+import grpc
+from enum import Enum
+from common.exceptions.ServiceException import ServiceException
+
+def check_enum(enum_name, method_name, value, to_enum_method, accepted_values_dict) -> Enum:
+    _value = to_enum_method(value)
+    if _value is None:                          # pragma: no cover (gRPC prevents unsupported values)
+        msg = 'Unsupported {}({}).'
+        msg = msg.format(enum_name, value)
+        raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
+
+    accepted_values = accepted_values_dict.get(method_name)
+    if accepted_values is None:                 # pragma: no cover (test requires malforming the code)
+        msg = '{} acceptable values not specified for Method({}).'
+        msg = msg.format(enum_name, method_name)
+        raise ServiceException(grpc.StatusCode.INTERNAL, msg)
+
+    if len(accepted_values) == 0: return _value
+    if _value in accepted_values: return _value
+
+    msg = 'Method({}) does not accept {}({}). Permitted values for Method({}) are {}({}).'
+    accepted_values_list = sorted(map(lambda v: v.name, accepted_values))
+    msg = msg.format(method_name, enum_name, _value.name, method_name, enum_name, accepted_values_list)
+    raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
diff --git a/src/common/tools/service/LinkCheckers.py b/src/common/tools/service/LinkCheckers.py
new file mode 100644
index 0000000000000000000000000000000000000000..a65046dbf065286547b1885239ad7578fa69a562
--- /dev/null
+++ b/src/common/tools/service/LinkCheckers.py
@@ -0,0 +1,19 @@
+import grpc
+from common.database.api.Database import Database
+from common.exceptions.ServiceException import ServiceException
+
+def check_link_exists(database : Database, context_id : str, topology_id : str, link_id : str):
+    db_context = database.context(context_id).create()
+    db_topology = db_context.topology(topology_id).create()
+    if db_topology.links.contains(link_id): return
+    msg = 'Context({})/Topology({})/Link({}) does not exist in the database.'
+    msg = msg.format(context_id, topology_id, link_id)
+    raise ServiceException(grpc.StatusCode.NOT_FOUND, msg)
+
+def check_link_not_exists(database : Database, context_id : str, topology_id : str, link_id : str):
+    db_context = database.context(context_id).create()
+    db_topology = db_context.topology(topology_id).create()
+    if not db_topology.links.contains(link_id): return
+    msg = 'Context({})/Topology({})/Link({}) already exists in the database.'
+    msg = msg.format(context_id, topology_id, link_id)
+    raise ServiceException(grpc.StatusCode.ALREADY_EXISTS, msg)
diff --git a/src/common/tools/service/ServiceCheckers.py b/src/common/tools/service/ServiceCheckers.py
new file mode 100644
index 0000000000000000000000000000000000000000..d8bafd1c03db0b1b330633062456752da7cd93c9
--- /dev/null
+++ b/src/common/tools/service/ServiceCheckers.py
@@ -0,0 +1,29 @@
+import grpc
+from common.database.api.Database import Database
+from common.exceptions.ServiceException import ServiceException
+
+def check_service_exists(database : Database, context_id : str, service_id : str):
+    if not database.contexts.contains(context_id):
+        msg = 'Context({}) does not exist in the database.'
+        msg = msg.format(context_id)
+        raise ServiceException(grpc.StatusCode.NOT_FOUND, msg)
+
+    db_context = database.context(context_id)
+    if db_context.services.contains(service_id): return
+
+    msg = 'Context({})/Service({}) does not exist in the database.'
+    msg = msg.format(context_id, service_id)
+    raise ServiceException(grpc.StatusCode.NOT_FOUND, msg)
+
+def check_service_not_exists(database : Database, context_id : str, service_id : str):
+    if not database.contexts.contains(context_id):
+        msg = 'Context({}) does not exist in the database.'
+        msg = msg.format(context_id)
+        raise ServiceException(grpc.StatusCode.NOT_FOUND, msg)
+
+    db_context = database.context(context_id)
+    if not db_context.services.contains(service_id): return
+
+    msg = 'Context({})/Service({}) already exists in the database.'
+    msg = msg.format(context_id, service_id)
+    raise ServiceException(grpc.StatusCode.ALREADY_EXISTS, msg)
diff --git a/src/common/tools/service/__init__.py b/src/common/tools/service/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/context/.gitlab-ci.yml b/src/context/.gitlab-ci.yml
index 1d702f3c9d689fd40217f458fe2dfb627c85d478..a6917d7287dadcae669c3cf2c7ffbc3e54ec0792 100644
--- a/src/context/.gitlab-ci.yml
+++ b/src/context/.gitlab-ci.yml
@@ -17,12 +17,12 @@ build context:
       - .gitlab-ci.yml
 
 # Pull, execute, and run unitary tests for the Docker image from the GitLab registry
-test context:
+unit_test context:
   variables:
     IMAGE_NAME: 'context' # name of the microservice
     IMAGE_NAME_TEST: 'context-test' # name of the microservice
     IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
-  stage: test
+  stage: unit_test
   needs:
     - build context
   before_script:
@@ -49,8 +49,9 @@ deploy context:
   stage: deploy
   needs:
     - build context
-    - test context
+    - unit_test context
     - dependencies all
+    - integ_test execute
   script:
     - kubectl version
     - kubectl get all
diff --git a/src/context/Config.py b/src/context/Config.py
index 473db28503e6ef2a9db36087e7c62fa8799979f7..2019cdd0141dc98063fde51568c59e84f6ae087e 100644
--- a/src/context/Config.py
+++ b/src/context/Config.py
@@ -8,8 +8,9 @@ GRPC_SERVICE_PORT = 1010
 GRPC_MAX_WORKERS  = 10
 GRPC_GRACE_PERIOD = 60
 
-# HTTP settings
-HTTP_SERVICE_PORT = 8080
+# REST-API settings
+RESTAPI_SERVICE_PORT = 8080
+RESTAPI_BASE_URL = '/api'
 
 # Prometheus settings
 METRICS_PORT = 9192
diff --git a/src/context/client/ContextClient.py b/src/context/client/ContextClient.py
index 64bd0010b3d1d57a90023b5473d8d89b8a69823a..32074eba3c8681029b567ca7bc4760d0879a593a 100644
--- a/src/context/client/ContextClient.py
+++ b/src/context/client/ContextClient.py
@@ -1,5 +1,5 @@
 import grpc, logging
-from common.tools.RetryDecorator import retry, delay_exponential
+from common.tools.client.RetryDecorator import retry, delay_exponential
 from context.proto.context_pb2 import Link, LinkId, Empty, Topology
 from context.proto.context_pb2_grpc import ContextServiceStub
 
diff --git a/src/context/requirements.in b/src/context/requirements.in
index 25abdad1b5767117956a88b816399635348884c7..e80c645fc94eb240a3290e762db22557e94cdae0 100644
--- a/src/context/requirements.in
+++ b/src/context/requirements.in
@@ -1,6 +1,8 @@
+flask-restful
 grpcio-health-checking
 grpcio
 prometheus-client
 pytest
 pytest-benchmark
 redis
+requests
diff --git a/src/context/service/ContextServiceServicerImpl.py b/src/context/service/ContextServiceServicerImpl.py
index 6a189d67835a3185c3d87f52bd76dd9582e261dc..f067d144652314ad58d5eaf3c3fdfefb3c900bbf 100644
--- a/src/context/service/ContextServiceServicerImpl.py
+++ b/src/context/service/ContextServiceServicerImpl.py
@@ -1,12 +1,11 @@
-from typing import Dict, List, Set, Tuple
 import grpc, logging
 from prometheus_client import Counter, Histogram
-from common.Checkers import chk_string
-from common.database.api.Constants import DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID
+from common.database.api.context.Constants import DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID
 from common.database.api.Database import Database
 from common.exceptions.ServiceException import ServiceException
 from context.proto.context_pb2 import Empty, Link, LinkId, Topology
 from context.proto.context_pb2_grpc import ContextServiceServicer
+from context.service.Tools import check_link_id_request, check_link_request
 
 LOGGER = logging.getLogger(__name__)
 
@@ -62,11 +61,13 @@ class ContextServiceServicerImpl(ContextServiceServicer):
             GETTOPOLOGY_COUNTER_COMPLETED.inc()
             return reply
         except ServiceException as e:                               # pragma: no cover (ServiceException not thrown)
-            grpc_context.abort(e.code, e.details)                   # pragma: no cover (ServiceException not thrown)
+            LOGGER.exception('GetTopology exception')
+            GETTOPOLOGY_COUNTER_FAILED.inc()
+            grpc_context.abort(e.code, e.details)
         except Exception as e:                                      # pragma: no cover
-            LOGGER.exception('GetTopology exception')               # pragma: no cover
-            GETTOPOLOGY_COUNTER_FAILED.inc()                        # pragma: no cover
-            grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))    # pragma: no cover
+            LOGGER.exception('GetTopology exception')
+            GETTOPOLOGY_COUNTER_FAILED.inc()
+            grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))
 
     @ADDLINK_HISTOGRAM_DURATION.time()
     def AddLink(self, request : Link, grpc_context : grpc.ServicerContext) -> LinkId:
@@ -75,95 +76,15 @@ class ContextServiceServicerImpl(ContextServiceServicer):
             LOGGER.debug('AddLink request: {}'.format(str(request)))
 
             # ----- Validate request data and pre-conditions -----------------------------------------------------------
-            try:
-                link_id = chk_string('link.link_id.link_id.uuid',
-                                     request.link_id.link_id.uuid,
-                                     allow_empty=False)
-            except Exception as e:
-                LOGGER.exception('Invalid arguments:')
-                raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e))
+            link_id, db_endpoints = check_link_request('AddLink', request, self.database, LOGGER)
 
+            # ----- Implement changes in the database ------------------------------------------------------------------
             db_context = self.database.context(DEFAULT_CONTEXT_ID).create()
             db_topology = db_context.topology(DEFAULT_TOPOLOGY_ID).create()
-
-            if db_topology.links.contains(link_id):
-                msg = 'Link({}) already exists in the database.'
-                msg = msg.format(link_id)
-                raise ServiceException(grpc.StatusCode.ALREADY_EXISTS, msg)
-
-            added_devices_and_endpoints : Dict[str, Set[str]] = {}
-            device_endpoint_pairs : List[Tuple[str, str]] = []
-            for i,endpoint in enumerate(request.endpointList):
-                try:
-                    ep_context_id  = chk_string('endpoint[#{}].topoId.contextId.contextUuid.uuid'.format(i),
-                                                endpoint.topoId.contextId.contextUuid.uuid,
-                                                allow_empty=True)
-                    ep_topology_id = chk_string('endpoint[#{}].topoId.topoId.uuid'.format(i),
-                                                endpoint.topoId.topoId.uuid,
-                                                allow_empty=True)
-                    ep_device_id   = chk_string('endpoint[#{}].dev_id.device_id.uuid'.format(i),
-                                                endpoint.dev_id.device_id.uuid,
-                                                allow_empty=False)
-                    ep_port_id     = chk_string('endpoint[#{}].port_id.uuid'.format(i),
-                                                endpoint.port_id.uuid,
-                                                allow_empty=False)
-                except Exception as e:
-                    LOGGER.exception('Invalid arguments:')
-                    raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e))
-
-                if (len(ep_context_id) > 0) and (ep_context_id != DEFAULT_CONTEXT_ID):
-                    msg = ' '.join([
-                        'Unsupported Context({}) in Endpoint(#{}) of Link({}).',
-                        'Only default Context({}) is currently supported.',
-                        'Optionally, leave field empty to use default Context.',
-                    ])
-                    msg = msg.format(ep_context_id, i, link_id, DEFAULT_CONTEXT_ID)
-                    raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
-                elif len(ep_context_id) == 0:
-                    ep_context_id = DEFAULT_CONTEXT_ID
-
-                if (len(ep_topology_id) > 0) and (ep_topology_id != DEFAULT_TOPOLOGY_ID):
-                    msg = ' '.join([
-                        'Unsupported Topology({}) in Endpoint(#{}) of Link({}).',
-                        'Only default Topology({}) is currently supported.',
-                        'Optionally, leave field empty to use default Topology.',
-                    ])
-                    msg = msg.format(ep_topology_id, i, link_id, DEFAULT_TOPOLOGY_ID)
-                    raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
-                elif len(ep_topology_id) == 0:
-                    ep_topology_id = DEFAULT_TOPOLOGY_ID
-
-                if ep_device_id in added_devices_and_endpoints:
-                    msg = 'Duplicated Device({}) in Endpoint(#{}) of Link({}).'
-                    msg = msg.format(ep_device_id, i, link_id)
-                    raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
-
-                if not db_topology.devices.contains(ep_device_id):
-                    msg = 'Device({}) in Endpoint(#{}) of Link({}) does not exist in the database.'
-                    msg = msg.format(ep_device_id, i, link_id)
-                    raise ServiceException(grpc.StatusCode.NOT_FOUND, msg)
-
-                added_device_and_endpoints = added_devices_and_endpoints.setdefault(ep_device_id, set())
-
-                # should never happen since same device cannot appear 2 times in the link
-                if ep_port_id in added_device_and_endpoints:                                # pragma: no cover
-                    msg = 'Duplicated Device({})/Port({}) in Endpoint(#{}) of Link({}).'    # pragma: no cover
-                    msg = msg.format(ep_device_id, ep_port_id, i, link_id)                  # pragma: no cover
-                    raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)           # pragma: no cover
-
-                if not db_topology.device(ep_device_id).endpoints.contains(ep_port_id):
-                    msg = 'Device({})/Port({}) in Endpoint(#{}) of Link({}) does not exist in the database.'
-                    msg = msg.format(ep_device_id, ep_port_id, i, link_id)
-                    raise ServiceException(grpc.StatusCode.NOT_FOUND, msg)
-
-                added_device_and_endpoints.add(ep_port_id)
-                device_endpoint_pairs.append((ep_device_id, ep_port_id))
-
-            # ----- Implement changes in the database ------------------------------------------------------------------
             db_link = db_topology.link(link_id).create()
-            for device_id,endpoint_id in device_endpoint_pairs:
-                link_endpoint_id = '{}/{}'.format(device_id, endpoint_id)
-                db_endpoint = db_topology.device(ep_device_id).endpoint(ep_port_id)
+            for db_endpoint in db_endpoints:
+                link_endpoint_id = '{}/{}'.format(
+                    db_endpoint.device_uuid, db_endpoint.endpoint_uuid)
                 db_link.endpoint(link_endpoint_id).create(db_endpoint)
 
             # ----- Compose reply --------------------------------------------------------------------------------------
@@ -172,11 +93,13 @@ class ContextServiceServicerImpl(ContextServiceServicer):
             ADDLINK_COUNTER_COMPLETED.inc()
             return reply
         except ServiceException as e:
+            LOGGER.exception('AddLink exception')
+            ADDLINK_COUNTER_FAILED.inc()
             grpc_context.abort(e.code, e.details)
         except Exception as e:                                      # pragma: no cover
-            LOGGER.exception('AddLink exception')                   # pragma: no cover
-            ADDLINK_COUNTER_FAILED.inc()                            # pragma: no cover
-            grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))    # pragma: no cover
+            LOGGER.exception('AddLink exception')
+            ADDLINK_COUNTER_FAILED.inc()
+            grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))
 
     @DELETELINK_HISTOGRAM_DURATION.time()
     def DeleteLink(self, request : LinkId, grpc_context : grpc.ServicerContext) -> Empty:
@@ -185,23 +108,11 @@ class ContextServiceServicerImpl(ContextServiceServicer):
             LOGGER.debug('DeleteLink request: {}'.format(str(request)))
 
             # ----- Validate request data and pre-conditions -----------------------------------------------------------
-            try:
-                link_id = chk_string('link_id.link_id.uuid',
-                                     request.link_id.uuid,
-                                     allow_empty=False)
-            except Exception as e:
-                LOGGER.exception('Invalid arguments:')
-                raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e))
+            link_id = check_link_id_request('DeleteLink', request, self.database, LOGGER)
 
+            # ----- Implement changes in the database ------------------------------------------------------------------
             db_context = self.database.context(DEFAULT_CONTEXT_ID).create()
             db_topology = db_context.topology(DEFAULT_TOPOLOGY_ID).create()
-
-            if not db_topology.links.contains(link_id):
-                msg = 'Link({}) does not exist in the database.'
-                msg = msg.format(link_id)
-                raise ServiceException(grpc.StatusCode.NOT_FOUND, msg)
-
-            # ----- Implement changes in the database ------------------------------------------------------------------
             db_topology.link(link_id).delete()
 
             # ----- Compose reply --------------------------------------------------------------------------------------
@@ -210,8 +121,10 @@ class ContextServiceServicerImpl(ContextServiceServicer):
             DELETELINK_COUNTER_COMPLETED.inc()
             return reply
         except ServiceException as e:
+            LOGGER.exception('DeleteLink exception')
+            DELETELINK_COUNTER_FAILED.inc()
             grpc_context.abort(e.code, e.details)
         except Exception as e:                                      # pragma: no cover
-            LOGGER.exception('DeleteLink exception')                # pragma: no cover
-            DELETELINK_COUNTER_FAILED.inc()                         # pragma: no cover
-            grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))    # pragma: no cover
+            LOGGER.exception('DeleteLink exception')
+            DELETELINK_COUNTER_FAILED.inc()
+            grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))
diff --git a/src/context/service/Tools.py b/src/context/service/Tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..9856d616bb335f0a1be64c09de5174eb3523fefc
--- /dev/null
+++ b/src/context/service/Tools.py
@@ -0,0 +1,70 @@
+import grpc, logging
+from typing import Dict, List, Set, Tuple
+from common.Checkers import chk_string
+from common.database.api.Database import Database
+from common.database.api.context.Constants import DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID
+from common.database.api.context.topology.device.Endpoint import Endpoint
+from common.exceptions.ServiceException import ServiceException
+from common.tools.service.EndpointIdCheckers import check_endpoint_id
+from common.tools.service.DeviceCheckers import check_device_endpoint_exists
+from common.tools.service.LinkCheckers import check_link_exists, check_link_not_exists
+from context.proto.context_pb2 import Link, LinkId
+
+def _check_link_exists(method_name : str, database : Database, link_id : str):
+    if method_name in ['AddLink']:
+        check_link_not_exists(database, DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID, link_id)
+    elif method_name in ['DeleteLink']:
+        check_link_exists(database, DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID, link_id)
+    else:                                       # pragma: no cover (test requires malforming the code)
+        msg = 'Unexpected condition [_check_link_exists(method_name={}, link_id={})]'
+        msg = msg.format(str(method_name), str(link_id))
+        raise ServiceException(grpc.StatusCode.UNIMPLEMENTED, msg)
+
+def check_link_request(
+    method_name : str, request : Link, database : Database, logger : logging.Logger
+    ) -> Tuple[str, List[Endpoint]]:
+
+    # ----- Parse attributes -------------------------------------------------------------------------------------------
+    try:
+        link_id = chk_string('link.link_id.link_id.uuid',
+                             request.link_id.link_id.uuid,
+                             allow_empty=False)
+    except Exception as e:
+        logger.exception('Invalid arguments:')
+        raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e))
+
+    # ----- Check if link exists in database ---------------------------------------------------------------------------
+    _check_link_exists(method_name, database, link_id)
+
+    # ----- Parse endpoints and check if they exist in the database as device endpoints --------------------------------
+    add_topology_devices_endpoints : Dict[str, Dict[str, Set[str]]] = {}
+    db_endpoints : List[Endpoint] = []
+    for endpoint_number,endpoint_id in enumerate(request.endpointList):
+        parent_name = 'Endpoint(#{}) of Context({})/Topology({})/Link({})'
+        parent_name = parent_name.format(endpoint_number, DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID, link_id)
+
+        _, ep_device_id, ep_port_id = check_endpoint_id(
+            logger, endpoint_number, parent_name, endpoint_id, add_topology_devices_endpoints)
+
+        db_endpoint = check_device_endpoint_exists(
+            database, parent_name, DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID, ep_device_id, ep_port_id)
+        db_endpoints.append(db_endpoint)
+
+    return link_id, db_endpoints
+
+def check_link_id_request(
+    method_name : str, request : LinkId, database : Database, logger : logging.Logger) -> str:
+
+    # ----- Parse attributes -------------------------------------------------------------------------------------------
+    try:
+        link_id = chk_string('link_id.link_id.uuid',
+                             request.link_id.uuid,
+                             allow_empty=False)
+    except Exception as e:
+        logger.exception('Invalid arguments:')
+        raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e))
+
+    # ----- Check if link exists in database ---------------------------------------------------------------------------
+    _check_link_exists(method_name, database, link_id)
+
+    return link_id
diff --git a/src/context/service/__main__.py b/src/context/service/__main__.py
index 4335f9137bb1b3d358b741c6e2b6d270887e54d2..e0a9888685bfdd1e4b9b49c2361599a267167f2d 100644
--- a/src/context/service/__main__.py
+++ b/src/context/service/__main__.py
@@ -1,8 +1,11 @@
 import logging, os, signal, sys, threading
 from prometheus_client import start_http_server
 from common.database.Factory import get_database
+from context.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD, LOG_LEVEL, RESTAPI_SERVICE_PORT, \
+    RESTAPI_BASE_URL, METRICS_PORT
 from context.service.ContextService import ContextService
-from context.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD, LOG_LEVEL, METRICS_PORT
+from context.service.rest_server.Server import Server
+from context.service.rest_server.resources.Context import Context
 
 terminate = threading.Event()
 logger = None
@@ -15,11 +18,13 @@ def signal_handler(signal, frame):
 def main():
     global terminate, logger
 
-    service_port = os.environ.get('CONTEXTSERVICE_SERVICE_PORT_GRPC', GRPC_SERVICE_PORT)
-    max_workers  = os.environ.get('MAX_WORKERS',  GRPC_MAX_WORKERS )
-    grace_period = os.environ.get('GRACE_PERIOD', GRPC_GRACE_PERIOD)
-    log_level    = os.environ.get('LOG_LEVEL',    LOG_LEVEL   )
-    metrics_port = os.environ.get('METRICS_PORT', METRICS_PORT)
+    grpc_service_port    = os.environ.get('CONTEXTSERVICE_SERVICE_PORT_GRPC', GRPC_SERVICE_PORT   )
+    max_workers          = os.environ.get('MAX_WORKERS',                      GRPC_MAX_WORKERS    )
+    grace_period         = os.environ.get('GRACE_PERIOD',                     GRPC_GRACE_PERIOD   )
+    log_level            = os.environ.get('LOG_LEVEL',                        LOG_LEVEL           )
+    restapi_service_port = os.environ.get('RESTAPI_SERVICE_PORT',             RESTAPI_SERVICE_PORT)
+    restapi_base_url     = os.environ.get('RESTAPI_BASE_URL',                 RESTAPI_BASE_URL    )
+    metrics_port         = os.environ.get('METRICS_PORT',                     METRICS_PORT        )
 
     logging.basicConfig(level=log_level)
     logger = logging.getLogger(__name__)
@@ -36,17 +41,24 @@ def main():
     database = get_database()
 
     # Starting context service
-    service = ContextService(database, port=service_port, max_workers=max_workers, grace_period=grace_period)
-    service.start()
+    grpc_service = ContextService(database, port=grpc_service_port, max_workers=max_workers, grace_period=grace_period)
+    grpc_service.start()
+
+    rest_server = Server(port=restapi_service_port, base_url=restapi_base_url)
+    rest_server.add_resource(
+        Context, '/restconf/config/context', endpoint='api.context', resource_class_args=(database,))
+    rest_server.start()
 
     # Wait for Ctrl+C or termination signal
-    while not terminate.wait(0.1): pass
+    while not terminate.wait(timeout=0.1): pass
 
     logger.info('Terminating...')
-    service.stop()
+    grpc_service.stop()
+    rest_server.shutdown()
+    rest_server.join()
 
     logger.info('Bye')
-    return(0)
+    return 0
 
 if __name__ == '__main__':
     sys.exit(main())
diff --git a/src/context/service/rest_server/Server.py b/src/context/service/rest_server/Server.py
new file mode 100644
index 0000000000000000000000000000000000000000..16badfce8c84f058aeaeac79993ada726a17f06a
--- /dev/null
+++ b/src/context/service/rest_server/Server.py
@@ -0,0 +1,34 @@
+import logging, threading
+from flask import Flask
+from flask_restful import Api
+from werkzeug.serving import make_server
+from context.Config import RESTAPI_BASE_URL, RESTAPI_SERVICE_PORT
+
+logging.getLogger('werkzeug').setLevel(logging.WARNING)
+
+BIND_ADDRESS = '0.0.0.0'
+LOGGER = logging.getLogger(__name__)
+
+class Server(threading.Thread):
+    def __init__(self, host=BIND_ADDRESS, port=RESTAPI_SERVICE_PORT, base_url=RESTAPI_BASE_URL):
+        threading.Thread.__init__(self, daemon=True)
+        self.host = host
+        self.port = port
+        self.base_url = base_url
+        self.app = Flask(__name__)
+        self.api = Api(self.app, prefix=self.base_url)
+
+    def add_resource(self, resource, *urls, **kwargs):
+        self.api.add_resource(resource, *urls, **kwargs)
+
+    def run(self):
+        self.srv = make_server(self.host, self.port, self.app, threaded=True)
+        self.ctx = self.app.app_context()
+        self.ctx.push()
+
+        endpoint = 'http://{}:{}{}'.format(self.host, self.port, self.base_url)
+        LOGGER.info('Listening on {}...'.format(endpoint))
+        self.srv.serve_forever()
+
+    def shutdown(self):
+        self.srv.shutdown()
diff --git a/src/context/service/rest_server/__init__.py b/src/context/service/rest_server/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/context/service/rest_server/resources/Context.py b/src/context/service/rest_server/resources/Context.py
new file mode 100644
index 0000000000000000000000000000000000000000..293ff24edebab8a1ded55e6ff5120409a534a332
--- /dev/null
+++ b/src/context/service/rest_server/resources/Context.py
@@ -0,0 +1,13 @@
+from flask.json import jsonify
+from flask_restful import Resource
+from common.database.api.Database import Database
+from common.database.api.context.Constants import DEFAULT_CONTEXT_ID
+
+class Context(Resource):
+    def __init__(self, database : Database) -> None:
+        super().__init__()
+        self.database = database
+
+    def get(self):
+        with self.database:
+            return jsonify(self.database.context(DEFAULT_CONTEXT_ID).dump())
diff --git a/src/context/service/rest_server/resources/__init__.py b/src/context/service/rest_server/resources/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/context/tests/test_unitary.py b/src/context/tests/test_unitary.py
index 104736af195f0eb0643563f816bdaeefa5b0743b..b3a22c60d1aad2e7ad70db42ecfd1d0641a5749a 100644
--- a/src/context/tests/test_unitary.py
+++ b/src/context/tests/test_unitary.py
@@ -1,39 +1,34 @@
-import copy, grpc, logging, pytest
+import copy, grpc, logging, pytest, requests, time
 from google.protobuf.json_format import MessageToDict
 from common.database.Factory import get_database, DatabaseEngineEnum
 from common.database.api.Database import Database
+from common.database.api.context.Constants import DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID
 from common.database.tests.script import populate_example
-from common.tests.Assertions import validate_empty, validate_link_id, validate_topology
+from common.tests.Assertions import validate_empty, validate_link_id, validate_topology, validate_topology_has_devices,\
+    validate_topology_has_links, validate_topology_is_empty
 from context.client.ContextClient import ContextClient
-from context.proto.context_pb2 import Empty, Link, LinkId
+from context.proto.context_pb2 import Empty, Link, LinkId, Topology
 from context.service.ContextService import ContextService
-from context.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD
+from context.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD, RESTAPI_SERVICE_PORT, \
+    RESTAPI_BASE_URL
+from context.service.rest_server.Server import Server
+from context.service.rest_server.resources.Context import Context
 
-port = 10000 + GRPC_SERVICE_PORT # avoid first 1024 privileged ports to avoid evelating permissions for tests
+grpc_port = 10000 + GRPC_SERVICE_PORT # avoid privileged ports
+restapi_port = 10000 + RESTAPI_SERVICE_PORT # avoid privileged ports
 
 LOGGER = logging.getLogger(__name__)
 LOGGER.setLevel(logging.DEBUG)
 
-LINK_ID = {'link_id': {'uuid': 'dev1/to-dev2 ==> dev2/to-dev1'}}
+# use "copy.deepcopy" to prevent propagating forced changes during tests
+CONTEXT_ID = {'contextUuid': {'uuid': DEFAULT_CONTEXT_ID}}
+TOPOLOGY_ID = {'contextId': copy.deepcopy(CONTEXT_ID), 'topoId': {'uuid': DEFAULT_TOPOLOGY_ID}}
+LINK_ID = {'link_id': {'uuid': 'DEV1/EP2 ==> DEV2/EP1'}}
 LINK = {
-    'link_id': {'link_id': {'uuid': 'dev1/to-dev2 ==> dev2/to-dev1'}},
+    'link_id': {'link_id': {'uuid': 'DEV1/EP2 ==> DEV2/EP1'}},
     'endpointList' : [
-        {
-            'topoId': {
-                'contextId': {'contextUuid': {'uuid': 'admin'}},
-                'topoId': {'uuid': 'admin'}
-            },
-            'dev_id': {'device_id': {'uuid': 'dev1'}},
-            'port_id': {'uuid' : 'to-dev2'}
-        },
-        {
-            'topoId': {
-                'contextId': {'contextUuid': {'uuid': 'admin'}},
-                'topoId': {'uuid': 'admin'}
-            },
-            'dev_id': {'device_id': {'uuid': 'dev2'}},
-            'port_id': {'uuid' : 'to-dev1'}
-        },
+        {'topoId': copy.deepcopy(TOPOLOGY_ID), 'dev_id': {'device_id': {'uuid': 'DEV1'}}, 'port_id': {'uuid' : 'EP2'}},
+        {'topoId': copy.deepcopy(TOPOLOGY_ID), 'dev_id': {'device_id': {'uuid': 'DEV2'}}, 'port_id': {'uuid' : 'EP1'}},
     ]
 }
 
@@ -45,32 +40,46 @@ def context_database():
 @pytest.fixture(scope='session')
 def context_service(context_database : Database):
     _service = ContextService(
-        context_database, port=port, max_workers=GRPC_MAX_WORKERS, grace_period=GRPC_GRACE_PERIOD)
+        context_database, port=grpc_port, max_workers=GRPC_MAX_WORKERS, grace_period=GRPC_GRACE_PERIOD)
     _service.start()
     yield _service
     _service.stop()
 
+@pytest.fixture(scope='session')
+def context_service_rest(context_database : Database):
+    _rest_server = Server(port=restapi_port, base_url=RESTAPI_BASE_URL)
+    _rest_server.add_resource(
+        Context, '/restconf/config/context', endpoint='api.context', resource_class_args=(context_database,))
+    _rest_server.start()
+    time.sleep(1) # bring time for the server to start
+    yield _rest_server
+    _rest_server.shutdown()
+    _rest_server.join()
+
 @pytest.fixture(scope='session')
 def context_client(context_service):
-    _client = ContextClient(address='127.0.0.1', port=port)
+    _client = ContextClient(address='127.0.0.1', port=grpc_port)
     yield _client
     _client.close()
 
 def test_get_topology_empty(context_client : ContextClient, context_database : Database):
     # should work
     context_database.clear_all()
-    validate_topology(MessageToDict(
+    validate_topology_is_empty(MessageToDict(
         context_client.GetTopology(Empty()),
         including_default_value_fields=True, preserving_proto_field_name=True,
         use_integers_for_enums=False))
 
 def test_get_topology_completed(context_client : ContextClient, context_database : Database):
     # should work
-    populate_example(context_database)
-    validate_topology(MessageToDict(
+    populate_example(context_database, add_services=False)
+    topology = MessageToDict(
         context_client.GetTopology(Empty()),
         including_default_value_fields=True, preserving_proto_field_name=True,
-        use_integers_for_enums=False))
+        use_integers_for_enums=False)
+    validate_topology(topology)
+    validate_topology_has_devices(topology)
+    validate_topology_has_links(topology)
 
 def test_delete_link_empty_uuid(context_client : ContextClient):
     # should fail with link not found
@@ -86,7 +95,11 @@ def test_add_link_already_exists(context_client : ContextClient):
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         context_client.AddLink(Link(**LINK))
     assert e.value.code() == grpc.StatusCode.ALREADY_EXISTS
-    assert e.value.details() == 'Link(dev1/to-dev2 ==> dev2/to-dev1) already exists in the database.'
+    msg = ' '.join([
+        'Context(admin)/Topology(admin)/Link(DEV1/EP2 ==> DEV2/EP1)',
+        'already exists in the database.',
+    ])
+    assert e.value.details() == msg
 
 def test_delete_link(context_client : ContextClient):
     # should work
@@ -100,7 +113,11 @@ def test_delete_link_not_existing(context_client : ContextClient):
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         context_client.DeleteLink(LinkId(**LINK_ID))
     assert e.value.code() == grpc.StatusCode.NOT_FOUND
-    assert e.value.details() == 'Link(dev1/to-dev2 ==> dev2/to-dev1) does not exist in the database.'
+    msg = ' '.join([
+        'Context(admin)/Topology(admin)/Link(DEV1/EP2 ==> DEV2/EP1)',
+        'does not exist in the database.'
+    ])
+    assert e.value.details() == msg
 
 def test_add_link_uuid_empty(context_client : ContextClient):
     # should fail with link uuid empty
@@ -111,81 +128,87 @@ def test_add_link_uuid_empty(context_client : ContextClient):
     assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
     assert e.value.details() == 'link.link_id.link_id.uuid() string is empty.'
 
-def test_add_link_endpoint_wrong_context(context_client : ContextClient):
-    # should fail with unsupported context
+def test_add_link_wrong_endpoint(context_client : ContextClient):
+    # should fail with wrong endpoint context
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         copy_link = copy.deepcopy(LINK)
         copy_link['endpointList'][0]['topoId']['contextId']['contextUuid']['uuid'] = 'wrong-context'
         context_client.AddLink(Link(**copy_link))
     assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
     msg = ' '.join([
-        'Unsupported Context(wrong-context) in Endpoint(#0) of Link(dev1/to-dev2 ==> dev2/to-dev1).',
-        'Only default Context(admin) is currently supported.',
-        'Optionally, leave field empty to use default Context.',
+        'Context(wrong-context) in Endpoint(#0) of Context(admin)/Topology(admin)/Link(DEV1/EP2 ==> DEV2/EP1)',
+        'mismatches acceptable Contexts({\'admin\'}).',
+        'Optionally, leave field empty to use predefined Context(admin).',
     ])
     assert e.value.details() == msg
 
-def test_add_link_endpoint_wrong_topology(context_client : ContextClient):
-    # should fail with unsupported topology
+    # should fail with wrong endpoint topology
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         copy_link = copy.deepcopy(LINK)
         copy_link['endpointList'][0]['topoId']['topoId']['uuid'] = 'wrong-topo'
         context_client.AddLink(Link(**copy_link))
     assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
     msg = ' '.join([
-        'Unsupported Topology(wrong-topo) in Endpoint(#0) of Link(dev1/to-dev2 ==> dev2/to-dev1).',
-        'Only default Topology(admin) is currently supported.',
-        'Optionally, leave field empty to use default Topology.',
+        'Context(admin)/Topology(wrong-topo)',
+        'in Endpoint(#0) of Context(admin)/Topology(admin)/Link(DEV1/EP2 ==> DEV2/EP1)',
+        'mismatches acceptable Topologies({\'admin\'}).',
+        'Optionally, leave field empty to use predefined Topology(admin).',
     ])
     assert e.value.details() == msg
 
-def test_add_link_empty_device_uuid(context_client : ContextClient):
-    # should fail with port uuid is empty
+    # should fail with device uuid is empty
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         copy_link = copy.deepcopy(LINK)
         copy_link['endpointList'][0]['dev_id']['device_id']['uuid'] = ''
         context_client.AddLink(Link(**copy_link))
     assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-    assert e.value.details() == 'endpoint[#0].dev_id.device_id.uuid() string is empty.'
+    assert e.value.details() == 'endpoint_id[#0].dev_id.device_id.uuid() string is empty.'
 
-def test_add_link_endpoint_wrong_device(context_client : ContextClient):
     # should fail with wrong endpoint device
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         copy_link = copy.deepcopy(LINK)
         copy_link['endpointList'][0]['dev_id']['device_id']['uuid'] = 'wrong-device'
         context_client.AddLink(Link(**copy_link))
     assert e.value.code() == grpc.StatusCode.NOT_FOUND
-    msg = 'Device(wrong-device) in Endpoint(#0) of Link(dev1/to-dev2 ==> dev2/to-dev1) does not exist in the database.'
+    msg = ' '.join([
+        'Context(admin)/Topology(admin)/Device(wrong-device)',
+        'in Endpoint(#0) of Context(admin)/Topology(admin)/Link(DEV1/EP2 ==> DEV2/EP1)',
+        'does not exist in the database.',
+    ])
     assert e.value.details() == msg
 
-def test_add_link_endpoint_wrong_port(context_client : ContextClient):
-    # should fail with wrong endpoint port
+    # should fail with endpoint uuid is empty
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         copy_link = copy.deepcopy(LINK)
-        copy_link['endpointList'][0]['port_id']['uuid'] = 'wrong-port'
+        copy_link['endpointList'][0]['port_id']['uuid'] = ''
         context_client.AddLink(Link(**copy_link))
-    assert e.value.code() == grpc.StatusCode.NOT_FOUND
-    msg = 'Device(dev1)/Port(wrong-port) in Endpoint(#0) of Link(dev1/to-dev2 ==> dev2/to-dev1) does not exist in the database.'
-    assert e.value.details() == msg
+    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
+    assert e.value.details() == 'endpoint_id[#0].port_id.uuid() string is empty.'
 
-def test_add_link_endpoint_duplicated_device(context_client : ContextClient):
-    # should fail with duplicated endpoint device
+    # should fail with wrong endpoint port
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         copy_link = copy.deepcopy(LINK)
-        copy_link['endpointList'][1]['dev_id']['device_id']['uuid'] = 'dev1'
+        copy_link['endpointList'][0]['port_id']['uuid'] = 'wrong-port'
         context_client.AddLink(Link(**copy_link))
-    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-    msg = 'Duplicated Device(dev1) in Endpoint(#1) of Link(dev1/to-dev2 ==> dev2/to-dev1).'
+    assert e.value.code() == grpc.StatusCode.NOT_FOUND
+    msg = ' '.join([
+        'Context(admin)/Topology(admin)/Device(DEV1)/Port(wrong-port)',
+        'in Endpoint(#0) of Context(admin)/Topology(admin)/Link(DEV1/EP2 ==> DEV2/EP1)',
+        'does not exist in the database.',
+    ])
     assert e.value.details() == msg
 
-def test_add_link_empty_port_uuid(context_client : ContextClient):
-    # should fail with port uuid is empty
+    # should fail with endpoint device duplicated
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         copy_link = copy.deepcopy(LINK)
-        copy_link['endpointList'][0]['port_id']['uuid'] = ''
+        copy_link['endpointList'][1]['dev_id']['device_id']['uuid'] = 'DEV1'
         context_client.AddLink(Link(**copy_link))
     assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-    assert e.value.details() == 'endpoint[#0].port_id.uuid() string is empty.'
+    msg = ' '.join([
+        'Duplicated Context(admin)/Topology(admin)/Device(DEV1)',
+        'in Endpoint(#1) of Context(admin)/Topology(admin)/Link(DEV1/EP2 ==> DEV2/EP1).',
+    ])
+    assert e.value.details() == msg
 
 def test_add_link(context_client : ContextClient):
     # should work
@@ -213,7 +236,23 @@ def test_add_link_default_endpoint_context_topology(context_client : ContextClie
 
 def test_get_topology_completed_2(context_client : ContextClient):
     # should work
-    validate_topology(MessageToDict(
+    topology = MessageToDict(
         context_client.GetTopology(Empty()),
         including_default_value_fields=True, preserving_proto_field_name=True,
-        use_integers_for_enums=False))
+        use_integers_for_enums=False)
+    validate_topology(topology)
+    validate_topology_has_devices(topology)
+    validate_topology_has_links(topology)
+
+def test_get_topology_completed_rest_api(context_service_rest : Server):
+    # should work
+    request_url = 'http://127.0.0.1:{}{}/restconf/config/context'.format(restapi_port, RESTAPI_BASE_URL)
+    reply = requests.get(request_url)
+    json_reply = reply.json()
+    topology = MessageToDict(
+        Topology(**json_reply['topologies'][0]),
+        including_default_value_fields=True, preserving_proto_field_name=True,
+        use_integers_for_enums=False)
+    validate_topology(topology)
+    validate_topology_has_devices(topology)
+    validate_topology_has_links(topology)
diff --git a/src/device/.gitlab-ci.yml b/src/device/.gitlab-ci.yml
index 8fd0205c895e67f3d63c8f56cd8434280dd19e25..0d538ad47c4e7b95c7e012ef0750039c89ac4a06 100644
--- a/src/device/.gitlab-ci.yml
+++ b/src/device/.gitlab-ci.yml
@@ -17,12 +17,12 @@ build device:
       - .gitlab-ci.yml
 
 # Pull, execute, and run unitary tests for the Docker image from the GitLab registry
-test device:
+unit_test device:
   variables:
     IMAGE_NAME: 'device' # name of the microservice
     IMAGE_NAME_TEST: 'device-test' # name of the microservice
     IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
-  stage: test
+  stage: unit_test
   needs:
     - build device
   before_script:
@@ -49,8 +49,9 @@ deploy device:
   stage: deploy
   needs:
     - build device
-    - test device
+    - unit_test device
     - dependencies all
+    - integ_test execute
   script:
     - kubectl version
     - kubectl get all
diff --git a/src/device/client/DeviceClient.py b/src/device/client/DeviceClient.py
index a517ebdd1551465f9404714ec07bd7326cad7c2d..c84400ed7e43a2649c8e3a57b4ee913c78858523 100644
--- a/src/device/client/DeviceClient.py
+++ b/src/device/client/DeviceClient.py
@@ -1,5 +1,5 @@
 import grpc, logging
-from common.tools.RetryDecorator import retry, delay_exponential
+from common.tools.client.RetryDecorator import retry, delay_exponential
 from device.proto.context_pb2 import Device, DeviceId, Empty
 from device.proto.device_pb2_grpc import DeviceServiceStub
 
diff --git a/src/device/service/DeviceServiceServicerImpl.py b/src/device/service/DeviceServiceServicerImpl.py
index 71c4bfc6d8dc809a837669614aaf60c7e6578fd9..84ff22b3cad32252160596a457ea8358bcad953b 100644
--- a/src/device/service/DeviceServiceServicerImpl.py
+++ b/src/device/service/DeviceServiceServicerImpl.py
@@ -2,13 +2,14 @@ from typing import List, Tuple
 import grpc, logging
 from prometheus_client import Counter, Histogram
 from common.Checkers import chk_options, chk_string
-from common.database.api.Constants import DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID
+from common.database.api.context.Constants import DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID
 from common.database.api.Database import Database
-from common.database.api.context.OperationalStatus import OperationalStatus, operationalstatus_enum_values, \
-    to_operationalstatus_enum
+from common.database.api.context.topology.device.OperationalStatus import OperationalStatus, \
+    operationalstatus_enum_values, to_operationalstatus_enum
 from common.exceptions.ServiceException import ServiceException
 from device.proto.context_pb2 import DeviceId, Device, Empty
 from device.proto.device_pb2_grpc import DeviceServiceServicer
+from device.service.Tools import check_device_id_request, check_device_request
 
 LOGGER = logging.getLogger(__name__)
 
@@ -52,112 +53,15 @@ class DeviceServiceServicerImpl(DeviceServiceServicer):
             LOGGER.debug('AddDevice request: {}'.format(str(request)))
 
             # ----- Validate request data and pre-conditions -----------------------------------------------------------
-            try:
-                device_id     = chk_string ('device.device_id.device_id.uuid',
-                                            request.device_id.device_id.uuid,
-                                            allow_empty=False)
-                device_type   = chk_string ('device.device_type',
-                                            request.device_type,
-                                            allow_empty=False)
-                device_config = chk_string ('device.device_config.device_config',
-                                            request.device_config.device_config,
-                                            allow_empty=True)
-                device_opstat = chk_options('device.devOperationalStatus',
-                                            request.devOperationalStatus,
-                                            operationalstatus_enum_values())
-            except Exception as e:
-                LOGGER.exception('Invalid arguments:')
-                raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e))
-
-            device_opstat = to_operationalstatus_enum(device_opstat)
-            # should not happen because gRPC limits accepted values in enums
-            if device_opstat is None:                                           # pragma: no cover
-                msg = 'Unsupported OperationalStatus({}).'                      # pragma: no cover
-                msg = msg.format(request.devOperationalStatus)                  # pragma: no cover
-                raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)   # pragma: no cover
-
-            if device_opstat == OperationalStatus.KEEP_STATE:
-                msg = ' '.join([
-                    'Device has to be created with either ENABLED/DISABLED Operational State.',
-                    'Use KEEP_STATE only in configure Device methods.',
-                ])
-                raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
+            device_id, device_type, device_config, device_opstat, db_endpoints_ports = \
+                check_device_request('AddDevice', request, self.database, LOGGER)
 
+            # ----- Implement changes in the database ------------------------------------------------------------------
             db_context = self.database.context(DEFAULT_CONTEXT_ID).create()
             db_topology = db_context.topology(DEFAULT_TOPOLOGY_ID).create()
-
-            if db_topology.devices.contains(device_id):
-                msg = 'Device({}) already exists in the database.'
-                msg = msg.format(device_id)
-                raise ServiceException(grpc.StatusCode.ALREADY_EXISTS, msg)
-
-            added_endpoint_uuids = set()
-            endpoint_pairs : List[Tuple[str, str]] = []
-            for i,endpoint in enumerate(request.endpointList):
-                try:
-                    ep_context_id  = chk_string('endpoint[#{}].port_id.topoId.contextId.contextUuid.uuid'.format(i),
-                                                endpoint.port_id.topoId.contextId.contextUuid.uuid,
-                                                allow_empty=True)
-                    ep_topology_id = chk_string('endpoint[#{}].port_id.topoId.topoId.uuid'.format(i),
-                                                endpoint.port_id.topoId.topoId.uuid,
-                                                allow_empty=True)
-                    ep_device_id   = chk_string('endpoint[#{}].port_id.dev_id.device_id.uuid'.format(i),
-                                                endpoint.port_id.dev_id.device_id.uuid,
-                                                allow_empty=True)
-                    ep_port_id     = chk_string('endpoint[#{}].port_id.port_id.uuid'.format(i),
-                                                endpoint.port_id.port_id.uuid,
-                                                allow_empty=False)
-                    ep_port_type   = chk_string('endpoint[#{}].port_type'.format(i),
-                                                endpoint.port_type,
-                                                allow_empty=False)
-                except Exception as e:
-                    LOGGER.exception('Invalid arguments:')
-                    raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e))
-
-                if (len(ep_context_id) > 0) and (ep_context_id != DEFAULT_CONTEXT_ID):
-                    msg = ' '.join([
-                        'Unsupported Context({}) in Endpoint(#{}) of Device({}).',
-                        'Only default Context({}) is currently supported.',
-                        'Optionally, leave field empty to use default Context.',
-                    ])
-                    msg = msg.format(ep_context_id, i, device_id, DEFAULT_CONTEXT_ID)
-                    raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
-                elif len(ep_context_id) == 0:
-                    ep_context_id = DEFAULT_CONTEXT_ID
-
-                if (len(ep_topology_id) > 0) and (ep_topology_id != DEFAULT_TOPOLOGY_ID):
-                    msg = ' '.join([
-                        'Unsupported Topology({}) in Endpoint(#{}) of Device({}).',
-                        'Only default Topology({}) is currently supported.',
-                        'Optionally, leave field empty to use default Topology.',
-                    ])
-                    msg = msg.format(ep_topology_id, i, device_id, DEFAULT_TOPOLOGY_ID)
-                    raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
-                elif len(ep_topology_id) == 0:
-                    ep_topology_id = DEFAULT_TOPOLOGY_ID
-
-                if (len(ep_device_id) > 0) and (ep_device_id != device_id):
-                    msg = ' '.join([
-                        'Wrong Device({}) in Endpoint(#{}).',
-                        'Parent specified in message is Device({}).',
-                        'Optionally, leave field empty to use parent Device.',
-                    ])
-                    msg = msg.format(ep_device_id, i, device_id)
-                    raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
-                elif len(ep_device_id) == 0:
-                    ep_device_id = device_id
-
-                if ep_port_id in added_endpoint_uuids:
-                    msg = 'Duplicated Port({}) in Endpoint(#{}) of Device({}).'
-                    msg = msg.format(ep_port_id, i, device_id)
-                    raise ServiceException(grpc.StatusCode.ALREADY_EXISTS, msg)
-
-                added_endpoint_uuids.add(ep_port_id)
-                endpoint_pairs.append((ep_port_id, ep_port_type))
-
-            # ----- Implement changes in the database ------------------------------------------------------------------
             db_device = db_topology.device(device_id).create(device_type, device_config, device_opstat)
-            for port_id,port_type in endpoint_pairs: db_device.endpoint(port_id).create(port_type)
+            for db_endpoint,port_type in db_endpoints_ports:
+                db_endpoint.create(port_type)
 
             # ----- Compose reply --------------------------------------------------------------------------------------
             reply = DeviceId(**db_device.dump_id())
@@ -165,11 +69,13 @@ class DeviceServiceServicerImpl(DeviceServiceServicer):
             ADDDEVICE_COUNTER_COMPLETED.inc()
             return reply
         except ServiceException as e:
+            LOGGER.exception('AddDevice exception')
+            ADDDEVICE_COUNTER_FAILED.inc()
             grpc_context.abort(e.code, e.details)
         except Exception as e:                                      # pragma: no cover
-            LOGGER.exception('AddDevice exception')                 # pragma: no cover
-            ADDDEVICE_COUNTER_FAILED.inc()                          # pragma: no cover
-            grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))    # pragma: no cover
+            LOGGER.exception('AddDevice exception')
+            ADDDEVICE_COUNTER_FAILED.inc()
+            grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))
 
     @CONFIGUREDEVICE_HISTOGRAM_DURATION.time()
     def ConfigureDevice(self, request : Device, grpc_context : grpc.ServicerContext) -> DeviceId:
@@ -178,39 +84,14 @@ class DeviceServiceServicerImpl(DeviceServiceServicer):
             LOGGER.debug('ConfigureDevice request: {}'.format(str(request)))
 
             # ----- Validate request data and pre-conditions -----------------------------------------------------------
-            try:
-                device_id     = chk_string ('device.device_id.device_id.uuid',
-                                            request.device_id.device_id.uuid,
-                                            allow_empty=False)
-                device_type   = chk_string ('device.device_type',
-                                            request.device_type,
-                                            allow_empty=True)
-                device_config = chk_string ('device.device_config.device_config',
-                                            request.device_config.device_config,
-                                            allow_empty=True)
-                device_opstat = chk_options('device.devOperationalStatus',
-                                            request.devOperationalStatus,
-                                            operationalstatus_enum_values())
-            except Exception as e:
-                LOGGER.exception('Invalid arguments:')
-                raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e))
-
-            device_opstat = to_operationalstatus_enum(device_opstat)
-            # should not happen because gRPC limits accepted values in enums
-            if device_opstat is None:                                           # pragma: no cover
-                msg = 'Unsupported OperationalStatus({}).'                      # pragma: no cover
-                msg = msg.format(request.devOperationalStatus)                  # pragma: no cover
-                raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)   # pragma: no cover
+            device_id, device_type, device_config, device_opstat, db_endpoints_ports = \
+                check_device_request('UpdateDevice', request, self.database, LOGGER)
 
+            # ----- Implement changes in the database ------------------------------------------------------------------
             db_context = self.database.context(DEFAULT_CONTEXT_ID).create()
             db_topology = db_context.topology(DEFAULT_TOPOLOGY_ID).create()
-
-            if not db_topology.devices.contains(device_id):
-                msg = 'Device({}) does not exist in the database.'
-                msg = msg.format(device_id)
-                raise ServiceException(grpc.StatusCode.NOT_FOUND, msg)
-
             db_device = db_topology.device(device_id)
+
             db_device_attributes = db_device.attributes.get(attributes=['device_type'])
             # should not happen, device creation through Database API ensures all fields are always present
             if len(db_device_attributes) == 0:                                                  # pragma: no cover
@@ -230,7 +111,7 @@ class DeviceServiceServicerImpl(DeviceServiceServicer):
                 msg = msg.format(device_id, db_device_type, device_type)
                 raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
 
-            if len(request.endpointList) > 0:
+            if len(db_endpoints_ports) > 0:
                 msg = 'Endpoints belonging to Device({}) cannot be modified.'
                 msg = msg.format(device_id)
                 raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
@@ -251,7 +132,6 @@ class DeviceServiceServicerImpl(DeviceServiceServicer):
                 msg = msg.format(device_id)
                 raise ServiceException(grpc.StatusCode.ABORTED, msg)
 
-            # ----- Implement changes in the database ------------------------------------------------------------------
             db_device.update(update_attributes=update_attributes)
 
             # ----- Compose reply --------------------------------------------------------------------------------------
@@ -260,11 +140,13 @@ class DeviceServiceServicerImpl(DeviceServiceServicer):
             CONFIGUREDEVICE_COUNTER_COMPLETED.inc()
             return reply
         except ServiceException as e:
+            LOGGER.exception('ConfigureDevice exception')
+            CONFIGUREDEVICE_COUNTER_FAILED.inc()
             grpc_context.abort(e.code, e.details)
         except Exception as e:                                      # pragma: no cover
-            LOGGER.exception('ConfigureDevice exception')           # pragma: no cover
-            CONFIGUREDEVICE_COUNTER_FAILED.inc()                    # pragma: no cover
-            grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))    # pragma: no cover
+            LOGGER.exception('ConfigureDevice exception')
+            CONFIGUREDEVICE_COUNTER_FAILED.inc()
+            grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))
 
     @DELETEDEVICE_HISTOGRAM_DURATION.time()
     def DeleteDevice(self, request : DeviceId, grpc_context : grpc.ServicerContext) -> Empty:
@@ -273,23 +155,11 @@ class DeviceServiceServicerImpl(DeviceServiceServicer):
             LOGGER.debug('DeleteDevice request: {}'.format(str(request)))
 
             # ----- Validate request data and pre-conditions -----------------------------------------------------------
-            try:
-                device_id = chk_string('device_id.device_id.uuid',
-                                       request.device_id.uuid,
-                                       allow_empty=False)
-            except Exception as e:
-                LOGGER.exception('Invalid arguments:')
-                raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e))
+            device_id = check_device_id_request('DeleteDevice', request, self.database, LOGGER)
 
+            # ----- Implement changes in the database ------------------------------------------------------------------
             db_context = self.database.context(DEFAULT_CONTEXT_ID).create()
             db_topology = db_context.topology(DEFAULT_TOPOLOGY_ID).create()
-
-            if not db_topology.devices.contains(device_id):
-                msg = 'Device({}) does not exist in the database.'
-                msg = msg.format(device_id)
-                raise ServiceException(grpc.StatusCode.NOT_FOUND, msg)
-
-            # ----- Implement changes in the database ------------------------------------------------------------------
             db_topology.device(device_id).delete()
 
             # ----- Compose reply --------------------------------------------------------------------------------------
@@ -298,8 +168,10 @@ class DeviceServiceServicerImpl(DeviceServiceServicer):
             DELETEDEVICE_COUNTER_COMPLETED.inc()
             return reply
         except ServiceException as e:
+            LOGGER.exception('DeleteDevice exception')
+            DELETEDEVICE_COUNTER_FAILED.inc()
             grpc_context.abort(e.code, e.details)
         except Exception as e:                                      # pragma: no cover
-            LOGGER.exception('DeleteDevice exception')              # pragma: no cover
-            DELETEDEVICE_COUNTER_FAILED.inc()                       # pragma: no cover
-            grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))    # pragma: no cover
+            LOGGER.exception('DeleteDevice exception')
+            DELETEDEVICE_COUNTER_FAILED.inc()
+            grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))
diff --git a/src/device/service/Tools.py b/src/device/service/Tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..26b5a5d90c34d7e23e52e12642178b18338891b2
--- /dev/null
+++ b/src/device/service/Tools.py
@@ -0,0 +1,120 @@
+import grpc, logging
+from typing import Dict, List, Set, Tuple
+from common.Checkers import chk_options, chk_string
+from common.database.api.Database import Database
+from common.database.api.context.Constants import DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID
+from common.database.api.context.topology.device.Endpoint import Endpoint
+from common.database.api.context.topology.device.OperationalStatus import OperationalStatus, \
+    operationalstatus_enum_values, to_operationalstatus_enum
+from common.exceptions.ServiceException import ServiceException
+from common.tools.service.DeviceCheckers import check_device_endpoint_exists
+from common.tools.service.EndpointIdCheckers import check_endpoint_id
+from common.tools.service.EnumCheckers import check_enum
+from common.tools.service.DeviceCheckers import check_device_exists, check_device_not_exists
+from device.proto.context_pb2 import Device, DeviceId
+
+# For each method name, define acceptable device operational statuses. Empty set means accept all.
+ACCEPTED_DEVICE_OPERATIONAL_STATUSES : Dict[str, Set[OperationalStatus]] = {
+    'AddDevice': set([OperationalStatus.ENABLED, OperationalStatus.DISABLED]),
+    'UpdateDevice': set([OperationalStatus.KEEP_STATE, OperationalStatus.ENABLED, OperationalStatus.DISABLED]),
+}
+
+def _check_device_exists(method_name : str, database : Database, device_id : str):
+    if method_name in ['AddDevice']:
+        check_device_not_exists(database, DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID, device_id)
+    elif method_name in ['UpdateDevice', 'DeleteDevice']:
+        check_device_exists(database, DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID, device_id)
+    else:                                       # pragma: no cover (test requires malforming the code)
+        msg = 'Unexpected condition [_check_device_exists(method_name={}, device_id={})]'
+        msg = msg.format(str(method_name), str(device_id))
+        raise ServiceException(grpc.StatusCode.UNIMPLEMENTED, msg)
+
+def _check_device_endpoint_exists_or_get_pointer(
+    method_name : str, database : Database, parent_name : str, device_id : str, endpoint_id : str):
+
+    if method_name in ['AddDevice']:
+        db_context = database.context(DEFAULT_CONTEXT_ID)
+        db_topology = db_context.topology(DEFAULT_TOPOLOGY_ID)
+        db_device = db_topology.device(device_id)
+        return db_device.endpoint(endpoint_id)
+    elif method_name in ['UpdateDevice', 'DeleteDevice']:
+        return check_device_endpoint_exists(
+            database, parent_name, DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID, device_id, endpoint_id)
+    else:                                       # pragma: no cover (test requires malforming the code)
+        msg = 'Unexpected condition [_check_device_exists(method_name={}, device_id={})]'
+        msg = msg.format(str(method_name), str(device_id))
+        raise ServiceException(grpc.StatusCode.UNIMPLEMENTED, msg)
+
+def check_device_operational_status(method_name : str, value : str) -> OperationalStatus:
+    return check_enum(
+        'OperationalStatus', method_name, value, to_operationalstatus_enum, ACCEPTED_DEVICE_OPERATIONAL_STATUSES)
+
+def check_device_request(
+    method_name : str, request : Device, database : Database, logger : logging.Logger
+    ) -> Tuple[str, str, str, OperationalStatus, List[Tuple[Endpoint, str]]]:
+
+    # ----- Parse attributes -------------------------------------------------------------------------------------------
+    try:
+        device_id     = chk_string ('device.device_id.device_id.uuid',
+                                    request.device_id.device_id.uuid,
+                                    allow_empty=False)
+        device_type   = chk_string ('device.device_type',
+                                    request.device_type,
+                                    allow_empty=False)
+        device_config = chk_string ('device.device_config.device_config',
+                                    request.device_config.device_config,
+                                    allow_empty=True)
+        device_opstat = chk_options('device.devOperationalStatus',
+                                    request.devOperationalStatus,
+                                    operationalstatus_enum_values())
+    except Exception as e:
+        logger.exception('Invalid arguments:')
+        raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e))
+
+    device_opstat = check_device_operational_status(method_name, device_opstat)
+
+    # ----- Check if device exists in database -------------------------------------------------------------------------
+    _check_device_exists(method_name, database, device_id)
+
+    # ----- Parse endpoints and check if they exist in the database as device endpoints --------------------------------
+    add_topology_devices_endpoints : Dict[str, Dict[str, Set[str]]] = {}
+    db_endpoints__port_types : List[Tuple[Endpoint, str]] = []
+    for endpoint_number,endpoint in enumerate(request.endpointList):
+        parent_name = 'Endpoint(#{}) of Context({})/Topology({})/Device({})'
+        parent_name = parent_name.format(endpoint_number, DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID, device_id)
+
+        _, ep_device_id, ep_port_id = check_endpoint_id(
+            logger, endpoint_number, parent_name, endpoint.port_id, add_topology_devices_endpoints,
+            predefined_device_id=device_id, acceptable_device_ids=set([device_id]),
+            prevent_same_device_multiple_times=False)
+
+        try:
+            ep_port_type = chk_string('endpoint[#{}].port_type'.format(endpoint_number),
+                                      endpoint.port_type,
+                                      allow_empty=False)
+        except Exception as e:
+            logger.exception('Invalid arguments:')
+            raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e))
+
+        db_endpoint = _check_device_endpoint_exists_or_get_pointer(
+            method_name, database, parent_name, ep_device_id, ep_port_id)
+        db_endpoints__port_types.append((db_endpoint, ep_port_type))
+
+    return device_id, device_type, device_config, device_opstat, db_endpoints__port_types
+
+def check_device_id_request(
+    method_name : str, request : DeviceId, database : Database, logger : logging.Logger) -> str:
+
+    # ----- Parse attributes -------------------------------------------------------------------------------------------
+    try:
+        device_id = chk_string('device_id.device_id.uuid',
+                               request.device_id.uuid,
+                               allow_empty=False)
+    except Exception as e:
+        logger.exception('Invalid arguments:')
+        raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e))
+
+    # ----- Check if device exists in database ---------------------------------------------------------------------------
+    _check_device_exists(method_name, database, device_id)
+
+    return device_id
diff --git a/src/device/service/__main__.py b/src/device/service/__main__.py
index ae7db591c5c39836e0e1a5c62039060eb6651434..3ac9893a1fe408478a9f6482fd38fb9dcf725038 100644
--- a/src/device/service/__main__.py
+++ b/src/device/service/__main__.py
@@ -16,10 +16,10 @@ def main():
     global terminate, logger
 
     service_port = os.environ.get('DEVICESERVICE_SERVICE_PORT_GRPC', GRPC_SERVICE_PORT)
-    max_workers  = os.environ.get('MAX_WORKERS',  GRPC_MAX_WORKERS )
-    grace_period = os.environ.get('GRACE_PERIOD', GRPC_GRACE_PERIOD)
-    log_level    = os.environ.get('LOG_LEVEL',    LOG_LEVEL   )
-    metrics_port = os.environ.get('METRICS_PORT', METRICS_PORT)
+    max_workers  = os.environ.get('MAX_WORKERS',                     GRPC_MAX_WORKERS )
+    grace_period = os.environ.get('GRACE_PERIOD',                    GRPC_GRACE_PERIOD)
+    log_level    = os.environ.get('LOG_LEVEL',                       LOG_LEVEL        )
+    metrics_port = os.environ.get('METRICS_PORT',                    METRICS_PORT     )
 
     logging.basicConfig(level=log_level)
     logger = logging.getLogger(__name__)
@@ -36,17 +36,17 @@ def main():
     database = get_database()
 
     # Starting device service
-    service = DeviceService(database, port=service_port, max_workers=max_workers, grace_period=grace_period)
-    service.start()
+    grpc_service = DeviceService(database, port=service_port, max_workers=max_workers, grace_period=grace_period)
+    grpc_service.start()
 
     # Wait for Ctrl+C or termination signal
-    while not terminate.wait(0.1): pass
+    while not terminate.wait(timeout=0.1): pass
 
     logger.info('Terminating...')
-    service.stop()
+    grpc_service.stop()
 
     logger.info('Bye')
-    return(0)
+    return 0
 
 if __name__ == '__main__':
     sys.exit(main())
diff --git a/src/device/tests/test_unitary.py b/src/device/tests/test_unitary.py
index 9834c5c39761997b336ed9feda11d6899dde19c0..95eb0a1af27433ddd85d100160cd122aebc60b8a 100644
--- a/src/device/tests/test_unitary.py
+++ b/src/device/tests/test_unitary.py
@@ -1,54 +1,39 @@
 import copy, grpc, logging, pytest
 from google.protobuf.json_format import MessageToDict
 from common.database.Factory import get_database, DatabaseEngineEnum
-from common.database.api.context.OperationalStatus import OperationalStatus
+from common.database.api.context.Constants import DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID
+from common.database.api.context.topology.device.OperationalStatus import OperationalStatus
 from common.tests.Assertions import validate_device_id, validate_empty
 from device.client.DeviceClient import DeviceClient
 from device.proto.context_pb2 import Device, DeviceId
 from device.service.DeviceService import DeviceService
 from device.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD
 
+port = 10000 + GRPC_SERVICE_PORT # avoid privileged ports
+
 LOGGER = logging.getLogger(__name__)
 LOGGER.setLevel(logging.DEBUG)
 
-DEVICE_ID = {'device_id': {'uuid': 'dev1'}}
+# use "copy.deepcopy" to prevent propagating forced changes during tests
+CONTEXT_ID = {'contextUuid': {'uuid': DEFAULT_CONTEXT_ID}}
+TOPOLOGY_ID = {'contextId': copy.deepcopy(CONTEXT_ID), 'topoId': {'uuid': DEFAULT_TOPOLOGY_ID}}
+DEVICE_ID = {'device_id': {'uuid': 'DEV1'}}
 DEVICE = {
-    'device_id': {'device_id': {'uuid': 'dev1'}},
+    'device_id': copy.deepcopy(DEVICE_ID),
     'device_type': 'ROADM',
     'device_config': {'device_config': '<config/>'},
     'devOperationalStatus': OperationalStatus.ENABLED.value,
     'endpointList' : [
         {
-            'port_id': {
-                'topoId': {
-                    'contextId': {'contextUuid': {'uuid': 'admin'}},
-                    'topoId': {'uuid': 'admin'}
-                },
-                'dev_id': {'device_id': {'uuid': 'dev1'}},
-                'port_id': {'uuid' : 'to-dev2'}
-            },
+            'port_id': {'topoId': copy.deepcopy(TOPOLOGY_ID), 'dev_id': copy.deepcopy(DEVICE_ID), 'port_id': {'uuid' : 'EP2'}},
             'port_type': 'WDM'
         },
         {
-            'port_id': {
-                'topoId': {
-                    'contextId': {'contextUuid': {'uuid': 'admin'}},
-                    'topoId': {'uuid': 'admin'}
-                },
-                'dev_id': {'device_id': {'uuid': 'dev1'}},
-                'port_id': {'uuid' : 'to-dev3'}
-            },
+            'port_id': {'topoId': copy.deepcopy(TOPOLOGY_ID), 'dev_id': copy.deepcopy(DEVICE_ID), 'port_id': {'uuid' : 'EP3'}},
             'port_type': 'WDM'
         },
         {
-            'port_id': {
-                'topoId': {
-                    'contextId': {'contextUuid': {'uuid': 'admin'}},
-                    'topoId': {'uuid': 'admin'}
-                },
-                'dev_id': {'device_id': {'uuid': 'dev1'}},
-                'port_id': {'uuid' : 'to-dev4'}
-            },
+            'port_id': {'topoId': copy.deepcopy(TOPOLOGY_ID), 'dev_id': copy.deepcopy(DEVICE_ID), 'port_id': {'uuid' : 'EP4'}},
             'port_type': 'WDM'
         },
     ]
@@ -62,18 +47,18 @@ def device_database():
 @pytest.fixture(scope='session')
 def device_service(device_database):
     _service = DeviceService(
-        device_database, port=GRPC_SERVICE_PORT, max_workers=GRPC_MAX_WORKERS, grace_period=GRPC_GRACE_PERIOD)
+        device_database, port=port, max_workers=GRPC_MAX_WORKERS, grace_period=GRPC_GRACE_PERIOD)
     _service.start()
     yield _service
     _service.stop()
 
 @pytest.fixture(scope='session')
 def device_client(device_service):
-    _client = DeviceClient(address='127.0.0.1', port=GRPC_SERVICE_PORT)
+    _client = DeviceClient(address='127.0.0.1', port=port)
     yield _client
     _client.close()
 
-def test_add_device_empty_device_uuid(device_client : DeviceClient):
+def test_add_device_wrong_attributes(device_client : DeviceClient):
     # should fail with device uuid is empty
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         copy_device = copy.deepcopy(DEVICE)
@@ -82,7 +67,6 @@ def test_add_device_empty_device_uuid(device_client : DeviceClient):
     assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
     assert e.value.details() == 'device.device_id.device_id.uuid() string is empty.'
 
-def test_add_device_empty_device_type(device_client : DeviceClient):
     # should fail with device type is empty
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         copy_device = copy.deepcopy(DEVICE)
@@ -91,7 +75,6 @@ def test_add_device_empty_device_type(device_client : DeviceClient):
     assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
     assert e.value.details() == 'device.device_type() string is empty.'
 
-def test_add_device_wrong_device_operational_status(device_client : DeviceClient):
     # should fail with wrong device operational status
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         copy_device = copy.deepcopy(DEVICE)
@@ -99,13 +82,13 @@ def test_add_device_wrong_device_operational_status(device_client : DeviceClient
         device_client.AddDevice(Device(**copy_device))
     assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
     msg = ' '.join([
-        'Device has to be created with either ENABLED/DISABLED Operational State.',
-        'Use KEEP_STATE only in configure Device methods.',
+        'Method(AddDevice) does not accept OperationalStatus(KEEP_STATE).',
+        'Permitted values for Method(AddDevice) are OperationalStatus([\'DISABLED\', \'ENABLED\']).',
     ])
     assert e.value.details() == msg
 
-def test_add_device_endpoint_wrong_context(device_client : DeviceClient):
-    # should fail with unsupported context
+def test_add_device_wrong_endpoint(device_client : DeviceClient):
+    # should fail with unsupported endpoint context
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         copy_device = copy.deepcopy(DEVICE)
         copy_device['endpointList'][0]['port_id']['topoId']['contextId']['contextUuid']['uuid'] = 'wrong-context'
@@ -113,27 +96,25 @@ def test_add_device_endpoint_wrong_context(device_client : DeviceClient):
         device_client.AddDevice(request)
     assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
     msg = ' '.join([
-        'Unsupported Context(wrong-context) in Endpoint(#0) of Device(dev1).',
-        'Only default Context(admin) is currently supported.',
-        'Optionally, leave field empty to use default Context.',
+        'Context(wrong-context)',
+        'in Endpoint(#0) of Context(admin)/Topology(admin)/Device(DEV1) mismatches acceptable Contexts({\'admin\'}).',
+        'Optionally, leave field empty to use predefined Context(admin).',
     ])
     assert e.value.details() == msg
 
-def test_add_device_endpoint_wrong_topology(device_client : DeviceClient):
-    # should fail with unsupported topology
+    # should fail with unsupported endpoint topology
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         copy_device = copy.deepcopy(DEVICE)
         copy_device['endpointList'][0]['port_id']['topoId']['topoId']['uuid'] = 'wrong-topo'
         device_client.AddDevice(Device(**copy_device))
     assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
     msg = ' '.join([
-        'Unsupported Topology(wrong-topo) in Endpoint(#0) of Device(dev1).',
-        'Only default Topology(admin) is currently supported.',
-        'Optionally, leave field empty to use default Topology.',
+        'Context(admin)/Topology(wrong-topo)',
+        'in Endpoint(#0) of Context(admin)/Topology(admin)/Device(DEV1) mismatches acceptable Topologies({\'admin\'}).',
+        'Optionally, leave field empty to use predefined Topology(admin).',
     ])
     assert e.value.details() == msg
 
-def test_add_device_endpoint_wrong_device(device_client : DeviceClient):
     # should fail with wrong endpoint device
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         copy_device = copy.deepcopy(DEVICE)
@@ -141,23 +122,21 @@ def test_add_device_endpoint_wrong_device(device_client : DeviceClient):
         device_client.AddDevice(Device(**copy_device))
     assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
     msg = ' '.join([
-        'Wrong Device(wrong-device) in Endpoint(#0).',
-        'Parent specified in message is Device(dev1).',
-        'Optionally, leave field empty to use parent Device.',
+        'Context(admin)/Topology(admin)/Device(wrong-device)',
+        'in Endpoint(#0) of Context(admin)/Topology(admin)/Device(DEV1) mismatches acceptable Devices({\'DEV1\'}).',
+        'Optionally, leave field empty to use predefined Device(DEV1).',
     ])
     assert e.value.details() == msg
 
-def test_add_device_empty_port_uuid(device_client : DeviceClient):
-    # should fail with port uuid is empty
+    # should fail with endpoint port uuid is empty
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         copy_device = copy.deepcopy(DEVICE)
         copy_device['endpointList'][0]['port_id']['port_id']['uuid'] = ''
         device_client.AddDevice(Device(**copy_device))
     assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-    assert e.value.details() == 'endpoint[#0].port_id.port_id.uuid() string is empty.'
+    assert e.value.details() == 'endpoint_id[#0].port_id.uuid() string is empty.'
 
-def test_add_device_empty_port_type(device_client : DeviceClient):
-    # should fail with port type is empty
+    # should fail with endpoint port type is empty
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         copy_device = copy.deepcopy(DEVICE)
         copy_device['endpointList'][0]['port_type'] = ''
@@ -165,14 +144,17 @@ def test_add_device_empty_port_type(device_client : DeviceClient):
     assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
     assert e.value.details() == 'endpoint[#0].port_type() string is empty.'
 
-def test_add_device_duplicate_port(device_client : DeviceClient):
-    # should fail with uplicate port in device
+    # should fail with duplicate port in device
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         copy_device = copy.deepcopy(DEVICE)
-        copy_device['endpointList'][1]['port_id']['port_id']['uuid'] = 'to-dev2'
+        copy_device['endpointList'][1]['port_id']['port_id']['uuid'] = 'EP2'
         device_client.AddDevice(Device(**copy_device))
-    assert e.value.code() == grpc.StatusCode.ALREADY_EXISTS
-    assert e.value.details() == 'Duplicated Port(to-dev2) in Endpoint(#1) of Device(dev1).'
+    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
+    msg = ' '.join([
+        'Duplicated Context(admin)/Topology(admin)/Device(DEV1)/Port(EP2)',
+        'in Endpoint(#1) of Context(admin)/Topology(admin)/Device(DEV1).',
+    ])
+    assert e.value.details() == msg
 
 def test_add_device(device_client : DeviceClient):
     # should work
@@ -186,7 +168,7 @@ def test_add_device_duplicate(device_client : DeviceClient):
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         device_client.AddDevice(Device(**DEVICE))
     assert e.value.code() == grpc.StatusCode.ALREADY_EXISTS
-    assert e.value.details() == 'Device(dev1) already exists in the database.'
+    assert e.value.details() == 'Context(admin)/Topology(admin)/Device(DEV1) already exists in the database.'
 
 def test_delete_device_empty_uuid(device_client : DeviceClient):
     # should fail with device uuid is empty
@@ -204,7 +186,7 @@ def test_delete_device_not_found(device_client : DeviceClient):
         copy_device_id['device_id']['uuid'] = 'wrong-device-id'
         device_client.DeleteDevice(DeviceId(**copy_device_id))
     assert e.value.code() == grpc.StatusCode.NOT_FOUND
-    assert e.value.details() == 'Device(wrong-device-id) does not exist in the database.'
+    assert e.value.details() == 'Context(admin)/Topology(admin)/Device(wrong-device-id) does not exist in the database.'
 
 def test_delete_device(device_client : DeviceClient):
     # should work
@@ -229,7 +211,7 @@ def test_configure_device_not_found(device_client : DeviceClient):
         copy_device['device_id']['device_id']['uuid'] = 'wrong-device-id'
         device_client.ConfigureDevice(Device(**copy_device))
     assert e.value.code() == grpc.StatusCode.NOT_FOUND
-    assert e.value.details() == 'Device(wrong-device-id) does not exist in the database.'
+    assert e.value.details() == 'Context(admin)/Topology(admin)/Device(wrong-device-id) does not exist in the database.'
 
 def test_add_device_default_endpoint_context_topology_device(device_client : DeviceClient):
     # should work
@@ -242,24 +224,22 @@ def test_add_device_default_endpoint_context_topology_device(device_client : Dev
             including_default_value_fields=True, preserving_proto_field_name=True,
             use_integers_for_enums=False))
 
-def test_configure_device_wrong_device_type(device_client : DeviceClient):
+def test_configure_device_wrong_attributes(device_client : DeviceClient):
     # should fail with device type is wrong
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         copy_device = copy.deepcopy(DEVICE)
         copy_device['device_type'] = 'wrong-type'
         device_client.ConfigureDevice(Device(**copy_device))
     assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-    assert e.value.details() == 'Device(dev1) has Type(ROADM) in the database. Cannot be changed to Type(wrong-type).'
+    assert e.value.details() == 'Device(DEV1) has Type(ROADM) in the database. Cannot be changed to Type(wrong-type).'
 
-def test_configure_device_with_endpoints(device_client : DeviceClient):
     # should fail with endpoints cannot be modified
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         copy_device = copy.deepcopy(DEVICE)
         device_client.ConfigureDevice(Device(**copy_device))
     assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-    assert e.value.details() == 'Endpoints belonging to Device(dev1) cannot be modified.'
+    assert e.value.details() == 'Endpoints belonging to Device(DEV1) cannot be modified.'
 
-def test_configure_device_no_change(device_client : DeviceClient):
     # should fail with any change detected
     with pytest.raises(grpc._channel._InactiveRpcError) as e:
         copy_device = copy.deepcopy(DEVICE)
@@ -269,7 +249,7 @@ def test_configure_device_no_change(device_client : DeviceClient):
         device_client.ConfigureDevice(Device(**copy_device))
     assert e.value.code() == grpc.StatusCode.ABORTED
     msg = ' '.join([
-        'Any change has been requested for Device(dev1).',
+        'Any change has been requested for Device(DEV1).',
         'Either specify a new configuration or a new device operational status.',
     ])
     assert e.value.details() == msg
diff --git a/src/integration_tester/definitions.py b/src/integration_tester/definitions.py
deleted file mode 100644
index ebdc3a43de438722716bf4f8d87eab3165526980..0000000000000000000000000000000000000000
--- a/src/integration_tester/definitions.py
+++ /dev/null
@@ -1,92 +0,0 @@
-from common.database.api.context.OperationalStatus import OperationalStatus
-
-TOPOLOGY_ID = {
-    'contextId': {'contextUuid': {'uuid': 'admin'}},
-    'topoId': {'uuid': 'admin'}
-}
-
-DEVICE_ID_DEV1 = {'device_id': {'uuid': 'dev1'}}
-DEVICE_DEV1 = {
-    'device_id': {'device_id': {'uuid': 'dev1'}}, 'device_type': 'ROADM', 'device_config': {'device_config': '<config/>'},
-    'devOperationalStatus': OperationalStatus.ENABLED.value,
-    'endpointList' : [
-        {'port_id': {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev1'}}, 'port_id': {'uuid' : 'to-dev2'}}, 'port_type': 'WDM'},
-        {'port_id': {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev1'}}, 'port_id': {'uuid' : 'to-dev3'}}, 'port_type': 'WDM'},
-    ]
-}
-
-DEVICE_ID_DEV2 = {'device_id': {'uuid': 'dev2'}}
-DEVICE_DEV2 = {
-    'device_id': {'device_id': {'uuid': 'dev2'}}, 'device_type': 'ROADM', 'device_config': {'device_config': '<config/>'},
-    'devOperationalStatus': OperationalStatus.ENABLED.value,
-    'endpointList' : [
-        {'port_id': {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev2'}}, 'port_id': {'uuid' : 'to-dev1'}}, 'port_type': 'WDM'},
-        {'port_id': {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev2'}}, 'port_id': {'uuid' : 'to-dev3'}}, 'port_type': 'WDM'},
-    ]
-}
-
-DEVICE_ID_DEV3 = {'device_id': {'uuid': 'dev3'}}
-DEVICE_DEV3 = {
-    'device_id': {'device_id': {'uuid': 'dev3'}},
-    'device_type': 'ROADM',
-    'device_config': {'device_config': '<config/>'},
-    'devOperationalStatus': OperationalStatus.ENABLED.value,
-    'endpointList' : [
-        {'port_id': {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev3'}}, 'port_id': {'uuid' : 'to-dev1'}}, 'port_type': 'WDM'},
-        {'port_id': {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev3'}}, 'port_id': {'uuid' : 'to-dev2'}}, 'port_type': 'WDM'},
-    ]
-}
-
-LINK_ID_DEV1_DEV2 = {'link_id': {'uuid': 'dev1/to-dev2 ==> dev2/to-dev1'}}
-LINK_DEV1_DEV2 = {
-    'link_id': {'link_id': {'uuid': 'dev1/to-dev2 ==> dev2/to-dev1'}},
-    'endpointList' : [
-        {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev1'}}, 'port_id': {'uuid' : 'to-dev2'}},
-        {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev2'}}, 'port_id': {'uuid' : 'to-dev1'}},
-    ]
-}
-
-LINK_ID_DEV1_DEV3 = {'link_id': {'uuid': 'dev1/to-dev3 ==> dev3/to-dev1'}}
-LINK_DEV1_DEV3 = {
-    'link_id': {'link_id': {'uuid': 'dev1/to-dev3 ==> dev3/to-dev1'}},
-    'endpointList' : [
-        {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev1'}}, 'port_id': {'uuid' : 'to-dev3'}},
-        {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev3'}}, 'port_id': {'uuid' : 'to-dev1'}},
-    ]
-}
-
-LINK_ID_DEV2_DEV1 = {'link_id': {'uuid': 'dev2/to-dev1 ==> dev1/to-dev2'}}
-LINK_DEV2_DEV1 = {
-    'link_id': {'link_id': {'uuid': 'dev2/to-dev1 ==> dev1/to-dev2'}},
-    'endpointList' : [
-        {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev2'}}, 'port_id': {'uuid' : 'to-dev1'}},
-        {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev1'}}, 'port_id': {'uuid' : 'to-dev2'}},
-    ]
-}
-
-LINK_ID_DEV2_DEV3 = {'link_id': {'uuid': 'dev2/to-dev3 ==> dev3/to-dev2'}}
-LINK_DEV2_DEV3 = {
-    'link_id': {'link_id': {'uuid': 'dev2/to-dev3 ==> dev3/to-dev2'}},
-    'endpointList' : [
-        {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev2'}}, 'port_id': {'uuid' : 'to-dev3'}},
-        {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev3'}}, 'port_id': {'uuid' : 'to-dev2'}},
-    ]
-}
-
-LINK_ID_DEV3_DEV1 = {'link_id': {'uuid': 'dev3/to-dev1 ==> dev1/to-dev3'}}
-LINK_DEV3_DEV1 = {
-    'link_id': {'link_id': {'uuid': 'dev3/to-dev1 ==> dev1/to-dev3'}},
-    'endpointList' : [
-        {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev3'}}, 'port_id': {'uuid' : 'to-dev1'}},
-        {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev1'}}, 'port_id': {'uuid' : 'to-dev3'}},
-    ]
-}
-
-LINK_ID_DEV3_DEV2 = {'link_id': {'uuid': 'dev3/to-dev2 ==> dev2/to-dev3'}}
-LINK_DEV3_DEV2 = {
-    'link_id': {'link_id': {'uuid': 'dev3/to-dev2 ==> dev2/to-dev3'}},
-    'endpointList' : [
-        {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev3'}}, 'port_id': {'uuid' : 'to-dev2'}},
-        {'topoId': TOPOLOGY_ID, 'dev_id': {'device_id': {'uuid': 'dev2'}}, 'port_id': {'uuid' : 'to-dev3'}},
-    ]
-}
diff --git a/src/service/.gitlab-ci.yml b/src/service/.gitlab-ci.yml
new file mode 100644
index 0000000000000000000000000000000000000000..edff5aad2ab260310668f60e04fd7e64dc2d0154
--- /dev/null
+++ b/src/service/.gitlab-ci.yml
@@ -0,0 +1,60 @@
+# Build, tag, and push the Docker images to the GitLab Docker registry
+build service:
+  variables:
+    IMAGE_NAME: 'service' # name of the microservice
+    IMAGE_NAME_TEST: 'service-test' # name of the microservice
+    IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
+  stage: build
+  before_script:
+    - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
+  script:
+    - docker build -t "$IMAGE_NAME:$IMAGE_TAG" -f ./src/$IMAGE_NAME/Dockerfile ./src/
+    - docker tag "$IMAGE_NAME:$IMAGE_TAG" "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
+    - docker push "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
+  rules:
+    - changes:
+      - src/$IMAGE_NAME/**
+      - .gitlab-ci.yml
+
+# Pull, execute, and run unitary tests for the Docker image from the GitLab registry
+unit_test service:
+  variables:
+    IMAGE_NAME: 'service' # name of the microservice
+    IMAGE_NAME_TEST: 'service-test' # name of the microservice
+    IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
+  stage: unit_test
+  needs:
+    - build service
+  before_script:
+    - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
+    - if docker network list | grep teraflowbridge; then echo "teraflowbridge is already created"; else docker network create -d bridge teraflowbridge; fi  
+  script:
+    - docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
+    - docker run -d -p 3030:3030 --name $IMAGE_NAME --network=teraflowbridge "$IMAGE_NAME:$IMAGE_TAG"
+    - docker ps -a
+    - sleep 5
+    - docker ps -a
+    - docker logs $IMAGE_NAME
+    - docker exec -i $IMAGE_NAME bash -c "pytest --log-level=DEBUG --verbose $IMAGE_NAME/tests/test_unitary.py"
+  after_script:
+    - docker stop $IMAGE_NAME
+    - docker rm $IMAGE_NAME
+  rules:
+    - changes:
+      - src/$IMAGE_NAME/**
+      - .gitlab-ci.yml
+
+# Deployment of the service in Kubernetes Cluster
+deploy service:
+  stage: deploy
+  needs:
+    - build service
+    - unit_test service
+    - dependencies all
+    - integ_test execute
+  script:
+    - kubectl version
+    - kubectl get all
+    - kubectl apply -f "manifests/serviceservice.yaml"
+    - kubectl delete pods --selector app=serviceservice
+    - kubectl get all
diff --git a/src/service/Config.py b/src/service/Config.py
new file mode 100644
index 0000000000000000000000000000000000000000..592392a1bde5757f83fd67589a7b7b7d6cc3e6c0
--- /dev/null
+++ b/src/service/Config.py
@@ -0,0 +1,12 @@
+import logging
+
+# General settings
+LOG_LEVEL = logging.WARNING
+
+# gRPC settings
+GRPC_SERVICE_PORT = 3030
+GRPC_MAX_WORKERS  = 10
+GRPC_GRACE_PERIOD = 60
+
+# Prometheus settings
+METRICS_PORT = 9192
diff --git a/src/service/Dockerfile b/src/service/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..306379a06d33c67c082540e19f3b1ca349becff2
--- /dev/null
+++ b/src/service/Dockerfile
@@ -0,0 +1,35 @@
+FROM python:3-slim
+
+# Install dependencies
+RUN apt-get --yes --quiet --quiet update && \
+    apt-get --yes --quiet --quiet install wget g++ && \
+    rm -rf /var/lib/apt/lists/*
+
+# Set Python to show logs as they occur
+ENV PYTHONUNBUFFERED=0
+
+# Download the gRPC health probe
+RUN GRPC_HEALTH_PROBE_VERSION=v0.2.0 && \
+    wget -qO/bin/grpc_health_probe https://github.com/grpc-ecosystem/grpc-health-probe/releases/download/${GRPC_HEALTH_PROBE_VERSION}/grpc_health_probe-linux-amd64 && \
+    chmod +x /bin/grpc_health_probe
+
+# Get generic Python packages
+RUN python3 -m pip install --upgrade pip setuptools wheel pip-tools
+
+# Set working directory
+WORKDIR /var/teraflow
+
+# Create module sub-folders
+RUN mkdir -p /var/teraflow/service
+
+# Get Python packages per module
+COPY service/requirements.in service/requirements.in
+RUN pip-compile --output-file=service/requirements.txt service/requirements.in
+RUN python3 -m pip install -r service/requirements.in
+
+# Add files into working directory
+COPY common/. common
+COPY service/. service
+
+# Start service service
+ENTRYPOINT ["python", "-m", "service.service"]
diff --git a/src/service/__init__.py b/src/service/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/service/client/ServiceClient.py b/src/service/client/ServiceClient.py
new file mode 100644
index 0000000000000000000000000000000000000000..5c1b63611f100403a12731a679fdaafb4764843a
--- /dev/null
+++ b/src/service/client/ServiceClient.py
@@ -0,0 +1,69 @@
+import grpc, logging
+from common.tools.client.RetryDecorator import retry, delay_exponential
+from service.proto.context_pb2 import Empty
+from service.proto.service_pb2 import ConnectionList, Service, ServiceId, ServiceList
+from service.proto.service_pb2_grpc import ServiceServiceStub
+
+LOGGER = logging.getLogger(__name__)
+MAX_RETRIES = 15
+DELAY_FUNCTION = delay_exponential(initial=0.01, increment=2.0, maximum=5.0)
+
+class ServiceClient:
+    def __init__(self, address, port):
+        self.endpoint = '{}:{}'.format(address, port)
+        LOGGER.debug('Creating channel to {}...'.format(self.endpoint))
+        self.channel = None
+        self.stub = None
+        self.connect()
+        LOGGER.debug('Channel created')
+
+    def connect(self):
+        self.channel = grpc.insecure_channel(self.endpoint)
+        self.stub = ServiceServiceStub(self.channel)
+
+    def close(self):
+        if(self.channel is not None): self.channel.close()
+        self.channel = None
+        self.stub = None
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def GetServiceList(self, request : Empty) -> ServiceList:
+        LOGGER.debug('GetServiceList request: {}'.format(request))
+        response = self.stub.GetServiceList(request)
+        LOGGER.debug('GetServiceList result: {}'.format(response))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def CreateService(self, request : Service) -> ServiceId:
+        LOGGER.debug('CreateService request: {}'.format(request))
+        response = self.stub.CreateService(request)
+        LOGGER.debug('CreateService result: {}'.format(response))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def UpdateService(self, request : Service) -> ServiceId:
+        LOGGER.debug('UpdateService request: {}'.format(request))
+        response = self.stub.UpdateService(request)
+        LOGGER.debug('UpdateService result: {}'.format(response))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def DeleteService(self, request : ServiceId) -> Empty:
+        LOGGER.debug('DeleteService request: {}'.format(request))
+        response = self.stub.DeleteService(request)
+        LOGGER.debug('DeleteService result: {}'.format(response))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def GetServiceById(self, request : ServiceId) -> Service:
+        LOGGER.debug('GetServiceById request: {}'.format(request))
+        response = self.stub.GetServiceById(request)
+        LOGGER.debug('GetServiceById result: {}'.format(response))
+        return response
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def GetConnectionList(self, request : Empty) -> ConnectionList:
+        LOGGER.debug('GetConnectionList request: {}'.format(request))
+        response = self.stub.GetConnectionList(request)
+        LOGGER.debug('GetConnectionList result: {}'.format(response))
+        return response
diff --git a/src/service/client/__init__.py b/src/service/client/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/service/genproto.sh b/src/service/genproto.sh
new file mode 100755
index 0000000000000000000000000000000000000000..9b6387a8a0739ce1cbf2cb75f1e178880a5dbc8e
--- /dev/null
+++ b/src/service/genproto.sh
@@ -0,0 +1,33 @@
+#!/bin/bash -eu
+#
+# Copyright 2018 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+#!/bin/bash -e
+
+# Make folder containing the script the root folder for its execution
+cd $(dirname $0)
+
+rm -rf proto/*.py
+rm -rf proto/__pycache__
+touch proto/__init__.py
+
+python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto context.proto
+python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto service.proto
+
+rm proto/context_pb2_grpc.py
+
+sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/context_pb2.py
+sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/service_pb2.py
+sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/service_pb2_grpc.py
diff --git a/src/service/proto/__init__.py b/src/service/proto/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/service/proto/context_pb2.py b/src/service/proto/context_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..a41b1de47f4df97a6e90b42a02fab7556feafd34
--- /dev/null
+++ b/src/service/proto/context_pb2.py
@@ -0,0 +1,880 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: context.proto
+"""Generated protocol buffer code."""
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='context.proto',
+  package='context',
+  syntax='proto3',
+  serialized_options=None,
+  create_key=_descriptor._internal_create_key,
+  serialized_pb=b'\n\rcontext.proto\x12\x07\x63ontext\"\x07\n\x05\x45mpty\"{\n\x07\x43ontext\x12%\n\tcontextId\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x1f\n\x04topo\x18\x02 \x01(\x0b\x32\x11.context.Topology\x12(\n\x03\x63tl\x18\x03 \x01(\x0b\x32\x1b.context.TeraFlowController\"/\n\tContextId\x12\"\n\x0b\x63ontextUuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"m\n\x08Topology\x12#\n\x06topoId\x18\x02 \x01(\x0b\x32\x13.context.TopologyId\x12\x1f\n\x06\x64\x65vice\x18\x03 \x03(\x0b\x32\x0f.context.Device\x12\x1b\n\x04link\x18\x04 \x03(\x0b\x32\r.context.Link\"S\n\x04Link\x12 \n\x07link_id\x18\x01 \x01(\x0b\x32\x0f.context.LinkId\x12)\n\x0c\x65ndpointList\x18\x02 \x03(\x0b\x32\x13.context.EndPointId\"R\n\nTopologyId\x12%\n\tcontextId\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x1d\n\x06topoId\x18\x02 \x01(\x0b\x32\r.context.Uuid\"?\n\nConstraint\x12\x17\n\x0f\x63onstraint_type\x18\x01 \x01(\t\x12\x18\n\x10\x63onstraint_value\x18\x02 \x01(\t\"\xda\x01\n\x06\x44\x65vice\x12$\n\tdevice_id\x18\x01 \x01(\x0b\x32\x11.context.DeviceId\x12\x13\n\x0b\x64\x65vice_type\x18\x02 \x01(\t\x12,\n\rdevice_config\x18\x03 \x01(\x0b\x32\x15.context.DeviceConfig\x12>\n\x14\x64\x65vOperationalStatus\x18\x04 \x01(\x0e\x32 .context.DeviceOperationalStatus\x12\'\n\x0c\x65ndpointList\x18\x05 \x03(\x0b\x32\x11.context.EndPoint\"%\n\x0c\x44\x65viceConfig\x12\x15\n\rdevice_config\x18\x01 \x01(\t\"C\n\x08\x45ndPoint\x12$\n\x07port_id\x18\x01 \x01(\x0b\x32\x13.context.EndPointId\x12\x11\n\tport_type\x18\x02 \x01(\t\"t\n\nEndPointId\x12#\n\x06topoId\x18\x01 \x01(\x0b\x32\x13.context.TopologyId\x12!\n\x06\x64\x65v_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\x12\x1e\n\x07port_id\x18\x03 \x01(\x0b\x32\r.context.Uuid\",\n\x08\x44\x65viceId\x12 \n\tdevice_id\x18\x01 \x01(\x0b\x32\r.context.Uuid\"(\n\x06LinkId\x12\x1e\n\x07link_id\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\x14\n\x04Uuid\x12\x0c\n\x04uuid\x18\x01 \x01(\t\"K\n\x12TeraFlowController\x12\"\n\x06\x63tl_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x11\n\tipaddress\x18\x02 \x01(\t\"Q\n\x14\x41uthenticationResult\x12\"\n\x06\x63tl_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x15\n\rauthenticated\x18\x02 \x01(\x08*N\n\x17\x44\x65viceOperationalStatus\x12\x0f\n\x0bKEEP_STATUS\x10\x00\x12\x15\n\x08\x44ISABLED\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01\x12\x0b\n\x07\x45NABLED\x10\x01\x32\xa2\x01\n\x0e\x43ontextService\x12\x32\n\x0bGetTopology\x12\x0e.context.Empty\x1a\x11.context.Topology\"\x00\x12+\n\x07\x41\x64\x64Link\x12\r.context.Link\x1a\x0f.context.LinkId\"\x00\x12/\n\nDeleteLink\x12\x0f.context.LinkId\x1a\x0e.context.Empty\"\x00\x62\x06proto3'
+)
+
+_DEVICEOPERATIONALSTATUS = _descriptor.EnumDescriptor(
+  name='DeviceOperationalStatus',
+  full_name='context.DeviceOperationalStatus',
+  filename=None,
+  file=DESCRIPTOR,
+  create_key=_descriptor._internal_create_key,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='KEEP_STATUS', index=0, number=0,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='DISABLED', index=1, number=-1,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='ENABLED', index=2, number=1,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=1271,
+  serialized_end=1349,
+)
+_sym_db.RegisterEnumDescriptor(_DEVICEOPERATIONALSTATUS)
+
+DeviceOperationalStatus = enum_type_wrapper.EnumTypeWrapper(_DEVICEOPERATIONALSTATUS)
+KEEP_STATUS = 0
+DISABLED = -1
+ENABLED = 1
+
+
+
+_EMPTY = _descriptor.Descriptor(
+  name='Empty',
+  full_name='context.Empty',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=26,
+  serialized_end=33,
+)
+
+
+_CONTEXT = _descriptor.Descriptor(
+  name='Context',
+  full_name='context.Context',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='contextId', full_name='context.Context.contextId', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='topo', full_name='context.Context.topo', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='ctl', full_name='context.Context.ctl', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=35,
+  serialized_end=158,
+)
+
+
+_CONTEXTID = _descriptor.Descriptor(
+  name='ContextId',
+  full_name='context.ContextId',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='contextUuid', full_name='context.ContextId.contextUuid', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=160,
+  serialized_end=207,
+)
+
+
+_TOPOLOGY = _descriptor.Descriptor(
+  name='Topology',
+  full_name='context.Topology',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='topoId', full_name='context.Topology.topoId', index=0,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='device', full_name='context.Topology.device', index=1,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='link', full_name='context.Topology.link', index=2,
+      number=4, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=209,
+  serialized_end=318,
+)
+
+
+_LINK = _descriptor.Descriptor(
+  name='Link',
+  full_name='context.Link',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='link_id', full_name='context.Link.link_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='endpointList', full_name='context.Link.endpointList', index=1,
+      number=2, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=320,
+  serialized_end=403,
+)
+
+
+_TOPOLOGYID = _descriptor.Descriptor(
+  name='TopologyId',
+  full_name='context.TopologyId',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='contextId', full_name='context.TopologyId.contextId', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='topoId', full_name='context.TopologyId.topoId', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=405,
+  serialized_end=487,
+)
+
+
+_CONSTRAINT = _descriptor.Descriptor(
+  name='Constraint',
+  full_name='context.Constraint',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='constraint_type', full_name='context.Constraint.constraint_type', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='constraint_value', full_name='context.Constraint.constraint_value', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=489,
+  serialized_end=552,
+)
+
+
+_DEVICE = _descriptor.Descriptor(
+  name='Device',
+  full_name='context.Device',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='device_id', full_name='context.Device.device_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='device_type', full_name='context.Device.device_type', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='device_config', full_name='context.Device.device_config', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='devOperationalStatus', full_name='context.Device.devOperationalStatus', index=3,
+      number=4, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='endpointList', full_name='context.Device.endpointList', index=4,
+      number=5, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=555,
+  serialized_end=773,
+)
+
+
+_DEVICECONFIG = _descriptor.Descriptor(
+  name='DeviceConfig',
+  full_name='context.DeviceConfig',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='device_config', full_name='context.DeviceConfig.device_config', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=775,
+  serialized_end=812,
+)
+
+
+_ENDPOINT = _descriptor.Descriptor(
+  name='EndPoint',
+  full_name='context.EndPoint',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='port_id', full_name='context.EndPoint.port_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='port_type', full_name='context.EndPoint.port_type', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=814,
+  serialized_end=881,
+)
+
+
+_ENDPOINTID = _descriptor.Descriptor(
+  name='EndPointId',
+  full_name='context.EndPointId',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='topoId', full_name='context.EndPointId.topoId', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='dev_id', full_name='context.EndPointId.dev_id', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='port_id', full_name='context.EndPointId.port_id', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=883,
+  serialized_end=999,
+)
+
+
+_DEVICEID = _descriptor.Descriptor(
+  name='DeviceId',
+  full_name='context.DeviceId',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='device_id', full_name='context.DeviceId.device_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1001,
+  serialized_end=1045,
+)
+
+
+_LINKID = _descriptor.Descriptor(
+  name='LinkId',
+  full_name='context.LinkId',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='link_id', full_name='context.LinkId.link_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1047,
+  serialized_end=1087,
+)
+
+
+_UUID = _descriptor.Descriptor(
+  name='Uuid',
+  full_name='context.Uuid',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='uuid', full_name='context.Uuid.uuid', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1089,
+  serialized_end=1109,
+)
+
+
+_TERAFLOWCONTROLLER = _descriptor.Descriptor(
+  name='TeraFlowController',
+  full_name='context.TeraFlowController',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='ctl_id', full_name='context.TeraFlowController.ctl_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='ipaddress', full_name='context.TeraFlowController.ipaddress', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1111,
+  serialized_end=1186,
+)
+
+
+_AUTHENTICATIONRESULT = _descriptor.Descriptor(
+  name='AuthenticationResult',
+  full_name='context.AuthenticationResult',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='ctl_id', full_name='context.AuthenticationResult.ctl_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='authenticated', full_name='context.AuthenticationResult.authenticated', index=1,
+      number=2, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1188,
+  serialized_end=1269,
+)
+
+_CONTEXT.fields_by_name['contextId'].message_type = _CONTEXTID
+_CONTEXT.fields_by_name['topo'].message_type = _TOPOLOGY
+_CONTEXT.fields_by_name['ctl'].message_type = _TERAFLOWCONTROLLER
+_CONTEXTID.fields_by_name['contextUuid'].message_type = _UUID
+_TOPOLOGY.fields_by_name['topoId'].message_type = _TOPOLOGYID
+_TOPOLOGY.fields_by_name['device'].message_type = _DEVICE
+_TOPOLOGY.fields_by_name['link'].message_type = _LINK
+_LINK.fields_by_name['link_id'].message_type = _LINKID
+_LINK.fields_by_name['endpointList'].message_type = _ENDPOINTID
+_TOPOLOGYID.fields_by_name['contextId'].message_type = _CONTEXTID
+_TOPOLOGYID.fields_by_name['topoId'].message_type = _UUID
+_DEVICE.fields_by_name['device_id'].message_type = _DEVICEID
+_DEVICE.fields_by_name['device_config'].message_type = _DEVICECONFIG
+_DEVICE.fields_by_name['devOperationalStatus'].enum_type = _DEVICEOPERATIONALSTATUS
+_DEVICE.fields_by_name['endpointList'].message_type = _ENDPOINT
+_ENDPOINT.fields_by_name['port_id'].message_type = _ENDPOINTID
+_ENDPOINTID.fields_by_name['topoId'].message_type = _TOPOLOGYID
+_ENDPOINTID.fields_by_name['dev_id'].message_type = _DEVICEID
+_ENDPOINTID.fields_by_name['port_id'].message_type = _UUID
+_DEVICEID.fields_by_name['device_id'].message_type = _UUID
+_LINKID.fields_by_name['link_id'].message_type = _UUID
+_TERAFLOWCONTROLLER.fields_by_name['ctl_id'].message_type = _CONTEXTID
+_AUTHENTICATIONRESULT.fields_by_name['ctl_id'].message_type = _CONTEXTID
+DESCRIPTOR.message_types_by_name['Empty'] = _EMPTY
+DESCRIPTOR.message_types_by_name['Context'] = _CONTEXT
+DESCRIPTOR.message_types_by_name['ContextId'] = _CONTEXTID
+DESCRIPTOR.message_types_by_name['Topology'] = _TOPOLOGY
+DESCRIPTOR.message_types_by_name['Link'] = _LINK
+DESCRIPTOR.message_types_by_name['TopologyId'] = _TOPOLOGYID
+DESCRIPTOR.message_types_by_name['Constraint'] = _CONSTRAINT
+DESCRIPTOR.message_types_by_name['Device'] = _DEVICE
+DESCRIPTOR.message_types_by_name['DeviceConfig'] = _DEVICECONFIG
+DESCRIPTOR.message_types_by_name['EndPoint'] = _ENDPOINT
+DESCRIPTOR.message_types_by_name['EndPointId'] = _ENDPOINTID
+DESCRIPTOR.message_types_by_name['DeviceId'] = _DEVICEID
+DESCRIPTOR.message_types_by_name['LinkId'] = _LINKID
+DESCRIPTOR.message_types_by_name['Uuid'] = _UUID
+DESCRIPTOR.message_types_by_name['TeraFlowController'] = _TERAFLOWCONTROLLER
+DESCRIPTOR.message_types_by_name['AuthenticationResult'] = _AUTHENTICATIONRESULT
+DESCRIPTOR.enum_types_by_name['DeviceOperationalStatus'] = _DEVICEOPERATIONALSTATUS
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+Empty = _reflection.GeneratedProtocolMessageType('Empty', (_message.Message,), {
+  'DESCRIPTOR' : _EMPTY,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Empty)
+  })
+_sym_db.RegisterMessage(Empty)
+
+Context = _reflection.GeneratedProtocolMessageType('Context', (_message.Message,), {
+  'DESCRIPTOR' : _CONTEXT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Context)
+  })
+_sym_db.RegisterMessage(Context)
+
+ContextId = _reflection.GeneratedProtocolMessageType('ContextId', (_message.Message,), {
+  'DESCRIPTOR' : _CONTEXTID,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ContextId)
+  })
+_sym_db.RegisterMessage(ContextId)
+
+Topology = _reflection.GeneratedProtocolMessageType('Topology', (_message.Message,), {
+  'DESCRIPTOR' : _TOPOLOGY,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Topology)
+  })
+_sym_db.RegisterMessage(Topology)
+
+Link = _reflection.GeneratedProtocolMessageType('Link', (_message.Message,), {
+  'DESCRIPTOR' : _LINK,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Link)
+  })
+_sym_db.RegisterMessage(Link)
+
+TopologyId = _reflection.GeneratedProtocolMessageType('TopologyId', (_message.Message,), {
+  'DESCRIPTOR' : _TOPOLOGYID,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.TopologyId)
+  })
+_sym_db.RegisterMessage(TopologyId)
+
+Constraint = _reflection.GeneratedProtocolMessageType('Constraint', (_message.Message,), {
+  'DESCRIPTOR' : _CONSTRAINT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Constraint)
+  })
+_sym_db.RegisterMessage(Constraint)
+
+Device = _reflection.GeneratedProtocolMessageType('Device', (_message.Message,), {
+  'DESCRIPTOR' : _DEVICE,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Device)
+  })
+_sym_db.RegisterMessage(Device)
+
+DeviceConfig = _reflection.GeneratedProtocolMessageType('DeviceConfig', (_message.Message,), {
+  'DESCRIPTOR' : _DEVICECONFIG,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.DeviceConfig)
+  })
+_sym_db.RegisterMessage(DeviceConfig)
+
+EndPoint = _reflection.GeneratedProtocolMessageType('EndPoint', (_message.Message,), {
+  'DESCRIPTOR' : _ENDPOINT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.EndPoint)
+  })
+_sym_db.RegisterMessage(EndPoint)
+
+EndPointId = _reflection.GeneratedProtocolMessageType('EndPointId', (_message.Message,), {
+  'DESCRIPTOR' : _ENDPOINTID,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.EndPointId)
+  })
+_sym_db.RegisterMessage(EndPointId)
+
+DeviceId = _reflection.GeneratedProtocolMessageType('DeviceId', (_message.Message,), {
+  'DESCRIPTOR' : _DEVICEID,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.DeviceId)
+  })
+_sym_db.RegisterMessage(DeviceId)
+
+LinkId = _reflection.GeneratedProtocolMessageType('LinkId', (_message.Message,), {
+  'DESCRIPTOR' : _LINKID,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.LinkId)
+  })
+_sym_db.RegisterMessage(LinkId)
+
+Uuid = _reflection.GeneratedProtocolMessageType('Uuid', (_message.Message,), {
+  'DESCRIPTOR' : _UUID,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Uuid)
+  })
+_sym_db.RegisterMessage(Uuid)
+
+TeraFlowController = _reflection.GeneratedProtocolMessageType('TeraFlowController', (_message.Message,), {
+  'DESCRIPTOR' : _TERAFLOWCONTROLLER,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.TeraFlowController)
+  })
+_sym_db.RegisterMessage(TeraFlowController)
+
+AuthenticationResult = _reflection.GeneratedProtocolMessageType('AuthenticationResult', (_message.Message,), {
+  'DESCRIPTOR' : _AUTHENTICATIONRESULT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.AuthenticationResult)
+  })
+_sym_db.RegisterMessage(AuthenticationResult)
+
+
+
+_CONTEXTSERVICE = _descriptor.ServiceDescriptor(
+  name='ContextService',
+  full_name='context.ContextService',
+  file=DESCRIPTOR,
+  index=0,
+  serialized_options=None,
+  create_key=_descriptor._internal_create_key,
+  serialized_start=1352,
+  serialized_end=1514,
+  methods=[
+  _descriptor.MethodDescriptor(
+    name='GetTopology',
+    full_name='context.ContextService.GetTopology',
+    index=0,
+    containing_service=None,
+    input_type=_EMPTY,
+    output_type=_TOPOLOGY,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='AddLink',
+    full_name='context.ContextService.AddLink',
+    index=1,
+    containing_service=None,
+    input_type=_LINK,
+    output_type=_LINKID,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='DeleteLink',
+    full_name='context.ContextService.DeleteLink',
+    index=2,
+    containing_service=None,
+    input_type=_LINKID,
+    output_type=_EMPTY,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+])
+_sym_db.RegisterServiceDescriptor(_CONTEXTSERVICE)
+
+DESCRIPTOR.services_by_name['ContextService'] = _CONTEXTSERVICE
+
+# @@protoc_insertion_point(module_scope)
diff --git a/src/service/proto/service_pb2.py b/src/service/proto/service_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..ed248a038c6f6550994ebb204cbb4f626292c65c
--- /dev/null
+++ b/src/service/proto/service_pb2.py
@@ -0,0 +1,617 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: service.proto
+"""Generated protocol buffer code."""
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from . import context_pb2 as context__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='service.proto',
+  package='service',
+  syntax='proto3',
+  serialized_options=None,
+  create_key=_descriptor._internal_create_key,
+  serialized_pb=b'\n\rservice.proto\x12\x07service\x1a\rcontext.proto\"+\n\x0bServiceList\x12\x1c\n\x02\x63s\x18\x01 \x03(\x0b\x32\x10.service.Service\"\x87\x02\n\x07Service\x12!\n\x05\x63s_id\x18\x01 \x01(\x0b\x32\x12.service.ServiceId\x12)\n\x0bserviceType\x18\x02 \x01(\x0e\x32\x14.service.ServiceType\x12)\n\x0c\x65ndpointList\x18\x03 \x03(\x0b\x32\x13.context.EndPointId\x12\'\n\nconstraint\x18\x04 \x03(\x0b\x32\x13.context.Constraint\x12+\n\x0cserviceState\x18\x05 \x01(\x0b\x32\x15.service.ServiceState\x12-\n\rserviceConfig\x18\x06 \x01(\x0b\x32\x16.service.ServiceConfig\"&\n\rServiceConfig\x12\x15\n\rserviceConfig\x18\x01 \x01(\t\"P\n\tServiceId\x12%\n\tcontextId\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x1c\n\x05\x63s_id\x18\x02 \x01(\x0b\x32\r.context.Uuid\":\n\rServiceIdList\x12)\n\rserviceIdList\x18\x01 \x03(\x0b\x32\x12.service.ServiceId\"?\n\x0cServiceState\x12/\n\x0cserviceState\x18\x01 \x01(\x0e\x32\x19.service.ServiceStateEnum\"=\n\x0e\x43onnectionList\x12+\n\x0e\x63onnectionList\x18\x01 \x03(\x0b\x32\x13.service.Connection\"\x84\x01\n\nConnection\x12%\n\x06\x63on_id\x18\x01 \x01(\x0b\x32\x15.service.ConnectionId\x12,\n\x10relatedServiceId\x18\x02 \x01(\x0b\x32\x12.service.ServiceId\x12!\n\x04path\x18\x03 \x03(\x0b\x32\x13.context.EndPointId\"-\n\x0c\x43onnectionId\x12\x1d\n\x06\x63on_id\x18\x01 \x01(\x0b\x32\r.context.Uuid*M\n\x0bServiceType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x08\n\x04L3NM\x10\x01\x12\x08\n\x04L2NM\x10\x02\x12\x1d\n\x19TAPI_CONNECTIVITY_SERVICE\x10\x03*@\n\x10ServiceStateEnum\x12\x0b\n\x07PLANNED\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\x13\n\x0fPENDING_REMOVAL\x10\x02\x32\xed\x02\n\x0eServiceService\x12\x38\n\x0eGetServiceList\x12\x0e.context.Empty\x1a\x14.service.ServiceList\"\x00\x12\x37\n\rCreateService\x12\x10.service.Service\x1a\x12.service.ServiceId\"\x00\x12\x37\n\rUpdateService\x12\x10.service.Service\x1a\x12.service.ServiceId\"\x00\x12\x35\n\rDeleteService\x12\x12.service.ServiceId\x1a\x0e.context.Empty\"\x00\x12\x38\n\x0eGetServiceById\x12\x12.service.ServiceId\x1a\x10.service.Service\"\x00\x12>\n\x11GetConnectionList\x12\x0e.context.Empty\x1a\x17.service.ConnectionList\"\x00\x62\x06proto3'
+  ,
+  dependencies=[context__pb2.DESCRIPTOR,])
+
+_SERVICETYPE = _descriptor.EnumDescriptor(
+  name='ServiceType',
+  full_name='service.ServiceType',
+  filename=None,
+  file=DESCRIPTOR,
+  create_key=_descriptor._internal_create_key,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='UNKNOWN', index=0, number=0,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='L3NM', index=1, number=1,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='L2NM', index=2, number=2,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='TAPI_CONNECTIVITY_SERVICE', index=3, number=3,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=844,
+  serialized_end=921,
+)
+_sym_db.RegisterEnumDescriptor(_SERVICETYPE)
+
+ServiceType = enum_type_wrapper.EnumTypeWrapper(_SERVICETYPE)
+_SERVICESTATEENUM = _descriptor.EnumDescriptor(
+  name='ServiceStateEnum',
+  full_name='service.ServiceStateEnum',
+  filename=None,
+  file=DESCRIPTOR,
+  create_key=_descriptor._internal_create_key,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='PLANNED', index=0, number=0,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='ACTIVE', index=1, number=1,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='PENDING_REMOVAL', index=2, number=2,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=923,
+  serialized_end=987,
+)
+_sym_db.RegisterEnumDescriptor(_SERVICESTATEENUM)
+
+ServiceStateEnum = enum_type_wrapper.EnumTypeWrapper(_SERVICESTATEENUM)
+UNKNOWN = 0
+L3NM = 1
+L2NM = 2
+TAPI_CONNECTIVITY_SERVICE = 3
+PLANNED = 0
+ACTIVE = 1
+PENDING_REMOVAL = 2
+
+
+
+_SERVICELIST = _descriptor.Descriptor(
+  name='ServiceList',
+  full_name='service.ServiceList',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='cs', full_name='service.ServiceList.cs', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=41,
+  serialized_end=84,
+)
+
+
+_SERVICE = _descriptor.Descriptor(
+  name='Service',
+  full_name='service.Service',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='cs_id', full_name='service.Service.cs_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='serviceType', full_name='service.Service.serviceType', index=1,
+      number=2, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='endpointList', full_name='service.Service.endpointList', index=2,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='constraint', full_name='service.Service.constraint', index=3,
+      number=4, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='serviceState', full_name='service.Service.serviceState', index=4,
+      number=5, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='serviceConfig', full_name='service.Service.serviceConfig', index=5,
+      number=6, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=87,
+  serialized_end=350,
+)
+
+
+_SERVICECONFIG = _descriptor.Descriptor(
+  name='ServiceConfig',
+  full_name='service.ServiceConfig',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='serviceConfig', full_name='service.ServiceConfig.serviceConfig', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=352,
+  serialized_end=390,
+)
+
+
+_SERVICEID = _descriptor.Descriptor(
+  name='ServiceId',
+  full_name='service.ServiceId',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='contextId', full_name='service.ServiceId.contextId', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='cs_id', full_name='service.ServiceId.cs_id', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=392,
+  serialized_end=472,
+)
+
+
+_SERVICEIDLIST = _descriptor.Descriptor(
+  name='ServiceIdList',
+  full_name='service.ServiceIdList',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='serviceIdList', full_name='service.ServiceIdList.serviceIdList', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=474,
+  serialized_end=532,
+)
+
+
+_SERVICESTATE = _descriptor.Descriptor(
+  name='ServiceState',
+  full_name='service.ServiceState',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='serviceState', full_name='service.ServiceState.serviceState', index=0,
+      number=1, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=534,
+  serialized_end=597,
+)
+
+
+_CONNECTIONLIST = _descriptor.Descriptor(
+  name='ConnectionList',
+  full_name='service.ConnectionList',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='connectionList', full_name='service.ConnectionList.connectionList', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=599,
+  serialized_end=660,
+)
+
+
+_CONNECTION = _descriptor.Descriptor(
+  name='Connection',
+  full_name='service.Connection',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='con_id', full_name='service.Connection.con_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='relatedServiceId', full_name='service.Connection.relatedServiceId', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='path', full_name='service.Connection.path', index=2,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=663,
+  serialized_end=795,
+)
+
+
+_CONNECTIONID = _descriptor.Descriptor(
+  name='ConnectionId',
+  full_name='service.ConnectionId',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='con_id', full_name='service.ConnectionId.con_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=797,
+  serialized_end=842,
+)
+
+_SERVICELIST.fields_by_name['cs'].message_type = _SERVICE
+_SERVICE.fields_by_name['cs_id'].message_type = _SERVICEID
+_SERVICE.fields_by_name['serviceType'].enum_type = _SERVICETYPE
+_SERVICE.fields_by_name['endpointList'].message_type = context__pb2._ENDPOINTID
+_SERVICE.fields_by_name['constraint'].message_type = context__pb2._CONSTRAINT
+_SERVICE.fields_by_name['serviceState'].message_type = _SERVICESTATE
+_SERVICE.fields_by_name['serviceConfig'].message_type = _SERVICECONFIG
+_SERVICEID.fields_by_name['contextId'].message_type = context__pb2._CONTEXTID
+_SERVICEID.fields_by_name['cs_id'].message_type = context__pb2._UUID
+_SERVICEIDLIST.fields_by_name['serviceIdList'].message_type = _SERVICEID
+_SERVICESTATE.fields_by_name['serviceState'].enum_type = _SERVICESTATEENUM
+_CONNECTIONLIST.fields_by_name['connectionList'].message_type = _CONNECTION
+_CONNECTION.fields_by_name['con_id'].message_type = _CONNECTIONID
+_CONNECTION.fields_by_name['relatedServiceId'].message_type = _SERVICEID
+_CONNECTION.fields_by_name['path'].message_type = context__pb2._ENDPOINTID
+_CONNECTIONID.fields_by_name['con_id'].message_type = context__pb2._UUID
+DESCRIPTOR.message_types_by_name['ServiceList'] = _SERVICELIST
+DESCRIPTOR.message_types_by_name['Service'] = _SERVICE
+DESCRIPTOR.message_types_by_name['ServiceConfig'] = _SERVICECONFIG
+DESCRIPTOR.message_types_by_name['ServiceId'] = _SERVICEID
+DESCRIPTOR.message_types_by_name['ServiceIdList'] = _SERVICEIDLIST
+DESCRIPTOR.message_types_by_name['ServiceState'] = _SERVICESTATE
+DESCRIPTOR.message_types_by_name['ConnectionList'] = _CONNECTIONLIST
+DESCRIPTOR.message_types_by_name['Connection'] = _CONNECTION
+DESCRIPTOR.message_types_by_name['ConnectionId'] = _CONNECTIONID
+DESCRIPTOR.enum_types_by_name['ServiceType'] = _SERVICETYPE
+DESCRIPTOR.enum_types_by_name['ServiceStateEnum'] = _SERVICESTATEENUM
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+ServiceList = _reflection.GeneratedProtocolMessageType('ServiceList', (_message.Message,), {
+  'DESCRIPTOR' : _SERVICELIST,
+  '__module__' : 'service_pb2'
+  # @@protoc_insertion_point(class_scope:service.ServiceList)
+  })
+_sym_db.RegisterMessage(ServiceList)
+
+Service = _reflection.GeneratedProtocolMessageType('Service', (_message.Message,), {
+  'DESCRIPTOR' : _SERVICE,
+  '__module__' : 'service_pb2'
+  # @@protoc_insertion_point(class_scope:service.Service)
+  })
+_sym_db.RegisterMessage(Service)
+
+ServiceConfig = _reflection.GeneratedProtocolMessageType('ServiceConfig', (_message.Message,), {
+  'DESCRIPTOR' : _SERVICECONFIG,
+  '__module__' : 'service_pb2'
+  # @@protoc_insertion_point(class_scope:service.ServiceConfig)
+  })
+_sym_db.RegisterMessage(ServiceConfig)
+
+ServiceId = _reflection.GeneratedProtocolMessageType('ServiceId', (_message.Message,), {
+  'DESCRIPTOR' : _SERVICEID,
+  '__module__' : 'service_pb2'
+  # @@protoc_insertion_point(class_scope:service.ServiceId)
+  })
+_sym_db.RegisterMessage(ServiceId)
+
+ServiceIdList = _reflection.GeneratedProtocolMessageType('ServiceIdList', (_message.Message,), {
+  'DESCRIPTOR' : _SERVICEIDLIST,
+  '__module__' : 'service_pb2'
+  # @@protoc_insertion_point(class_scope:service.ServiceIdList)
+  })
+_sym_db.RegisterMessage(ServiceIdList)
+
+ServiceState = _reflection.GeneratedProtocolMessageType('ServiceState', (_message.Message,), {
+  'DESCRIPTOR' : _SERVICESTATE,
+  '__module__' : 'service_pb2'
+  # @@protoc_insertion_point(class_scope:service.ServiceState)
+  })
+_sym_db.RegisterMessage(ServiceState)
+
+ConnectionList = _reflection.GeneratedProtocolMessageType('ConnectionList', (_message.Message,), {
+  'DESCRIPTOR' : _CONNECTIONLIST,
+  '__module__' : 'service_pb2'
+  # @@protoc_insertion_point(class_scope:service.ConnectionList)
+  })
+_sym_db.RegisterMessage(ConnectionList)
+
+Connection = _reflection.GeneratedProtocolMessageType('Connection', (_message.Message,), {
+  'DESCRIPTOR' : _CONNECTION,
+  '__module__' : 'service_pb2'
+  # @@protoc_insertion_point(class_scope:service.Connection)
+  })
+_sym_db.RegisterMessage(Connection)
+
+ConnectionId = _reflection.GeneratedProtocolMessageType('ConnectionId', (_message.Message,), {
+  'DESCRIPTOR' : _CONNECTIONID,
+  '__module__' : 'service_pb2'
+  # @@protoc_insertion_point(class_scope:service.ConnectionId)
+  })
+_sym_db.RegisterMessage(ConnectionId)
+
+
+
+_SERVICESERVICE = _descriptor.ServiceDescriptor(
+  name='ServiceService',
+  full_name='service.ServiceService',
+  file=DESCRIPTOR,
+  index=0,
+  serialized_options=None,
+  create_key=_descriptor._internal_create_key,
+  serialized_start=990,
+  serialized_end=1355,
+  methods=[
+  _descriptor.MethodDescriptor(
+    name='GetServiceList',
+    full_name='service.ServiceService.GetServiceList',
+    index=0,
+    containing_service=None,
+    input_type=context__pb2._EMPTY,
+    output_type=_SERVICELIST,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='CreateService',
+    full_name='service.ServiceService.CreateService',
+    index=1,
+    containing_service=None,
+    input_type=_SERVICE,
+    output_type=_SERVICEID,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='UpdateService',
+    full_name='service.ServiceService.UpdateService',
+    index=2,
+    containing_service=None,
+    input_type=_SERVICE,
+    output_type=_SERVICEID,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='DeleteService',
+    full_name='service.ServiceService.DeleteService',
+    index=3,
+    containing_service=None,
+    input_type=_SERVICEID,
+    output_type=context__pb2._EMPTY,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetServiceById',
+    full_name='service.ServiceService.GetServiceById',
+    index=4,
+    containing_service=None,
+    input_type=_SERVICEID,
+    output_type=_SERVICE,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+  _descriptor.MethodDescriptor(
+    name='GetConnectionList',
+    full_name='service.ServiceService.GetConnectionList',
+    index=5,
+    containing_service=None,
+    input_type=context__pb2._EMPTY,
+    output_type=_CONNECTIONLIST,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+])
+_sym_db.RegisterServiceDescriptor(_SERVICESERVICE)
+
+DESCRIPTOR.services_by_name['ServiceService'] = _SERVICESERVICE
+
+# @@protoc_insertion_point(module_scope)
diff --git a/src/service/proto/service_pb2_grpc.py b/src/service/proto/service_pb2_grpc.py
new file mode 100644
index 0000000000000000000000000000000000000000..54d431fc21a22ceb3b0dd8614119b534a9de93ee
--- /dev/null
+++ b/src/service/proto/service_pb2_grpc.py
@@ -0,0 +1,232 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
+
+from . import context_pb2 as context__pb2
+from . import service_pb2 as service__pb2
+
+
+class ServiceServiceStub(object):
+    """Missing associated documentation comment in .proto file."""
+
+    def __init__(self, channel):
+        """Constructor.
+
+        Args:
+            channel: A grpc.Channel.
+        """
+        self.GetServiceList = channel.unary_unary(
+                '/service.ServiceService/GetServiceList',
+                request_serializer=context__pb2.Empty.SerializeToString,
+                response_deserializer=service__pb2.ServiceList.FromString,
+                )
+        self.CreateService = channel.unary_unary(
+                '/service.ServiceService/CreateService',
+                request_serializer=service__pb2.Service.SerializeToString,
+                response_deserializer=service__pb2.ServiceId.FromString,
+                )
+        self.UpdateService = channel.unary_unary(
+                '/service.ServiceService/UpdateService',
+                request_serializer=service__pb2.Service.SerializeToString,
+                response_deserializer=service__pb2.ServiceId.FromString,
+                )
+        self.DeleteService = channel.unary_unary(
+                '/service.ServiceService/DeleteService',
+                request_serializer=service__pb2.ServiceId.SerializeToString,
+                response_deserializer=context__pb2.Empty.FromString,
+                )
+        self.GetServiceById = channel.unary_unary(
+                '/service.ServiceService/GetServiceById',
+                request_serializer=service__pb2.ServiceId.SerializeToString,
+                response_deserializer=service__pb2.Service.FromString,
+                )
+        self.GetConnectionList = channel.unary_unary(
+                '/service.ServiceService/GetConnectionList',
+                request_serializer=context__pb2.Empty.SerializeToString,
+                response_deserializer=service__pb2.ConnectionList.FromString,
+                )
+
+
+class ServiceServiceServicer(object):
+    """Missing associated documentation comment in .proto file."""
+
+    def GetServiceList(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def CreateService(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def UpdateService(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def DeleteService(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def GetServiceById(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def GetConnectionList(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+
+def add_ServiceServiceServicer_to_server(servicer, server):
+    rpc_method_handlers = {
+            'GetServiceList': grpc.unary_unary_rpc_method_handler(
+                    servicer.GetServiceList,
+                    request_deserializer=context__pb2.Empty.FromString,
+                    response_serializer=service__pb2.ServiceList.SerializeToString,
+            ),
+            'CreateService': grpc.unary_unary_rpc_method_handler(
+                    servicer.CreateService,
+                    request_deserializer=service__pb2.Service.FromString,
+                    response_serializer=service__pb2.ServiceId.SerializeToString,
+            ),
+            'UpdateService': grpc.unary_unary_rpc_method_handler(
+                    servicer.UpdateService,
+                    request_deserializer=service__pb2.Service.FromString,
+                    response_serializer=service__pb2.ServiceId.SerializeToString,
+            ),
+            'DeleteService': grpc.unary_unary_rpc_method_handler(
+                    servicer.DeleteService,
+                    request_deserializer=service__pb2.ServiceId.FromString,
+                    response_serializer=context__pb2.Empty.SerializeToString,
+            ),
+            'GetServiceById': grpc.unary_unary_rpc_method_handler(
+                    servicer.GetServiceById,
+                    request_deserializer=service__pb2.ServiceId.FromString,
+                    response_serializer=service__pb2.Service.SerializeToString,
+            ),
+            'GetConnectionList': grpc.unary_unary_rpc_method_handler(
+                    servicer.GetConnectionList,
+                    request_deserializer=context__pb2.Empty.FromString,
+                    response_serializer=service__pb2.ConnectionList.SerializeToString,
+            ),
+    }
+    generic_handler = grpc.method_handlers_generic_handler(
+            'service.ServiceService', rpc_method_handlers)
+    server.add_generic_rpc_handlers((generic_handler,))
+
+
+ # This class is part of an EXPERIMENTAL API.
+class ServiceService(object):
+    """Missing associated documentation comment in .proto file."""
+
+    @staticmethod
+    def GetServiceList(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/service.ServiceService/GetServiceList',
+            context__pb2.Empty.SerializeToString,
+            service__pb2.ServiceList.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def CreateService(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/service.ServiceService/CreateService',
+            service__pb2.Service.SerializeToString,
+            service__pb2.ServiceId.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def UpdateService(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/service.ServiceService/UpdateService',
+            service__pb2.Service.SerializeToString,
+            service__pb2.ServiceId.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def DeleteService(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/service.ServiceService/DeleteService',
+            service__pb2.ServiceId.SerializeToString,
+            context__pb2.Empty.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def GetServiceById(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/service.ServiceService/GetServiceById',
+            service__pb2.ServiceId.SerializeToString,
+            service__pb2.Service.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def GetConnectionList(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/service.ServiceService/GetConnectionList',
+            context__pb2.Empty.SerializeToString,
+            service__pb2.ConnectionList.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
diff --git a/src/integration_tester/requirements.in b/src/service/requirements.in
similarity index 100%
rename from src/integration_tester/requirements.in
rename to src/service/requirements.in
diff --git a/src/service/service/ServiceService.py b/src/service/service/ServiceService.py
new file mode 100644
index 0000000000000000000000000000000000000000..44573b0b92c7d4f2a6740e99eeb22cf1ff4d8a65
--- /dev/null
+++ b/src/service/service/ServiceService.py
@@ -0,0 +1,55 @@
+import grpc
+import logging
+from concurrent import futures
+from grpc_health.v1.health import HealthServicer, OVERALL_HEALTH
+from grpc_health.v1.health_pb2 import HealthCheckResponse
+from grpc_health.v1.health_pb2_grpc import add_HealthServicer_to_server
+from service.proto.service_pb2_grpc import add_ServiceServiceServicer_to_server
+from service.service.ServiceServiceServicerImpl import ServiceServiceServicerImpl
+from service.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD
+
+BIND_ADDRESS = '0.0.0.0'
+LOGGER = logging.getLogger(__name__)
+
+class ServiceService:
+    def __init__(self, database, address=BIND_ADDRESS, port=GRPC_SERVICE_PORT, max_workers=GRPC_MAX_WORKERS,
+                 grace_period=GRPC_GRACE_PERIOD):
+        self.database = database
+        self.address = address
+        self.port = port
+        self.endpoint = None
+        self.max_workers = max_workers
+        self.grace_period = grace_period
+        self.service_servicer = None
+        self.health_servicer = None
+        self.pool = None
+        self.server = None
+
+    def start(self):
+        self.endpoint = '{}:{}'.format(self.address, self.port)
+        LOGGER.debug('Starting Service (tentative endpoint: {}, max_workers: {})...'.format(
+            self.endpoint, self.max_workers))
+
+        self.pool = futures.ThreadPoolExecutor(max_workers=self.max_workers)
+        self.server = grpc.server(self.pool) # , interceptors=(tracer_interceptor,))
+
+        self.service_servicer = ServiceServiceServicerImpl(self.database)
+        add_ServiceServiceServicer_to_server(self.service_servicer, self.server)
+
+        self.health_servicer = HealthServicer(
+            experimental_non_blocking=True, experimental_thread_pool=futures.ThreadPoolExecutor(max_workers=1))
+        add_HealthServicer_to_server(self.health_servicer, self.server)
+
+        port = self.server.add_insecure_port(self.endpoint)
+        self.endpoint = '{}:{}'.format(self.address, port)
+        LOGGER.info('Listening on {}...'.format(self.endpoint))
+        self.server.start()
+        self.health_servicer.set(OVERALL_HEALTH, HealthCheckResponse.SERVING) # pylint: disable=maybe-no-member
+
+        LOGGER.debug('Service started')
+
+    def stop(self):
+        LOGGER.debug('Stopping service (grace period {} seconds)...'.format(self.grace_period))
+        self.health_servicer.enter_graceful_shutdown()
+        self.server.stop(self.grace_period)
+        LOGGER.debug('Service stopped')
diff --git a/src/service/service/ServiceServiceServicerImpl.py b/src/service/service/ServiceServiceServicerImpl.py
new file mode 100644
index 0000000000000000000000000000000000000000..b1f370abc1d3eed1b3b756bf31b95c01209539fd
--- /dev/null
+++ b/src/service/service/ServiceServiceServicerImpl.py
@@ -0,0 +1,277 @@
+from typing import Dict
+import grpc, logging
+from prometheus_client import Counter, Histogram
+from common.database.api.Database import Database
+from common.exceptions.ServiceException import ServiceException
+from service.proto.context_pb2 import Empty
+from service.proto.service_pb2 import ConnectionList, Service, ServiceId, ServiceList
+from service.proto.service_pb2_grpc import ServiceServiceServicer
+from service.service.Tools import check_service_id_request, check_service_request
+
+LOGGER = logging.getLogger(__name__)
+
+GETSERVICELIST_COUNTER_STARTED    = Counter  ('service_getservicelist_counter_started',
+                                              'Service:GetServiceList counter of requests started'  )
+GETSERVICELIST_COUNTER_COMPLETED  = Counter  ('service_getservicelist_counter_completed',
+                                              'Service:GetServiceList counter of requests completed')
+GETSERVICELIST_COUNTER_FAILED     = Counter  ('service_getservicelist_counter_failed',
+                                              'Service:GetServiceList counter of requests failed'   )
+GETSERVICELIST_HISTOGRAM_DURATION = Histogram('service_getservicelist_histogram_duration',
+                                              'Service:GetServiceList histogram of request duration')
+
+CREATESERVICE_COUNTER_STARTED    = Counter  ('service_createservice_counter_started',
+                                             'Service:CreateService counter of requests started'  )
+CREATESERVICE_COUNTER_COMPLETED  = Counter  ('service_createservice_counter_completed',
+                                             'Service:CreateService counter of requests completed')
+CREATESERVICE_COUNTER_FAILED     = Counter  ('service_createservice_counter_failed',
+                                             'Service:CreateService counter of requests failed'   )
+CREATESERVICE_HISTOGRAM_DURATION = Histogram('service_createservice_histogram_duration',
+                                             'Service:CreateService histogram of request duration')
+
+UPDATESERVICE_COUNTER_STARTED    = Counter  ('service_updateservice_counter_started',
+                                             'Service:UpdateService counter of requests started'  )
+UPDATESERVICE_COUNTER_COMPLETED  = Counter  ('service_updateservice_counter_completed',
+                                             'Service:UpdateService counter of requests completed')
+UPDATESERVICE_COUNTER_FAILED     = Counter  ('service_updateservice_counter_failed',
+                                             'Service:UpdateService counter of requests failed'   )
+UPDATESERVICE_HISTOGRAM_DURATION = Histogram('service_updateservice_histogram_duration',
+                                             'Service:UpdateService histogram of request duration')
+
+DELETESERVICE_COUNTER_STARTED    = Counter  ('service_deleteservice_counter_started',
+                                             'Service:DeleteService counter of requests started'  )
+DELETESERVICE_COUNTER_COMPLETED  = Counter  ('service_deleteservice_counter_completed',
+                                             'Service:DeleteService counter of requests completed')
+DELETESERVICE_COUNTER_FAILED     = Counter  ('service_deleteservice_counter_failed',
+                                             'Service:DeleteService counter of requests failed'   )
+DELETESERVICE_HISTOGRAM_DURATION = Histogram('service_deleteservice_histogram_duration',
+                                             'Service:DeleteService histogram of request duration')
+
+GETSERVICEBYID_COUNTER_STARTED    = Counter  ('service_getservicebyid_counter_started',
+                                              'Service:GetServiceById counter of requests started'  )
+GETSERVICEBYID_COUNTER_COMPLETED  = Counter  ('service_getservicebyid_counter_completed',
+                                              'Service:GetServiceById counter of requests completed')
+GETSERVICEBYID_COUNTER_FAILED     = Counter  ('service_getservicebyid_counter_failed',
+                                              'Service:GetServiceById counter of requests failed'   )
+GETSERVICEBYID_HISTOGRAM_DURATION = Histogram('service_getservicebyid_histogram_duration',
+                                              'Service:GetServiceById histogram of request duration')
+
+GETCONNECTIONLIST_COUNTER_STARTED    = Counter  ('service_getconnectionlist_counter_started',
+                                                 'Service:GetConnectionList counter of requests started'  )
+GETCONNECTIONLIST_COUNTER_COMPLETED  = Counter  ('service_getconnectionlist_counter_completed',
+                                                 'Service:GetConnectionList counter of requests completed')
+GETCONNECTIONLIST_COUNTER_FAILED     = Counter  ('service_getconnectionlist_counter_failed',
+                                                 'Service:GetConnectionList counter of requests failed'   )
+GETCONNECTIONLIST_HISTOGRAM_DURATION = Histogram('service_getconnectionlist_histogram_duration',
+                                                 'Service:GetConnectionList histogram of request duration')
+
+class ServiceServiceServicerImpl(ServiceServiceServicer):
+    def __init__(self, database : Database):
+        LOGGER.debug('Creating Servicer...')
+        self.database = database
+        LOGGER.debug('Servicer Created')
+
+    @GETSERVICELIST_HISTOGRAM_DURATION.time()
+    def GetServiceList(self, request : Empty, grpc_context : grpc.ServicerContext) -> ServiceList:
+        GETSERVICELIST_COUNTER_STARTED.inc()
+        try:
+            LOGGER.debug('GetServiceList request: {}'.format(str(request)))
+
+            # ----- Validate request data and pre-conditions -----------------------------------------------------------
+
+            # ----- Retrieve data from the database --------------------------------------------------------------------
+            db_context_uuids = self.database.contexts.get()
+            json_services = []
+            for db_context_uuid in db_context_uuids:
+                db_context = self.database.context(db_context_uuid)
+                json_services.extend(db_context.dump_services())
+
+            # ----- Compose reply --------------------------------------------------------------------------------------
+            reply = ServiceList(cs=json_services)
+            LOGGER.debug('GetServiceList reply: {}'.format(str(reply)))
+            GETSERVICELIST_COUNTER_COMPLETED.inc()
+            return reply
+        except ServiceException as e:                               # pragma: no cover (ServiceException not thrown)
+            LOGGER.exception('GetServiceList exception')
+            GETSERVICELIST_COUNTER_FAILED.inc()
+            grpc_context.abort(e.code, e.details)
+        except Exception as e:                                      # pragma: no cover
+            LOGGER.exception('GetServiceList exception')
+            GETSERVICELIST_COUNTER_FAILED.inc()
+            grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))
+
+    @CREATESERVICE_HISTOGRAM_DURATION.time()
+    def CreateService(self, request : Service, grpc_context : grpc.ServicerContext) -> ServiceId:
+        CREATESERVICE_COUNTER_STARTED.inc()
+        try:
+            LOGGER.debug('CreateService request: {}'.format(str(request)))
+
+            # ----- Validate request data and pre-conditions -----------------------------------------------------------
+            context_id, service_id, service_type, service_config, service_state, db_endpoints, constraint_tuples = \
+                check_service_request('CreateService', request, self.database, LOGGER)
+
+            # ----- Implement changes in the database ------------------------------------------------------------------
+            db_context = self.database.context(context_id)
+            db_service = db_context.service(service_id)
+            db_service.create(service_type, service_config, service_state)
+
+            for db_endpoint in db_endpoints:
+                service_endpoint_id = '{}:{}/{}'.format(
+                    db_endpoint.topology_uuid, db_endpoint.device_uuid, db_endpoint.endpoint_uuid)
+                db_service.endpoint(service_endpoint_id).create(db_endpoint)
+
+            for cons_type,cons_value in constraint_tuples: db_service.constraint(cons_type).create(cons_value)
+
+            # ----- Compose reply --------------------------------------------------------------------------------------
+            reply = ServiceId(**db_service.dump_id())
+            LOGGER.debug('CreateService reply: {}'.format(str(reply)))
+            CREATESERVICE_COUNTER_COMPLETED.inc()
+            return reply
+        except ServiceException as e:
+            LOGGER.exception('CreateService exception')
+            CREATESERVICE_COUNTER_FAILED.inc()
+            grpc_context.abort(e.code, e.details)
+        except Exception as e:                                      # pragma: no cover
+            LOGGER.exception('CreateService exception')
+            CREATESERVICE_COUNTER_FAILED.inc()
+            grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))
+
+    @UPDATESERVICE_HISTOGRAM_DURATION.time()
+    def UpdateService(self, request : Service, grpc_context : grpc.ServicerContext) -> ServiceId:
+        UPDATESERVICE_COUNTER_STARTED.inc()
+        try:
+            LOGGER.debug('UpdateService request: {}'.format(str(request)))
+
+            # ----- Validate request data and pre-conditions -----------------------------------------------------------
+            context_id, service_id, service_type, service_config, service_state, db_endpoints, constraint_tuples = \
+                check_service_request('UpdateService', request, self.database, LOGGER)
+
+            # ----- Implement changes in the database ------------------------------------------------------------------
+            db_context = self.database.context(context_id)
+            db_service = db_context.service(service_id)
+
+            # Update service attributes
+            db_service.update(update_attributes={
+                'service_type'  : service_type,
+                'service_config': service_config,
+                'service_state' : service_state,
+            })
+
+            # Update service constraints; first add missing, then remove existing, but not added to Service
+            db_service_constraint_types = set(db_service.constraints.get())
+            for constraint_type,constraint_value in constraint_tuples:
+                if constraint_type in db_service_constraint_types:
+                    db_service.constraint(constraint_type).update(update_attributes={
+                        'constraint_value': constraint_value
+                    })
+                else:
+                    db_service.constraint(constraint_type).create(constraint_value)
+                db_service_constraint_types.discard(constraint_type)
+
+            for constraint_type in db_service_constraint_types:
+                db_service.constraint(constraint_type).delete()
+
+            # Update service endpoints; first add missing, then remove existing, but not added to Service
+            db_service_endpoint_uuids = set(db_service.endpoints.get())
+            for db_endpoint in db_endpoints:
+                service_endpoint_id = '{}:{}/{}'.format(
+                    db_endpoint.topology_uuid, db_endpoint.device_uuid, db_endpoint.endpoint_uuid)
+                if service_endpoint_id not in db_service_endpoint_uuids:
+                    db_service.endpoint(service_endpoint_id).create(db_endpoint)
+                db_service_endpoint_uuids.discard(service_endpoint_id)
+
+            for db_service_endpoint_uuid in db_service_endpoint_uuids:
+                db_service.endpoint(db_service_endpoint_uuid).delete()
+
+            # ----- Compose reply --------------------------------------------------------------------------------------
+            reply = ServiceId(**db_service.dump_id())
+            LOGGER.debug('UpdateService reply: {}'.format(str(reply)))
+            UPDATESERVICE_COUNTER_COMPLETED.inc()
+            return reply
+        except ServiceException as e:
+            LOGGER.exception('UpdateService exception')
+            UPDATESERVICE_COUNTER_FAILED.inc()
+            grpc_context.abort(e.code, e.details)
+        except Exception as e:                                      # pragma: no cover
+            LOGGER.exception('UpdateService exception')
+            UPDATESERVICE_COUNTER_FAILED.inc()
+            grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))
+
+    @DELETESERVICE_HISTOGRAM_DURATION.time()
+    def DeleteService(self, request : ServiceId, grpc_context : grpc.ServicerContext) -> Empty:
+        DELETESERVICE_COUNTER_STARTED.inc()
+        try:
+            LOGGER.debug('DeleteService request: {}'.format(str(request)))
+
+            # ----- Validate request data and pre-conditions -----------------------------------------------------------
+            context_id, service_id = check_service_id_request('DeleteService', request, self.database, LOGGER)
+
+            # ----- Implement changes in the database ------------------------------------------------------------------
+            db_context = self.database.context(context_id)
+            db_service = db_context.service(service_id)
+            db_service.delete()
+
+            # ----- Compose reply --------------------------------------------------------------------------------------
+            reply = Empty()
+            LOGGER.debug('DeleteService reply: {}'.format(str(reply)))
+            DELETESERVICE_COUNTER_COMPLETED.inc()
+            return reply
+        except ServiceException as e:
+            LOGGER.exception('DeleteService exception')
+            DELETESERVICE_COUNTER_FAILED.inc()
+            grpc_context.abort(e.code, e.details)
+        except Exception as e:                                      # pragma: no cover
+            LOGGER.exception('DeleteService exception')
+            DELETESERVICE_COUNTER_FAILED.inc()
+            grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))
+
+    @GETSERVICEBYID_HISTOGRAM_DURATION.time()
+    def GetServiceById(self, request : ServiceId, grpc_context : grpc.ServicerContext) -> Service:
+        GETSERVICEBYID_COUNTER_STARTED.inc()
+        try:
+            LOGGER.debug('GetServiceById request: {}'.format(str(request)))
+
+            # ----- Validate request data and pre-conditions -----------------------------------------------------------
+            context_id, service_id = check_service_id_request('GetServiceById', request, self.database, LOGGER)
+
+            # ----- Retrieve data from the database --------------------------------------------------------------------
+            db_context = self.database.context(context_id)
+            db_service = db_context.service(service_id)
+
+            # ----- Compose reply --------------------------------------------------------------------------------------
+            reply = Service(**db_service.dump())
+            LOGGER.debug('GetServiceById reply: {}'.format(str(reply)))
+            GETSERVICEBYID_COUNTER_COMPLETED.inc()
+            return reply
+        except ServiceException as e:
+            LOGGER.exception('GetServiceById exception')
+            GETSERVICEBYID_COUNTER_FAILED.inc()
+            grpc_context.abort(e.code, e.details)
+        except Exception as e:                                      # pragma: no cover
+            LOGGER.exception('GetServiceById exception')
+            GETSERVICEBYID_COUNTER_FAILED.inc()
+            grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))
+
+    @GETCONNECTIONLIST_HISTOGRAM_DURATION.time()
+    def GetConnectionList(self, request : Empty, grpc_context : grpc.ServicerContext) -> ConnectionList:
+        GETCONNECTIONLIST_COUNTER_STARTED.inc()
+        try:
+            LOGGER.debug('GetConnectionList request: {}'.format(str(request)))
+
+            # ----- Validate request data and pre-conditions -----------------------------------------------------------
+
+            # ----- Retrieve data from the database --------------------------------------------------------------------
+            raise ServiceException(grpc.StatusCode.UNIMPLEMENTED, 'RPC GetConnectionList() not implemented')
+
+            # ----- Compose reply --------------------------------------------------------------------------------------
+            #reply = ConnectionList()
+            #LOGGER.debug('GetConnectionList reply: {}'.format(str(reply)))
+            #GETCONNECTIONLIST_COUNTER_COMPLETED.inc()
+            #return reply
+        except ServiceException as e:
+            LOGGER.exception('GetConnectionList exception')
+            GETCONNECTIONLIST_COUNTER_FAILED.inc()
+            grpc_context.abort(e.code, e.details)
+        except Exception as e:                                      # pragma: no cover
+            LOGGER.exception('GetConnectionList exception')
+            GETCONNECTIONLIST_COUNTER_FAILED.inc()
+            grpc_context.abort(grpc.StatusCode.INTERNAL, str(e))
diff --git a/src/service/service/Tools.py b/src/service/service/Tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..62d602c058a55cdd229050cf3ba6301b4519fdb3
--- /dev/null
+++ b/src/service/service/Tools.py
@@ -0,0 +1,143 @@
+import grpc, logging
+from typing import Dict, List, Set, Tuple
+from common.Checkers import chk_options, chk_string
+from common.database.api.Database import Database
+from common.database.api.context.Constants import DEFAULT_TOPOLOGY_ID
+from common.database.api.context.topology.device.Endpoint import Endpoint
+from common.database.api.context.service.ServiceState import ServiceState, servicestate_enum_values, \
+    to_servicestate_enum
+from common.database.api.context.service.ServiceType import ServiceType, servicetype_enum_values, to_servicetype_enum
+from common.exceptions.ServiceException import ServiceException
+from common.tools.service.DeviceCheckers import check_device_endpoint_exists
+from common.tools.service.EndpointIdCheckers import check_endpoint_id
+from common.tools.service.EnumCheckers import check_enum
+from common.tools.service.ServiceCheckers import check_service_exists, check_service_not_exists
+from service.proto.context_pb2 import Constraint
+from service.proto.service_pb2 import Service, ServiceId
+
+# For each method name, define acceptable service types. Empty set means accept all.
+ACCEPTED_SERVICE_TYPES : Dict[str, Set[ServiceType]] = {
+    'CreateService': set([ServiceType.L2NM, ServiceType.L3NM, ServiceType.TAPI_CONNECTIVITY_SERVICE]),
+    'UpdateService': set([ServiceType.L2NM, ServiceType.L3NM, ServiceType.TAPI_CONNECTIVITY_SERVICE]),
+}
+
+# For each method name, define acceptable service states. Empty set means accept all.
+ACCEPTED_SERVICE_STATES : Dict[str, Set[ServiceState]] = {
+    'CreateService': set([ServiceState.PLANNED]),
+    'UpdateService': set([ServiceState.PLANNED, ServiceState.ACTIVE, ServiceState.PENDING_REMOVAL]),
+}
+
+def _check_service_exists(method_name : str, database : Database, context_id : str, service_id : str):
+    if method_name in ['CreateService']:
+        check_service_not_exists(database, context_id, service_id)
+    elif method_name in ['UpdateService', 'DeleteService', 'GetServiceById']:
+        check_service_exists(database, context_id, service_id)
+    else:                                       # pragma: no cover (test requires malforming the code)
+        msg = 'Unexpected condition [_check_service_exists(method_name={}, context_id={}, service_id={})]'
+        msg = msg.format(str(method_name), str(context_id), str(service_id))
+        raise ServiceException(grpc.StatusCode.UNIMPLEMENTED, msg)
+
+def check_service_type(method_name : str, value : str) -> ServiceType:
+    return check_enum('ServiceType', method_name, value, to_servicetype_enum, ACCEPTED_SERVICE_TYPES)
+
+def check_service_state(method_name : str, value : str) -> ServiceState:
+    return check_enum('ServiceState', method_name, value, to_servicestate_enum, ACCEPTED_SERVICE_STATES)
+
+def check_service_constraint(
+    logger : logging.Logger, constraint_number : int, parent_name : str, constraint : Constraint,
+    add_constraints : Dict[str, Dict[str, Set[str]]]) -> Tuple[str, str]:
+
+    try:
+        constraint_type  = chk_string('constraint[#{}].constraint_type'.format(constraint_number),
+                                      constraint.constraint_type,
+                                      allow_empty=False)
+        constraint_value = chk_string('constraint[#{}].constraint_value'.format(constraint_number),
+                                      constraint.constraint_value,
+                                      allow_empty=False)
+    except Exception as e:
+        logger.exception('Invalid arguments:')
+        raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e))
+
+    if constraint_type in add_constraints:
+        msg = 'Duplicated ConstraintType({}) in {}.'
+        msg = msg.format(constraint_type, parent_name)
+        raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, msg)
+
+    add_constraints[constraint_type] = constraint_value
+    return constraint_type, constraint_value
+
+def check_service_request(
+    method_name : str, request : Service, database : Database, logger : logging.Logger
+    ) -> Tuple[str, str, ServiceType, str, ServiceState, List[Endpoint], List[Tuple[str, str]]]:
+
+    # ----- Parse attributes -------------------------------------------------------------------------------------------
+    try:
+        context_id     = chk_string ('service.cs_id.contextId.contextUuid.uuid',
+                                    request.cs_id.contextId.contextUuid.uuid,
+                                    allow_empty=False)
+        service_id     = chk_string ('service.cs_id.cs_id.uuid',
+                                    request.cs_id.cs_id.uuid,
+                                    allow_empty=False)
+        service_type   = chk_options('service.serviceType',
+                                    request.serviceType,
+                                    servicetype_enum_values())
+        service_config = chk_string ('service.serviceConfig.serviceConfig',
+                                    request.serviceConfig.serviceConfig,
+                                    allow_empty=True)
+        service_state  = chk_options('service.serviceState.serviceState',
+                                    request.serviceState.serviceState,
+                                    servicestate_enum_values())
+    except Exception as e:
+        logger.exception('Invalid arguments:')
+        raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e))
+
+    service_type = check_service_type(method_name, service_type)
+    service_state = check_service_state(method_name, service_state)
+
+    # ----- Check if service exists in database ------------------------------------------------------------------------
+    _check_service_exists(method_name, database, context_id, service_id)
+
+    # ----- Parse constraints ------------------------------------------------------------------------------------------
+    add_constraints : Dict[str, str] = {}
+    constraint_tuples : List[Tuple[str, str]] = []
+    for constraint_number,constraint in enumerate(request.constraint):
+        parent_name = 'Constraint(#{}) of Context({})/Service({})'.format(constraint_number, context_id, service_id)
+        constraint_type, constraint_value = check_service_constraint(
+            logger, constraint_number, parent_name, constraint, add_constraints)
+        constraint_tuples.append((constraint_type, constraint_value))
+
+    # ----- Parse endpoints and check if they exist in the database as device endpoints --------------------------------
+    add_topology_devices_endpoints : Dict[str, Dict[str, Set[str]]] = {}
+    db_endpoints : List[Endpoint] = []
+    for endpoint_number,endpoint_id in enumerate(request.endpointList):
+        parent_name = 'Endpoint(#{}) of Context({})/Service({})'.format(endpoint_number, context_id, service_id)
+
+        ep_topology_id, ep_device_id, ep_port_id = check_endpoint_id(
+            logger, endpoint_number, parent_name, endpoint_id, add_topology_devices_endpoints,
+            predefined_context_id=context_id, acceptable_context_ids=set([context_id]))
+
+        db_endpoint = check_device_endpoint_exists(
+            database, parent_name, context_id, ep_topology_id, ep_device_id, ep_port_id)
+        db_endpoints.append(db_endpoint)
+
+    return context_id, service_id, service_type, service_config, service_state, db_endpoints, constraint_tuples
+
+def check_service_id_request(
+    method_name : str, request : ServiceId, database : Database, logger : logging.Logger) -> Tuple[str, str]:
+
+    # ----- Parse attributes -------------------------------------------------------------------------------------------
+    try:
+        context_id     = chk_string ('service_id.contextId.contextUuid.uuid',
+                                    request.contextId.contextUuid.uuid,
+                                    allow_empty=False)
+        service_id     = chk_string ('service_id.cs_id.uuid',
+                                    request.cs_id.uuid,
+                                    allow_empty=False)
+    except Exception as e:
+        logger.exception('Invalid arguments:')
+        raise ServiceException(grpc.StatusCode.INVALID_ARGUMENT, str(e))
+
+    # ----- Check if service exists in database ------------------------------------------------------------------------
+    _check_service_exists(method_name, database, context_id, service_id)
+
+    return context_id, service_id
diff --git a/src/service/service/__init__.py b/src/service/service/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/service/service/__main__.py b/src/service/service/__main__.py
new file mode 100644
index 0000000000000000000000000000000000000000..f492d9096f4237280e03edfb594138c092099cd5
--- /dev/null
+++ b/src/service/service/__main__.py
@@ -0,0 +1,52 @@
+import logging, os, signal, sys, threading
+from prometheus_client import start_http_server
+from common.database.Factory import get_database
+from service.service.ServiceService import ServiceService
+from service.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD, LOG_LEVEL, METRICS_PORT
+
+terminate = threading.Event()
+logger = None
+
+def signal_handler(signal, frame):
+    global terminate, logger
+    logger.warning('Terminate signal received')
+    terminate.set()
+
+def main():
+    global terminate, logger
+
+    service_port = os.environ.get('SERVICESERVICE_SERVICE_PORT_GRPC', GRPC_SERVICE_PORT)
+    max_workers  = os.environ.get('MAX_WORKERS',                      GRPC_MAX_WORKERS )
+    grace_period = os.environ.get('GRACE_PERIOD',                     GRPC_GRACE_PERIOD)
+    log_level    = os.environ.get('LOG_LEVEL',                        LOG_LEVEL        )
+    metrics_port = os.environ.get('METRICS_PORT',                     METRICS_PORT     )
+
+    logging.basicConfig(level=log_level)
+    logger = logging.getLogger(__name__)
+
+    signal.signal(signal.SIGINT,  signal_handler)
+    signal.signal(signal.SIGTERM, signal_handler)
+
+    logger.info('Starting...')
+
+    # Start metrics server
+    start_http_server(metrics_port)
+
+    # Get database instance
+    database = get_database()
+
+    # Starting service service
+    grpc_service = ServiceService(database, port=service_port, max_workers=max_workers, grace_period=grace_period)
+    grpc_service.start()
+
+    # Wait for Ctrl+C or termination signal
+    while not terminate.wait(timeout=0.1): pass
+
+    logger.info('Terminating...')
+    grpc_service.stop()
+
+    logger.info('Bye')
+    return 0
+
+if __name__ == '__main__':
+    sys.exit(main())
diff --git a/src/service/tests/__init__.py b/src/service/tests/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/service/tests/test_unitary.py b/src/service/tests/test_unitary.py
new file mode 100644
index 0000000000000000000000000000000000000000..48d56af038b226caa2ffe6a73b71509f4c011ad1
--- /dev/null
+++ b/src/service/tests/test_unitary.py
@@ -0,0 +1,367 @@
+import copy, grpc, logging, pytest
+from google.protobuf.json_format import MessageToDict
+from common.database.Factory import get_database, DatabaseEngineEnum
+from common.database.api.Database import Database
+from common.database.api.context.Constants import DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID
+from common.database.tests.script import populate_example
+from common.tests.Assertions import validate_empty, validate_service, validate_service_id, \
+    validate_service_list_is_empty, validate_service_list_is_not_empty
+from service.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD
+from service.client.ServiceClient import ServiceClient
+from service.proto.context_pb2 import Empty
+from service.proto.service_pb2 import Service, ServiceId, ServiceStateEnum, ServiceType
+from service.service.ServiceService import ServiceService
+
+port = 10000 + GRPC_SERVICE_PORT # avoid privileged ports
+
+LOGGER = logging.getLogger(__name__)
+LOGGER.setLevel(logging.DEBUG)
+
+# use "copy.deepcopy" to prevent propagating forced changes during tests
+CONTEXT_ID = {'contextUuid': {'uuid': DEFAULT_CONTEXT_ID}}
+TOPOLOGY_ID = {'contextId': copy.deepcopy(CONTEXT_ID), 'topoId': {'uuid': DEFAULT_TOPOLOGY_ID}}
+SERVICE_ID = {'contextId': copy.deepcopy(CONTEXT_ID), 'cs_id': {'uuid': 'DEV1'}}
+SERVICE = {
+    'cs_id': copy.deepcopy(SERVICE_ID),
+    'serviceType': ServiceType.L3NM,
+    'serviceConfig': {'serviceConfig': '<config/>'},
+    'serviceState': {'serviceState': ServiceStateEnum.PLANNED},
+    'constraint': [
+        {'constraint_type': 'latency_ms', 'constraint_value': '100'},
+        {'constraint_type': 'hops', 'constraint_value': '5'},
+    ],
+    'endpointList' : [
+        {'topoId': copy.deepcopy(TOPOLOGY_ID), 'dev_id': {'device_id': {'uuid': 'DEV1'}}, 'port_id': {'uuid' : 'EP5'}},
+        {'topoId': copy.deepcopy(TOPOLOGY_ID), 'dev_id': {'device_id': {'uuid': 'DEV2'}}, 'port_id': {'uuid' : 'EP5'}},
+        {'topoId': copy.deepcopy(TOPOLOGY_ID), 'dev_id': {'device_id': {'uuid': 'DEV3'}}, 'port_id': {'uuid' : 'EP5'}},
+    ]
+}
+
+@pytest.fixture(scope='session')
+def database():
+    _database = get_database(engine=DatabaseEngineEnum.INMEMORY)
+    populate_example(_database, add_services=False)
+    return _database
+
+@pytest.fixture(scope='session')
+def service_service(database):
+    _service = ServiceService(
+        database, port=port, max_workers=GRPC_MAX_WORKERS, grace_period=GRPC_GRACE_PERIOD)
+    _service.start()
+    yield _service
+    _service.stop()
+
+@pytest.fixture(scope='session')
+def service_client(service_service):
+    _client = ServiceClient(address='127.0.0.1', port=port)
+    yield _client
+    _client.close()
+
+def test_get_services_empty(service_client : ServiceClient):
+    # should work
+    validate_service_list_is_empty(MessageToDict(
+        service_client.GetServiceList(Empty()),
+        including_default_value_fields=True, preserving_proto_field_name=True,
+        use_integers_for_enums=False))
+
+def test_create_service_wrong_service_attributes(service_client : ServiceClient):
+    # should fail with wrong service context
+    with pytest.raises(grpc._channel._InactiveRpcError) as e:
+        copy_service = copy.deepcopy(SERVICE)
+        copy_service['cs_id']['contextId']['contextUuid']['uuid'] = ''
+        service_client.CreateService(Service(**copy_service))
+    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
+    msg = 'service.cs_id.contextId.contextUuid.uuid() string is empty.'
+    assert e.value.details() == msg
+
+    # should fail with service context does not exist
+    with pytest.raises(grpc._channel._InactiveRpcError) as e:
+        copy_service = copy.deepcopy(SERVICE)
+        copy_service['cs_id']['contextId']['contextUuid']['uuid'] = 'wrong-context'
+        service_client.CreateService(Service(**copy_service))
+    assert e.value.code() == grpc.StatusCode.NOT_FOUND
+    msg = 'Context(wrong-context) does not exist in the database.'
+    assert e.value.details() == msg
+
+    # should fail with wrong service id
+    with pytest.raises(grpc._channel._InactiveRpcError) as e:
+        copy_service = copy.deepcopy(SERVICE)
+        copy_service['cs_id']['cs_id']['uuid'] = ''
+        service_client.CreateService(Service(**copy_service))
+    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
+    msg = 'service.cs_id.cs_id.uuid() string is empty.'
+    assert e.value.details() == msg
+
+    # should fail with wrong service type
+    with pytest.raises(grpc._channel._InactiveRpcError) as e:
+        copy_service = copy.deepcopy(SERVICE)
+        copy_service['serviceType'] = ServiceType.UNKNOWN
+        service_client.CreateService(Service(**copy_service))
+    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
+    msg = ' '.join([
+        'Method(CreateService) does not accept ServiceType(UNKNOWN).',
+        'Permitted values for Method(CreateService) are',
+        'ServiceType([\'L2NM\', \'L3NM\', \'TAPI_CONNECTIVITY_SERVICE\']).',
+    ])
+    assert e.value.details() == msg
+
+    # should fail with wrong service state
+    with pytest.raises(grpc._channel._InactiveRpcError) as e:
+        copy_service = copy.deepcopy(SERVICE)
+        copy_service['serviceState']['serviceState'] = ServiceStateEnum.PENDING_REMOVAL
+        service_client.CreateService(Service(**copy_service))
+    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
+    msg = ' '.join([
+        'Method(CreateService) does not accept ServiceState(PENDING_REMOVAL).',
+        'Permitted values for Method(CreateService) are',
+        'ServiceState([\'PLANNED\']).',
+    ])
+    assert e.value.details() == msg
+
+def test_create_service_wrong_constraint(service_client : ServiceClient):
+    # should fail with wrong constraint type
+    with pytest.raises(grpc._channel._InactiveRpcError) as e:
+        copy_service = copy.deepcopy(SERVICE)
+        copy_service['constraint'][0]['constraint_type'] = ''
+        service_client.CreateService(Service(**copy_service))
+    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
+    msg = 'constraint[#0].constraint_type() string is empty.'
+    assert e.value.details() == msg
+
+    # should fail with wrong constraint value
+    with pytest.raises(grpc._channel._InactiveRpcError) as e:
+        copy_service = copy.deepcopy(SERVICE)
+        copy_service['constraint'][0]['constraint_value'] = ''
+        service_client.CreateService(Service(**copy_service))
+    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
+    msg = 'constraint[#0].constraint_value() string is empty.'
+    assert e.value.details() == msg
+
+    # should fail with dupplicated constraint type
+    with pytest.raises(grpc._channel._InactiveRpcError) as e:
+        copy_service = copy.deepcopy(SERVICE)
+        copy_service['constraint'][1] = copy_service['constraint'][0]
+        service_client.CreateService(Service(**copy_service))
+    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
+    msg = 'Duplicated ConstraintType(latency_ms) in Constraint(#1) of Context(admin)/Service(DEV1).'
+    assert e.value.details() == msg
+
+def test_create_service_wrong_endpoint(service_client : ServiceClient, database : Database):
+    # should fail with wrong endpoint context
+    with pytest.raises(grpc._channel._InactiveRpcError) as e:
+        copy_service = copy.deepcopy(SERVICE)
+        copy_service['endpointList'][0]['topoId']['contextId']['contextUuid']['uuid'] = 'wrong-context'
+        print(copy_service)
+        service_client.CreateService(Service(**copy_service))
+    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
+    msg = ' '.join([
+        'Context(wrong-context) in Endpoint(#0) of Context(admin)/Service(DEV1)',
+        'mismatches acceptable Contexts({\'admin\'}).',
+        'Optionally, leave field empty to use predefined Context(admin).'
+    ])
+    assert e.value.details() == msg
+
+    # should fail with wrong endpoint topology
+    with pytest.raises(grpc._channel._InactiveRpcError) as e:
+        copy_service = copy.deepcopy(SERVICE)
+        copy_service['endpointList'][0]['topoId']['topoId']['uuid'] = 'wrong-topo'
+        service_client.CreateService(Service(**copy_service))
+    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
+    msg = ' '.join([
+        'Context(admin)/Topology(wrong-topo) in Endpoint(#0) of Context(admin)/Service(DEV1)',
+        'mismatches acceptable Topologies({\'admin\'}).',
+        'Optionally, leave field empty to use predefined Topology(admin).',
+    ])
+    assert e.value.details() == msg
+
+    # should fail with endpoint device is empty
+    with pytest.raises(grpc._channel._InactiveRpcError) as e:
+        copy_service = copy.deepcopy(SERVICE)
+        copy_service['endpointList'][0]['dev_id']['device_id']['uuid'] = ''
+        service_client.CreateService(Service(**copy_service))
+    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
+    msg = 'endpoint_id[#0].dev_id.device_id.uuid() string is empty.'
+    assert e.value.details() == msg
+
+    # should fail with endpoint device not found
+    with pytest.raises(grpc._channel._InactiveRpcError) as e:
+        copy_service = copy.deepcopy(SERVICE)
+        copy_service['endpointList'][0]['dev_id']['device_id']['uuid'] = 'wrong-device'
+        service_client.CreateService(Service(**copy_service))
+    assert e.value.code() == grpc.StatusCode.NOT_FOUND
+    msg = ' '.join([
+        'Context(admin)/Topology(admin)/Device(wrong-device) in Endpoint(#0)',
+        'of Context(admin)/Service(DEV1) does not exist in the database.',
+    ])
+    assert e.value.details() == msg
+
+    # should fail with endpoint device duplicated
+    with pytest.raises(grpc._channel._InactiveRpcError) as e:
+        copy_service = copy.deepcopy(SERVICE)
+        copy_service['endpointList'][1] = copy_service['endpointList'][0]
+        service_client.CreateService(Service(**copy_service))
+    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
+    msg = 'Duplicated Context(admin)/Topology(admin)/Device(DEV1) in Endpoint(#1) of Context(admin)/Service(DEV1).'
+    assert e.value.details() == msg
+
+    # should fail with endpoint port is empty
+    with pytest.raises(grpc._channel._InactiveRpcError) as e:
+        copy_service = copy.deepcopy(SERVICE)
+        copy_service['endpointList'][0]['port_id']['uuid'] = ''
+        service_client.CreateService(Service(**copy_service))
+    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
+    msg = 'endpoint_id[#0].port_id.uuid() string is empty.'
+    assert e.value.details() == msg
+
+    # should fail with endpoint port not found
+    with pytest.raises(grpc._channel._InactiveRpcError) as e:
+        copy_service = copy.deepcopy(SERVICE)
+        copy_service['endpointList'][0]['port_id']['uuid'] = 'wrong-port'
+        service_client.CreateService(Service(**copy_service))
+    assert e.value.code() == grpc.StatusCode.NOT_FOUND
+    msg = ' '.join([
+        'Context(admin)/Topology(admin)/Device(DEV1)/Port(wrong-port) in Endpoint(#0)',
+        'of Context(admin)/Service(DEV1) does not exist in the database.',
+    ])
+    assert e.value.details() == msg
+
+def test_get_service_does_not_exist(service_client : ServiceClient):
+    # should fail with service context does not exist
+    with pytest.raises(grpc._channel._InactiveRpcError) as e:
+        copy_service_id = copy.deepcopy(SERVICE_ID)
+        copy_service_id['contextId']['contextUuid']['uuid'] = 'wrong-context'
+        service_client.GetServiceById(ServiceId(**copy_service_id))
+    assert e.value.code() == grpc.StatusCode.NOT_FOUND
+    msg = 'Context(wrong-context) does not exist in the database.'
+    assert e.value.details() == msg
+
+    # should fail with service does not exist
+    with pytest.raises(grpc._channel._InactiveRpcError) as e:
+        service_client.GetServiceById(ServiceId(**SERVICE_ID))
+    assert e.value.code() == grpc.StatusCode.NOT_FOUND
+    msg = 'Context(admin)/Service(DEV1) does not exist in the database.'
+    assert e.value.details() == msg
+
+def test_update_service_does_not_exist(service_client : ServiceClient):
+    # should fail with service does not exist
+    with pytest.raises(grpc._channel._InactiveRpcError) as e:
+        service_client.UpdateService(Service(**SERVICE))
+    assert e.value.code() == grpc.StatusCode.NOT_FOUND
+    msg = 'Context(admin)/Service(DEV1) does not exist in the database.'
+    assert e.value.details() == msg
+
+def test_create_service(service_client : ServiceClient):
+    # should work
+    validate_service_id(MessageToDict(
+        service_client.CreateService(Service(**SERVICE)),
+        including_default_value_fields=True, preserving_proto_field_name=True,
+        use_integers_for_enums=False))
+
+def test_create_service_already_exists(service_client : ServiceClient):
+    # should fail with service already exists
+    with pytest.raises(grpc._channel._InactiveRpcError) as e:
+        service_client.CreateService(Service(**SERVICE))
+    assert e.value.code() == grpc.StatusCode.ALREADY_EXISTS
+    msg = 'Context(admin)/Service(DEV1) already exists in the database.'
+    assert e.value.details() == msg
+
+def test_get_service(service_client : ServiceClient):
+    # should work
+    validate_service(MessageToDict(
+        service_client.GetServiceById(ServiceId(**SERVICE_ID)),
+        including_default_value_fields=True, preserving_proto_field_name=True,
+        use_integers_for_enums=False))
+
+def test_update_service(service_client : ServiceClient):
+    # should work
+    copy_service = copy.deepcopy(SERVICE)
+    copy_service['serviceConfig']['serviceConfig'] = '<newconfig/>'
+    copy_service['serviceState']['serviceState'] = ServiceStateEnum.ACTIVE
+    copy_service['constraint'] = [
+        {'constraint_type': 'latency_ms', 'constraint_value': '200'},
+        {'constraint_type': 'bandwidth_gbps', 'constraint_value': '100'},
+    ]
+    copy_service['endpointList'] = [
+        {
+            'topoId': {'contextId': {'contextUuid': {'uuid': 'admin'}}, 'topoId': {'uuid': 'admin'}},
+            'dev_id': {'device_id': {'uuid': 'DEV1'}},
+            'port_id': {'uuid' : 'EP5'}
+        },
+        {
+            'topoId': {'contextId': {'contextUuid': {'uuid': 'admin'}}, 'topoId': {'uuid': 'admin'}},
+            'dev_id': {'device_id': {'uuid': 'DEV2'}},
+            'port_id': {'uuid' : 'EP6'}
+        },
+    ]
+    validate_service_id(MessageToDict(
+        service_client.UpdateService(Service(**copy_service)),
+        including_default_value_fields=True, preserving_proto_field_name=True,
+        use_integers_for_enums=False))
+
+def test_delete_service_wrong_service_id(service_client : ServiceClient):
+    # should fail with service context is empty
+    with pytest.raises(grpc._channel._InactiveRpcError) as e:
+        copy_service_id = copy.deepcopy(SERVICE_ID)
+        copy_service_id['contextId']['contextUuid']['uuid'] = ''
+        service_client.DeleteService(ServiceId(**copy_service_id))
+    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
+    msg = 'service_id.contextId.contextUuid.uuid() string is empty.'
+    assert e.value.details() == msg
+
+    # should fail with service context does not exist
+    with pytest.raises(grpc._channel._InactiveRpcError) as e:
+        copy_service_id = copy.deepcopy(SERVICE_ID)
+        copy_service_id['contextId']['contextUuid']['uuid'] = 'wrong-context'
+        service_client.DeleteService(ServiceId(**copy_service_id))
+    assert e.value.code() == grpc.StatusCode.NOT_FOUND
+    msg = 'Context(wrong-context) does not exist in the database.'
+    assert e.value.details() == msg
+
+    # should fail with service id is empty
+    with pytest.raises(grpc._channel._InactiveRpcError) as e:
+        copy_service_id = copy.deepcopy(SERVICE_ID)
+        copy_service_id['cs_id']['uuid'] = ''
+        service_client.DeleteService(ServiceId(**copy_service_id))
+    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
+    msg = 'service_id.cs_id.uuid() string is empty.'
+    assert e.value.details() == msg
+
+    # should fail with service id is empty
+    with pytest.raises(grpc._channel._InactiveRpcError) as e:
+        copy_service_id = copy.deepcopy(SERVICE_ID)
+        copy_service_id['cs_id']['uuid'] = 'wrong-service'
+        service_client.DeleteService(ServiceId(**copy_service_id))
+    assert e.value.code() == grpc.StatusCode.NOT_FOUND
+    msg = 'Context(admin)/Service(wrong-service) does not exist in the database.'
+    assert e.value.details() == msg
+
+def test_delete_service(service_client : ServiceClient):
+    # should work
+    validate_empty(MessageToDict(
+        service_client.DeleteService(ServiceId(**SERVICE_ID)),
+        including_default_value_fields=True, preserving_proto_field_name=True,
+        use_integers_for_enums=False))
+
+def test_get_services_empty_2(service_client : ServiceClient):
+    # should work
+    validate_service_list_is_empty(MessageToDict(
+        service_client.GetServiceList(Empty()),
+        including_default_value_fields=True, preserving_proto_field_name=True,
+        use_integers_for_enums=False))
+
+def test_create_service_empty_endpoints(service_client : ServiceClient):
+    # should work
+    copy_service = copy.deepcopy(SERVICE)
+    copy_service['endpointList'][0]['topoId']['contextId']['contextUuid']['uuid'] = ''
+    copy_service['endpointList'][0]['topoId']['topoId']['uuid'] = ''
+    validate_service_id(MessageToDict(
+        service_client.CreateService(Service(**copy_service)),
+        including_default_value_fields=True, preserving_proto_field_name=True,
+        use_integers_for_enums=False))
+
+def test_get_services_full(service_client : ServiceClient):
+    # should work
+    validate_service_list_is_not_empty(MessageToDict(
+        service_client.GetServiceList(Empty()),
+        including_default_value_fields=True, preserving_proto_field_name=True,
+        use_integers_for_enums=False))
diff --git a/src/integration_tester/.gitlab-ci.yml b/src/tester_functional/.gitlab-ci.yml
similarity index 79%
rename from src/integration_tester/.gitlab-ci.yml
rename to src/tester_functional/.gitlab-ci.yml
index d090e73a20e3eea4c0fb19cac579fd3aa251f79e..15f2cd434174031ee8e461ec33c39a1cf5ff58e9 100644
--- a/src/integration_tester/.gitlab-ci.yml
+++ b/src/tester_functional/.gitlab-ci.yml
@@ -1,7 +1,7 @@
 # Build, tag, and push the Docker images to the GitLab Docker registry
-build integration_tester:
+build funct_test:
   variables:
-    IMAGE_NAME: 'integration_tester' # name of the microservice
+    IMAGE_NAME: 'tester_functional' # name of the microservice
     IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: build
   before_script:
@@ -15,17 +15,18 @@ build integration_tester:
       - src/common/**
       - src/context/**
       - src/device/**
+      - src/service/**
       - src/$IMAGE_NAME/**
       - .gitlab-ci.yml
 
 # Pull, execute, and run unitary tests for the Docker image from the GitLab registry
-test integration_tester:
+unit_test funct_test:
   variables:
-    IMAGE_NAME: 'integration_tester' # name of the microservice
+    IMAGE_NAME: 'tester_functional' # name of the microservice
     IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
   stage: test
   needs:
-    - build integration_tester
+    - build funct_test
   before_script:
     - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
   script:
@@ -35,21 +36,24 @@ test integration_tester:
       - src/common/**
       - src/context/**
       - src/device/**
+      - src/service/**
       - src/$IMAGE_NAME/**
       - .gitlab-ci.yml
 
-# Run integration tests in Kubernetes Cluster
-integration_test integration_tester:
+# Run functional tests in Kubernetes Cluster
+funct_test execute:
   variables:
-    IMAGE_NAME: 'integration_tester' # name of the microservice
+    IMAGE_NAME: 'tester_functional' # name of the microservice
     IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
-  stage: integration_test
+  stage: funct_test
   needs:
-    - build integration_tester
-    - test integration_tester
+    - build funct_test
+    - unit_test funct_test
     - deploy context
     - deploy device
+    - deploy service
     - dependencies all
+    - integ_test execute
   before_script:
     - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
   script:
diff --git a/src/tester_functional/Dockerfile b/src/tester_functional/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..36883f30bf76fbee7094ee9c5dbe626c6fdec071
--- /dev/null
+++ b/src/tester_functional/Dockerfile
@@ -0,0 +1,35 @@
+FROM python:3-slim
+
+# Install dependencies
+RUN apt-get --yes --quiet --quiet update && \
+    apt-get --yes --quiet --quiet install wget g++ && \
+    rm -rf /var/lib/apt/lists/*
+
+# Set Python to show logs as they occur
+ENV PYTHONUNBUFFERED=0
+
+# Get generic Python packages
+RUN python3 -m pip install --upgrade pip setuptools wheel pip-tools
+
+# Set working directory
+WORKDIR /var/teraflow
+
+# Create module sub-folders
+RUN mkdir -p /var/teraflow/tester_functional
+
+# Get Python packages per module
+COPY tester_functional/requirements.in tester_functional/requirements.in
+RUN pip-compile --output-file=tester_functional/requirements.txt tester_functional/requirements.in
+RUN python3 -m pip install -r tester_functional/requirements.in
+
+# Add files into working directory
+COPY common/. common
+COPY context/. context
+COPY device/. device
+COPY service/. service
+COPY tester_functional/. tester_functional
+
+# Run integration tester
+ENTRYPOINT ["pytest", "-v", "--log-level=DEBUG", \
+            "tester_functional/test_context_device_service.py" \
+]
diff --git a/src/tester_functional/__init__.py b/src/tester_functional/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/tester_functional/definitions.py b/src/tester_functional/definitions.py
new file mode 100644
index 0000000000000000000000000000000000000000..2b38e74bc9d25199e4f192dbb3f093ec033fe80b
--- /dev/null
+++ b/src/tester_functional/definitions.py
@@ -0,0 +1,165 @@
+import copy
+from common.database.api.context.Constants import DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID
+from common.database.api.context.service.ServiceState import ServiceState
+from common.database.api.context.service.ServiceType import ServiceType
+from common.database.api.context.topology.device.OperationalStatus import OperationalStatus
+
+# use "copy.deepcopy" to prevent propagating forced changes during tests
+dc = copy.deepcopy
+
+CONTEXT_ID = {'contextUuid': {'uuid': DEFAULT_CONTEXT_ID}}
+
+TOPOLOGY_ID = {'contextId': dc(CONTEXT_ID), 'topoId': {'uuid': DEFAULT_TOPOLOGY_ID}}
+
+DEVICE_ID_DEV1 = {'device_id': {'uuid': 'dev1'}}
+DEVICE_DEV1 = {
+    'device_id': dc(DEVICE_ID_DEV1), 'device_type': 'ROADM', 'device_config': {'device_config': '<config/>'},
+    'devOperationalStatus': OperationalStatus.ENABLED.value,
+    'endpointList' : [
+        {'port_id': {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV1), 'port_id': {'uuid' : 'port2'}},
+         'port_type': 'WDM'},
+        {'port_id': {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV1), 'port_id': {'uuid' : 'port3'}},
+         'port_type': 'WDM'},
+        {'port_id': {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV1), 'port_id': {'uuid' : 'port101'}},
+         'port_type': 'OCH'},
+        {'port_id': {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV1), 'port_id': {'uuid' : 'port102'}},
+         'port_type': 'OCH'},
+        {'port_id': {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV1), 'port_id': {'uuid' : 'port103'}},
+         'port_type': 'OCH'},
+    ]
+}
+
+DEVICE_ID_DEV2 = {'device_id': {'uuid': 'dev2'}}
+DEVICE_DEV2 = {
+    'device_id': dc(DEVICE_ID_DEV2), 'device_type': 'ROADM', 'device_config': {'device_config': '<config/>'},
+    'devOperationalStatus': OperationalStatus.ENABLED.value,
+    'endpointList' : [
+        {'port_id': {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV2), 'port_id': {'uuid' : 'port1'}},
+         'port_type': 'WDM'},
+        {'port_id': {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV2), 'port_id': {'uuid' : 'port3'}},
+         'port_type': 'WDM'},
+        {'port_id': {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV2), 'port_id': {'uuid' : 'port101'}},
+         'port_type': 'OCH'},
+        {'port_id': {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV2), 'port_id': {'uuid' : 'port102'}},
+         'port_type': 'OCH'},
+        {'port_id': {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV2), 'port_id': {'uuid' : 'port103'}},
+         'port_type': 'OCH'},
+    ]
+}
+
+DEVICE_ID_DEV3 = {'device_id': {'uuid': 'dev3'}}
+DEVICE_DEV3 = {
+    'device_id': dc(DEVICE_ID_DEV3), 'device_type': 'ROADM', 'device_config': {'device_config': '<config/>'},
+    'devOperationalStatus': OperationalStatus.ENABLED.value,
+    'endpointList' : [
+        {'port_id': {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV3), 'port_id': {'uuid' : 'port1'}},
+         'port_type': 'WDM'},
+        {'port_id': {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV3), 'port_id': {'uuid' : 'port2'}},
+         'port_type': 'WDM'},
+        {'port_id': {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV3), 'port_id': {'uuid' : 'port101'}},
+         'port_type': 'OCH'},
+        {'port_id': {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV3), 'port_id': {'uuid' : 'port102'}},
+         'port_type': 'OCH'},
+        {'port_id': {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV3), 'port_id': {'uuid' : 'port103'}},
+         'port_type': 'OCH'},
+    ]
+}
+
+LINK_ID_DEV1_DEV2 = {'link_id': {'uuid': 'dev1/port2 ==> dev2/port1'}}
+LINK_DEV1_DEV2 = {
+    'link_id': dc(LINK_ID_DEV1_DEV2),
+    'endpointList' : [
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV1), 'port_id': {'uuid' : 'port2'}},
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV2), 'port_id': {'uuid' : 'port1'}},
+    ]
+}
+
+LINK_ID_DEV1_DEV3 = {'link_id': {'uuid': 'dev1/port3 ==> dev3/port1'}}
+LINK_DEV1_DEV3 = {
+    'link_id': dc(LINK_ID_DEV1_DEV3),
+    'endpointList' : [
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV1), 'port_id': {'uuid' : 'port3'}},
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV3), 'port_id': {'uuid' : 'port1'}},
+    ]
+}
+
+LINK_ID_DEV2_DEV1 = {'link_id': {'uuid': 'dev2/port1 ==> dev1/port2'}}
+LINK_DEV2_DEV1 = {
+    'link_id': dc(LINK_ID_DEV2_DEV1),
+    'endpointList' : [
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV2), 'port_id': {'uuid' : 'port1'}},
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV1), 'port_id': {'uuid' : 'port2'}},
+    ]
+}
+
+LINK_ID_DEV2_DEV3 = {'link_id': {'uuid': 'dev2/port3 ==> dev3/port2'}}
+LINK_DEV2_DEV3 = {
+    'link_id': dc(LINK_ID_DEV2_DEV3),
+    'endpointList' : [
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV2), 'port_id': {'uuid' : 'port3'}},
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV3), 'port_id': {'uuid' : 'port2'}},
+    ]
+}
+
+LINK_ID_DEV3_DEV1 = {'link_id': {'uuid': 'dev3/port1 ==> dev1/port3'}}
+LINK_DEV3_DEV1 = {
+    'link_id': dc(LINK_ID_DEV3_DEV1),
+    'endpointList' : [
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV3), 'port_id': {'uuid' : 'port1'}},
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV1), 'port_id': {'uuid' : 'port3'}},
+    ]
+}
+
+LINK_ID_DEV3_DEV2 = {'link_id': {'uuid': 'dev3/port2 ==> dev2/port3'}}
+LINK_DEV3_DEV2 = {
+    'link_id': dc(LINK_ID_DEV3_DEV2),
+    'endpointList' : [
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV3), 'port_id': {'uuid' : 'port2'}},
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV2), 'port_id': {'uuid' : 'port3'}},
+    ]
+}
+
+SERVICE_ID_SVC1 = {'contextId': dc(CONTEXT_ID), 'cs_id': {'uuid': 'svc1'}}
+SERVICE_SVC1 = {
+    'cs_id': dc(SERVICE_ID_SVC1), 'serviceType': ServiceType.L3NM.value,
+    'serviceConfig': {'serviceConfig': '<config/>'}, 'serviceState': {'serviceState': ServiceState.PLANNED.value},
+    'constraint': [
+        {'constraint_type': 'latency_ms', 'constraint_value': '100'},
+        {'constraint_type': 'hops', 'constraint_value': '5'},
+    ],
+    'endpointList' : [
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV1), 'port_id': {'uuid' : 'port101'}},
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV2), 'port_id': {'uuid' : 'port101'}},
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV3), 'port_id': {'uuid' : 'port101'}},
+    ]
+}
+
+SERVICE_ID_SVC2 = {'contextId': dc(CONTEXT_ID), 'cs_id': {'uuid': 'svc2'}}
+SERVICE_SVC2 = {
+    'cs_id': dc(SERVICE_ID_SVC2), 'serviceType': ServiceType.L3NM.value,
+    'serviceConfig': {'serviceConfig': '<config/>'}, 'serviceState': {'serviceState': ServiceState.PLANNED.value},
+    'constraint': [
+        {'constraint_type': 'latency_ms', 'constraint_value': '100'},
+        {'constraint_type': 'hops', 'constraint_value': '5'},
+    ],
+    'endpointList' : [
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV1), 'port_id': {'uuid' : 'port102'}},
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV2), 'port_id': {'uuid' : 'port102'}},
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV3), 'port_id': {'uuid' : 'port102'}},
+    ]
+}
+
+SERVICE_ID_SVC3 = {'contextId': dc(CONTEXT_ID), 'cs_id': {'uuid': 'svc3'}}
+SERVICE_SVC3 = {
+    'cs_id': dc(SERVICE_ID_SVC3), 'serviceType': ServiceType.L3NM.value,
+    'serviceConfig': {'serviceConfig': '<config/>'}, 'serviceState': {'serviceState': ServiceState.PLANNED.value},
+    'constraint': [
+        {'constraint_type': 'latency_ms', 'constraint_value': '100'},
+        {'constraint_type': 'hops', 'constraint_value': '5'},
+    ],
+    'endpointList' : [
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV1), 'port_id': {'uuid' : 'port103'}},
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV2), 'port_id': {'uuid' : 'port103'}},
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV3), 'port_id': {'uuid' : 'port103'}},
+    ]
+}
diff --git a/src/tester_functional/requirements.in b/src/tester_functional/requirements.in
new file mode 100644
index 0000000000000000000000000000000000000000..25abdad1b5767117956a88b816399635348884c7
--- /dev/null
+++ b/src/tester_functional/requirements.in
@@ -0,0 +1,6 @@
+grpcio-health-checking
+grpcio
+prometheus-client
+pytest
+pytest-benchmark
+redis
diff --git a/src/integration_tester/test_context_device.py b/src/tester_functional/test_context_device_service.py
similarity index 67%
rename from src/integration_tester/test_context_device.py
rename to src/tester_functional/test_context_device_service.py
index 7c101f17dea8847de4e5579ffce28b3318586f9f..d1b228b0dffc40b681314ebe66879534b85ed699 100644
--- a/src/integration_tester/test_context_device.py
+++ b/src/tester_functional/test_context_device_service.py
@@ -2,13 +2,18 @@ import logging, os, pytest
 from google.protobuf.json_format import MessageToDict
 from common.database.Factory import get_database, DatabaseEngineEnum
 from common.database.api.Database import Database
-from common.tests.Assertions import validate_device_id, validate_link_id, validate_topology_has_devices, \
-    validate_topology_has_links, validate_topology_is_empty
+from common.tests.Assertions import validate_device_id, validate_link_id, validate_service_id, \
+    validate_service_list_is_not_empty, validate_topology_has_devices, validate_topology_has_links, \
+    validate_topology_is_empty
 from context.client.ContextClient import ContextClient
 from context.proto.context_pb2 import Device, Empty, Link
 from device.client.DeviceClient import DeviceClient
-from .definitions import DEVICE_DEV1, DEVICE_DEV2, DEVICE_DEV3, LINK_DEV1_DEV2, LINK_DEV1_DEV3, LINK_DEV2_DEV1, \
-    LINK_DEV2_DEV3, LINK_DEV3_DEV1, LINK_DEV3_DEV2
+from tester_functional.definitions import DEVICE_DEV1, DEVICE_DEV2, DEVICE_DEV3
+from tester_functional.definitions import LINK_DEV1_DEV2, LINK_DEV1_DEV3, LINK_DEV2_DEV1, LINK_DEV2_DEV3, \
+    LINK_DEV3_DEV1, LINK_DEV3_DEV2
+from tester_functional.definitions import SERVICE_SVC1, SERVICE_SVC2, SERVICE_SVC3
+from service.client.ServiceClient import ServiceClient
+from service.proto.service_pb2 import Service
 
 LOGGER = logging.getLogger(__name__)
 LOGGER.setLevel(logging.DEBUG)
@@ -39,6 +44,14 @@ def device_client():
     yield _client
     _client.close()
 
+@pytest.fixture(scope='session')
+def service_client():
+    service_host = get_setting('SERVICESERVICE_SERVICE_HOST')
+    service_port = get_setting('SERVICESERVICE_SERVICE_PORT_GRPC')
+    _client = ServiceClient(address=service_host, port=service_port)
+    yield _client
+    _client.close()
+
 def test_clean_database(redis_database : Database):
     # should work
     redis_database.clear_all()
@@ -77,3 +90,17 @@ def test_add_links(context_client : ContextClient):
         context_client.GetTopology(Empty()),
         including_default_value_fields=True, preserving_proto_field_name=True,
         use_integers_for_enums=False))
+
+def test_add_services(service_client : ServiceClient):
+    # should work
+    for service in [SERVICE_SVC1, SERVICE_SVC2, SERVICE_SVC3]:
+        validate_service_id(MessageToDict(
+            service_client.CreateService(Service(**service)),
+            including_default_value_fields=True, preserving_proto_field_name=True,
+            use_integers_for_enums=False))
+
+    # should work
+    validate_service_list_is_not_empty(MessageToDict(
+        service_client.GetServiceList(Empty()),
+        including_default_value_fields=True, preserving_proto_field_name=True,
+        use_integers_for_enums=False))
diff --git a/src/tester_integration/.gitlab-ci.yml b/src/tester_integration/.gitlab-ci.yml
new file mode 100644
index 0000000000000000000000000000000000000000..cf868b1a40c583481b292dfe2445b3e0fcfe4501
--- /dev/null
+++ b/src/tester_integration/.gitlab-ci.yml
@@ -0,0 +1,77 @@
+# Build, tag, and push the Docker images to the GitLab Docker registry
+build integ_test:
+  variables:
+    IMAGE_NAME: 'tester_integration' # name of the microservice
+    IMAGE_NAME_TEST: 'tester_integration-test' # name of the microservice
+    IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
+  stage: build
+  before_script:
+    - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
+  script:
+    - docker build -t "$IMAGE_NAME:$IMAGE_TAG" -f ./src/$IMAGE_NAME/Dockerfile ./src/
+    - docker tag "$IMAGE_NAME:$IMAGE_TAG" "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
+    - docker push "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
+  rules:
+    - changes:
+      - src/common/**
+      - src/context/**
+      - src/device/**
+      - src/service/**
+      - src/$IMAGE_NAME/**
+      - .gitlab-ci.yml
+
+# Pull, execute, and run unitary tests for the Docker image from the GitLab registry
+unit_test integ_test:
+  variables:
+    IMAGE_NAME: 'tester_functional' # name of the microservice
+    IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
+  stage: test
+  needs:
+    - build integ_test
+  before_script:
+    - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
+  script:
+    - docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
+  rules:
+    - changes:
+      - src/common/**
+      - src/context/**
+      - src/device/**
+      - src/service/**
+      - src/$IMAGE_NAME/**
+      - .gitlab-ci.yml
+
+# Pull, execute, and run integration tests for the Docker image from the GitLab registry
+integ_test execute:
+  variables:
+    IMAGE_NAME: 'tester_integration' # name of the microservice
+    IMAGE_NAME_TEST: 'tester_integration-test' # name of the microservice
+    IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
+  stage: integ_test
+  needs:
+    - build integ_test
+    - unit_test context
+    - unit_test device
+    - unit_test service
+    - unit_test integ_test
+  before_script:
+    - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
+    - if docker network list | grep teraflowbridge; then echo "teraflowbridge is already created"; else docker network create -d bridge teraflowbridge; fi  
+  script:
+    - docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
+    - docker run -d -p 6379:6379 --name redis --network=teraflowbridge "redis:6.2"
+    - docker ps -a
+    - sleep 5
+    - docker ps -a
+    - docker run -d --name $IMAGE_NAME --network=teraflowbridge --env "DB_ENGINE=redis" --env "REDIS_SERVICE_HOST=$(docker inspect -f '{{range.NetworkSettings.Networks}}{{.IPAddress}}{{end}}' redis)" --env "REDIS_SERVICE_PORT=6379" --env "REDIS_DATABASE_ID=0" "$IMAGE_NAME:$IMAGE_TAG" bash -c "pytest --log-level=INFO --verbose common/database/tests/test_engine_redis.py tester_integration/test_context_device_service.py"
+  after_script:
+    - docker stop $IMAGE_NAME redis
+    - docker rm $IMAGE_NAME redis
+  rules:
+    - changes:
+      - src/common/**
+      - src/context/**
+      - src/device/**
+      - src/service/**
+      - src/$IMAGE_NAME/**
+      - .gitlab-ci.yml
diff --git a/src/integration_tester/Dockerfile b/src/tester_integration/Dockerfile
similarity index 61%
rename from src/integration_tester/Dockerfile
rename to src/tester_integration/Dockerfile
index 5aabc8bc89115a45b40095aa325808eaf3c4cbaf..31ab8884b62814d43fbadaf624754e10681c8e7c 100644
--- a/src/integration_tester/Dockerfile
+++ b/src/tester_integration/Dockerfile
@@ -15,20 +15,21 @@ RUN python3 -m pip install --upgrade pip setuptools wheel pip-tools
 WORKDIR /var/teraflow
 
 # Create module sub-folders
-RUN mkdir -p /var/teraflow/integration_tester
+RUN mkdir -p /var/teraflow/tester_integration
 
 # Get Python packages per module
-COPY integration_tester/requirements.in integration_tester/requirements.in
-RUN pip-compile --output-file=integration_tester/requirements.txt integration_tester/requirements.in
-RUN python3 -m pip install -r integration_tester/requirements.in
+COPY tester_integration/requirements.in tester_integration/requirements.in
+RUN pip-compile --output-file=tester_integration/requirements.txt tester_integration/requirements.in
+RUN python3 -m pip install -r tester_integration/requirements.in
 
 # Add files into working directory
 COPY common/. common
 COPY context/. context
 COPY device/. device
-COPY integration_tester/. integration_tester
+COPY service/. service
+COPY tester_integration/. tester_integration
 
 # Run integration tester
 ENTRYPOINT ["pytest", "-v", "--log-level=DEBUG", \
-            "integration_tester/test_context_device.py" \
+            "tester_integration/test_context_device_service.py" \
 ]
diff --git a/src/tester_integration/__init__.py b/src/tester_integration/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/tester_integration/definitions.py b/src/tester_integration/definitions.py
new file mode 100644
index 0000000000000000000000000000000000000000..2b38e74bc9d25199e4f192dbb3f093ec033fe80b
--- /dev/null
+++ b/src/tester_integration/definitions.py
@@ -0,0 +1,165 @@
+import copy
+from common.database.api.context.Constants import DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID
+from common.database.api.context.service.ServiceState import ServiceState
+from common.database.api.context.service.ServiceType import ServiceType
+from common.database.api.context.topology.device.OperationalStatus import OperationalStatus
+
+# use "copy.deepcopy" to prevent propagating forced changes during tests
+dc = copy.deepcopy
+
+CONTEXT_ID = {'contextUuid': {'uuid': DEFAULT_CONTEXT_ID}}
+
+TOPOLOGY_ID = {'contextId': dc(CONTEXT_ID), 'topoId': {'uuid': DEFAULT_TOPOLOGY_ID}}
+
+DEVICE_ID_DEV1 = {'device_id': {'uuid': 'dev1'}}
+DEVICE_DEV1 = {
+    'device_id': dc(DEVICE_ID_DEV1), 'device_type': 'ROADM', 'device_config': {'device_config': '<config/>'},
+    'devOperationalStatus': OperationalStatus.ENABLED.value,
+    'endpointList' : [
+        {'port_id': {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV1), 'port_id': {'uuid' : 'port2'}},
+         'port_type': 'WDM'},
+        {'port_id': {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV1), 'port_id': {'uuid' : 'port3'}},
+         'port_type': 'WDM'},
+        {'port_id': {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV1), 'port_id': {'uuid' : 'port101'}},
+         'port_type': 'OCH'},
+        {'port_id': {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV1), 'port_id': {'uuid' : 'port102'}},
+         'port_type': 'OCH'},
+        {'port_id': {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV1), 'port_id': {'uuid' : 'port103'}},
+         'port_type': 'OCH'},
+    ]
+}
+
+DEVICE_ID_DEV2 = {'device_id': {'uuid': 'dev2'}}
+DEVICE_DEV2 = {
+    'device_id': dc(DEVICE_ID_DEV2), 'device_type': 'ROADM', 'device_config': {'device_config': '<config/>'},
+    'devOperationalStatus': OperationalStatus.ENABLED.value,
+    'endpointList' : [
+        {'port_id': {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV2), 'port_id': {'uuid' : 'port1'}},
+         'port_type': 'WDM'},
+        {'port_id': {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV2), 'port_id': {'uuid' : 'port3'}},
+         'port_type': 'WDM'},
+        {'port_id': {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV2), 'port_id': {'uuid' : 'port101'}},
+         'port_type': 'OCH'},
+        {'port_id': {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV2), 'port_id': {'uuid' : 'port102'}},
+         'port_type': 'OCH'},
+        {'port_id': {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV2), 'port_id': {'uuid' : 'port103'}},
+         'port_type': 'OCH'},
+    ]
+}
+
+DEVICE_ID_DEV3 = {'device_id': {'uuid': 'dev3'}}
+DEVICE_DEV3 = {
+    'device_id': dc(DEVICE_ID_DEV3), 'device_type': 'ROADM', 'device_config': {'device_config': '<config/>'},
+    'devOperationalStatus': OperationalStatus.ENABLED.value,
+    'endpointList' : [
+        {'port_id': {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV3), 'port_id': {'uuid' : 'port1'}},
+         'port_type': 'WDM'},
+        {'port_id': {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV3), 'port_id': {'uuid' : 'port2'}},
+         'port_type': 'WDM'},
+        {'port_id': {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV3), 'port_id': {'uuid' : 'port101'}},
+         'port_type': 'OCH'},
+        {'port_id': {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV3), 'port_id': {'uuid' : 'port102'}},
+         'port_type': 'OCH'},
+        {'port_id': {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV3), 'port_id': {'uuid' : 'port103'}},
+         'port_type': 'OCH'},
+    ]
+}
+
+LINK_ID_DEV1_DEV2 = {'link_id': {'uuid': 'dev1/port2 ==> dev2/port1'}}
+LINK_DEV1_DEV2 = {
+    'link_id': dc(LINK_ID_DEV1_DEV2),
+    'endpointList' : [
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV1), 'port_id': {'uuid' : 'port2'}},
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV2), 'port_id': {'uuid' : 'port1'}},
+    ]
+}
+
+LINK_ID_DEV1_DEV3 = {'link_id': {'uuid': 'dev1/port3 ==> dev3/port1'}}
+LINK_DEV1_DEV3 = {
+    'link_id': dc(LINK_ID_DEV1_DEV3),
+    'endpointList' : [
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV1), 'port_id': {'uuid' : 'port3'}},
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV3), 'port_id': {'uuid' : 'port1'}},
+    ]
+}
+
+LINK_ID_DEV2_DEV1 = {'link_id': {'uuid': 'dev2/port1 ==> dev1/port2'}}
+LINK_DEV2_DEV1 = {
+    'link_id': dc(LINK_ID_DEV2_DEV1),
+    'endpointList' : [
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV2), 'port_id': {'uuid' : 'port1'}},
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV1), 'port_id': {'uuid' : 'port2'}},
+    ]
+}
+
+LINK_ID_DEV2_DEV3 = {'link_id': {'uuid': 'dev2/port3 ==> dev3/port2'}}
+LINK_DEV2_DEV3 = {
+    'link_id': dc(LINK_ID_DEV2_DEV3),
+    'endpointList' : [
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV2), 'port_id': {'uuid' : 'port3'}},
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV3), 'port_id': {'uuid' : 'port2'}},
+    ]
+}
+
+LINK_ID_DEV3_DEV1 = {'link_id': {'uuid': 'dev3/port1 ==> dev1/port3'}}
+LINK_DEV3_DEV1 = {
+    'link_id': dc(LINK_ID_DEV3_DEV1),
+    'endpointList' : [
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV3), 'port_id': {'uuid' : 'port1'}},
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV1), 'port_id': {'uuid' : 'port3'}},
+    ]
+}
+
+LINK_ID_DEV3_DEV2 = {'link_id': {'uuid': 'dev3/port2 ==> dev2/port3'}}
+LINK_DEV3_DEV2 = {
+    'link_id': dc(LINK_ID_DEV3_DEV2),
+    'endpointList' : [
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV3), 'port_id': {'uuid' : 'port2'}},
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV2), 'port_id': {'uuid' : 'port3'}},
+    ]
+}
+
+SERVICE_ID_SVC1 = {'contextId': dc(CONTEXT_ID), 'cs_id': {'uuid': 'svc1'}}
+SERVICE_SVC1 = {
+    'cs_id': dc(SERVICE_ID_SVC1), 'serviceType': ServiceType.L3NM.value,
+    'serviceConfig': {'serviceConfig': '<config/>'}, 'serviceState': {'serviceState': ServiceState.PLANNED.value},
+    'constraint': [
+        {'constraint_type': 'latency_ms', 'constraint_value': '100'},
+        {'constraint_type': 'hops', 'constraint_value': '5'},
+    ],
+    'endpointList' : [
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV1), 'port_id': {'uuid' : 'port101'}},
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV2), 'port_id': {'uuid' : 'port101'}},
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV3), 'port_id': {'uuid' : 'port101'}},
+    ]
+}
+
+SERVICE_ID_SVC2 = {'contextId': dc(CONTEXT_ID), 'cs_id': {'uuid': 'svc2'}}
+SERVICE_SVC2 = {
+    'cs_id': dc(SERVICE_ID_SVC2), 'serviceType': ServiceType.L3NM.value,
+    'serviceConfig': {'serviceConfig': '<config/>'}, 'serviceState': {'serviceState': ServiceState.PLANNED.value},
+    'constraint': [
+        {'constraint_type': 'latency_ms', 'constraint_value': '100'},
+        {'constraint_type': 'hops', 'constraint_value': '5'},
+    ],
+    'endpointList' : [
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV1), 'port_id': {'uuid' : 'port102'}},
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV2), 'port_id': {'uuid' : 'port102'}},
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV3), 'port_id': {'uuid' : 'port102'}},
+    ]
+}
+
+SERVICE_ID_SVC3 = {'contextId': dc(CONTEXT_ID), 'cs_id': {'uuid': 'svc3'}}
+SERVICE_SVC3 = {
+    'cs_id': dc(SERVICE_ID_SVC3), 'serviceType': ServiceType.L3NM.value,
+    'serviceConfig': {'serviceConfig': '<config/>'}, 'serviceState': {'serviceState': ServiceState.PLANNED.value},
+    'constraint': [
+        {'constraint_type': 'latency_ms', 'constraint_value': '100'},
+        {'constraint_type': 'hops', 'constraint_value': '5'},
+    ],
+    'endpointList' : [
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV1), 'port_id': {'uuid' : 'port103'}},
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV2), 'port_id': {'uuid' : 'port103'}},
+        {'topoId': dc(TOPOLOGY_ID), 'dev_id': dc(DEVICE_ID_DEV3), 'port_id': {'uuid' : 'port103'}},
+    ]
+}
diff --git a/src/tester_integration/requirements.in b/src/tester_integration/requirements.in
new file mode 100644
index 0000000000000000000000000000000000000000..25abdad1b5767117956a88b816399635348884c7
--- /dev/null
+++ b/src/tester_integration/requirements.in
@@ -0,0 +1,6 @@
+grpcio-health-checking
+grpcio
+prometheus-client
+pytest
+pytest-benchmark
+redis
diff --git a/src/tester_integration/test_context_device_service.py b/src/tester_integration/test_context_device_service.py
new file mode 100644
index 0000000000000000000000000000000000000000..2d128800c2e714de242de328c2d00a6134c378d7
--- /dev/null
+++ b/src/tester_integration/test_context_device_service.py
@@ -0,0 +1,138 @@
+import logging, pytest
+from google.protobuf.json_format import MessageToDict
+from common.database.Factory import get_database, DatabaseEngineEnum
+from common.database.api.Database import Database
+from common.tests.Assertions import validate_device_id, validate_link_id, validate_service_id, \
+    validate_service_list_is_not_empty, validate_topology_has_devices, validate_topology_has_links, \
+    validate_topology_is_empty
+from context.Config import GRPC_SERVICE_PORT as GRPC_CONTEXT_SERVICE_PORT, \
+    GRPC_MAX_WORKERS as GRPC_CONTEXT_MAX_WORKERS, GRPC_GRACE_PERIOD as GRPC_CONTEXT_GRACE_PERIOD
+from context.client.ContextClient import ContextClient
+from context.proto.context_pb2 import Device, Empty, Link
+from context.service.ContextService import ContextService
+from device.Config import GRPC_SERVICE_PORT as GRPC_DEVICE_SERVICE_PORT, \
+    GRPC_MAX_WORKERS as GRPC_DEVICE_MAX_WORKERS, GRPC_GRACE_PERIOD as GRPC_DEVICE_GRACE_PERIOD
+from device.client.DeviceClient import DeviceClient
+from device.service.DeviceService import DeviceService
+from service.service.ServiceService import ServiceService
+from tester_integration.definitions import DEVICE_DEV1, DEVICE_DEV2, DEVICE_DEV3
+from tester_integration.definitions import LINK_DEV1_DEV2, LINK_DEV1_DEV3, LINK_DEV2_DEV1, LINK_DEV2_DEV3, \
+    LINK_DEV3_DEV1, LINK_DEV3_DEV2
+from tester_integration.definitions import SERVICE_SVC1, SERVICE_SVC2, SERVICE_SVC3
+from service.Config import GRPC_SERVICE_PORT as GRPC_SERVICE_SERVICE_PORT, \
+    GRPC_MAX_WORKERS as GRPC_SERVICE_MAX_WORKERS, GRPC_GRACE_PERIOD as GRPC_SERVICE_GRACE_PERIOD
+from service.client.ServiceClient import ServiceClient
+from service.proto.service_pb2 import Service
+
+PORT_CONTEXT = 10000 + GRPC_CONTEXT_SERVICE_PORT # avoid privileged ports
+PORT_DEVICE  = 10000 + GRPC_DEVICE_SERVICE_PORT  # avoid privileged ports
+PORT_SERVICE = 10000 + GRPC_SERVICE_SERVICE_PORT # avoid privileged ports
+
+LOGGER = logging.getLogger(__name__)
+LOGGER.setLevel(logging.DEBUG)
+
+@pytest.fixture(scope='session')
+def redis_database():
+    _database = get_database(engine=DatabaseEngineEnum.REDIS, REDIS_DATABASE_ID=0)
+    return _database
+
+@pytest.fixture(scope='session')
+def context_service(redis_database : Database):
+    context_database = get_database(engine=DatabaseEngineEnum.REDIS, REDIS_DATABASE_ID=0)
+    _service = ContextService(
+        context_database, port=PORT_CONTEXT, max_workers=GRPC_CONTEXT_MAX_WORKERS,
+        grace_period=GRPC_CONTEXT_GRACE_PERIOD)
+    _service.start()
+    yield _service
+    _service.stop()
+
+@pytest.fixture(scope='session')
+def context_client(context_service):
+    _client = ContextClient(address='127.0.0.1', port=PORT_CONTEXT)
+    yield _client
+    _client.close()
+
+@pytest.fixture(scope='session')
+def device_service(redis_database : Database):
+    device_database = get_database(engine=DatabaseEngineEnum.REDIS, REDIS_DATABASE_ID=0)
+    _service = DeviceService(
+        device_database, port=PORT_DEVICE, max_workers=GRPC_DEVICE_MAX_WORKERS,
+        grace_period=GRPC_DEVICE_GRACE_PERIOD)
+    _service.start()
+    yield _service
+    _service.stop()
+
+@pytest.fixture(scope='session')
+def device_client(device_service):
+    _client = DeviceClient(address='127.0.0.1', port=PORT_DEVICE)
+    yield _client
+    _client.close()
+
+@pytest.fixture(scope='session')
+def service_service(redis_database : Database):
+    service_database = get_database(engine=DatabaseEngineEnum.REDIS, REDIS_DATABASE_ID=0)
+    _service = ServiceService(
+        service_database, port=PORT_SERVICE, max_workers=GRPC_SERVICE_MAX_WORKERS,
+        grace_period=GRPC_SERVICE_GRACE_PERIOD)
+    _service.start()
+    yield _service
+    _service.stop()
+
+@pytest.fixture(scope='session')
+def service_client(service_service):
+    _client = ServiceClient(address='127.0.0.1', port=PORT_SERVICE)
+    yield _client
+    _client.close()
+
+def test_clean_database(redis_database : Database):
+    # should work
+    redis_database.clear_all()
+
+def test_get_topology_empty(context_client : ContextClient):
+    # should work
+    validate_topology_is_empty(MessageToDict(
+        context_client.GetTopology(Empty()),
+        including_default_value_fields=True, preserving_proto_field_name=True,
+        use_integers_for_enums=False))
+
+def test_add_devices(context_client : ContextClient, device_client : DeviceClient):
+    # should work
+    for device in [DEVICE_DEV1, DEVICE_DEV2, DEVICE_DEV3]:
+        validate_device_id(MessageToDict(
+            device_client.AddDevice(Device(**device)),
+            including_default_value_fields=True, preserving_proto_field_name=True,
+            use_integers_for_enums=False))
+
+    # should work
+    validate_topology_has_devices(MessageToDict(
+        context_client.GetTopology(Empty()),
+        including_default_value_fields=True, preserving_proto_field_name=True,
+        use_integers_for_enums=False))
+
+def test_add_links(context_client : ContextClient):
+    # should work
+    for link in [LINK_DEV1_DEV2, LINK_DEV1_DEV3, LINK_DEV2_DEV1, LINK_DEV2_DEV3, LINK_DEV3_DEV1, LINK_DEV3_DEV2]:
+        validate_link_id(MessageToDict(
+            context_client.AddLink(Link(**link)),
+            including_default_value_fields=True, preserving_proto_field_name=True,
+            use_integers_for_enums=False))
+
+    # should work
+    validate_topology_has_links(MessageToDict(
+        context_client.GetTopology(Empty()),
+        including_default_value_fields=True, preserving_proto_field_name=True,
+        use_integers_for_enums=False))
+
+def test_add_services(service_client : ServiceClient):
+    # should work
+    for service in [SERVICE_SVC1, SERVICE_SVC2, SERVICE_SVC3]:
+        validate_service_id(MessageToDict(
+            service_client.CreateService(Service(**service)),
+            including_default_value_fields=True, preserving_proto_field_name=True,
+            use_integers_for_enums=False))
+
+    # should work
+    validate_service_list_is_not_empty(MessageToDict(
+        service_client.GetServiceList(Empty()),
+        including_default_value_fields=True, preserving_proto_field_name=True,
+        use_integers_for_enums=False))