diff --git a/manifests/.gitignore b/manifests/.gitignore
index 7ca2c51d3ca0ade4160bd94025769882bce3fb08..74cb209abec0711bfa467772708b137606e98b9a 100644
--- a/manifests/.gitignore
+++ b/manifests/.gitignore
@@ -1,4 +1,4 @@
 # Internal manifest used for local testings.
 
 # CTTC section:
-cttc-ols.yaml
+cttc-ols/
diff --git a/manifests/serviceservice.yaml b/manifests/serviceservice.yaml
index 72fd1c61564831f61bdf78aa494092829f0dd676..e9337807d9bbcec32fee2fe6f4ab04e4b4dba1aa 100644
--- a/manifests/serviceservice.yaml
+++ b/manifests/serviceservice.yaml
@@ -19,10 +19,6 @@ spec:
         ports:
         - containerPort: 3030
         env:
-        - name: DB_ENGINE
-          value: "redis"
-        - name: REDIS_DATABASE_ID
-          value: "0"
         - name: LOG_LEVEL
           value: "DEBUG"
         readinessProbe:
diff --git a/src/device/service/DeviceServiceServicerImpl.py b/src/device/service/DeviceServiceServicerImpl.py
index e72b4b610b8a718fcc726e38db09a7b0d8ced089..485cf0a27994b4943df6d8aadf3d54d3e63bfdcf 100644
--- a/src/device/service/DeviceServiceServicerImpl.py
+++ b/src/device/service/DeviceServiceServicerImpl.py
@@ -73,13 +73,15 @@ class DeviceServiceServicerImpl(DeviceServiceServicer):
                               'with "_connect/" tag. Others should be configured after adding the device.')
 
         if len(request.device_endpoints) > 0:
-            unexpected_endpoints = MessageToDict(
-                request.device_endpoints, including_default_value_fields=True, preserving_proto_field_name=True,
-                use_integers_for_enums=True)
+            unexpected_endpoints = []
+            for device_endpoint in request.device_endpoints:
+                unexpected_endpoints.append(MessageToDict(
+                    device_endpoint, including_default_value_fields=True, preserving_proto_field_name=True,
+                    use_integers_for_enums=True))
             str_unexpected_endpoints = json.dumps(unexpected_endpoints, sort_keys=True)
             raise InvalidArgumentException(
                 'device.device_endpoints', str_unexpected_endpoints,
-                extra_details='RPC method AddDevice does not accept endpoints. Endpoints are discovered through '\
+                extra_details='RPC method AddDevice does not accept Endpoints. Endpoints are discovered through '\
                               'interrogation of the physical device.')
 
         # Remove device configuration
@@ -204,7 +206,7 @@ class DeviceServiceServicerImpl(DeviceServiceServicer):
             for config_rule in running_config_rules
         ]
         #for running_config_rule in running_config_rules:
-        #    LOGGER.info('[AddDevice] running_config_rule: {:s}'.format(str(running_config_rule)))
+        #    LOGGER.info('[ConfigureDevice] running_config_rule: {:s}'.format(str(running_config_rule)))
         update_config(self.database, device_uuid, 'running', running_config_rules)
 
         sync_device_to_context(db_device, self.context_client)
diff --git a/src/device/service/__main__.py b/src/device/service/__main__.py
index 6832faf70b1d10f4796e50269ab018f0c6be147e..86f9b3ec1091883df068d3a7d2d4409dcffc9e90 100644
--- a/src/device/service/__main__.py
+++ b/src/device/service/__main__.py
@@ -7,7 +7,6 @@ from device.Config import (
     METRICS_PORT, MONITORING_SERVICE_HOST, MONITORING_SERVICE_PORT)
 from monitoring.client.monitoring_client import MonitoringClient
 from .DeviceService import DeviceService
-from .MonitoringLoops import MonitoringLoops
 from .driver_api.DriverFactory import DriverFactory
 from .driver_api.DriverInstanceCache import DriverInstanceCache
 from .drivers import DRIVERS
diff --git a/src/device/service/driver_api/AnyTreeTools.py b/src/device/service/driver_api/AnyTreeTools.py
index 47e80e6c71cadd1fb24a6aecb0309f8194a06756..3b247eca6af25d1c34ad46fd824f8303c12c74d9 100644
--- a/src/device/service/driver_api/AnyTreeTools.py
+++ b/src/device/service/driver_api/AnyTreeTools.py
@@ -1,5 +1,5 @@
 import anytree
-from typing import Any, List, Optional
+from typing import Any, List, Optional, Union
 from apscheduler.job import Job
 
 class TreeNode(anytree.node.Node):
@@ -28,20 +28,24 @@ class RawStyle(anytree.render.AbstractStyle):
         Node('/root/sub0/sub0A')
         Node('/root/sub1')
         """
-        super(RawStyle, self).__init__(u'', u'', u'')
+        super(RawStyle, self).__init__('', '', '')
 
-def get_subnode(resolver : anytree.Resolver, root : TreeNode, path : List[str], default : Optional[Any] = None):
+def get_subnode(
+    resolver : anytree.Resolver, root : TreeNode, key_or_path : Union[str, List[str]], default : Optional[Any] = None):
+
+    if isinstance(key_or_path, str): key_or_path = key_or_path.split('/')
     node = root
-    for path_item in path:
+    for path_item in key_or_path:
         try:
             node = resolver.get(node, path_item)
         except anytree.ChildResolverError:
             return default
     return node
 
-def set_subnode_value(resolver : anytree.Resolver, root : TreeNode, path : List[str], value : Any):
+def set_subnode_value(resolver : anytree.Resolver, root : TreeNode, key_or_path : Union[str, List[str]], value : Any):
+    if isinstance(key_or_path, str): key_or_path = key_or_path.split('/')
     node = root
-    for path_item in path:
+    for path_item in key_or_path:
         try:
             node = resolver.get(node, path_item)
         except anytree.ChildResolverError:
diff --git a/src/device/service/driver_api/QueryFields.py b/src/device/service/driver_api/QueryFields.py
deleted file mode 100644
index 15b3f5b7582283083c9ea1080dc4f3d6f5390501..0000000000000000000000000000000000000000
--- a/src/device/service/driver_api/QueryFields.py
+++ /dev/null
@@ -1,33 +0,0 @@
-from enum import Enum
-
-class DeviceTypeQueryFieldEnum(Enum):
-    OPTICAL_ROADM       = 'optical-roadm'
-    OPTICAL_TRANDPONDER = 'optical-trandponder'
-    PACKET_ROUTER       = 'packet-router'
-    PACKET_SWITCH       = 'packet-switch'
-
-class ProtocolQueryFieldEnum(Enum):
-    SOFTWARE = 'software'
-    GRPC     = 'grpc'
-    RESTAPI  = 'restapi'
-    NETCONF  = 'netconf'
-    GNMI     = 'gnmi'
-    RESTCONF = 'restconf'
-
-class DataModelQueryFieldEnum(Enum):
-    EMULATED              = 'emu'
-    OPENCONFIG            = 'oc'
-    P4                    = 'p4'
-    TRANSPORT_API         = 'tapi'
-    IETF_NETWORK_TOPOLOGY = 'ietf-netw-topo'
-    ONF_TR_352            = 'onf-tr-352'
-
-# Map allowed query fields to allowed values per query field. If no restriction (free text) None is specified
-QUERY_FIELDS = {
-    'device_type'  : {i.value for i in DeviceTypeQueryFieldEnum},
-    'protocol'     : {i.value for i in ProtocolQueryFieldEnum},
-    'data_model'   : {i.value for i in DataModelQueryFieldEnum},
-    'vendor'       : None,
-    'model'        : None,
-    'serial_number': None,
-}
diff --git a/src/device/service/drivers/emulated/QueryFields.py b/src/device/service/drivers/emulated/QueryFields.py
deleted file mode 100644
index 6db43e5b5d4ffe1bbcc652d305981757bd960c3e..0000000000000000000000000000000000000000
--- a/src/device/service/drivers/emulated/QueryFields.py
+++ /dev/null
@@ -1,8 +0,0 @@
-from enum import Enum
-
-VENDOR_CTTC = 'cttc'
-
-DEVICE_MODEL_EMULATED_OPTICAL_ROADM       = 'cttc_emu_opt_rdm'
-DEVICE_MODEL_EMULATED_OPTICAL_TRANDPONDER = 'cttc_emu_opt_tp'
-DEVICE_MODEL_EMULATED_PACKET_ROUTER       = 'cttc_emu_pkt_rtr'
-DEVICE_MODEL_EMULATED_PACKET_SWITCH       = 'cttc_emu_pkt_swt'
diff --git a/src/device/tests/test_unitary.py b/src/device/tests/test_unitary.py
index 7c2ad83d52b80e0a9d41cd8d0cecf9a4539618ea..046131810a6b00cb8e345111b017450493b6fcba 100644
--- a/src/device/tests/test_unitary.py
+++ b/src/device/tests/test_unitary.py
@@ -26,7 +26,7 @@ from device.service.driver_api.DriverFactory import DriverFactory
 from device.service.driver_api.DriverInstanceCache import DriverInstanceCache
 from device.service.drivers import DRIVERS
 from device.tests.MockMonitoringService import MockMonitoringService
-from device.tests.Tools import endpoint_id
+from device.tests.Tools import endpoint, endpoint_id
 from monitoring.Config import (
     GRPC_SERVICE_PORT as MONITORING_GRPC_SERVICE_PORT, GRPC_MAX_WORKERS as MONITORING_GRPC_MAX_WORKERS,
     GRPC_GRACE_PERIOD as MONITORING_GRPC_GRACE_PERIOD)
@@ -156,6 +156,17 @@ def test_device_emulated_add_error_cases(
     device_client : DeviceClient,       # pylint: disable=redefined-outer-name
     device_service : DeviceService):    # pylint: disable=redefined-outer-name
 
+    with pytest.raises(grpc.RpcError) as e:
+        DEVICE_EMU_WITH_ENDPOINTS = copy.deepcopy(DEVICE_EMU)
+        DEVICE_EMU_WITH_ENDPOINTS['device_endpoints'].append(endpoint(DEVICE_EMU_ID, 'ep-id', 'ep-type'))
+        device_client.AddDevice(Device(**DEVICE_EMU_WITH_ENDPOINTS))
+    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
+    msg_head = 'device.device_endpoints(['
+    msg_tail = ']) is invalid; RPC method AddDevice does not accept Endpoints. '\
+               'Endpoints are discovered through interrogation of the physical device.'
+    except_msg = str(e.value.details())
+    assert except_msg.startswith(msg_head) and except_msg.endswith(msg_tail)
+
     with pytest.raises(grpc.RpcError) as e:
         DEVICE_EMU_WITH_EXTRA_RULES = copy.deepcopy(DEVICE_EMU)
         DEVICE_EMU_WITH_EXTRA_RULES['device_config']['config_rules'].extend(DEVICE_EMU_CONNECT_RULES)
@@ -187,10 +198,10 @@ def test_device_emulated_get(
     device_service : DeviceService):    # pylint: disable=redefined-outer-name
 
     initial_config = device_client.GetInitialConfig(DeviceId(**DEVICE_EMU_ID))
-    #LOGGER.info('initial_config = {:s}'.format(grpc_message_to_json_string(initial_config)))
+    LOGGER.info('initial_config = {:s}'.format(grpc_message_to_json_string(initial_config)))
 
     device_data = context_client.GetDevice(DeviceId(**DEVICE_EMU_ID))
-    #LOGGER.info('device_data = {:s}'.format(grpc_message_to_json_string(device_data)))
+    LOGGER.info('device_data = {:s}'.format(grpc_message_to_json_string(device_data)))
 
 
 def test_device_emulated_configure(
@@ -383,7 +394,7 @@ def test_device_emulated_monitor(
     for kpi_uuid in KPI_UUIDS__TO__NUM_SAMPLES_RECEIVED.keys():
         MONITORING_SETTINGS_UNSUBSCRIBE = {
             'kpi_id'             : {'kpi_id': {'uuid': kpi_uuid}},
-            'sampling_duration_s': -1, # negative value in sampling_duration_s or in sampling_interval_s means unsibscribe
+            'sampling_duration_s': -1, # negative value in sampling_duration_s or sampling_interval_s means unsibscribe
             'sampling_interval_s': -1, # kpi_id is mandatory to unsibscribe
         }
         device_client.MonitorDeviceKpi(MonitoringSettings(**MONITORING_SETTINGS_UNSUBSCRIBE))
diff --git a/src/service/.gitlab-ci.yml b/src/service/.gitlab-ci.yml
index 9b579ac863f972fa02d18bcc16218555f5a78259..d47177171a768a776368561c20aae1658bdc614d 100644
--- a/src/service/.gitlab-ci.yml
+++ b/src/service/.gitlab-ci.yml
@@ -36,9 +36,10 @@ unit test service:
     - if docker container ls | grep $IMAGE_NAME; then docker rm -f $IMAGE_NAME; else echo "$IMAGE_NAME image is not in the system"; fi
   script:
     - docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
-    - docker run --name $IMAGE_NAME -d -p 3030:3030 --network=teraflowbridge --rm $CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG
+    - docker run --name $IMAGE_NAME -d -p 3030:3030 --network=teraflowbridge $CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG
     - sleep 5
     - docker ps -a
+    - docker logs $IMAGE_NAME
     - docker exec -i $IMAGE_NAME bash -c "pytest --log-level=DEBUG --verbose $IMAGE_NAME/tests/test_unitary.py"
   after_script:
     - docker rm -f $IMAGE_NAME
diff --git a/src/service/Config.py b/src/service/Config.py
index 592392a1bde5757f83fd67589a7b7b7d6cc3e6c0..79ca2232c1ee273dbda9d3c6642ded229b32ab6f 100644
--- a/src/service/Config.py
+++ b/src/service/Config.py
@@ -10,3 +10,10 @@ GRPC_GRACE_PERIOD = 60
 
 # Prometheus settings
 METRICS_PORT = 9192
+
+# Dependency micro-service connection settings
+CONTEXT_SERVICE_HOST = '127.0.0.1'
+CONTEXT_SERVICE_PORT = 1010
+
+DEVICE_SERVICE_HOST = '127.0.0.1'
+DEVICE_SERVICE_PORT = 2020
diff --git a/src/service/Dockerfile b/src/service/Dockerfile
index 306379a06d33c67c082540e19f3b1ca349becff2..3e8dcaa31acdc2c97f71fbd12a3ca730e93a7677 100644
--- a/src/service/Dockerfile
+++ b/src/service/Dockerfile
@@ -29,6 +29,9 @@ RUN python3 -m pip install -r service/requirements.in
 
 # Add files into working directory
 COPY common/. common
+COPY context/. context
+COPY device/. device
+COPY monitoring/. monitoring
 COPY service/. service
 
 # Start service service
diff --git a/src/service/client/ServiceClient.py b/src/service/client/ServiceClient.py
index 958e066238446d9642ba38c8a907f32df1dcf030..b9d123b88b1c004192098caccedf4b67dc92ac2b 100644
--- a/src/service/client/ServiceClient.py
+++ b/src/service/client/ServiceClient.py
@@ -21,7 +21,7 @@ class ServiceClient:
         self.stub = ServiceServiceStub(self.channel)
 
     def close(self):
-        if(self.channel is not None): self.channel.close()
+        if self.channel is not None: self.channel.close()
         self.channel = None
         self.stub = None
 
diff --git a/src/service/genproto.sh b/src/service/genproto.sh
index 4a53ab9eec365b74a4d76356605fe97961b4279e..7ea496d6fc49bccbd57acfea9c2ac4dce6ae1fa1 100755
--- a/src/service/genproto.sh
+++ b/src/service/genproto.sh
@@ -33,3 +33,4 @@ rm proto/kpi_sample_types_pb2_grpc.py
 sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/context_pb2.py
 sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/service_pb2.py
 sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/service_pb2_grpc.py
+sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/kpi_sample_types_pb2.py
diff --git a/src/service/requirements.in b/src/service/requirements.in
index 25abdad1b5767117956a88b816399635348884c7..55d2eb25b8619d86d0a80f3682ba9c9524b8d38d 100644
--- a/src/service/requirements.in
+++ b/src/service/requirements.in
@@ -1,6 +1,16 @@
+anytree
+apscheduler
+fastcache
+flask-restful
 grpcio-health-checking
 grpcio
+Jinja2
+netconf-client #1.7.3
 prometheus-client
 pytest
 pytest-benchmark
+python-json-logger
+pytz
 redis
+requests
+xmltodict
diff --git a/src/service/service/ServiceService.py b/src/service/service/ServiceService.py
index 6280af0b70eb37b8cf9c27954bf4db4690743198..bff0d7b2f32bf0498adaa834a49226efb373b786 100644
--- a/src/service/service/ServiceService.py
+++ b/src/service/service/ServiceService.py
@@ -1,21 +1,31 @@
-import grpc
-import logging
+import grpc, logging
 from concurrent import futures
 from grpc_health.v1.health import HealthServicer, OVERALL_HEALTH
 from grpc_health.v1.health_pb2 import HealthCheckResponse
 from grpc_health.v1.health_pb2_grpc import add_HealthServicer_to_server
+from common.orm.backend.BackendEnum import BackendEnum
+from common.orm.Database import Database
+from common.orm.Factory import get_database_backend
+from context.client.ContextClient import ContextClient
+from device.client.DeviceClient import DeviceClient
 from service.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD
 from service.proto.service_pb2_grpc import add_ServiceServiceServicer_to_server
 from .ServiceServiceServicerImpl import ServiceServiceServicerImpl
+from .service_handler_api.ServiceHandlerFactory import ServiceHandlerFactory
 
 BIND_ADDRESS = '0.0.0.0'
 LOGGER = logging.getLogger(__name__)
 
 class ServiceService:
     def __init__(
-        self, address=BIND_ADDRESS, port=GRPC_SERVICE_PORT, max_workers=GRPC_MAX_WORKERS,
+        self, context_client : ContextClient, device_client : DeviceClient,
+        service_handler_factory : ServiceHandlerFactory,
+        address=BIND_ADDRESS, port=GRPC_SERVICE_PORT, max_workers=GRPC_MAX_WORKERS,
         grace_period=GRPC_GRACE_PERIOD):
 
+        self.context_client = context_client
+        self.device_client = device_client
+        self.service_handler_factory = service_handler_factory
         self.address = address
         self.port = port
         self.endpoint = None
@@ -26,6 +36,8 @@ class ServiceService:
         self.pool = None
         self.server = None
 
+        self.database = Database(get_database_backend(backend=BackendEnum.INMEMORY))
+
     def start(self):
         self.endpoint = '{:s}:{:s}'.format(str(self.address), str(self.port))
         LOGGER.info('Starting Service (tentative endpoint: {:s}, max_workers: {:s})...'.format(
@@ -34,7 +46,8 @@ class ServiceService:
         self.pool = futures.ThreadPoolExecutor(max_workers=self.max_workers)
         self.server = grpc.server(self.pool) # , interceptors=(tracer_interceptor,))
 
-        self.service_servicer = ServiceServiceServicerImpl()
+        self.service_servicer = ServiceServiceServicerImpl(
+            self.context_client, self.device_client, self.database, self.service_handler_factory)
         add_ServiceServiceServicer_to_server(self.service_servicer, self.server)
 
         self.health_servicer = HealthServicer(
diff --git a/src/service/service/ServiceServiceServicerImpl.py b/src/service/service/ServiceServiceServicerImpl.py
index e400d0b6e4e6741d387eef7c156eedd65ffb76da..ca93c098010031823fcd38059a624e7b0c62a5bb 100644
--- a/src/service/service/ServiceServiceServicerImpl.py
+++ b/src/service/service/ServiceServiceServicerImpl.py
@@ -1,10 +1,30 @@
-#from typing import Dict
-import grpc, logging
-#from common.exceptions.ServiceException import ServiceException
+import grpc, json, logging
+from typing import Any, List, Optional, Tuple
+from google.protobuf.json_format import MessageToDict
+from common.orm.Database import Database
+from common.orm.HighLevel import get_object
+from common.orm.backend.Tools import key_to_str
 from common.rpc_method_wrapper.Decorator import create_metrics, safe_and_metered_rpc_method
-from service.proto.context_pb2 import ConnectionList, Empty, Service, ServiceId #, ServiceList
+from common.rpc_method_wrapper.ServiceExceptions import (
+    InvalidArgumentException, NotFoundException, OperationFailedException)
+from context.client.ContextClient import ContextClient
+from device.client.DeviceClient import DeviceClient
+from service.proto.context_pb2 import ConnectionList, Empty, Service, ServiceId
 from service.proto.service_pb2_grpc import ServiceServiceServicer
-#from service.service.Tools import check_service_id_request, check_service_request
+from .database.ConfigModel import ConfigModel, ConfigRuleModel
+from .database.ConstraintModel import ConstraintModel, ConstraintsModel
+from .database.DatabaseServiceTools import (
+    delete_service_from_context, sync_service_from_context, sync_service_to_context, update_service_in_local_database)
+from .database.RelationModels import ServiceEndPointModel
+from .database.ServiceModel import ServiceModel
+from .service_handler_api._ServiceHandler import _ServiceHandler
+from .service_handler_api.ServiceHandlerFactory import ServiceHandlerFactory
+from .service_handler_api.Tools import (
+    check_errors_deleteconfig, check_errors_deleteconstraint, check_errors_deleteendpoint, check_errors_setconfig,
+    check_errors_setconstraint, check_errors_setendpoint)
+from .Tools import (
+    classify_config_rules, classify_constraints, classify_endpointids, get_service_handler_class,
+    sync_devices_from_context)
 
 LOGGER = logging.getLogger(__name__)
 
@@ -13,86 +33,173 @@ METHOD_NAMES = ['CreateService', 'UpdateService', 'DeleteService',  'GetConnecti
 METRICS = create_metrics(SERVICE_NAME, METHOD_NAMES)
 
 class ServiceServiceServicerImpl(ServiceServiceServicer):
-    def __init__(self):
+    def __init__(
+        self, context_client : ContextClient, device_client : DeviceClient, database : Database,
+        service_handler_factory : ServiceHandlerFactory):
+
         LOGGER.debug('Creating Servicer...')
+        self.context_client = context_client
+        self.device_client = device_client
+        self.database = database
+        self.service_handler_factory = service_handler_factory
         LOGGER.debug('Servicer Created')
 
-    #@safe_and_metered_rpc_method(METRICS, LOGGER)
-    #def GetServiceList(self, request : Empty, context : grpc.ServicerContext) -> ServiceList:
-    #    db_context_uuids = self.database.contexts.get()
-    #    json_services = []
-    #    for db_context_uuid in db_context_uuids:
-    #        db_context = self.database.context(db_context_uuid)
-    #        json_services.extend(db_context.dump_services())
-    #    return ServiceList(cs=json_services)
-
     @safe_and_metered_rpc_method(METRICS, LOGGER)
     def CreateService(self, request : Service, context : grpc.ServicerContext) -> ServiceId:
-        #context_id, service_id, service_type, service_config, service_state, db_endpoints, constraint_tuples = \
-        #    check_service_request('CreateService', request, self.database, LOGGER)
-        #db_context = self.database.context(context_id)
-        #db_service = db_context.service(service_id)
-        #db_service.create(service_type, service_config, service_state)
-        #for db_endpoint in db_endpoints:
-        #    service_endpoint_id = '{}:{}/{}'.format(
-        #        db_endpoint.topology_uuid, db_endpoint.device_uuid, db_endpoint.endpoint_uuid)
-        #    db_service.endpoint(service_endpoint_id).create(db_endpoint)
-        #for cons_type,cons_value in constraint_tuples: db_service.constraint(cons_type).create(cons_value)
-        #return ServiceId(**db_service.dump_id())
-        return ServiceId()
+        service_id = request.service_id
+        service_uuid = service_id.service_uuid.uuid
+        service_context_uuid = service_id.context_id.context_uuid.uuid
+
+        if len(request.service_endpoint_ids) > 0:
+            unexpected_endpoints = []
+            for service_endpoint_id in request.service_endpoint_ids:
+                unexpected_endpoints.append(MessageToDict(
+                    service_endpoint_id, including_default_value_fields=True, preserving_proto_field_name=True,
+                    use_integers_for_enums=True))
+            str_unexpected_endpoints = json.dumps(unexpected_endpoints, sort_keys=True)
+            raise InvalidArgumentException(
+                'service.service_endpoint_ids', str_unexpected_endpoints,
+                extra_details='RPC method CreateService does not accept Endpoints. '\
+                              'Endpoints should be configured after creating the service.')
+
+        if len(request.service_constraints) > 0:
+            unexpected_constraints = []
+            for service_constraint in request.service_constraints:
+                unexpected_constraints.append(MessageToDict(
+                    service_constraint, including_default_value_fields=True, preserving_proto_field_name=True,
+                    use_integers_for_enums=True))
+            str_unexpected_constraints = json.dumps(unexpected_constraints, sort_keys=True)
+            raise InvalidArgumentException(
+                'service.service_constraints', str_unexpected_constraints,
+                extra_details='RPC method CreateService does not accept Constraints. '\
+                              'Constraints should be configured after creating the service.')
+
+        if len(request.service_config.config_rules) > 0:
+            unexpected_config_rules = MessageToDict(
+                request.service_config, including_default_value_fields=True,
+                preserving_proto_field_name=True, use_integers_for_enums=True)
+            unexpected_config_rules = unexpected_config_rules['config_rules']
+            str_unexpected_config_rules = json.dumps(unexpected_config_rules, sort_keys=True)
+            raise InvalidArgumentException(
+                'service.service_config.config_rules', str_unexpected_config_rules,
+                extra_details='RPC method CreateService does not accept Config Rules. '\
+                              'Config Rules should be configured after creating the service.')
+
+        sync_service_from_context(service_context_uuid, service_uuid, self.context_client, self.database)
+        db_service,_ = update_service_in_local_database(self.database, request)
+
+        LOGGER.info('[CreateService] db_service = {:s}'.format(str(db_service.dump(
+            include_endpoint_ids=True, include_constraints=True, include_config_rules=True))))
+
+        sync_service_to_context(db_service, self.context_client)
+        return ServiceId(**db_service.dump_id())
 
     @safe_and_metered_rpc_method(METRICS, LOGGER)
     def UpdateService(self, request : Service, context : grpc.ServicerContext) -> ServiceId:
-        #context_id, service_id, service_type, service_config, service_state, db_endpoints, constraint_tuples = \
-        #    check_service_request('UpdateService', request, self.database, LOGGER)
-        #db_context = self.database.context(context_id)
-        #db_service = db_context.service(service_id)
-        #db_service.update(update_attributes={
-        #    'service_type'  : service_type,
-        #    'service_config': service_config,
-        #    'service_state' : service_state,
-        #})
-        ## Update service constraints; first add missing, then remove existing, but not added to Service
-        #db_service_constraint_types = set(db_service.constraints.get())
-        #for constraint_type,constraint_value in constraint_tuples:
-        #    if constraint_type in db_service_constraint_types:
-        #        db_service.constraint(constraint_type).update(update_attributes={
-        #            'constraint_value': constraint_value
-        #        })
-        #    else:
-        #        db_service.constraint(constraint_type).create(constraint_value)
-        #    db_service_constraint_types.discard(constraint_type)
-        #for constraint_type in db_service_constraint_types:
-        #    db_service.constraint(constraint_type).delete()
-        ## Update service endpoints; first add missing, then remove existing, but not added to Service
-        #db_service_endpoint_uuids = set(db_service.endpoints.get())
-        #for db_endpoint in db_endpoints:
-        #    service_endpoint_id = '{}:{}/{}'.format(
-        #        db_endpoint.topology_uuid, db_endpoint.device_uuid, db_endpoint.endpoint_uuid)
-        #    if service_endpoint_id not in db_service_endpoint_uuids:
-        #        db_service.endpoint(service_endpoint_id).create(db_endpoint)
-        #    db_service_endpoint_uuids.discard(service_endpoint_id)
-        #for db_service_endpoint_uuid in db_service_endpoint_uuids:
-        #    db_service.endpoint(db_service_endpoint_uuid).delete()
-        #return ServiceId(**db_service.dump_id())
-        return ServiceId()
+        service_id = request.service_id
+        service_uuid = service_id.service_uuid.uuid
+        service_context_uuid = service_id.context_id.context_uuid.uuid
+        str_service_key = key_to_str([service_context_uuid, service_uuid])
+
+        # Sync before updating service to ensure we have devices, endpoints, constraints, and config rules to be
+        # set/deleted before actuallymodifying them in the local in-memory database.
+
+        sync_service_from_context(service_context_uuid, service_uuid, self.context_client, self.database)
+        db_service = get_object(self.database, ServiceModel, str_service_key, raise_if_not_found=False)
+        if db_service is None: raise NotFoundException('Service', str_service_key)
+        LOGGER.info('[UpdateService] db_service = {:s}'.format(str(db_service.dump(
+            include_endpoint_ids=True, include_constraints=True, include_config_rules=True))))
+
+        db_devices = sync_devices_from_context(self.context_client, db_service, request.service_endpoint_ids)
+
+        resources_to_set    : List[Tuple[str, Any]] = [] # resource_key, resource_value
+        resources_to_delete : List[Tuple[str, Any]] = [] # resource_key, resource_value
+        classify_config_rules(db_service, request.service_config.config_rules, resources_to_set, resources_to_delete)
+
+        constraints_to_set    : List[Tuple[str, str]] = [] # constraint_type, constraint_value
+        constraints_to_delete : List[Tuple[str, str]] = [] # constraint_type, constraint_value
+        classify_constraints(db_service, request.service_constraints, constraints_to_set, constraints_to_delete)
+
+        endpointids_to_set    : List[Tuple[str, str, Optional[str]]] = [] # device_uuid, endpoint_uuid, topology_uuid
+        endpointids_to_delete : List[Tuple[str, str, Optional[str]]] = [] # device_uuid, endpoint_uuid, topology_uuid
+        classify_endpointids(db_service, request.service_endpoint_ids, endpointids_to_set, endpointids_to_delete)
+
+        service_handler_class = get_service_handler_class(self.service_handler_factory, db_service, db_devices)
+        service_handler_settings = {}
+        service_handler : _ServiceHandler = service_handler_class(
+            db_service, self.database, self.context_client, self.device_client, **service_handler_settings)
+
+        errors = []
+
+        if len(errors) == 0:
+            results_deleteendpoint = service_handler.DeleteEndpoint(endpointids_to_delete)
+            errors.extend(check_errors_deleteendpoint(endpointids_to_delete, results_deleteendpoint))
+
+        if len(errors) == 0:
+            results_deleteconstraint = service_handler.DeleteConstraint(constraints_to_delete)
+            errors.extend(check_errors_deleteconstraint(constraints_to_delete, results_deleteconstraint))
+
+        if len(errors) == 0:
+            results_deleteconfig = service_handler.DeleteConfig(resources_to_delete)
+            errors.extend(check_errors_deleteconfig(resources_to_delete, results_deleteconfig))
+
+        if len(errors) == 0:
+            results_setconfig = service_handler.SetConfig(resources_to_set)
+            errors.extend(check_errors_setconfig(resources_to_set, results_setconfig))
+
+        if len(errors) == 0:
+            results_setconstraint = service_handler.SetConstraint(constraints_to_set)
+            errors.extend(check_errors_setconstraint(constraints_to_set, results_setconstraint))
+
+        if len(errors) == 0:
+            results_setendpoint = service_handler.SetEndpoint(endpointids_to_set)
+            errors.extend(check_errors_setendpoint(endpointids_to_set, results_setendpoint))
+
+        if len(errors) > 0:
+            raise OperationFailedException('UpdateService', extra_details=errors)
+
+        db_service,_ = update_service_in_local_database(self.database, request)
+        LOGGER.info('[UpdateService] db_service = {:s}'.format(str(db_service.dump(
+            include_endpoint_ids=True, include_constraints=True, include_config_rules=True))))
+
+        #db_entries = self.database.dump()
+        #LOGGER.info('----- Database Dump [{:3d} entries] -------------------------'.format(len(db_entries)))
+        #for db_entry in db_entries:
+        #    LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
+        #LOGGER.info('-----------------------------------------------------------')
+
+        sync_service_to_context(db_service, self.context_client)
+        return ServiceId(**db_service.dump_id())
 
     @safe_and_metered_rpc_method(METRICS, LOGGER)
     def DeleteService(self, request : ServiceId, context : grpc.ServicerContext) -> Empty:
-        #context_id, service_id = check_service_id_request('DeleteService', request, self.database, LOGGER)
-        #db_context = self.database.context(context_id)
-        #db_service = db_context.service(service_id)
-        #db_service.delete()
-        return Empty()
+        service_uuid = request.service_uuid.uuid
+        service_context_uuid = request.context_id.context_uuid.uuid
+
+        sync_service_from_context(service_context_uuid, service_uuid, self.context_client, self.database)
+        str_service_key = key_to_str([service_context_uuid, service_uuid])
+        db_service : ServiceModel = get_object(self.database, ServiceModel, str_service_key, raise_if_not_found=False)
+        if db_service is None: return Empty()
+
+        delete_service_from_context(db_service, self.context_client)
 
-    #@safe_and_metered_rpc_method(METRICS, LOGGER)
-    #def GetServiceById(self, request : ServiceId, context : grpc.ServicerContext) -> Service:
-    #    context_id, service_id = check_service_id_request('GetServiceById', request, self.database, LOGGER)
-    #    db_context = self.database.context(context_id)
-    #    db_service = db_context.service(service_id)
-    #    return Service(**db_service.dump())
+        for db_service_endpoint_pk,_ in db_service.references(ServiceEndPointModel):
+            ServiceEndPointModel(self.database, db_service_endpoint_pk).delete()
+
+        db_running_config = ConfigModel(self.database, db_service.service_config_fk)
+        for db_config_rule_pk,_ in db_running_config.references(ConfigRuleModel):
+            ConfigRuleModel(self.database, db_config_rule_pk).delete()
+
+        db_running_constraints = ConstraintsModel(self.database, db_service.service_constraints_fk)
+        for db_constraint_pk,_ in db_running_constraints.references(ConstraintModel):
+            ConstraintModel(self.database, db_constraint_pk).delete()
+
+        db_service.delete()
+        db_running_config.delete()
+        db_running_constraints.delete()
+        return Empty()
 
     @safe_and_metered_rpc_method(METRICS, LOGGER)
-    def GetConnectionList(self, request : Empty, context : grpc.ServicerContext) -> ConnectionList:
+    def GetConnectionList(self, request : ServiceId, context : grpc.ServicerContext) -> ConnectionList:
         #raise ServiceException(grpc.StatusCode.UNIMPLEMENTED, 'RPC GetConnectionList() not implemented')
         return ConnectionList()
diff --git a/src/service/service/Tools.py b/src/service/service/Tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..80084f363231ee18c1fe75559b5633f0b5b02e68
--- /dev/null
+++ b/src/service/service/Tools.py
@@ -0,0 +1,160 @@
+import logging
+from typing import Any, Dict, List, Optional, Tuple
+from common.orm.HighLevel import get_object, get_related_objects
+from common.rpc_method_wrapper.ServiceExceptions import NotFoundException
+from context.client.ContextClient import ContextClient
+from service.proto.context_pb2 import ConfigRule, Constraint, EndPointId
+from service.service.database.ConstraintModel import get_constraints, grpc_constraints_to_raw
+from service.service.database.DatabaseDeviceTools import sync_device_from_context
+from service.service.database.EndPointModel import EndPointModel, grpc_endpointids_to_raw
+from .database.ConfigModel import ORM_ConfigActionEnum, get_config_rules, grpc_config_rules_to_raw
+from .database.DeviceModel import DeviceModel, DriverModel
+from .database.RelationModels import ServiceEndPointModel
+from .database.ServiceModel import ServiceModel
+from .service_handler_api._ServiceHandler import _ServiceHandler
+from .service_handler_api.FilterFields import FilterFieldEnum
+from .service_handler_api.ServiceHandlerFactory import ServiceHandlerFactory
+
+LOGGER = logging.getLogger(__name__)
+
+def sync_devices_from_context(
+    context_client : ContextClient, db_service : ServiceModel, service_endpoint_ids : List[EndPointId]
+    ) -> Dict[str, DeviceModel]:
+
+    database = db_service.database
+
+    required_device_uuids = set()
+    db_endpoints = get_related_objects(db_service, ServiceEndPointModel, 'endpoint_fk')
+    for db_endpoint in db_endpoints:
+        db_device = DeviceModel(database, db_endpoint.device_fk)
+        required_device_uuids.add(db_device.device_uuid)
+
+    for endpoint_id in service_endpoint_ids:
+        required_device_uuids.add(endpoint_id.device_id.device_uuid.uuid)
+
+    db_devices = {}
+    devices_not_found = set()
+    for device_uuid in required_device_uuids:
+        sync_device_from_context(device_uuid, context_client, database)
+        db_device = get_object(database, DeviceModel, device_uuid, raise_if_not_found=False)
+        if db_device is None:
+            devices_not_found.add(device_uuid)
+        else:
+            db_devices[device_uuid] = db_device
+
+    if len(devices_not_found) > 0:
+        extra_details = ['Devices({:s}) cannot be retrieved from Context'.format(str(devices_not_found))]
+        raise NotFoundException('Device', '...', extra_details=extra_details)
+
+    return db_devices
+
+def classify_config_rules(
+    db_service : ServiceModel, service_config_rules : List[ConfigRule],
+    resources_to_set: List[Tuple[str, Any]], resources_to_delete : List[Tuple[str, Any]]):
+
+    context_config_rules = get_config_rules(db_service.database, db_service.pk, 'running')
+    context_config_rules = {config_rule[1]: config_rule[2] for config_rule in context_config_rules}
+    #LOGGER.info('[classify_config_rules] context_config_rules = {:s}'.format(str(context_config_rules)))
+
+    request_config_rules = grpc_config_rules_to_raw(service_config_rules)
+    #LOGGER.info('[classify_config_rules] request_config_rules = {:s}'.format(str(request_config_rules)))
+
+    for config_rule in request_config_rules:
+        action, key, value = config_rule
+        if action == ORM_ConfigActionEnum.SET:
+            if (key not in context_config_rules) or (context_config_rules[key] != value):
+                resources_to_set.append((key, value))
+        elif action == ORM_ConfigActionEnum.DELETE:
+            if key in context_config_rules:
+                resources_to_delete.append((key, value))
+
+    #LOGGER.info('[classify_config_rules] resources_to_set = {:s}'.format(str(resources_to_set)))
+    #LOGGER.info('[classify_config_rules] resources_to_delete = {:s}'.format(str(resources_to_delete)))
+
+def classify_constraints(
+    db_service : ServiceModel, service_constraints : List[Constraint],
+    constraints_to_set: List[Tuple[str, str]], constraints_to_delete : List[Tuple[str, str]]):
+
+    context_constraints = get_constraints(db_service.database, db_service.pk, 'running')
+    context_constraints = {constraint[0]: constraint[1] for constraint in context_constraints}
+    #LOGGER.info('[classify_constraints] context_constraints = {:s}'.format(str(context_constraints)))
+
+    request_constraints = grpc_constraints_to_raw(service_constraints)
+    #LOGGER.info('[classify_constraints] request_constraints = {:s}'.format(str(request_constraints)))
+
+    for constraint in request_constraints:
+        constraint_type, constraint_value = constraint
+        if constraint_type in context_constraints:
+            if context_constraints[constraint_type] != constraint_value:
+                constraints_to_set.append(constraint)
+        else:
+            constraints_to_set.append(constraint)
+        context_constraints.pop(constraint_type, None)
+
+    for constraint in context_constraints:
+        constraints_to_delete.append(constraint)
+
+    #LOGGER.info('[classify_constraints] constraints_to_set = {:s}'.format(str(constraints_to_set)))
+    #LOGGER.info('[classify_constraints] constraints_to_delete = {:s}'.format(str(constraints_to_delete)))
+
+def get_service_endpointids(db_service : ServiceModel) -> List[Tuple[str, str, Optional[str]]]:
+    db_endpoints : List[EndPointModel] = get_related_objects(db_service, ServiceEndPointModel, 'endpoint_fk')
+    endpoint_ids = [db_endpoint.dump_id() for db_endpoint in db_endpoints]
+    return [
+        (endpoint_id['device_id']['device_uuid']['uuid'], endpoint_id['endpoint_uuid']['uuid'],
+            endpoint_id.get('topology_id', {}).get('topology_uuid', {}).get('uuid', None))
+        for endpoint_id in endpoint_ids
+    ]
+
+def classify_endpointids(
+    db_service : ServiceModel, service_endpoint_ids : List[EndPointId],
+    endpointids_to_set: List[Tuple[str, str, Optional[str]]],
+    endpointids_to_delete : List[Tuple[str, str, Optional[str]]]):
+
+    context_endpoint_ids = get_service_endpointids(db_service)
+    #LOGGER.info('[classify_endpointids] context_endpoint_ids = {:s}'.format(str(context_endpoint_ids)))
+    context_endpoint_ids = set(context_endpoint_ids)
+    #LOGGER.info('[classify_endpointids] context_endpoint_ids = {:s}'.format(str(context_endpoint_ids)))
+
+    request_endpoint_ids = grpc_endpointids_to_raw(service_endpoint_ids)
+    #LOGGER.info('[classify_endpointids] request_endpoint_ids = {:s}'.format(str(request_endpoint_ids)))
+
+    for endpoint_id in request_endpoint_ids:
+        if endpoint_id not in context_endpoint_ids:
+            endpointids_to_set.append(endpoint_id)
+        context_endpoint_ids.discard(endpoint_id)
+
+    for endpoint_id in context_endpoint_ids:
+        endpointids_to_delete.append(endpoint_id)
+
+    #LOGGER.info('[classify_endpointids] endpointids_to_set = {:s}'.format(str(endpointids_to_set)))
+    #LOGGER.info('[classify_endpointids] endpointids_to_delete = {:s}'.format(str(endpointids_to_delete)))
+
+def get_service_handler_class(
+    service_handler_factory : ServiceHandlerFactory, db_service : ServiceModel, db_devices : Dict[str, DeviceModel]
+    ) -> Optional[_ServiceHandler]:
+
+    str_service_key = db_service.pk
+    database = db_service.database
+
+    # Assume all devices involved in the service must support at least one driver in common
+    device_drivers = None
+    for _,db_device in db_devices.items():
+        db_driver_pks = db_device.references(DriverModel)
+        db_driver_names = [DriverModel(database, pk).driver.value for pk,_ in db_driver_pks]
+        if device_drivers is None:
+            device_drivers = set(db_driver_names)
+        else:
+            device_drivers.intersection_update(db_driver_names)
+
+    filter_fields = {
+        FilterFieldEnum.SERVICE_TYPE.value  : db_service.service_type.value,    # must be supported
+        FilterFieldEnum.DEVICE_DRIVER.value : device_drivers,                   # at least one must be supported
+    }
+
+    msg = 'Selecting service handler for service({:s}) with filter_fields({:s})...'
+    LOGGER.info(msg.format(str(str_service_key), str(filter_fields)))
+    service_handler_class = service_handler_factory.get_service_handler_class(**filter_fields)
+    msg = 'ServiceHandler({:s}) selected for service({:s}) with filter_fields({:s})...'
+    LOGGER.info(msg.format(str(service_handler_class.__name__), str(str_service_key), str(filter_fields)))
+    return service_handler_class
diff --git a/src/service/service/__main__.py b/src/service/service/__main__.py
index 7de072b007d16fbd1c3274ee6a1ba04a5e0e56e5..d6a0e9fd32c4e707dd731e5185aa7751e8bd65ee 100644
--- a/src/service/service/__main__.py
+++ b/src/service/service/__main__.py
@@ -1,8 +1,14 @@
 import logging, signal, sys, threading
 from prometheus_client import start_http_server
 from common.Settings import get_setting
-from service.service.ServiceService import ServiceService
-from service.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD, LOG_LEVEL, METRICS_PORT
+from context.client.ContextClient import ContextClient
+from device.client.DeviceClient import DeviceClient
+from service.Config import (
+    CONTEXT_SERVICE_HOST, CONTEXT_SERVICE_PORT, DEVICE_SERVICE_HOST, DEVICE_SERVICE_PORT, GRPC_SERVICE_PORT,
+    GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD, LOG_LEVEL, METRICS_PORT)
+from .ServiceService import ServiceService
+from .service_handler_api.ServiceHandlerFactory import ServiceHandlerFactory
+from .service_handlers import SERVICE_HANDLERS
 
 terminate = threading.Event()
 LOGGER = None
@@ -14,11 +20,15 @@ def signal_handler(signal, frame): # pylint: disable=redefined-outer-name
 def main():
     global LOGGER # pylint: disable=global-statement
 
-    service_port = get_setting('SERVICESERVICE_SERVICE_PORT_GRPC', default=GRPC_SERVICE_PORT)
-    max_workers  = get_setting('MAX_WORKERS',                      default=GRPC_MAX_WORKERS )
-    grace_period = get_setting('GRACE_PERIOD',                     default=GRPC_GRACE_PERIOD)
-    log_level    = get_setting('LOG_LEVEL',                        default=LOG_LEVEL        )
-    metrics_port = get_setting('METRICS_PORT',                     default=METRICS_PORT     )
+    grpc_service_port    = get_setting('SERVICESERVICE_SERVICE_PORT_GRPC', default=GRPC_SERVICE_PORT   )
+    max_workers          = get_setting('MAX_WORKERS',                      default=GRPC_MAX_WORKERS    )
+    grace_period         = get_setting('GRACE_PERIOD',                     default=GRPC_GRACE_PERIOD   )
+    log_level            = get_setting('LOG_LEVEL',                        default=LOG_LEVEL           )
+    metrics_port         = get_setting('METRICS_PORT',                     default=METRICS_PORT        )
+    context_service_host = get_setting('CONTEXTSERVICE_SERVICE_HOST',      default=CONTEXT_SERVICE_HOST)
+    context_service_port = get_setting('CONTEXTSERVICE_SERVICE_PORT_GRPC', default=CONTEXT_SERVICE_PORT)
+    device_service_host  = get_setting('DEVICESERVICE_SERVICE_HOST',       default=DEVICE_SERVICE_HOST )
+    device_service_port  = get_setting('DEVICESERVICE_SERVICE_PORT_GRPC',  default=DEVICE_SERVICE_PORT )
 
     logging.basicConfig(level=log_level)
     LOGGER = logging.getLogger(__name__)
@@ -31,8 +41,24 @@ def main():
     # Start metrics server
     start_http_server(metrics_port)
 
+    # Initialize Context Client
+    if context_service_host is None or context_service_port is None:
+        raise Exception('Wrong address({:s}):port({:s}) of Context component'.format(
+            str(context_service_host), str(context_service_port)))
+    context_client = ContextClient(context_service_host, context_service_port)
+
+    # Initialize Device Client
+    if device_service_host is None or device_service_port is None:
+        raise Exception('Wrong address({:s}):port({:s}) of Device component'.format(
+            str(device_service_host), str(device_service_port)))
+    device_client = DeviceClient(device_service_host, device_service_port)
+
+    service_handler_factory = ServiceHandlerFactory(SERVICE_HANDLERS)
+
     # Starting service service
-    grpc_service = ServiceService(port=service_port, max_workers=max_workers, grace_period=grace_period)
+    grpc_service = ServiceService(
+        context_client, device_client, service_handler_factory, port=grpc_service_port, max_workers=max_workers,
+        grace_period=grace_period)
     grpc_service.start()
 
     # Wait for Ctrl+C or termination signal
diff --git a/src/service/service/database/ConfigModel.py b/src/service/service/database/ConfigModel.py
new file mode 100644
index 0000000000000000000000000000000000000000..39a2b5a794fa88d2700603975f36a3fa4ef90450
--- /dev/null
+++ b/src/service/service/database/ConfigModel.py
@@ -0,0 +1,98 @@
+import functools, logging, operator
+from enum import Enum
+from typing import Dict, List, Tuple, Union
+from common.orm.Database import Database
+from common.orm.HighLevel import get_object, get_or_create_object, update_or_create_object
+from common.orm.backend.Tools import key_to_str
+from common.orm.fields.EnumeratedField import EnumeratedField
+from common.orm.fields.ForeignKeyField import ForeignKeyField
+from common.orm.fields.IntegerField import IntegerField
+from common.orm.fields.PrimaryKeyField import PrimaryKeyField
+from common.orm.fields.StringField import StringField
+from common.orm.model.Model import Model
+from service.proto.context_pb2 import ConfigActionEnum
+from .Tools import fast_hasher, grpc_to_enum, remove_dict_key
+
+LOGGER = logging.getLogger(__name__)
+
+class ORM_ConfigActionEnum(Enum):
+    UNDEFINED = ConfigActionEnum.CONFIGACTION_UNDEFINED
+    SET       = ConfigActionEnum.CONFIGACTION_SET
+    DELETE    = ConfigActionEnum.CONFIGACTION_DELETE
+
+grpc_to_enum__config_action = functools.partial(
+    grpc_to_enum, ConfigActionEnum, ORM_ConfigActionEnum)
+
+class ConfigModel(Model): # pylint: disable=abstract-method
+    pk = PrimaryKeyField()
+
+    def dump(self) -> List[Dict]:
+        db_config_rule_pks = self.references(ConfigRuleModel)
+        config_rules = [ConfigRuleModel(self.database, pk).dump(include_position=True) for pk,_ in db_config_rule_pks]
+        config_rules = sorted(config_rules, key=operator.itemgetter('position'))
+        return [remove_dict_key(config_rule, 'position') for config_rule in config_rules]
+
+class ConfigRuleModel(Model): # pylint: disable=abstract-method
+    pk = PrimaryKeyField()
+    config_fk = ForeignKeyField(ConfigModel)
+    position = IntegerField(min_value=0, required=True)
+    action = EnumeratedField(ORM_ConfigActionEnum, required=True)
+    key = StringField(required=True, allow_empty=False)
+    value = StringField(required=False, allow_empty=True)
+
+    def dump(self, include_position=True) -> Dict: # pylint: disable=arguments-differ
+        result = {
+            'action': self.action.value,
+            'resource_key': self.key,
+            'resource_value': self.value,
+        }
+        if include_position: result['position'] = self.position
+        return result
+
+def delete_all_config_rules(database : Database, db_parent_pk : str, config_name : str) -> None:
+    str_config_key = key_to_str([db_parent_pk, config_name], separator=':')
+    db_config : ConfigModel = get_object(database, ConfigModel, str_config_key, raise_if_not_found=False)
+    if db_config is None: return
+    db_config_rule_pks = db_config.references(ConfigRuleModel)
+    for pk,_ in db_config_rule_pks: ConfigRuleModel(database, pk).delete()
+
+def grpc_config_rules_to_raw(grpc_config_rules) -> List[Tuple[ORM_ConfigActionEnum, str, str]]:
+    def translate(grpc_config_rule):
+        action = grpc_to_enum__config_action(grpc_config_rule.action)
+        return action, grpc_config_rule.resource_key, grpc_config_rule.resource_value
+    return [translate(grpc_config_rule) for grpc_config_rule in grpc_config_rules]
+
+def get_config_rules(
+    database : Database, db_parent_pk : str, config_name : str
+    ) -> List[Tuple[ORM_ConfigActionEnum, str, str]]:
+
+    str_config_key = key_to_str([db_parent_pk, config_name], separator=':')
+    db_config = get_object(database, ConfigModel, str_config_key, raise_if_not_found=False)
+    return [] if db_config is None else [
+        (ORM_ConfigActionEnum._value2member_map_.get(config_rule['action']),
+            config_rule['resource_key'], config_rule['resource_value'])
+        for config_rule in db_config.dump()
+    ]
+
+def update_config(
+    database : Database, db_parent_pk : str, config_name : str,
+    raw_config_rules : List[Tuple[ORM_ConfigActionEnum, str, str]]
+    ) -> List[Tuple[Union[ConfigModel, ConfigRuleModel], bool]]:
+
+    str_config_key = key_to_str([db_parent_pk, config_name], separator=':')
+    result : Tuple[ConfigModel, bool] = get_or_create_object(database, ConfigModel, str_config_key)
+    db_config, created = result
+
+    db_objects : List[Tuple[Union[ConfigModel, ConfigRuleModel], bool]] = [(db_config, created)]
+
+    for position,(action, resource_key, resource_value) in enumerate(raw_config_rules):
+        str_rule_key_hash = fast_hasher(resource_key)
+        str_config_rule_key = key_to_str([db_config.pk, str_rule_key_hash], separator=':')
+        result : Tuple[ConfigRuleModel, bool] = update_or_create_object(
+            database, ConfigRuleModel, str_config_rule_key, {
+                'config_fk': db_config, 'position': position, 'action': action, 'key': resource_key,
+                'value': resource_value})
+        db_config_rule, updated = result
+        db_objects.append((db_config_rule, updated))
+
+    return db_objects
diff --git a/src/service/service/database/ConstraintModel.py b/src/service/service/database/ConstraintModel.py
new file mode 100644
index 0000000000000000000000000000000000000000..b2a4933e4caa9697a41947e4a9266b1a0d6ee5a7
--- /dev/null
+++ b/src/service/service/database/ConstraintModel.py
@@ -0,0 +1,82 @@
+import logging, operator
+from typing import Dict, List, Tuple, Union
+from common.orm.Database import Database
+from common.orm.HighLevel import get_object, get_or_create_object, update_or_create_object
+from common.orm.backend.Tools import key_to_str
+from common.orm.fields.ForeignKeyField import ForeignKeyField
+from common.orm.fields.IntegerField import IntegerField
+from common.orm.fields.PrimaryKeyField import PrimaryKeyField
+from common.orm.fields.StringField import StringField
+from common.orm.model.Model import Model
+from service.service.database.Tools import fast_hasher, remove_dict_key
+
+LOGGER = logging.getLogger(__name__)
+
+class ConstraintsModel(Model): # pylint: disable=abstract-method
+    pk = PrimaryKeyField()
+
+    def dump(self) -> List[Dict]:
+        db_constraint_pks = self.references(ConstraintModel)
+        constraints = [ConstraintModel(self.database, pk).dump(include_position=True) for pk,_ in db_constraint_pks]
+        constraints = sorted(constraints, key=operator.itemgetter('position'))
+        return [remove_dict_key(constraint, 'position') for constraint in constraints]
+
+class ConstraintModel(Model): # pylint: disable=abstract-method
+    pk = PrimaryKeyField()
+    constraints_fk = ForeignKeyField(ConstraintsModel)
+    position = IntegerField(min_value=0, required=True)
+    constraint_type = StringField(required=True, allow_empty=False)
+    constraint_value = StringField(required=True, allow_empty=False)
+
+    def dump(self, include_position=True) -> Dict: # pylint: disable=arguments-differ
+        result = {
+            'constraint_type': self.constraint_type,
+            'constraint_value': self.constraint_value,
+        }
+        if include_position: result['position'] = self.position
+        return result
+
+def delete_all_constraints(database : Database, db_parent_pk : str, constraints_name : str) -> None:
+    str_constraints_key = key_to_str([db_parent_pk, constraints_name], separator=':')
+    db_constraints : ConstraintsModel = get_object(
+        database, ConstraintsModel, str_constraints_key, raise_if_not_found=False)
+    if db_constraints is None: return
+    db_constraint_pks = db_constraints.references(ConstraintModel)
+    for pk,_ in db_constraint_pks: ConstraintModel(database, pk).delete()
+
+def grpc_constraints_to_raw(grpc_constraints) -> List[Tuple[str, str]]:
+    return [
+        (grpc_constraint.constraint_type, grpc_constraint.constraint_value)
+        for grpc_constraint in grpc_constraints
+    ]
+
+def get_constraints(database : Database, db_parent_pk : str, constraints_name : str) -> List[Tuple[str, str]]:
+    str_constraints_key = key_to_str([db_parent_pk, constraints_name], separator=':')
+    db_constraints : ConstraintsModel = get_object(
+        database, ConstraintsModel, str_constraints_key, raise_if_not_found=False)
+    return [] if db_constraints is None else [
+        (constraint['constraint_type'], constraint['constraint_value'])
+        for constraint in db_constraints.dump()
+    ]
+
+def update_constraints(
+    database : Database, db_parent_pk : str, constraints_name : str, raw_constraints : List[Tuple[str, str]]
+    ) -> List[Tuple[Union[ConstraintsModel, ConstraintModel], bool]]:
+
+    str_constraints_key = key_to_str([db_parent_pk, constraints_name], separator=':')
+    result : Tuple[ConstraintsModel, bool] = get_or_create_object(database, ConstraintsModel, str_constraints_key)
+    db_constraints, created = result
+
+    db_objects : List[Tuple[Union[ConstraintsModel, ConstraintModel], bool]] = [(db_constraints, created)]
+
+    for position,(constraint_type, constraint_value) in enumerate(raw_constraints):
+        str_constraint_key_hash = fast_hasher(constraint_type)
+        str_constraint_key = key_to_str([db_constraints.pk, str_constraint_key_hash], separator=':')
+        result : Tuple[ConstraintModel, bool] = update_or_create_object(
+            database, ConstraintModel, str_constraint_key, {
+                'constraints_fk': db_constraints, 'position': position, 'constraint_type': constraint_type,
+                'constraint_value': constraint_value})
+        db_constraints_rule, updated = result
+        db_objects.append((db_constraints_rule, updated))
+
+    return db_objects
diff --git a/src/service/service/database/ContextModel.py b/src/service/service/database/ContextModel.py
new file mode 100644
index 0000000000000000000000000000000000000000..74e577e5ba5ab366cd7c3ca07c8730d21d5e8ec9
--- /dev/null
+++ b/src/service/service/database/ContextModel.py
@@ -0,0 +1,30 @@
+import logging
+from typing import Dict #, List
+from common.orm.fields.PrimaryKeyField import PrimaryKeyField
+from common.orm.fields.StringField import StringField
+from common.orm.model.Model import Model
+
+LOGGER = logging.getLogger(__name__)
+
+class ContextModel(Model):
+    pk = PrimaryKeyField()
+    context_uuid = StringField(required=True, allow_empty=False)
+
+    def dump_id(self) -> Dict:
+        return {'context_uuid': {'uuid': self.context_uuid}}
+
+#    def dump_service_ids(self) -> List[Dict]:
+#        from .ServiceModel import ServiceModel # pylint: disable=import-outside-toplevel
+#        db_service_pks = self.references(ServiceModel)
+#        return [ServiceModel(self.database, pk).dump_id() for pk,_ in db_service_pks]
+#
+#    def dump_topology_ids(self) -> List[Dict]:
+#        from .TopologyModel import TopologyModel # pylint: disable=import-outside-toplevel
+#        db_topology_pks = self.references(TopologyModel)
+#        return [TopologyModel(self.database, pk).dump_id() for pk,_ in db_topology_pks]
+#
+#    def dump(self, include_services=True, include_topologies=True) -> Dict: # pylint: disable=arguments-differ
+#        result = {'context_id': self.dump_id()}
+#        if include_services: result['service_ids'] = self.dump_service_ids()
+#        if include_topologies: result['topology_ids'] = self.dump_topology_ids()
+#        return result
diff --git a/src/service/service/database/DatabaseDeviceTools.py b/src/service/service/database/DatabaseDeviceTools.py
new file mode 100644
index 0000000000000000000000000000000000000000..9cf0252831921b07abeba629bf154dc2c8b475da
--- /dev/null
+++ b/src/service/service/database/DatabaseDeviceTools.py
@@ -0,0 +1,87 @@
+import grpc
+from typing import Tuple
+from common.orm.Database import Database
+from common.orm.HighLevel import get_or_create_object, update_or_create_object
+from common.orm.backend.Tools import key_to_str
+from common.rpc_method_wrapper.ServiceExceptions import InvalidArgumentException
+from context.client.ContextClient import ContextClient
+from device.proto.context_pb2 import Device, DeviceId
+from .ConfigModel import delete_all_config_rules, grpc_config_rules_to_raw, update_config
+from .ContextModel import ContextModel
+from .DeviceModel import DeviceModel, grpc_to_enum__device_operational_status, set_drivers
+from .EndPointModel import EndPointModel
+from .TopologyModel import TopologyModel
+
+def update_device_in_local_database(database : Database, device : Device) -> Tuple[DeviceModel, bool]:
+    device_uuid = device.device_id.device_uuid.uuid
+
+    for i,endpoint in enumerate(device.device_endpoints):
+        endpoint_device_uuid = endpoint.endpoint_id.device_id.device_uuid.uuid
+        if len(endpoint_device_uuid) == 0: endpoint_device_uuid = device_uuid
+        if device_uuid != endpoint_device_uuid:
+            raise InvalidArgumentException(
+                'request.device_endpoints[{:d}].device_id.device_uuid.uuid'.format(i), endpoint_device_uuid,
+                ['should be == {:s}({:s})'.format('request.device_id.device_uuid.uuid', device_uuid)])
+
+    config_rules = grpc_config_rules_to_raw(device.device_config.config_rules)
+    delete_all_config_rules(database, device_uuid, 'running')
+    running_config_result = update_config(database, device_uuid, 'running', config_rules)
+
+    result : Tuple[DeviceModel, bool] = update_or_create_object(database, DeviceModel, device_uuid, {
+        'device_uuid'              : device_uuid,
+        'device_type'              : device.device_type,
+        'device_operational_status': grpc_to_enum__device_operational_status(device.device_operational_status),
+        'device_config_fk'         : running_config_result[0][0],
+    })
+    db_device, updated = result
+    set_drivers(database, db_device, device.device_drivers)
+
+    for i,endpoint in enumerate(device.device_endpoints):
+        endpoint_uuid = endpoint.endpoint_id.endpoint_uuid.uuid
+        endpoint_device_uuid = endpoint.endpoint_id.device_id.device_uuid.uuid
+        if len(endpoint_device_uuid) == 0: endpoint_device_uuid = device_uuid
+
+        str_endpoint_key = key_to_str([device_uuid, endpoint_uuid])
+        endpoint_attributes = {
+            'device_fk'    : db_device,
+            'endpoint_uuid': endpoint_uuid,
+            'endpoint_type': endpoint.endpoint_type,
+        }
+
+        endpoint_topology_context_uuid = endpoint.endpoint_id.topology_id.context_id.context_uuid.uuid
+        endpoint_topology_uuid = endpoint.endpoint_id.topology_id.topology_uuid.uuid
+        if len(endpoint_topology_context_uuid) > 0 and len(endpoint_topology_uuid) > 0:
+            result : Tuple[ContextModel, bool] = get_or_create_object(
+                database, ContextModel, endpoint_topology_context_uuid, defaults={
+                    'context_uuid': endpoint_topology_context_uuid,
+                })
+            db_context, _ = result
+
+            str_topology_key = key_to_str([endpoint_topology_context_uuid, endpoint_topology_uuid])
+            result : Tuple[TopologyModel, bool] = get_or_create_object(
+                database, TopologyModel, str_topology_key, defaults={
+                    'context_fk': db_context,
+                    'topology_uuid': endpoint_topology_uuid,
+                })
+            db_topology, _ = result
+
+            str_endpoint_key = key_to_str([str_endpoint_key, str_topology_key], separator=':')
+            endpoint_attributes['topology_fk'] = db_topology
+
+        result : Tuple[EndPointModel, bool] = update_or_create_object(
+            database, EndPointModel, str_endpoint_key, endpoint_attributes)
+        _, db_endpoint_updated = result
+        updated = updated or db_endpoint_updated
+
+    return db_device, updated
+
+def sync_device_from_context(
+    device_uuid : str, context_client : ContextClient, database : Database
+    ) -> Tuple[DeviceModel, bool]:
+
+    try:
+        device : Device = context_client.GetDevice(DeviceId(device_uuid={'uuid': device_uuid}))
+    except grpc.RpcError as e:
+        if e.code() != grpc.StatusCode.NOT_FOUND: raise # pylint: disable=no-member
+        return None
+    return update_device_in_local_database(database, device)
diff --git a/src/service/service/database/DatabaseServiceTools.py b/src/service/service/database/DatabaseServiceTools.py
new file mode 100644
index 0000000000000000000000000000000000000000..0b538f82a62b73aaa194628b986810204dcbc46b
--- /dev/null
+++ b/src/service/service/database/DatabaseServiceTools.py
@@ -0,0 +1,129 @@
+import grpc
+from typing import Tuple
+from common.Constants import DEFAULT_CONTEXT_UUID, DEFAULT_TOPOLOGY_UUID
+from common.orm.Database import Database
+from common.orm.HighLevel import get_object, get_or_create_object, update_or_create_object
+from common.orm.backend.Tools import key_to_str
+from common.rpc_method_wrapper.ServiceExceptions import InvalidArgumentException
+from context.client.ContextClient import ContextClient
+from service.proto.context_pb2 import Service, ServiceId
+from .ConfigModel import delete_all_config_rules, grpc_config_rules_to_raw, update_config
+from .ConstraintModel import delete_all_constraints, grpc_constraints_to_raw, update_constraints
+from .ContextModel import ContextModel
+from .EndPointModel import EndPointModel
+from .RelationModels import ServiceEndPointModel
+from .ServiceModel import ServiceModel, grpc_to_enum__service_status, grpc_to_enum__service_type
+from .TopologyModel import TopologyModel
+
+def update_service_in_local_database(database : Database, service : Service) -> Tuple[ServiceModel, bool]:
+    service_uuid = service.service_id.service_uuid.uuid
+    service_context_uuid = service.service_id.context_id.context_uuid.uuid
+    if len(service_context_uuid) == 0: service_context_uuid = DEFAULT_CONTEXT_UUID
+
+    topology_uuids = {}
+    for i,endpoint_id in enumerate(service.service_endpoint_ids):
+        endpoint_uuid                  = endpoint_id.endpoint_uuid.uuid
+        endpoint_device_uuid           = endpoint_id.device_id.device_uuid.uuid
+        endpoint_topology_uuid         = endpoint_id.topology_id.topology_uuid.uuid
+        endpoint_topology_context_uuid = endpoint_id.topology_id.context_id.context_uuid.uuid
+
+        if len(endpoint_device_uuid) == 0:
+            raise InvalidArgumentException(
+                'request.service_endpoint_ids[{:d}].device_id.device_uuid.uuid'.format(i), endpoint_device_uuid,
+                ['not set'])
+
+        if len(endpoint_topology_context_uuid) == 0: endpoint_topology_context_uuid = service_context_uuid
+        if service_context_uuid != endpoint_topology_context_uuid:
+            raise InvalidArgumentException(
+                'request.service_endpoint_ids[{:d}].topology_id.context_id.context_uuid.uuid'.format(i),
+                endpoint_device_uuid,
+                ['should be == {:s}({:s})'.format('service_id.context_id.context_uuid.uuid', service_context_uuid)])
+
+        topology_uuids.setdefault(endpoint_topology_uuid, set()).add(
+            'request.service_endpoint_ids[{:d}].device_id.device_uuid.uuid'.format(i))
+
+    if len(topology_uuids) > 1:
+        raise InvalidArgumentException(
+            'request.service_endpoint_ids', '...',
+            ['Multiple different topology_uuid values specified: {:s}'.format(str(topology_uuids))])
+    if len(topology_uuids) == 1:
+        topology_uuid = topology_uuids.popitem()[0]
+    else:
+        topology_uuid = DEFAULT_TOPOLOGY_UUID
+
+    result : Tuple[ContextModel, bool] = get_or_create_object(
+        database, ContextModel, service_context_uuid, defaults={'context_uuid': service_context_uuid})
+    db_context, _ = result
+
+    str_topology_key = None
+    if len(topology_uuid) > 0:
+        str_topology_key = key_to_str([service_context_uuid, topology_uuid])
+        result : Tuple[TopologyModel, bool] = get_or_create_object(
+            database, TopologyModel, str_topology_key, defaults={'context_fk': db_context, 'topology_uuid': topology_uuid})
+        #db_topology, _ = result
+
+    str_service_key = key_to_str([service_context_uuid, service_uuid])
+
+    config_rules = grpc_config_rules_to_raw(service.service_config.config_rules)
+    delete_all_config_rules(database, str_service_key, 'running')
+    running_config_result = update_config(database, str_service_key, 'running', config_rules)
+
+    constraints = grpc_constraints_to_raw(service.service_constraints)
+    delete_all_constraints(database, str_service_key, 'running')
+    running_constraints_result = update_constraints(database, str_service_key, 'running', constraints)
+
+    result : Tuple[ContextModel, bool] = get_or_create_object(
+        database, ContextModel, service_context_uuid, defaults={
+            'context_uuid': service_context_uuid,
+        })
+    db_context, _ = result
+
+    result : Tuple[ServiceModel, bool] = update_or_create_object(database, ServiceModel, str_service_key, {
+        'context_fk'            : db_context,
+        'service_uuid'          : service_uuid,
+        'service_type'          : grpc_to_enum__service_type(service.service_type),
+        'service_status'        : grpc_to_enum__service_status(service.service_status.service_status),
+        'service_constraints_fk': running_constraints_result[0][0],
+        'service_config_fk'     : running_config_result[0][0],
+    })
+    db_service, updated = result
+
+    for i,endpoint_id in enumerate(service.service_endpoint_ids):
+        endpoint_uuid        = endpoint_id.endpoint_uuid.uuid
+        endpoint_device_uuid = endpoint_id.device_id.device_uuid.uuid
+
+        str_endpoint_key = key_to_str([endpoint_device_uuid, endpoint_uuid])
+        if str_topology_key is not None:
+            str_endpoint_key = key_to_str([str_endpoint_key, str_topology_key], separator=':')
+        db_endpoint : EndPointModel = get_object(database, EndPointModel, str_endpoint_key)
+
+        str_service_endpoint_key = key_to_str([str_service_key, str_endpoint_key], separator='--')
+        result : Tuple[ServiceEndPointModel, bool] = get_or_create_object(
+            database, ServiceEndPointModel, str_service_endpoint_key, {
+                'service_fk': db_service, 'endpoint_fk': db_endpoint})
+        _, service_endpoint_created = result
+        updated = updated or service_endpoint_created
+
+    return db_service, updated
+
+def sync_service_from_context(
+    context_uuid : str, service_uuid : str, context_client : ContextClient, database : Database
+    ) -> Tuple[ServiceModel, bool]:
+
+    try:
+        service : Service = context_client.GetService(ServiceId(
+            context_id={'context_uuid': {'uuid': context_uuid}},
+            service_uuid={'uuid': service_uuid}))
+    except grpc.RpcError as e:
+        if e.code() != grpc.StatusCode.NOT_FOUND: raise # pylint: disable=no-member
+        return None
+    return update_service_in_local_database(database, service)
+
+def sync_service_to_context(db_service : ServiceModel, context_client : ContextClient) -> None:
+    if db_service is None: return
+    context_client.SetService(Service(**db_service.dump(
+        include_endpoint_ids=True, include_constraints=True, include_config_rules=True)))
+
+def delete_service_from_context(db_service : ServiceModel, context_client : ContextClient) -> None:
+    if db_service is None: return
+    context_client.RemoveService(ServiceId(**db_service.dump_id()))
diff --git a/src/service/service/database/DeviceModel.py b/src/service/service/database/DeviceModel.py
new file mode 100644
index 0000000000000000000000000000000000000000..d005292acfd4cf20619548dbfa5b48a08780ec2f
--- /dev/null
+++ b/src/service/service/database/DeviceModel.py
@@ -0,0 +1,87 @@
+import functools, logging
+from enum import Enum
+from typing import Dict, List
+from common.orm.Database import Database
+from common.orm.backend.Tools import key_to_str
+from common.orm.fields.EnumeratedField import EnumeratedField
+from common.orm.fields.ForeignKeyField import ForeignKeyField
+from common.orm.fields.PrimaryKeyField import PrimaryKeyField
+from common.orm.fields.StringField import StringField
+from common.orm.model.Model import Model
+from context.proto.context_pb2 import DeviceDriverEnum, DeviceOperationalStatusEnum
+from .ConfigModel import ConfigModel
+from .Tools import grpc_to_enum
+
+LOGGER = logging.getLogger(__name__)
+
+class ORM_DeviceDriverEnum(Enum):
+    UNDEFINED             = DeviceDriverEnum.DEVICEDRIVER_UNDEFINED
+    OPENCONFIG            = DeviceDriverEnum.DEVICEDRIVER_OPENCONFIG
+    TRANSPORT_API         = DeviceDriverEnum.DEVICEDRIVER_TRANSPORT_API
+    P4                    = DeviceDriverEnum.DEVICEDRIVER_P4
+    IETF_NETWORK_TOPOLOGY = DeviceDriverEnum.DEVICEDRIVER_IETF_NETWORK_TOPOLOGY
+    ONF_TR_352            = DeviceDriverEnum.DEVICEDRIVER_ONF_TR_352
+
+grpc_to_enum__device_driver = functools.partial(
+    grpc_to_enum, DeviceDriverEnum, ORM_DeviceDriverEnum)
+
+class ORM_DeviceOperationalStatusEnum(Enum):
+    UNDEFINED = DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_UNDEFINED
+    DISABLED  = DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_DISABLED
+    ENABLED   = DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_ENABLED
+
+grpc_to_enum__device_operational_status = functools.partial(
+    grpc_to_enum, DeviceOperationalStatusEnum, ORM_DeviceOperationalStatusEnum)
+
+class DeviceModel(Model):
+    pk = PrimaryKeyField()
+    device_uuid = StringField(required=True, allow_empty=False)
+    device_type = StringField()
+    device_config_fk = ForeignKeyField(ConfigModel)
+    device_operational_status = EnumeratedField(ORM_DeviceOperationalStatusEnum, required=True)
+
+    def dump_id(self) -> Dict:
+        return {'device_uuid': {'uuid': self.device_uuid}}
+
+    def dump_config(self) -> Dict:
+        return ConfigModel(self.database, self.device_config_fk).dump()
+
+    def dump_drivers(self) -> List[int]:
+        db_driver_pks = self.references(DriverModel)
+        return [DriverModel(self.database, pk).dump() for pk,_ in db_driver_pks]
+
+    def dump_endpoints(self) -> List[Dict]:
+        from .EndPointModel import EndPointModel # pylint: disable=import-outside-toplevel
+        db_endpoints_pks = self.references(EndPointModel)
+        return [EndPointModel(self.database, pk).dump() for pk,_ in db_endpoints_pks]
+
+    def dump(   # pylint: disable=arguments-differ
+            self, include_config_rules=True, include_drivers=True, include_endpoints=True
+        ) -> Dict:
+        result = {
+            'device_id': self.dump_id(),
+            'device_type': self.device_type,
+            'device_operational_status': self.device_operational_status.value,
+        }
+        if include_config_rules: result.setdefault('device_config', {})['config_rules'] = self.dump_config()
+        if include_drivers: result['device_drivers'] = self.dump_drivers()
+        if include_endpoints: result['device_endpoints'] = self.dump_endpoints()
+        return result
+
+class DriverModel(Model): # pylint: disable=abstract-method
+    pk = PrimaryKeyField()
+    device_fk = ForeignKeyField(DeviceModel)
+    driver = EnumeratedField(ORM_DeviceDriverEnum, required=True)
+
+    def dump(self) -> Dict:
+        return self.driver.value
+
+def set_drivers(database : Database, db_device : DeviceModel, grpc_device_drivers):
+    db_device_pk = db_device.pk
+    for driver in grpc_device_drivers:
+        orm_driver = grpc_to_enum__device_driver(driver)
+        str_device_driver_key = key_to_str([db_device_pk, orm_driver.name])
+        db_device_driver = DriverModel(database, str_device_driver_key)
+        db_device_driver.device_fk = db_device
+        db_device_driver.driver = orm_driver
+        db_device_driver.save()
diff --git a/src/service/service/database/EndPointModel.py b/src/service/service/database/EndPointModel.py
new file mode 100644
index 0000000000000000000000000000000000000000..78b00db8628a841c8deae89e3db8ca1056fa1db5
--- /dev/null
+++ b/src/service/service/database/EndPointModel.py
@@ -0,0 +1,43 @@
+import logging
+from typing import Dict, List, Optional, Tuple
+from common.orm.fields.ForeignKeyField import ForeignKeyField
+from common.orm.fields.PrimaryKeyField import PrimaryKeyField
+from common.orm.fields.StringField import StringField
+from common.orm.model.Model import Model
+from service.proto.context_pb2 import EndPointId
+from .DeviceModel import DeviceModel
+from .TopologyModel import TopologyModel
+
+LOGGER = logging.getLogger(__name__)
+
+class EndPointModel(Model):
+    pk = PrimaryKeyField()
+    topology_fk = ForeignKeyField(TopologyModel, required=False)
+    device_fk = ForeignKeyField(DeviceModel)
+    endpoint_uuid = StringField(required=True, allow_empty=False)
+    endpoint_type = StringField()
+
+    def dump_id(self) -> Dict:
+        device_id = DeviceModel(self.database, self.device_fk).dump_id()
+        result = {
+            'device_id': device_id,
+            'endpoint_uuid': {'uuid': self.endpoint_uuid},
+        }
+        if self.topology_fk is not None:
+            result['topology_id'] = TopologyModel(self.database, self.topology_fk).dump_id()
+        return result
+
+    def dump(self) -> Dict:
+        return {
+            'endpoint_id': self.dump_id(),
+            'endpoint_type': self.endpoint_type,
+        }
+
+def grpc_endpointids_to_raw(grpc_endpointids : List[EndPointId]) -> List[Tuple[str, str, Optional[str]]]:
+    def translate(grpc_endpointid : EndPointId) -> Tuple[str, str, Optional[str]]:
+        device_uuid   = grpc_endpointid.device_id.device_uuid.uuid
+        endpoint_uuid = grpc_endpointid.endpoint_uuid.uuid
+        topology_uuid = grpc_endpointid.topology_id.topology_uuid.uuid
+        if len(topology_uuid) == 0: topology_uuid = None
+        return device_uuid, endpoint_uuid, topology_uuid
+    return [translate(grpc_endpointid) for grpc_endpointid in grpc_endpointids]
diff --git a/src/service/service/database/RelationModels.py b/src/service/service/database/RelationModels.py
new file mode 100644
index 0000000000000000000000000000000000000000..fbf93feff84a99aa20acb43a31b9abb30ae14a20
--- /dev/null
+++ b/src/service/service/database/RelationModels.py
@@ -0,0 +1,31 @@
+import logging
+from common.orm.fields.ForeignKeyField import ForeignKeyField
+from common.orm.fields.PrimaryKeyField import PrimaryKeyField
+from common.orm.model.Model import Model
+from .DeviceModel import DeviceModel
+from .EndPointModel import EndPointModel
+#from .LinkModel import LinkModel
+from .ServiceModel import ServiceModel
+from .TopologyModel import TopologyModel
+
+LOGGER = logging.getLogger(__name__)
+
+#class LinkEndPointModel(Model): # pylint: disable=abstract-method
+#    pk = PrimaryKeyField()
+#    link_fk = ForeignKeyField(LinkModel)
+#    endpoint_fk = ForeignKeyField(EndPointModel)
+
+class ServiceEndPointModel(Model): # pylint: disable=abstract-method
+    pk = PrimaryKeyField()
+    service_fk = ForeignKeyField(ServiceModel)
+    endpoint_fk = ForeignKeyField(EndPointModel)
+
+class TopologyDeviceModel(Model): # pylint: disable=abstract-method
+    pk = PrimaryKeyField()
+    topology_fk = ForeignKeyField(TopologyModel)
+    device_fk = ForeignKeyField(DeviceModel)
+
+#class TopologyLinkModel(Model): # pylint: disable=abstract-method
+#    pk = PrimaryKeyField()
+#    topology_fk = ForeignKeyField(TopologyModel)
+#    link_fk = ForeignKeyField(LinkModel)
diff --git a/src/service/service/database/ServiceModel.py b/src/service/service/database/ServiceModel.py
new file mode 100644
index 0000000000000000000000000000000000000000..f6bd2e9c47f77538fac6d65aa0e02ca720126abb
--- /dev/null
+++ b/src/service/service/database/ServiceModel.py
@@ -0,0 +1,74 @@
+import functools, logging, operator
+from enum import Enum
+from typing import Dict, List
+from common.orm.fields.EnumeratedField import EnumeratedField
+from common.orm.fields.ForeignKeyField import ForeignKeyField
+from common.orm.fields.PrimaryKeyField import PrimaryKeyField
+from common.orm.fields.StringField import StringField
+from common.orm.model.Model import Model
+from common.orm.HighLevel import get_related_objects
+from context.proto.context_pb2 import ServiceStatusEnum, ServiceTypeEnum
+from .ConfigModel import ConfigModel
+from .ConstraintModel import ConstraintsModel
+from .ContextModel import ContextModel
+from .Tools import grpc_to_enum
+
+LOGGER = logging.getLogger(__name__)
+
+class ORM_ServiceTypeEnum(Enum):
+    UNKNOWN                   = ServiceTypeEnum.SERVICETYPE_UNKNOWN
+    L3NM                      = ServiceTypeEnum.SERVICETYPE_L3NM
+    L2NM                      = ServiceTypeEnum.SERVICETYPE_L2NM
+    TAPI_CONNECTIVITY_SERVICE = ServiceTypeEnum.SERVICETYPE_TAPI_CONNECTIVITY_SERVICE
+
+grpc_to_enum__service_type = functools.partial(
+    grpc_to_enum, ServiceTypeEnum, ORM_ServiceTypeEnum)
+
+class ORM_ServiceStatusEnum(Enum):
+    UNDEFINED       = ServiceStatusEnum.SERVICESTATUS_UNDEFINED
+    PLANNED         = ServiceStatusEnum.SERVICESTATUS_PLANNED
+    ACTIVE          = ServiceStatusEnum.SERVICESTATUS_ACTIVE
+    PENDING_REMOVAL = ServiceStatusEnum.SERVICESTATUS_PENDING_REMOVAL
+
+grpc_to_enum__service_status = functools.partial(
+    grpc_to_enum, ServiceStatusEnum, ORM_ServiceStatusEnum)
+
+class ServiceModel(Model):
+    pk = PrimaryKeyField()
+    context_fk = ForeignKeyField(ContextModel)
+    service_uuid = StringField(required=True, allow_empty=False)
+    service_type = EnumeratedField(ORM_ServiceTypeEnum, required=True)
+    service_constraints_fk = ForeignKeyField(ConstraintsModel)
+    service_status = EnumeratedField(ORM_ServiceStatusEnum, required=True)
+    service_config_fk = ForeignKeyField(ConfigModel)
+
+    def dump_id(self) -> Dict:
+        context_id = ContextModel(self.database, self.context_fk).dump_id()
+        return {
+            'context_id': context_id,
+            'service_uuid': {'uuid': self.service_uuid},
+        }
+
+    def dump_endpoint_ids(self) -> List[Dict]:
+        from .RelationModels import ServiceEndPointModel # pylint: disable=import-outside-toplevel
+        db_endpoints = get_related_objects(self, ServiceEndPointModel, 'endpoint_fk')
+        return [db_endpoint.dump_id() for db_endpoint in sorted(db_endpoints, key=operator.attrgetter('pk'))]
+
+    def dump_constraints(self) -> List[Dict]:
+        return ConstraintsModel(self.database, self.service_constraints_fk).dump()
+
+    def dump_config(self) -> Dict:
+        return ConfigModel(self.database, self.service_config_fk).dump()
+
+    def dump(   # pylint: disable=arguments-differ
+            self, include_endpoint_ids=True, include_constraints=True, include_config_rules=True
+        ) -> Dict:
+        result = {
+            'service_id': self.dump_id(),
+            'service_type': self.service_type.value,
+            'service_status': {'service_status': self.service_status.value},
+        }
+        if include_endpoint_ids: result['service_endpoint_ids'] = self.dump_endpoint_ids()
+        if include_constraints: result['service_constraints'] = self.dump_constraints()
+        if include_config_rules: result.setdefault('service_config', {})['config_rules'] = self.dump_config()
+        return result
diff --git a/src/service/service/database/Tools.py b/src/service/service/database/Tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..36ffbcd46fcf686371b0799445ce4f9ce5b75838
--- /dev/null
+++ b/src/service/service/database/Tools.py
@@ -0,0 +1,58 @@
+import hashlib, re
+from enum import Enum
+from typing import Dict, List, Tuple, Union
+
+# Convenient helper function to remove dictionary items in dict/list/set comprehensions.
+
+def remove_dict_key(dictionary : Dict, key : str):
+    dictionary.pop(key, None)
+    return dictionary
+
+# Enumeration classes are redundant with gRPC classes, but gRPC does not provide a programmatical method to retrieve
+# the values it expects from strings containing the desired value symbol or its integer value, so a kind of mapping is
+# required. Besides, ORM Models expect Enum classes in EnumeratedFields; we create specific and conveniently defined
+# Enum classes to serve both purposes.
+
+def grpc_to_enum(grpc_enum_class, orm_enum_class : Enum, grpc_enum_value):
+    grpc_enum_name = grpc_enum_class.Name(grpc_enum_value)
+    grpc_enum_prefix = orm_enum_class.__name__.upper()
+    grpc_enum_prefix = re.sub(r'^ORM_(.+)$', r'\1', grpc_enum_prefix)
+    grpc_enum_prefix = re.sub(r'^(.+)ENUM$', r'\1', grpc_enum_prefix)
+    grpc_enum_prefix = grpc_enum_prefix + '_'
+    orm_enum_name = grpc_enum_name.replace(grpc_enum_prefix, '')
+    orm_enum_value = orm_enum_class._member_map_.get(orm_enum_name) # pylint: disable=protected-access
+    return orm_enum_value
+
+# For some models, it is convenient to produce a string hash for fast comparisons of existence or modification. Method
+# fast_hasher computes configurable length (between 1 and 64 byte) hashes and retrieves them in hex representation.
+
+FASTHASHER_ITEM_ACCEPTED_FORMAT = 'Union[bytes, str]'
+FASTHASHER_DATA_ACCEPTED_FORMAT = 'Union[{fmt:s}, List[{fmt:s}], Tuple[{fmt:s}]]'.format(
+    fmt=FASTHASHER_ITEM_ACCEPTED_FORMAT)
+
+def fast_hasher(data : Union[bytes, str, List[Union[bytes, str]], Tuple[Union[bytes, str]]], digest_size : int = 8):
+    hasher = hashlib.blake2b(digest_size=digest_size)
+    # Do not accept sets, dicts, or other unordered dats tructures since their order is arbitrary thus producing
+    # different hashes depending on the order. Consider adding support for sets or dicts with previous sorting of
+    # items by their key.
+
+    if isinstance(data, bytes):
+        data = [data]
+    elif isinstance(data, str):
+        data = [data.encode('UTF-8')]
+    elif isinstance(data, (list, tuple)):
+        pass
+    else:
+        msg = 'data({:s}) must be {:s}, found {:s}'
+        raise TypeError(msg.format(str(data), FASTHASHER_DATA_ACCEPTED_FORMAT, str(type(data))))
+
+    for i,item in enumerate(data):
+        if isinstance(item, str):
+            item = item.encode('UTF-8')
+        elif isinstance(item, bytes):
+            pass
+        else:
+            msg = 'data[{:d}]({:s}) must be {:s}, found {:s}'
+            raise TypeError(msg.format(i, str(item), FASTHASHER_ITEM_ACCEPTED_FORMAT, str(type(item))))
+        hasher.update(item)
+    return hasher.hexdigest()
diff --git a/src/service/service/database/TopologyModel.py b/src/service/service/database/TopologyModel.py
new file mode 100644
index 0000000000000000000000000000000000000000..54a7e75e5059e0f1f605fe6235740c0094ab73c5
--- /dev/null
+++ b/src/service/service/database/TopologyModel.py
@@ -0,0 +1,40 @@
+import logging #, operator
+#from typing import Dict, List
+from common.orm.fields.ForeignKeyField import ForeignKeyField
+from common.orm.fields.PrimaryKeyField import PrimaryKeyField
+from common.orm.fields.StringField import StringField
+from common.orm.model.Model import Model
+#from common.orm.HighLevel import get_related_objects
+from .ContextModel import ContextModel
+
+LOGGER = logging.getLogger(__name__)
+
+class TopologyModel(Model):
+    pk = PrimaryKeyField()
+    context_fk = ForeignKeyField(ContextModel)
+    topology_uuid = StringField(required=True, allow_empty=False)
+
+#    def dump_id(self) -> Dict:
+#        context_id = ContextModel(self.database, self.context_fk).dump_id()
+#        return {
+#            'context_id': context_id,
+#            'topology_uuid': {'uuid': self.topology_uuid},
+#        }
+#
+#    def dump_device_ids(self) -> List[Dict]:
+#        from .RelationModels import TopologyDeviceModel # pylint: disable=import-outside-toplevel
+#        db_devices = get_related_objects(self, TopologyDeviceModel, 'device_fk')
+#        return [db_device.dump_id() for db_device in sorted(db_devices, key=operator.attrgetter('pk'))]
+#
+#    def dump_link_ids(self) -> List[Dict]:
+#        from .RelationModels import TopologyLinkModel # pylint: disable=import-outside-toplevel
+#        db_links = get_related_objects(self, TopologyLinkModel, 'link_fk')
+#        return [db_link.dump_id() for db_link in sorted(db_links, key=operator.attrgetter('pk'))]
+#
+#    def dump(   # pylint: disable=arguments-differ
+#            self, include_devices=True, include_links=True
+#        ) -> Dict:
+#        result = {'topology_id': self.dump_id()}
+#        if include_devices: result['device_ids'] = self.dump_device_ids()
+#        if include_links: result['link_ids'] = self.dump_link_ids()
+#        return result
diff --git a/src/service/service/database/__init__.py b/src/service/service/database/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..5d2c65947e2775c4c9dc5dda8e7737bb04edb8cd
--- /dev/null
+++ b/src/service/service/database/__init__.py
@@ -0,0 +1,2 @@
+# In-Memory database with a simplified representation of Context Database focused on the Service model.
+# Used as an internal data cache, for message validation, and message formatting purposes.
diff --git a/src/device/service/drivers/emulated/AnyTreeTools.py b/src/service/service/service_handler_api/AnyTreeTools.py
similarity index 59%
rename from src/device/service/drivers/emulated/AnyTreeTools.py
rename to src/service/service/service_handler_api/AnyTreeTools.py
index e7817b78921e3b91fb84ca660f022c869ac88220..435fccf56b5166000fde2b6f4e555af29c550531 100644
--- a/src/device/service/drivers/emulated/AnyTreeTools.py
+++ b/src/service/service/service_handler_api/AnyTreeTools.py
@@ -1,7 +1,5 @@
 import anytree
-from typing import Any, List, Optional
-
-from anytree.render import Row
+from typing import Any, List, Optional, Union
 
 class TreeNode(anytree.node.Node):
     def __init__(self, name, parent=None, children=None, **kwargs) -> None:
@@ -29,25 +27,43 @@ class RawStyle(anytree.render.AbstractStyle):
         Node('/root/sub0/sub0A')
         Node('/root/sub1')
         """
-        super(RawStyle, self).__init__(u'', u'', u'')
+        super(RawStyle, self).__init__('', '', '')
+
+def get_subnode(
+    resolver : anytree.Resolver, root : TreeNode, key_or_path : Union[str, List[str]], default : Optional[Any] = None):
 
-def get_subnode(resolver : anytree.Resolver, root : TreeNode, path : List[str], default : Optional[Any] = None):
+    if not isinstance(root, TreeNode): raise Exception('root must be a TreeNode')
+    if isinstance(key_or_path, str): key_or_path = key_or_path.split('/')
     node = root
-    for path_item in path:
+    for path_item in key_or_path:
         try:
             node = resolver.get(node, path_item)
         except anytree.ChildResolverError:
             return default
     return node
 
-def set_subnode_value(resolver : anytree.Resolver, root : TreeNode, path : List[str], value : Any):
+def set_subnode_value(resolver : anytree.Resolver, root : TreeNode, key_or_path : Union[str, List[str]], value : Any):
+    if not isinstance(root, TreeNode): raise Exception('root must be a TreeNode')
+    if isinstance(key_or_path, str): key_or_path = key_or_path.split('/')
     node = root
-    for path_item in path:
+    for path_item in key_or_path:
         try:
             node = resolver.get(node, path_item)
         except anytree.ChildResolverError:
             node = TreeNode(path_item, parent=node)
-    node.value = value
+    if isinstance(node.value, dict) and isinstance(value, dict):
+        node.value.update(value)
+    else:
+        node.value = value
+
+def delete_subnode(resolver : anytree.Resolver, root : TreeNode, key_or_path : Union[str, List[str]]):
+    if not isinstance(root, TreeNode): raise Exception('root must be a TreeNode')
+    node = get_subnode(resolver, root, key_or_path, default=None)
+    if node is None: return
+    parent : TreeNode = node.parent
+    children = list(parent.children)
+    children.remove(node)
+    parent.children = tuple(children)
 
 def dump_subtree(root : TreeNode):
     if not isinstance(root, TreeNode): raise Exception('root must be a TreeNode')
diff --git a/src/service/service/service_handler_api/Exceptions.py b/src/service/service/service_handler_api/Exceptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..d52df91786ddd8a08068be792a6e6f36bb1b3ebb
--- /dev/null
+++ b/src/service/service/service_handler_api/Exceptions.py
@@ -0,0 +1,52 @@
+class UnsatisfiedFilterException(Exception):
+    def __init__(self, filter_fields):
+        msg = 'No ServiceHandler satisfies FilterFields({:s})'
+        super().__init__(msg.format(str(filter_fields)))
+
+class UnsupportedServiceHandlerClassException(Exception):
+    def __init__(self, service_handler_class_name):
+        msg = 'Class({:s}) is not a subclass of _ServiceHandler'
+        super().__init__(msg.format(str(service_handler_class_name)))
+
+class UnsupportedFilterFieldException(Exception):
+    def __init__(self, unsupported_filter_fields, service_handler_class_name=None):
+        if service_handler_class_name:
+            msg = 'FilterFields({:s}) specified by ServiceHandler({:s}) are not supported'
+            msg = msg.format(str(unsupported_filter_fields), str(service_handler_class_name))
+        else:
+            msg = 'FilterFields({:s}) specified in Filter are not supported'
+            msg = msg.format(str(unsupported_filter_fields))
+        super().__init__(msg)
+
+class UnsupportedFilterFieldValueException(Exception):
+    def __init__(
+        self, filter_field_name, filter_field_value, allowed_filter_field_values, service_handler_class_name=None):
+
+        if service_handler_class_name:
+            msg = 'FilterField({:s}={:s}) specified by ServiceHandler({:s}) is not supported. Allowed values are {:s}'
+            msg = msg.format(
+                str(filter_field_name), str(filter_field_value), str(service_handler_class_name),
+                str(allowed_filter_field_values))
+        else:
+            msg = 'FilterField({:s}={:s}) specified in Filter is not supported. Allowed values are {:s}'
+            msg = msg.format(str(filter_field_name), str(filter_field_value), str(allowed_filter_field_values))
+        super().__init__(msg)
+
+#class UnsupportedResourceKeyException(Exception):
+#    def __init__(self, resource_key):
+#        msg = 'ResourceKey({:s}) not supported'
+#        msg = msg.format(str(resource_key))
+#        super().__init__(msg)
+#
+#class ConfigFieldNotFoundException(Exception):
+#    def __init__(self, config_field_name):
+#        msg = 'ConfigField({:s}) not specified in resource'
+#        msg = msg.format(str(config_field_name))
+#        super().__init__(msg)
+#
+#class ConfigFieldsNotSupportedException(Exception):
+#    def __init__(self, config_fields):
+#        msg = 'ConfigFields({:s}) not supported in resource'
+#        msg = msg.format(str(config_fields))
+#        super().__init__(msg)
+#
\ No newline at end of file
diff --git a/src/service/service/service_handler_api/FilterFields.py b/src/service/service/service_handler_api/FilterFields.py
new file mode 100644
index 0000000000000000000000000000000000000000..fc1a3d5afee4468bca344e18a356728807367bbf
--- /dev/null
+++ b/src/service/service/service_handler_api/FilterFields.py
@@ -0,0 +1,13 @@
+from enum import Enum
+from service.service.database.ServiceModel import ORM_ServiceTypeEnum
+from service.service.database.DeviceModel import ORM_DeviceDriverEnum
+
+class FilterFieldEnum(Enum):
+    SERVICE_TYPE  = 'service_type'
+    DEVICE_DRIVER = 'device_driver'
+
+# Map allowed filter fields to allowed values per Filter field. If no restriction (free text) None is specified
+FILTER_FIELD_ALLOWED_VALUES = {
+    FilterFieldEnum.SERVICE_TYPE.value  : {i.value for i in ORM_ServiceTypeEnum},
+    FilterFieldEnum.DEVICE_DRIVER.value : {i.value for i in ORM_DeviceDriverEnum},
+}
diff --git a/src/service/service/service_handler_api/ServiceHandlerFactory.py b/src/service/service/service_handler_api/ServiceHandlerFactory.py
new file mode 100644
index 0000000000000000000000000000000000000000..3f04b449de84b4fd566b4e10cec0b46839183dfb
--- /dev/null
+++ b/src/service/service/service_handler_api/ServiceHandlerFactory.py
@@ -0,0 +1,79 @@
+import logging, operator
+from enum import Enum
+from typing import Any, Dict, Iterable, List, Set, Tuple
+from service.service.service_handler_api._ServiceHandler import _ServiceHandler
+from .Exceptions import (
+    UnsatisfiedFilterException, UnsupportedServiceHandlerClassException, UnsupportedFilterFieldException,
+    UnsupportedFilterFieldValueException)
+from .FilterFields import FILTER_FIELD_ALLOWED_VALUES, FilterFieldEnum
+
+LOGGER = logging.getLogger(__name__)
+
+class ServiceHandlerFactory:
+    def __init__(self, service_handlers : List[Tuple[type, List[Dict[FilterFieldEnum, Any]]]]) -> None:
+        # Dict{field_name => Dict{field_value => Set{ServiceHandler}}}
+        self.__indices : Dict[str, Dict[str, Set[_ServiceHandler]]] = {}
+
+        for service_handler_class,filter_field_sets in service_handlers:
+            for filter_fields in filter_field_sets:
+                filter_fields = {k.value:v for k,v in filter_fields.items()}
+                self.register_service_handler_class(service_handler_class, **filter_fields)
+
+    def register_service_handler_class(self, service_handler_class, **filter_fields):
+        if not issubclass(service_handler_class, _ServiceHandler):
+            raise UnsupportedServiceHandlerClassException(str(service_handler_class))
+
+        service_handler_name = service_handler_class.__name__
+        supported_filter_fields = set(FILTER_FIELD_ALLOWED_VALUES.keys())
+        unsupported_filter_fields = set(filter_fields.keys()).difference(supported_filter_fields)
+        if len(unsupported_filter_fields) > 0:
+            raise UnsupportedFilterFieldException(
+                unsupported_filter_fields, service_handler_class_name=service_handler_name)
+
+        for field_name, field_values in filter_fields.items():
+            field_indice = self.__indices.setdefault(field_name, dict())
+            field_enum_values = FILTER_FIELD_ALLOWED_VALUES.get(field_name)
+            if not isinstance(field_values, Iterable) or isinstance(field_values, str):
+                field_values = [field_values]
+            for field_value in field_values:
+                if isinstance(field_value, Enum): field_value = field_value.value
+                if field_enum_values is not None and field_value not in field_enum_values:
+                    raise UnsupportedFilterFieldValueException(
+                        field_name, field_value, field_enum_values, service_handler_class_name=service_handler_name)
+                field_indice_service_handlers = field_indice.setdefault(field_value, set())
+                field_indice_service_handlers.add(service_handler_class)
+
+    def get_service_handler_class(self, **filter_fields) -> _ServiceHandler:
+        supported_filter_fields = set(FILTER_FIELD_ALLOWED_VALUES.keys())
+        unsupported_filter_fields = set(filter_fields.keys()).difference(supported_filter_fields)
+        if len(unsupported_filter_fields) > 0: raise UnsupportedFilterFieldException(unsupported_filter_fields)
+
+        candidate_service_handler_classes : Dict[_ServiceHandler, int] = None # num. filter hits per service_handler
+        for field_name, field_values in filter_fields.items():
+            field_indice = self.__indices.get(field_name)
+            if field_indice is None: continue
+            if not isinstance(field_values, Iterable) or isinstance(field_values, str):
+                field_values = [field_values]
+
+            field_enum_values = FILTER_FIELD_ALLOWED_VALUES.get(field_name)
+
+            field_candidate_service_handler_classes = set()
+            for field_value in field_values:
+                if field_enum_values is not None and field_value not in field_enum_values:
+                    raise UnsupportedFilterFieldValueException(field_name, field_value, field_enum_values)
+                field_indice_service_handlers = field_indice.get(field_value)
+                if field_indice_service_handlers is None: continue
+                field_candidate_service_handler_classes = field_candidate_service_handler_classes.union(
+                    field_indice_service_handlers)
+
+            if candidate_service_handler_classes is None:
+                candidate_service_handler_classes = {k:1 for k in field_candidate_service_handler_classes}
+            else:
+                for candidate_service_handler_class in candidate_service_handler_classes:
+                    if candidate_service_handler_class not in field_candidate_service_handler_classes: continue
+                    candidate_service_handler_classes[candidate_service_handler_class] += 1
+
+        if len(candidate_service_handler_classes) == 0: raise UnsatisfiedFilterException(filter_fields)
+        candidate_service_handler_classes = sorted(
+            candidate_service_handler_classes.items(), key=operator.itemgetter(1), reverse=True)
+        return candidate_service_handler_classes[0][0]
diff --git a/src/service/service/service_handler_api/Tools.py b/src/service/service/service_handler_api/Tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..af900690ed5bffec77434bb73c320d1df5a02ed8
--- /dev/null
+++ b/src/service/service/service_handler_api/Tools.py
@@ -0,0 +1,28 @@
+import functools
+from typing import Any, List, Union
+
+ACTION_MSG_SET_ENDPOINT      = 'Set EndPoint(device_uuid={:s}, endpoint_uuid={:s}, topology_uuid={:s})'
+ACTION_MSG_DELETE_ENDPOINT   = 'Delete EndPoint(device_uuid={:s}, endpoint_uuid={:s}, topology_uuid={:s})'
+
+ACTION_MSG_SET_CONSTRAINT    = 'Set Constraint(constraint_type={:s}, constraint_value={:s})'
+ACTION_MSG_DELETE_CONSTRAINT = 'Delete Constraint(constraint_type={:s}, constraint_value={:s})'
+
+ACTION_MSG_SET_CONFIG        = 'Set Resource(key={:s}, value={:s})'
+ACTION_MSG_DELETE_CONFIG     = 'Delete Resource(key={:s}, value={:s})'
+
+def _check_errors(
+        message : str, parameters_list : List[Any], results_list : List[Union[bool, Exception]]
+    ) -> List[str]:
+    errors = []
+    for parameters, results in zip(parameters_list, results_list):
+        if not isinstance(results, Exception): continue
+        message = message.format(*tuple(map(str, parameters)))
+        errors.append('Unable to {:s}; error({:s})'.format(message, str(results)))
+    return errors
+
+check_errors_setendpoint      = functools.partial(_check_errors, ACTION_MSG_SET_ENDPOINT     )
+check_errors_deleteendpoint   = functools.partial(_check_errors, ACTION_MSG_DELETE_ENDPOINT  )
+check_errors_setconstraint    = functools.partial(_check_errors, ACTION_MSG_SET_CONSTRAINT   )
+check_errors_deleteconstraint = functools.partial(_check_errors, ACTION_MSG_DELETE_CONSTRAINT)
+check_errors_setconfig        = functools.partial(_check_errors, ACTION_MSG_SET_CONFIG       )
+check_errors_deleteconfig     = functools.partial(_check_errors, ACTION_MSG_DELETE_CONFIG    )
diff --git a/src/service/service/service_handler_api/_ServiceHandler.py b/src/service/service/service_handler_api/_ServiceHandler.py
new file mode 100644
index 0000000000000000000000000000000000000000..55de4ce7a470c0b5bf87e3883f42d60f5c2d3ece
--- /dev/null
+++ b/src/service/service/service_handler_api/_ServiceHandler.py
@@ -0,0 +1,109 @@
+from typing import Any, List, Optional, Tuple, Union
+from common.orm.Database import Database
+from context.client.ContextClient import ContextClient
+from device.client.DeviceClient import DeviceClient
+from service.service.database.ServiceModel import ServiceModel
+
+class _ServiceHandler:
+    def __init__(
+        self, db_service : ServiceModel, database : Database, context_client : ContextClient,
+        device_client : DeviceClient, **settings
+    ) -> None:
+        """ Initialize Driver.
+            Parameters:
+                db_service
+                    The service instance from the local in-memory database.
+                database
+                    The instance of the local in-memory database.
+                context_client
+                    An instance of context client to be used to retrieve information from the service and the devices.
+                device_client
+                    An instance of device client to be used to configure the devices.
+                **settings
+                    Extra settings required by the service handler.
+        """
+        raise NotImplementedError()
+
+    def SetEndpoint(self, endpoints : List[Tuple[str, str, Optional[str]]]) -> List[Union[bool, Exception]]:
+        """ Set endpoints from a list.
+            Parameters:
+                endpoints : List[Tuple[str, str, Optional[str]]]
+                    List of tuples, each containing a device_uuid, endpoint_uuid and, optionally, the topology_uuid
+                    of the endpoint to be added.
+            Returns:
+                results : List[Union[bool, Exception]]
+                    List of results for endpoint changes requested. Return values must be in the same order than
+                    endpoints requested. If an endpoint is properly added, True must be retrieved; otherwise, the
+                    Exception that is raised during the processing must be retrieved.
+        """
+        raise NotImplementedError()
+
+    def DeleteEndpoint(self, endpoints : List[Tuple[str, str, Optional[str]]]) -> List[Union[bool, Exception]]:
+        """ Delete endpoints form a list.
+            Parameters:
+                endpoints : List[Tuple[str, str, Optional[str]]]
+                    List of tuples, each containing a device_uuid, endpoint_uuid, and the topology_uuid of the endpoint
+                    to be removed.
+            Returns:
+                results : List[Union[bool, Exception]]
+                    List of results for endpoint deletions requested. Return values must be in the same order than
+                    endpoints requested. If an endpoint is properly deleted, True must be retrieved; otherwise, the
+                    Exception that is raised during the processing must be retrieved.
+        """
+        raise NotImplementedError()
+
+    def SetConstraint(self, constraints : List[Tuple[str, Any]]) -> List[Union[bool, Exception]]:
+        """ Create/Update constraints.
+            Parameters:
+                constraints : List[Tuple[str, Any]]
+                    List of tuples, each containing a constraint_type and the new constraint_value to be set.
+            Returns:
+                results : List[Union[bool, Exception]]
+                    List of results for constraint changes requested. Return values must be in the same order than
+                    constraints requested. If a constraint is properly set, True must be retrieved; otherwise, the
+                    Exception that is raised during the processing must be retrieved.
+        """
+        raise NotImplementedError()
+
+    def DeleteConstraint(self, constraints : List[Tuple[str, Any]]) -> List[Union[bool, Exception]]:
+        """ Delete constraints.
+            Parameters:
+                constraints : List[Tuple[str, Any]]
+                    List of tuples, each containing a constraint_type pointing to the constraint to be deleted, and a
+                    constraint_value containing possible additionally required values to locate the constraint to be
+                    removed.
+            Returns:
+                results : List[Union[bool, Exception]]
+                    List of results for constraint deletions requested. Return values must be in the same order than
+                    constraints requested. If a constraint is properly deleted, True must be retrieved; otherwise, the
+                    Exception that is raised during the processing must be retrieved.
+        """
+        raise NotImplementedError()
+
+    def SetConfig(self, resources : List[Tuple[str, Any]]) -> List[Union[bool, Exception]]:
+        """ Create/Update configuration for a list of resources.
+            Parameters:
+                resources : List[Tuple[str, Any]]
+                    List of tuples, each containing a resource_key pointing the resource to be modified, and a
+                    resource_value containing the new value to be set.
+            Returns:
+                results : List[Union[bool, Exception]]
+                    List of results for resource key changes requested. Return values must be in the same order than
+                    resource keys requested. If a resource is properly set, True must be retrieved; otherwise, the
+                    Exception that is raised during the processing must be retrieved.
+        """
+        raise NotImplementedError()
+
+    def DeleteConfig(self, resources : List[Tuple[str, Any]]) -> List[Union[bool, Exception]]:
+        """ Delete configuration for a list of resources.
+            Parameters:
+                resources : List[Tuple[str, Any]]
+                    List of tuples, each containing a resource_key pointing the resource to be modified, and a
+                    resource_value containing possible additionally required values to locate the value to be removed.
+            Returns:
+                results : List[Union[bool, Exception]]
+                    List of results for resource key deletions requested. Return values must be in the same order than
+                    resource keys requested. If a resource is properly deleted, True must be retrieved; otherwise, the
+                    Exception that is raised during the processing must be retrieved.
+        """
+        raise NotImplementedError()
diff --git a/src/service/service/service_handler_api/__init__.py b/src/service/service/service_handler_api/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/service/service/service_handlers/Tools.py b/src/service/service/service_handlers/Tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..4186abf25869f69884ddba0862739713d45bba38
--- /dev/null
+++ b/src/service/service/service_handlers/Tools.py
@@ -0,0 +1,28 @@
+import json
+from copy import deepcopy
+from typing import Any, Dict, Union
+from service.proto.context_pb2 import ConfigActionEnum
+
+def config_rule(action : ConfigActionEnum, resource_key : str, resource_value : Union[str, Dict[str, Any]]):
+    if not isinstance(resource_value, str): resource_value = json.dumps(resource_value, sort_keys=True)
+    return {'action': action, 'resource_key': resource_key, 'resource_value': resource_value}
+
+def config_rule_set(resource_key : str, resource_value : Union[str, Dict[str, Any]]):
+    return config_rule(ConfigActionEnum.CONFIGACTION_SET, resource_key, resource_value)
+
+def config_rule_delete(resource_key : str, resource_value : Union[str, Dict[str, Any]]):
+    return config_rule(ConfigActionEnum.CONFIGACTION_DELETE, resource_key, resource_value)
+
+def constraint(constraint_type, constraint_value):
+    return {'constraint_type': str(constraint_type), 'constraint_value': str(constraint_value)}
+
+def endpoint_id(device_id, endpoint_uuid, topology_id=None):
+    result = {'device_id': deepcopy(device_id), 'endpoint_uuid': {'uuid': endpoint_uuid}}
+    if topology_id is not None: result['topology_id'] = deepcopy(topology_id)
+    return result
+
+def endpoint(device_id, endpoint_uuid, endpoint_type, topology_id=None):
+    return {
+        'endpoint_id': endpoint_id(device_id, endpoint_uuid, topology_id=topology_id),
+        'endpoint_type': endpoint_type,
+    }
diff --git a/src/service/service/service_handlers/__init__.py b/src/service/service/service_handlers/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..f4359f1fd15b8979a80a3532500546c9099cddef
--- /dev/null
+++ b/src/service/service/service_handlers/__init__.py
@@ -0,0 +1,11 @@
+from ..service_handler_api.FilterFields import FilterFieldEnum, ORM_DeviceDriverEnum, ORM_ServiceTypeEnum
+from .l3nm_emulated.L3NMEmulatedServiceHandler import L3NMEmulatedServiceHandler
+
+SERVICE_HANDLERS = [
+    (L3NMEmulatedServiceHandler, [
+        {
+            FilterFieldEnum.SERVICE_TYPE  : ORM_ServiceTypeEnum.L3NM,
+            FilterFieldEnum.DEVICE_DRIVER : ORM_DeviceDriverEnum.UNDEFINED,
+        }
+    ]),
+]
diff --git a/src/service/service/service_handlers/l3nm_emulated/L3NMEmulatedServiceHandler.py b/src/service/service/service_handlers/l3nm_emulated/L3NMEmulatedServiceHandler.py
new file mode 100644
index 0000000000000000000000000000000000000000..4b5636c6b1284189c65f1c8589e0966ac6d57860
--- /dev/null
+++ b/src/service/service/service_handlers/l3nm_emulated/L3NMEmulatedServiceHandler.py
@@ -0,0 +1,209 @@
+import anytree, json, logging
+from typing import Any, Dict, List, Optional, Tuple, Union
+from common.orm.Database import Database
+from common.orm.HighLevel import get_object
+from common.orm.backend.Tools import key_to_str
+from common.type_checkers.Checkers import chk_length, chk_type
+from context.client.ContextClient import ContextClient
+from device.client.DeviceClient import DeviceClient
+from device.proto.context_pb2 import Device
+from service.service.database.ConfigModel import ORM_ConfigActionEnum, get_config_rules
+from service.service.database.ContextModel import ContextModel
+from service.service.database.DeviceModel import DeviceModel
+from service.service.database.ServiceModel import ServiceModel
+from service.service.service_handler_api._ServiceHandler import _ServiceHandler
+from service.service.service_handler_api.AnyTreeTools import TreeNode, delete_subnode, get_subnode, set_subnode_value
+from service.service.service_handlers.Tools import config_rule_set, config_rule_delete
+
+LOGGER = logging.getLogger(__name__)
+
+class L3NMEmulatedServiceHandler(_ServiceHandler):
+    def __init__(   # pylint: disable=super-init-not-called
+        self, db_service : ServiceModel, database : Database, context_client : ContextClient,
+        device_client : DeviceClient, **settings
+    ) -> None:
+        self.__db_service = db_service
+        self.__database = database
+        self.__context_client = context_client # pylint: disable=unused-private-member
+        self.__device_client = device_client
+
+        self.__db_context : ContextModel = get_object(self.__database, ContextModel, self.__db_service.context_fk)
+        str_service_key = key_to_str([self.__db_context.context_uuid, self.__db_service.service_uuid])
+        db_config = get_config_rules(self.__database, str_service_key, 'running')
+        self.__resolver = anytree.Resolver(pathattr='name')
+        self.__config = TreeNode('.')
+        for action, resource_key, resource_value in db_config:
+            if action == ORM_ConfigActionEnum.SET:
+                try:
+                    resource_value = json.loads(resource_value)
+                except: # pylint: disable=bare-except
+                    pass
+                set_subnode_value(self.__resolver, self.__config, resource_key, resource_value)
+            elif action == ORM_ConfigActionEnum.DELETE:
+                delete_subnode(self.__resolver, self.__config, resource_key)
+
+    def SetEndpoint(self, endpoints : List[Tuple[str, str, Optional[str]]]) -> List[Union[bool, Exception]]:
+        chk_type('endpoints', endpoints, list)
+        if len(endpoints) == 0: return []
+
+        service_uuid              = self.__db_service.service_uuid
+        network_instance_name     = '{:s}-NetInst'.format(service_uuid)
+        network_interface_name    = '{:s}-NetIf'.format(service_uuid)
+        network_subinterface_name = '{:s}-NetSubIf'.format(service_uuid)
+
+        settings : TreeNode = get_subnode(self.__resolver, self.__config, 'settings', None)
+        if settings is None: raise Exception('Unable to retrieve service settings')
+        json_settings : Dict = settings.value
+        route_distinguisher = json_settings.get('route_distinguisher', '0:0')    # '60001:801'
+        mtu                 = json_settings.get('mtu',                 1450 )    # 1512
+        address_families    = json_settings.get('address_families',    []   )    # ['IPV4']
+
+        results = []
+        for endpoint in endpoints:
+            try:
+                chk_type('endpoint', endpoint, (tuple, list))
+                chk_length('endpoint', endpoint, min_length=2, max_length=3)
+                if len(endpoint) == 2:
+                    device_uuid, endpoint_uuid = endpoint
+                else:
+                    device_uuid, endpoint_uuid, _ = endpoint # ignore topology_uuid by now
+
+                endpoint_settings_uri = 'device[{:s}]/endpoint[{:s}]/settings'.format(device_uuid, endpoint_uuid)
+                endpoint_settings : TreeNode = get_subnode(self.__resolver, self.__config, endpoint_settings_uri, None)
+                if endpoint_settings is None:
+                    raise Exception('Unable to retrieve service settings for endpoint({:s})'.format(
+                        str(endpoint_settings_uri)))
+                json_endpoint_settings : Dict = endpoint_settings.value
+                router_id           = json_endpoint_settings.get('router_id',           '0.0.0.0')  # '10.95.0.10'
+                sub_interface_index = json_endpoint_settings.get('sub_interface_index', 0        )  # 1
+
+                db_device : DeviceModel = get_object(self.__database, DeviceModel, device_uuid, raise_if_not_found=True)
+                json_device = db_device.dump(include_config_rules=False, include_drivers=True, include_endpoints=True)
+                json_device_config : Dict = json_device.setdefault('device_config', {})
+                json_device_config_rules : List = json_device_config.setdefault('config_rules', [])
+                json_device_config_rules.extend([
+                    config_rule_set(
+                        '/network_instance[{:s}]'.format(network_instance_name), {
+                            'name': network_instance_name, 'type': 'L3VRF', 'router_id': router_id,
+                            'route_distinguisher': route_distinguisher, 'address_families': address_families,
+                    }),
+                    config_rule_set(
+                        '/interface[{:s}]'.format(endpoint_uuid), {
+                            'name': endpoint_uuid, 'description': network_interface_name, 'mtu': mtu,
+                    }),
+                    config_rule_set(
+                        '/interface[{:s}]/subinterface[{:d}]'.format(endpoint_uuid, sub_interface_index), {
+                            'name': endpoint_uuid, 'index': sub_interface_index,
+                            'description': network_subinterface_name, 'mtu': mtu,
+                    }),
+                    config_rule_set(
+                        '/network_instance[{:s}]/interface[{:s}]'.format(network_instance_name, endpoint_uuid), {
+                            'name': network_instance_name, 'id': endpoint_uuid,
+                    }),
+                ])
+                self.__device_client.ConfigureDevice(Device(**json_device))
+                results.append(True)
+            except Exception as e: # pylint: disable=broad-except
+                LOGGER.exception('Unable to SetEndpoint({:s})'.format(str(endpoint)))
+                results.append(e)
+
+        return results
+
+    def DeleteEndpoint(self, endpoints : List[Tuple[str, str, Optional[str]]]) -> List[Union[bool, Exception]]:
+        chk_type('endpoints', endpoints, list)
+        if len(endpoints) == 0: return []
+
+        service_uuid              = self.__db_service.service_uuid
+        network_instance_name     = '{:s}-NetInst'.format(service_uuid)
+
+        results = []
+        for endpoint in endpoints:
+            try:
+                chk_type('endpoint', endpoint, (tuple, list))
+                chk_length('endpoint', endpoint, min_length=2, max_length=3)
+                if len(endpoint) == 2:
+                    device_uuid, endpoint_uuid = endpoint
+                else:
+                    device_uuid, endpoint_uuid, _ = endpoint # ignore topology_uuid by now
+
+                endpoint_settings_uri = 'device[{:s}]/endpoint[{:s}]/settings'.format(device_uuid, endpoint_uuid)
+                endpoint_settings : TreeNode = get_subnode(self.__resolver, self.__config, endpoint_settings_uri, None)
+                if endpoint_settings is None:
+                    raise Exception('Unable to retrieve service settings for endpoint({:s})'.format(
+                        str(endpoint_settings_uri)))
+                json_endpoint_settings : Dict = endpoint_settings.value
+                sub_interface_index = json_endpoint_settings.get('sub_interface_index', 0        )  # 1
+
+                db_device : DeviceModel = get_object(self.__database, DeviceModel, device_uuid, raise_if_not_found=True)
+                json_device = db_device.dump(include_config_rules=False, include_drivers=True, include_endpoints=True)
+                json_device_config : Dict = json_device.setdefault('device_config', {})
+                json_device_config_rules : List = json_device_config.setdefault('config_rules', [])
+                json_device_config_rules.extend([
+                    config_rule_delete(
+                        '/network_instance[{:s}]/interface[{:s}]'.format(network_instance_name, endpoint_uuid), {
+                            'name': network_instance_name, 'id': endpoint_uuid
+                    }),
+                    config_rule_delete(
+                        '/interface[{:s}]/subinterface[{:d}]'.format(endpoint_uuid, sub_interface_index), {
+                            'name': endpoint_uuid, 'index': sub_interface_index,
+                    }),
+                    config_rule_delete(
+                        '/network_instance[{:s}]'.format(network_instance_name), {
+                            'name': network_instance_name
+                    }),
+                ])
+                self.__device_client.ConfigureDevice(Device(**json_device))
+                results.append(True)
+            except Exception as e: # pylint: disable=broad-except
+                LOGGER.exception('Unable to DeleteEndpoint({:s})'.format(str(endpoint)))
+                results.append(e)
+
+        return results
+
+    def SetConstraint(self, constraints : List[Tuple[str, Any]]) -> List[Union[bool, Exception]]:
+        chk_type('constraints', constraints, list)
+        if len(constraints) == 0: return []
+
+        msg = '[SetConstraint] Method not implemented. Constraints({:s}) are being ignored.'
+        LOGGER.warning(msg.format(str(constraints)))
+        return [True for _ in range(len(constraints))]
+
+    def DeleteConstraint(self, constraints : List[Tuple[str, Any]]) -> List[Union[bool, Exception]]:
+        chk_type('constraints', constraints, list)
+        if len(constraints) == 0: return []
+
+        msg = '[DeleteConstraint] Method not implemented. Constraints({:s}) are being ignored.'
+        LOGGER.warning(msg.format(str(constraints)))
+        return [True for _ in range(len(constraints))]
+
+    def SetConfig(self, resources : List[Tuple[str, Any]]) -> List[Union[bool, Exception]]:
+        chk_type('resources', resources, list)
+        if len(resources) == 0: return []
+
+        results = []
+        for resource in resources:
+            try:
+                resource_key, resource_value = resource
+                resource_value = json.loads(resource_value)
+                set_subnode_value(self.__resolver, self.__config, resource_key, resource_value)
+                results.append(True)
+            except Exception as e: # pylint: disable=broad-except
+                LOGGER.exception('Unable to SetConfig({:s})'.format(str(resource)))
+                results.append(e)
+
+        return results
+
+    def DeleteConfig(self, resources : List[Tuple[str, Any]]) -> List[Union[bool, Exception]]:
+        chk_type('resources', resources, list)
+        if len(resources) == 0: return []
+
+        results = []
+        for resource in resources:
+            try:
+                resource_key, _ = resource
+                delete_subnode(self.__resolver, self.__config, resource_key)
+            except Exception as e: # pylint: disable=broad-except
+                LOGGER.exception('Unable to DeleteConfig({:s})'.format(str(resource)))
+                results.append(e)
+
+        return results
diff --git a/src/service/service/service_handlers/l3nm_emulated/__init__.py b/src/service/service/service_handlers/l3nm_emulated/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/service/tests/CommonObjects.py b/src/service/tests/CommonObjects.py
new file mode 100644
index 0000000000000000000000000000000000000000..b704bad95e09f9bcfa2970359cc377925000f72c
--- /dev/null
+++ b/src/service/tests/CommonObjects.py
@@ -0,0 +1,62 @@
+import operator
+from copy import deepcopy
+from common.Constants import DEFAULT_CONTEXT_UUID, DEFAULT_TOPOLOGY_UUID
+from service.proto.context_pb2 import DeviceDriverEnum, DeviceOperationalStatusEnum
+from service.service.service_handlers.Tools import config_rule_set
+
+# use "deepcopy" to prevent propagating forced changes during tests
+
+CONTEXT_ID = {'context_uuid': {'uuid': DEFAULT_CONTEXT_UUID}}
+CONTEXT = {
+    'context_id': deepcopy(CONTEXT_ID),
+    'topology_ids': [],
+    'service_ids': [],
+}
+
+TOPOLOGY_ID = {
+    'context_id': deepcopy(CONTEXT_ID),
+    'topology_uuid': {'uuid': DEFAULT_TOPOLOGY_UUID},
+}
+TOPOLOGY = {
+    'topology_id': deepcopy(TOPOLOGY_ID),
+    'device_ids': [],
+    'link_ids': [],
+}
+
+DEVICE_EMU1_UUID = 'EMU-1'
+DEVICE_EMU1_ID = {'device_uuid': {'uuid': DEVICE_EMU1_UUID}}
+DEVICE_EMU1 = {
+    'device_id': deepcopy(DEVICE_EMU1_ID),
+    'device_type': 'emulated',
+    'device_config': {'config_rules': []},
+    'device_operational_status': DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_ENABLED,
+    'device_drivers': [DeviceDriverEnum.DEVICEDRIVER_UNDEFINED],
+    'device_endpoints': [],
+}
+
+DEVICE_EMU2_UUID = 'EMU-2'
+DEVICE_EMU2_ID = {'device_uuid': {'uuid': DEVICE_EMU2_UUID}}
+DEVICE_EMU2 = {
+    'device_id': deepcopy(DEVICE_EMU2_ID),
+    'device_type': 'emulated',
+    'device_config': {'config_rules': []},
+    'device_operational_status': DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_ENABLED,
+    'device_drivers': [DeviceDriverEnum.DEVICEDRIVER_UNDEFINED],
+    'device_endpoints': [],
+}
+
+DEVICE_EMU_ENDPOINTS = []
+for endpoint_uuid in ['EP1', 'EP2', 'EP3', 'EP100']:
+    DEVICE_EMU_ENDPOINTS.append((endpoint_uuid, '10Gbps', []))
+
+DEVICE_EMU_CONNECT_RULES = [
+    config_rule_set('_connect/address', '127.0.0.1'),
+    config_rule_set('_connect/port',    '0'),
+    config_rule_set('_connect/settings', {'endpoints': [
+        {
+            'uuid': endpoint_uuid, 'type': endpoint_type,
+            'sample_types': list(map(operator.attrgetter('value'), endpoint_sample_types)),
+        }
+        for endpoint_uuid,endpoint_type,endpoint_sample_types in DEVICE_EMU_ENDPOINTS
+    ]}),
+]
diff --git a/src/service/tests/Service_L3NM_EMU.py b/src/service/tests/Service_L3NM_EMU.py
new file mode 100644
index 0000000000000000000000000000000000000000..8cd3e9a1b8ed7d9fcdb7bf30744faa6c301592ec
--- /dev/null
+++ b/src/service/tests/Service_L3NM_EMU.py
@@ -0,0 +1,50 @@
+from copy import deepcopy
+from service.proto.context_pb2 import ServiceStatusEnum, ServiceTypeEnum
+from .CommonObjects import CONTEXT_ID
+from service.service.service_handlers.Tools import config_rule_set, constraint, endpoint_id
+
+# use "deepcopy" to prevent propagating forced changes during tests
+
+SERVICE_L3NM_EMU_UUID = 'SVC_L3NM_EMU'
+SERVICE_L3NM_EMU_ID = {
+    'context_id': deepcopy(CONTEXT_ID),
+    'service_uuid': {'uuid': SERVICE_L3NM_EMU_UUID}
+}
+SERVICE_L3NM_EMU = {
+    'service_id': deepcopy(SERVICE_L3NM_EMU_ID),
+    'service_type': ServiceTypeEnum.SERVICETYPE_L3NM,
+    'service_endpoint_ids' : [],
+    'service_constraints': [],
+    'service_status': {'service_status': ServiceStatusEnum.SERVICESTATUS_PLANNED},
+    'service_config': {'config_rules': []},
+}
+
+DEVICE1_UUID = 'EMU-1'
+DEVICE1_ID = {'device_uuid': {'uuid': DEVICE1_UUID}}
+DEVICE1_ENDPOINT_UUID = 'EP100'
+
+DEVICE2_UUID = 'EMU-2'
+DEVICE2_ID = {'device_uuid': {'uuid': DEVICE2_UUID}}
+DEVICE2_ENDPOINT_UUID = 'EP100'
+
+SERVICE_L3NM_EMU_ENDPOINT_IDS = [
+    endpoint_id(DEVICE1_ID, DEVICE1_ENDPOINT_UUID),
+    endpoint_id(DEVICE2_ID, DEVICE2_ENDPOINT_UUID),
+]
+
+SERVICE_L3NM_EMU_CONSTRAINTS = [
+    constraint('latency_ms', 15.2),
+    constraint('jitter_us', 1.2),
+]
+
+SERVICE_L3NM_EMU_CONFIG_RULES = [
+    config_rule_set('settings', {
+        'route_distinguisher': '60001:801', 'mtu': 1512, 'address_families': ['IPV4']
+    }),
+    config_rule_set('device[{:s}]/endpoint[{:s}]/settings'.format(DEVICE1_UUID, DEVICE1_ENDPOINT_UUID), {
+        'router_id': '10.0.0.1', 'sub_interface_index': 1,
+    }),
+    config_rule_set('device[{:s}]/endpoint[{:s}]/settings'.format(DEVICE2_UUID, DEVICE2_ENDPOINT_UUID), {
+        'router_id': '10.0.0.2', 'sub_interface_index': 1,
+    }),
+]
diff --git a/src/service/tests/test_unitary.py b/src/service/tests/test_unitary.py
index e807cb845ad2abf0d13fca559a0be5e03f4ed795..5489ee2a59545b31c2d9ff44956a0c67414a4b24 100644
--- a/src/service/tests/test_unitary.py
+++ b/src/service/tests/test_unitary.py
@@ -1,357 +1,269 @@
-import copy, grpc, logging, pytest
+import copy, grpc, logging, operator, os, pytest
+from typing import Tuple
 from google.protobuf.json_format import MessageToDict
-#from common.database.api.context.Constants import DEFAULT_CONTEXT_ID, DEFAULT_TOPOLOGY_ID
-#from common.tests.Assertions import validate_empty, validate_service, validate_service_id, \
-#    validate_service_list_is_empty, validate_service_list_is_not_empty
-from service.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD
+from common.orm.Database import Database
+from common.orm.Factory import get_database_backend, BackendEnum as DatabaseBackendEnum
+from common.message_broker.Factory import get_messagebroker_backend, BackendEnum as MessageBrokerBackendEnum
+from common.message_broker.MessageBroker import MessageBroker
+from context.Config import (
+    GRPC_SERVICE_PORT as CONTEXT_GRPC_SERVICE_PORT, GRPC_MAX_WORKERS as CONTEXT_GRPC_MAX_WORKERS,
+    GRPC_GRACE_PERIOD as CONTEXT_GRPC_GRACE_PERIOD)
+from context.client.ContextClient import ContextClient
+from context.proto.context_pb2 import Context, DeviceId, Topology, Device
+from context.service.grpc_server.ContextService import ContextService
+from device.Config import (
+    GRPC_SERVICE_PORT as DEVICE_GRPC_SERVICE_PORT, GRPC_MAX_WORKERS as DEVICE_GRPC_MAX_WORKERS,
+    GRPC_GRACE_PERIOD as DEVICE_GRPC_GRACE_PERIOD)
+from device.client.DeviceClient import DeviceClient
+from device.service.DeviceService import DeviceService
+from device.service.driver_api.DriverFactory import DriverFactory
+from device.service.driver_api.DriverInstanceCache import DriverInstanceCache
+from device.service.drivers import DRIVERS
+from device.tests.MockMonitoringService import MockMonitoringService
+from monitoring.Config import (
+    GRPC_SERVICE_PORT as MONITORING_GRPC_SERVICE_PORT, GRPC_MAX_WORKERS as MONITORING_GRPC_MAX_WORKERS,
+    GRPC_GRACE_PERIOD as MONITORING_GRPC_GRACE_PERIOD)
+from monitoring.client.monitoring_client import MonitoringClient
+from service.Config import (
+    GRPC_SERVICE_PORT as SERVICE_GRPC_SERVICE_PORT, GRPC_MAX_WORKERS as SERVICE_GRPC_MAX_WORKERS,
+    GRPC_GRACE_PERIOD as SERVICE_GRPC_GRACE_PERIOD)
 from service.client.ServiceClient import ServiceClient
-from service.proto.context_pb2 import Service
+from service.proto.context_pb2 import Service, ServiceId
 from service.service.ServiceService import ServiceService
-
-port = 10000 + GRPC_SERVICE_PORT # avoid privileged ports
+from service.service.service_handler_api.ServiceHandlerFactory import ServiceHandlerFactory
+from service.service.service_handlers import SERVICE_HANDLERS
+from service.tests.Service_L3NM_EMU import (
+    SERVICE_L3NM_EMU, SERVICE_L3NM_EMU_CONFIG_RULES, SERVICE_L3NM_EMU_CONSTRAINTS, SERVICE_L3NM_EMU_ENDPOINT_IDS,
+    SERVICE_L3NM_EMU_ID)
+#from device.service.MonitoringLoops import MonitoringLoops
+from .CommonObjects import CONTEXT, DEVICE_EMU1, DEVICE_EMU2, DEVICE_EMU_CONNECT_RULES, TOPOLOGY
 
 LOGGER = logging.getLogger(__name__)
 LOGGER.setLevel(logging.DEBUG)
 
-## use "copy.deepcopy" to prevent propagating forced changes during tests
-#CONTEXT_ID = {'contextUuid': {'uuid': DEFAULT_CONTEXT_ID}}
-#TOPOLOGY_ID = {'contextId': copy.deepcopy(CONTEXT_ID), 'topoId': {'uuid': DEFAULT_TOPOLOGY_ID}}
-#SERVICE_ID = {'contextId': copy.deepcopy(CONTEXT_ID), 'cs_id': {'uuid': 'DEV1'}}
-#SERVICE = {
-#    'cs_id': copy.deepcopy(SERVICE_ID),
-#    'serviceType': ServiceType.L3NM,
-#    'serviceConfig': {'serviceConfig': '<config/>'},
-#    'serviceState': {'serviceState': ServiceStateEnum.PLANNED},
-#    'constraint': [
-#        {'constraint_type': 'latency_ms', 'constraint_value': '100'},
-#        {'constraint_type': 'hops', 'constraint_value': '5'},
-#    ],
-#    'endpointList' : [
-#        {'topoId': copy.deepcopy(TOPOLOGY_ID), 'dev_id': {'device_id': {'uuid': 'DEV1'}}, 'port_id': {'uuid' : 'EP5'}},
-#        {'topoId': copy.deepcopy(TOPOLOGY_ID), 'dev_id': {'device_id': {'uuid': 'DEV2'}}, 'port_id': {'uuid' : 'EP5'}},
-#        {'topoId': copy.deepcopy(TOPOLOGY_ID), 'dev_id': {'device_id': {'uuid': 'DEV3'}}, 'port_id': {'uuid' : 'EP5'}},
-#    ]
-#}
+CONTEXT_GRPC_SERVICE_PORT = 10000 + CONTEXT_GRPC_SERVICE_PORT # avoid privileged ports
+DEVICE_GRPC_SERVICE_PORT = 10000 + DEVICE_GRPC_SERVICE_PORT # avoid privileged ports
+SERVICE_GRPC_SERVICE_PORT = 10000 + SERVICE_GRPC_SERVICE_PORT # avoid privileged ports
+MONITORING_GRPC_SERVICE_PORT = 10000 + MONITORING_GRPC_SERVICE_PORT # avoid privileged ports
+
+DEFAULT_REDIS_SERVICE_HOST = '127.0.0.1'
+DEFAULT_REDIS_SERVICE_PORT = 6379
+DEFAULT_REDIS_DATABASE_ID  = 0
+
+REDIS_CONFIG = {
+    'REDIS_SERVICE_HOST': os.environ.get('REDIS_SERVICE_HOST', DEFAULT_REDIS_SERVICE_HOST),
+    'REDIS_SERVICE_PORT': os.environ.get('REDIS_SERVICE_PORT', DEFAULT_REDIS_SERVICE_PORT),
+    'REDIS_DATABASE_ID' : os.environ.get('REDIS_DATABASE_ID',  DEFAULT_REDIS_DATABASE_ID ),
+}
+
+SCENARIOS = [
+    ('all_inmemory', DatabaseBackendEnum.INMEMORY, {},           MessageBrokerBackendEnum.INMEMORY, {}          ),
+    #('all_redis',    DatabaseBackendEnum.REDIS,    REDIS_CONFIG, MessageBrokerBackendEnum.REDIS,    REDIS_CONFIG),
+]
+
+@pytest.fixture(scope='session', ids=[str(scenario[0]) for scenario in SCENARIOS], params=SCENARIOS)
+def context_db_mb(request) -> Tuple[Database, MessageBroker]:
+    name,db_backend,db_settings,mb_backend,mb_settings = request.param
+    msg = 'Running scenario {:s} db_backend={:s}, db_settings={:s}, mb_backend={:s}, mb_settings={:s}...'
+    LOGGER.info(msg.format(str(name), str(db_backend.value), str(db_settings), str(mb_backend.value), str(mb_settings)))
+    _database = Database(get_database_backend(backend=db_backend, **db_settings))
+    _message_broker = MessageBroker(get_messagebroker_backend(backend=mb_backend, **mb_settings))
+    yield _database, _message_broker
+    _message_broker.terminate()
+
+@pytest.fixture(scope='session')
+def context_service(context_db_mb : Tuple[Database, MessageBroker]): # pylint: disable=redefined-outer-name
+    _service = ContextService(
+        context_db_mb[0], context_db_mb[1], port=CONTEXT_GRPC_SERVICE_PORT, max_workers=CONTEXT_GRPC_MAX_WORKERS,
+        grace_period=CONTEXT_GRPC_GRACE_PERIOD)
+    _service.start()
+    yield _service
+    _service.stop()
+
+@pytest.fixture(scope='session')
+def context_client(context_service : ContextService): # pylint: disable=redefined-outer-name
+    _client = ContextClient(address='127.0.0.1', port=CONTEXT_GRPC_SERVICE_PORT)
+    yield _client
+    _client.close()
+
+@pytest.fixture(scope='session')
+def monitoring_service():
+    _service = MockMonitoringService(port=MONITORING_GRPC_SERVICE_PORT, max_workers=MONITORING_GRPC_MAX_WORKERS,
+        grace_period=MONITORING_GRPC_GRACE_PERIOD)
+    _service.start()
+    yield _service
+    _service.stop()
+
+@pytest.fixture(scope='session')
+def monitoring_client(monitoring_service : MockMonitoringService): # pylint: disable=redefined-outer-name
+    _client = MonitoringClient(server='127.0.0.1', port=MONITORING_GRPC_SERVICE_PORT)
+    #yield _client
+    #_client.close()
+    return _client
+
+@pytest.fixture(scope='session')
+def device_service(
+    context_client : ContextClient,         # pylint: disable=redefined-outer-name
+    monitoring_client : MonitoringClient):  # pylint: disable=redefined-outer-name
+
+    _driver_factory = DriverFactory(DRIVERS)
+    _driver_instance_cache = DriverInstanceCache(_driver_factory)
+    _service = DeviceService(
+        context_client, monitoring_client, _driver_instance_cache, port=DEVICE_GRPC_SERVICE_PORT,
+        max_workers=DEVICE_GRPC_MAX_WORKERS, grace_period=DEVICE_GRPC_GRACE_PERIOD)
+    _service.start()
+    yield _service
+    _service.stop()
+
+@pytest.fixture(scope='session')
+def device_client(device_service : DeviceService): # pylint: disable=redefined-outer-name
+    _client = DeviceClient(address='127.0.0.1', port=DEVICE_GRPC_SERVICE_PORT)
+    yield _client
+    _client.close()
 
 @pytest.fixture(scope='session')
-def service_service():
-    _service = ServiceService(port=port, max_workers=GRPC_MAX_WORKERS, grace_period=GRPC_GRACE_PERIOD)
+def service_service(
+    context_client : ContextClient, # pylint: disable=redefined-outer-name
+    device_client : DeviceClient):  # pylint: disable=redefined-outer-name
+
+    _service_handler_factory = ServiceHandlerFactory(SERVICE_HANDLERS)
+    _service = ServiceService(
+        context_client, device_client, _service_handler_factory,
+        port=SERVICE_GRPC_SERVICE_PORT, max_workers=SERVICE_GRPC_MAX_WORKERS, grace_period=SERVICE_GRPC_GRACE_PERIOD)
     _service.start()
     yield _service
     _service.stop()
 
 @pytest.fixture(scope='session')
-def service_client(service_service):
-    _client = ServiceClient(address='127.0.0.1', port=port)
+def service_client(service_service : ServiceService): # pylint: disable=redefined-outer-name
+    _client = ServiceClient(address='127.0.0.1', port=SERVICE_GRPC_SERVICE_PORT)
     yield _client
     _client.close()
 
-def test_dummy(service_client : ServiceClient):
-    reply = service_client.CreateService(Service())
-
-#def test_get_services_empty(service_client : ServiceClient):
-#    # should work
-#    validate_service_list_is_empty(MessageToDict(
-#        service_client.GetServiceList(Empty()),
-#        including_default_value_fields=True, preserving_proto_field_name=True,
-#        use_integers_for_enums=False))
-
-#def test_create_service_wrong_service_attributes(service_client : ServiceClient):
-#    # should fail with wrong service context
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service = copy.deepcopy(SERVICE)
-#        copy_service['cs_id']['contextId']['contextUuid']['uuid'] = ''
-#        service_client.CreateService(Service(**copy_service))
-#    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-#    msg = 'service.cs_id.contextId.contextUuid.uuid() is out of range: '\
-#          'allow_empty(False) min_length(None) max_length(None) allowed_lengths(None).'
-#    assert e.value.details() == msg
-#
-#    # should fail with service context does not exist
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service = copy.deepcopy(SERVICE)
-#        copy_service['cs_id']['contextId']['contextUuid']['uuid'] = 'wrong-context'
-#        service_client.CreateService(Service(**copy_service))
-#    assert e.value.code() == grpc.StatusCode.NOT_FOUND
-#    msg = 'Context(wrong-context) does not exist in the database.'
-#    assert e.value.details() == msg
-#
-#    # should fail with wrong service id
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service = copy.deepcopy(SERVICE)
-#        copy_service['cs_id']['cs_id']['uuid'] = ''
-#        service_client.CreateService(Service(**copy_service))
-#    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-#    msg = 'service.cs_id.cs_id.uuid() is out of range: '\
-#          'allow_empty(False) min_length(None) max_length(None) allowed_lengths(None).'
-#    assert e.value.details() == msg
-#
-#    # should fail with wrong service type
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service = copy.deepcopy(SERVICE)
-#        copy_service['serviceType'] = ServiceType.UNKNOWN
-#        service_client.CreateService(Service(**copy_service))
-#    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-#    msg = 'Method(CreateService) does not accept ServiceType(UNKNOWN). '\
-#          'Permitted values for Method(CreateService) are '\
-#          'ServiceType([\'L2NM\', \'L3NM\', \'TAPI_CONNECTIVITY_SERVICE\']).'
-#    assert e.value.details() == msg
-#
-#    # should fail with wrong service state
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service = copy.deepcopy(SERVICE)
-#        copy_service['serviceState']['serviceState'] = ServiceStateEnum.PENDING_REMOVAL
-#        service_client.CreateService(Service(**copy_service))
-#    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-#    msg = 'Method(CreateService) does not accept ServiceState(PENDING_REMOVAL). '\
-#          'Permitted values for Method(CreateService) are '\
-#          'ServiceState([\'PLANNED\']).'
-#    assert e.value.details() == msg
-#
-#def test_create_service_wrong_constraint(service_client : ServiceClient):
-#    # should fail with wrong constraint type
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service = copy.deepcopy(SERVICE)
-#        copy_service['constraint'][0]['constraint_type'] = ''
-#        service_client.CreateService(Service(**copy_service))
-#    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-#    msg = 'constraint[#0].constraint_type() is out of range: '\
-#          'allow_empty(False) min_length(None) max_length(None) allowed_lengths(None).'
-#    assert e.value.details() == msg
-#
-#    # should fail with wrong constraint value
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service = copy.deepcopy(SERVICE)
-#        copy_service['constraint'][0]['constraint_value'] = ''
-#        service_client.CreateService(Service(**copy_service))
-#    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-#    msg = 'constraint[#0].constraint_value() is out of range: '\
-#          'allow_empty(False) min_length(None) max_length(None) allowed_lengths(None).'
-#    assert e.value.details() == msg
-#
-#    # should fail with dupplicated constraint type
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service = copy.deepcopy(SERVICE)
-#        copy_service['constraint'][1] = copy_service['constraint'][0]
-#        service_client.CreateService(Service(**copy_service))
-#    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-#    msg = 'Duplicated ConstraintType(latency_ms) in Constraint(#1) of Context(admin)/Service(DEV1).'
-#    assert e.value.details() == msg
-#
-#def test_create_service_wrong_endpoint(service_client : ServiceClient, database : Database):
-#    # should fail with wrong endpoint context
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service = copy.deepcopy(SERVICE)
-#        copy_service['endpointList'][0]['topoId']['contextId']['contextUuid']['uuid'] = 'wrong-context'
-#        print(copy_service)
-#        service_client.CreateService(Service(**copy_service))
-#    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-#    msg = 'Context(wrong-context) in Endpoint(#0) of '\
-#          'Context(admin)/Service(DEV1) mismatches acceptable Contexts({\'admin\'}). '\
-#          'Optionally, leave field empty to use predefined Context(admin).'
-#    assert e.value.details() == msg
-#
-#    # should fail with wrong endpoint topology
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service = copy.deepcopy(SERVICE)
-#        copy_service['endpointList'][0]['topoId']['topoId']['uuid'] = 'wrong-topo'
-#        service_client.CreateService(Service(**copy_service))
-#    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-#    msg = 'Context(admin)/Topology(wrong-topo) in Endpoint(#0) of '\
-#          'Context(admin)/Service(DEV1) mismatches acceptable Topologies({\'admin\'}). '\
-#          'Optionally, leave field empty to use predefined Topology(admin).'
-#    assert e.value.details() == msg
-#
-#    # should fail with endpoint device is empty
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service = copy.deepcopy(SERVICE)
-#        copy_service['endpointList'][0]['dev_id']['device_id']['uuid'] = ''
-#        service_client.CreateService(Service(**copy_service))
-#    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-#    msg = 'endpoint_id[#0].dev_id.device_id.uuid() is out of range: '\
-#          'allow_empty(False) min_length(None) max_length(None) allowed_lengths(None).'
-#    assert e.value.details() == msg
-#
-#    # should fail with endpoint device not found
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service = copy.deepcopy(SERVICE)
-#        copy_service['endpointList'][0]['dev_id']['device_id']['uuid'] = 'wrong-device'
-#        service_client.CreateService(Service(**copy_service))
-#    assert e.value.code() == grpc.StatusCode.NOT_FOUND
-#    msg = 'Context(admin)/Topology(admin)/Device(wrong-device) in Endpoint(#0) of '\
-#          'Context(admin)/Service(DEV1) does not exist in the database.'
-#    assert e.value.details() == msg
-#
-#    # should fail with endpoint device duplicated
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service = copy.deepcopy(SERVICE)
-#        copy_service['endpointList'][1] = copy_service['endpointList'][0]
-#        service_client.CreateService(Service(**copy_service))
-#    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-#    msg = 'Duplicated Context(admin)/Topology(admin)/Device(DEV1) in Endpoint(#1) of '\
-#          'Context(admin)/Service(DEV1).'
-#    assert e.value.details() == msg
-#
-#    # should fail with endpoint port is empty
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service = copy.deepcopy(SERVICE)
-#        copy_service['endpointList'][0]['port_id']['uuid'] = ''
-#        service_client.CreateService(Service(**copy_service))
-#    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-#    msg = 'endpoint_id[#0].port_id.uuid() is out of range: '\
-#          'allow_empty(False) min_length(None) max_length(None) allowed_lengths(None).'
-#    assert e.value.details() == msg
-#
-#    # should fail with endpoint port not found
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service = copy.deepcopy(SERVICE)
-#        copy_service['endpointList'][0]['port_id']['uuid'] = 'wrong-port'
-#        service_client.CreateService(Service(**copy_service))
-#    assert e.value.code() == grpc.StatusCode.NOT_FOUND
-#    msg = 'Context(admin)/Topology(admin)/Device(DEV1)/Port(wrong-port) in Endpoint(#0) of '\
-#          'Context(admin)/Service(DEV1) does not exist in the database.'
-#    assert e.value.details() == msg
-#
-#def test_get_service_does_not_exist(service_client : ServiceClient):
-#    # should fail with service context does not exist
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service_id = copy.deepcopy(SERVICE_ID)
-#        copy_service_id['contextId']['contextUuid']['uuid'] = 'wrong-context'
-#        service_client.GetServiceById(ServiceId(**copy_service_id))
-#    assert e.value.code() == grpc.StatusCode.NOT_FOUND
-#    msg = 'Context(wrong-context) does not exist in the database.'
-#    assert e.value.details() == msg
-#
-#    # should fail with service does not exist
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        service_client.GetServiceById(ServiceId(**SERVICE_ID))
-#    assert e.value.code() == grpc.StatusCode.NOT_FOUND
-#    msg = 'Context(admin)/Service(DEV1) does not exist in the database.'
-#    assert e.value.details() == msg
-#
-#def test_update_service_does_not_exist(service_client : ServiceClient):
-#    # should fail with service does not exist
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        service_client.UpdateService(Service(**SERVICE))
-#    assert e.value.code() == grpc.StatusCode.NOT_FOUND
-#    msg = 'Context(admin)/Service(DEV1) does not exist in the database.'
-#    assert e.value.details() == msg
-#
-#def test_create_service(service_client : ServiceClient):
-#    # should work
-#    validate_service_id(MessageToDict(
-#        service_client.CreateService(Service(**SERVICE)),
-#        including_default_value_fields=True, preserving_proto_field_name=True,
-#        use_integers_for_enums=False))
-#
-#def test_create_service_already_exists(service_client : ServiceClient):
-#    # should fail with service already exists
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        service_client.CreateService(Service(**SERVICE))
-#    assert e.value.code() == grpc.StatusCode.ALREADY_EXISTS
-#    msg = 'Context(admin)/Service(DEV1) already exists in the database.'
-#    assert e.value.details() == msg
-#
-#def test_get_service(service_client : ServiceClient):
-#    # should work
-#    validate_service(MessageToDict(
-#        service_client.GetServiceById(ServiceId(**SERVICE_ID)),
-#        including_default_value_fields=True, preserving_proto_field_name=True,
-#        use_integers_for_enums=False))
-#
-#def test_update_service(service_client : ServiceClient):
-#    # should work
-#    copy_service = copy.deepcopy(SERVICE)
-#    copy_service['serviceConfig']['serviceConfig'] = '<newconfig/>'
-#    copy_service['serviceState']['serviceState'] = ServiceStateEnum.ACTIVE
-#    copy_service['constraint'] = [
-#        {'constraint_type': 'latency_ms', 'constraint_value': '200'},
-#        {'constraint_type': 'bandwidth_gbps', 'constraint_value': '100'},
-#    ]
-#    copy_service['endpointList'] = [
-#        {
-#            'topoId': {'contextId': {'contextUuid': {'uuid': 'admin'}}, 'topoId': {'uuid': 'admin'}},
-#            'dev_id': {'device_id': {'uuid': 'DEV1'}},
-#            'port_id': {'uuid' : 'EP5'}
-#        },
-#        {
-#            'topoId': {'contextId': {'contextUuid': {'uuid': 'admin'}}, 'topoId': {'uuid': 'admin'}},
-#            'dev_id': {'device_id': {'uuid': 'DEV2'}},
-#            'port_id': {'uuid' : 'EP6'}
-#        },
-#    ]
-#    validate_service_id(MessageToDict(
-#        service_client.UpdateService(Service(**copy_service)),
-#        including_default_value_fields=True, preserving_proto_field_name=True,
-#        use_integers_for_enums=False))
-#
-#def test_delete_service_wrong_service_id(service_client : ServiceClient):
-#    # should fail with service context is empty
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service_id = copy.deepcopy(SERVICE_ID)
-#        copy_service_id['contextId']['contextUuid']['uuid'] = ''
-#        service_client.DeleteService(ServiceId(**copy_service_id))
-#    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-#    msg = 'service_id.contextId.contextUuid.uuid() is out of range: '\
-#          'allow_empty(False) min_length(None) max_length(None) allowed_lengths(None).'
-#    assert e.value.details() == msg
-#
-#    # should fail with service context does not exist
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service_id = copy.deepcopy(SERVICE_ID)
-#        copy_service_id['contextId']['contextUuid']['uuid'] = 'wrong-context'
-#        service_client.DeleteService(ServiceId(**copy_service_id))
-#    assert e.value.code() == grpc.StatusCode.NOT_FOUND
-#    msg = 'Context(wrong-context) does not exist in the database.'
-#    assert e.value.details() == msg
-#
-#    # should fail with service id is empty
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service_id = copy.deepcopy(SERVICE_ID)
-#        copy_service_id['cs_id']['uuid'] = ''
-#        service_client.DeleteService(ServiceId(**copy_service_id))
-#    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
-#    msg = 'service_id.cs_id.uuid() is out of range: '\
-#          'allow_empty(False) min_length(None) max_length(None) allowed_lengths(None).'
-#    assert e.value.details() == msg
-#
-#    # should fail with service id is empty
-#    with pytest.raises(grpc._channel._InactiveRpcError) as e:
-#        copy_service_id = copy.deepcopy(SERVICE_ID)
-#        copy_service_id['cs_id']['uuid'] = 'wrong-service'
-#        service_client.DeleteService(ServiceId(**copy_service_id))
-#    assert e.value.code() == grpc.StatusCode.NOT_FOUND
-#    msg = 'Context(admin)/Service(wrong-service) does not exist in the database.'
-#    assert e.value.details() == msg
-#
-#def test_delete_service(service_client : ServiceClient):
-#    # should work
-#    validate_empty(MessageToDict(
-#        service_client.DeleteService(ServiceId(**SERVICE_ID)),
-#        including_default_value_fields=True, preserving_proto_field_name=True,
-#        use_integers_for_enums=False))
-#
-#def test_get_services_empty_2(service_client : ServiceClient):
-#    # should work
-#    validate_service_list_is_empty(MessageToDict(
-#        service_client.GetServiceList(Empty()),
-#        including_default_value_fields=True, preserving_proto_field_name=True,
-#        use_integers_for_enums=False))
-#
-#def test_create_service_empty_endpoints(service_client : ServiceClient):
-#    # should work
-#    copy_service = copy.deepcopy(SERVICE)
-#    copy_service['endpointList'][0]['topoId']['contextId']['contextUuid']['uuid'] = ''
-#    copy_service['endpointList'][0]['topoId']['topoId']['uuid'] = ''
-#    validate_service_id(MessageToDict(
-#        service_client.CreateService(Service(**copy_service)),
-#        including_default_value_fields=True, preserving_proto_field_name=True,
-#        use_integers_for_enums=False))
-#
-#def test_get_services_full(service_client : ServiceClient):
-#    # should work
-#    validate_service_list_is_not_empty(MessageToDict(
-#        service_client.GetServiceList(Empty()),
-#        including_default_value_fields=True, preserving_proto_field_name=True,
-#        use_integers_for_enums=False))
-#
\ No newline at end of file
+def grpc_message_to_json_string(message):
+    return str(MessageToDict(
+        message, including_default_value_fields=True, preserving_proto_field_name=True, use_integers_for_enums=False))
+
+
+def test_prepare_environment(
+    context_client : ContextClient, device_client : DeviceClient):  # pylint: disable=redefined-outer-name
+
+    context_client.SetContext(Context(**CONTEXT))
+    context_client.SetTopology(Topology(**TOPOLOGY))
+
+    DEVICE_EMU1_WITH_CONNECT_RULES = copy.deepcopy(DEVICE_EMU1)
+    DEVICE_EMU1_WITH_CONNECT_RULES['device_config']['config_rules'].extend(DEVICE_EMU_CONNECT_RULES)
+    device_client.AddDevice(Device(**DEVICE_EMU1_WITH_CONNECT_RULES))
+
+    DEVICE_EMU2_WITH_CONNECT_RULES = copy.deepcopy(DEVICE_EMU2)
+    DEVICE_EMU2_WITH_CONNECT_RULES['device_config']['config_rules'].extend(DEVICE_EMU_CONNECT_RULES)
+    device_client.AddDevice(Device(**DEVICE_EMU2_WITH_CONNECT_RULES))
+
+
+def test_service_create_error_cases(
+    context_client : ContextClient,     # pylint: disable=redefined-outer-name
+    service_client : ServiceClient,     # pylint: disable=redefined-outer-name
+    service_service : ServiceService):  # pylint: disable=redefined-outer-name
+
+    with pytest.raises(grpc.RpcError) as e:
+        SERVICE_WITH_ENDPOINTS = copy.deepcopy(SERVICE_L3NM_EMU)
+        SERVICE_WITH_ENDPOINTS['service_endpoint_ids'].extend(SERVICE_L3NM_EMU_ENDPOINT_IDS)
+        service_client.CreateService(Service(**SERVICE_WITH_ENDPOINTS))
+    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
+    msg_head = 'service.service_endpoint_ids(['
+    msg_tail = ']) is invalid; RPC method CreateService does not accept Endpoints. '\
+               'Endpoints should be configured after creating the service.'
+    except_msg = str(e.value.details())
+    assert except_msg.startswith(msg_head) and except_msg.endswith(msg_tail)
+
+    with pytest.raises(grpc.RpcError) as e:
+        SERVICE_WITH_CONFIG_RULES = copy.deepcopy(SERVICE_L3NM_EMU)
+        SERVICE_WITH_CONFIG_RULES['service_config']['config_rules'].extend(SERVICE_L3NM_EMU_CONFIG_RULES)
+        service_client.CreateService(Service(**SERVICE_WITH_CONFIG_RULES))
+    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
+    msg_head = 'service.service_config.config_rules(['
+    msg_tail = ']) is invalid; RPC method CreateService does not accept Config Rules. '\
+               'Config Rules should be configured after creating the service.'
+    except_msg = str(e.value.details())
+    assert except_msg.startswith(msg_head) and except_msg.endswith(msg_tail)
+
+    with pytest.raises(grpc.RpcError) as e:
+        SERVICE_WITH_CONSTRAINTS = copy.deepcopy(SERVICE_L3NM_EMU)
+        SERVICE_WITH_CONSTRAINTS['service_constraints'].extend(SERVICE_L3NM_EMU_CONSTRAINTS)
+        service_client.CreateService(Service(**SERVICE_WITH_CONSTRAINTS))
+    assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
+    msg_head = 'service.service_constraints(['
+    msg_tail = ']) is invalid; RPC method CreateService does not accept Constraints. '\
+               'Constraints should be configured after creating the service.'
+    except_msg = str(e.value.details())
+    assert except_msg.startswith(msg_head) and except_msg.endswith(msg_tail)
+
+
+def test_service_create_correct(
+    context_client : ContextClient,     # pylint: disable=redefined-outer-name
+    service_client : ServiceClient,     # pylint: disable=redefined-outer-name
+    service_service : ServiceService):  # pylint: disable=redefined-outer-name
+
+    service_client.CreateService(Service(**SERVICE_L3NM_EMU))
+    #driver : _Driver = device_service.driver_instance_cache.get(DEVICE_EMU_UUID) # we know the driver exists now
+    #assert driver is not None
+
+
+def test_service_get_created(
+    context_client : ContextClient,     # pylint: disable=redefined-outer-name
+    service_client : ServiceClient,     # pylint: disable=redefined-outer-name
+    service_service : ServiceService):  # pylint: disable=redefined-outer-name
+
+    service_data = context_client.GetService(ServiceId(**SERVICE_L3NM_EMU_ID))
+    LOGGER.info('service_data = {:s}'.format(grpc_message_to_json_string(service_data)))
+
+
+def test_service_update(
+    context_client : ContextClient,     # pylint: disable=redefined-outer-name
+    service_client : ServiceClient,     # pylint: disable=redefined-outer-name
+    service_service : ServiceService):  # pylint: disable=redefined-outer-name
+
+    # Configure
+    SERVICE_WITH_SETTINGS = copy.deepcopy(SERVICE_L3NM_EMU)
+    SERVICE_WITH_SETTINGS['service_endpoint_ids'].extend(SERVICE_L3NM_EMU_ENDPOINT_IDS)
+    SERVICE_WITH_SETTINGS['service_config']['config_rules'].extend(SERVICE_L3NM_EMU_CONFIG_RULES)
+    SERVICE_WITH_SETTINGS['service_constraints'].extend(SERVICE_L3NM_EMU_CONSTRAINTS)
+    service_client.UpdateService(Service(**SERVICE_WITH_SETTINGS))
+
+    for endpoint_id in SERVICE_L3NM_EMU_ENDPOINT_IDS:
+        device_id = endpoint_id['device_id']
+        device_data = context_client.GetDevice(DeviceId(**device_id))
+        for i,config_rule in enumerate(device_data.device_config.config_rules):
+            LOGGER.info('device_data[{:s}][#{:d}] => {:s}'.format(
+                str(device_id), i, grpc_message_to_json_string(config_rule)))
+
+    # Deconfigure
+    SERVICE_WITH_SETTINGS = copy.deepcopy(SERVICE_L3NM_EMU)
+    SERVICE_WITH_SETTINGS['service_endpoint_ids'].extend([]) # remove endpoints
+    service_client.UpdateService(Service(**SERVICE_WITH_SETTINGS))
+
+    for endpoint_id in SERVICE_L3NM_EMU_ENDPOINT_IDS:
+        device_id = endpoint_id['device_id']
+        device_data = context_client.GetDevice(DeviceId(**device_id))
+        for i,config_rule in enumerate(device_data.device_config.config_rules):
+            LOGGER.info('device_data[{:s}][#{:d}] => {:s}'.format(
+                str(device_id), i, grpc_message_to_json_string(config_rule)))
+
+
+def test_service_get_updated(
+    context_client : ContextClient,     # pylint: disable=redefined-outer-name
+    service_client : ServiceClient,     # pylint: disable=redefined-outer-name
+    service_service : ServiceService):  # pylint: disable=redefined-outer-name
+
+    service_data = context_client.GetService(ServiceId(**SERVICE_L3NM_EMU_ID))
+    LOGGER.info('service_data = {:s}'.format(grpc_message_to_json_string(service_data)))
+
+
+def test_service_delete(
+    context_client : ContextClient,     # pylint: disable=redefined-outer-name
+    service_client : ServiceClient,     # pylint: disable=redefined-outer-name
+    service_service : ServiceService):  # pylint: disable=redefined-outer-name
+
+    service_client.DeleteService(ServiceId(**SERVICE_L3NM_EMU_ID))
+    #driver : _Driver = device_service.driver_instance_cache.get(DEVICE_EMU_UUID, {})
+    #assert driver is None