diff --git a/src/pathcomp/frontend/service/__main__.py b/src/pathcomp/frontend/service/__main__.py
index a41b9e994f02db725c4adf371d9638fd5135693e..24a5b77418839153839a4fcf92f1a2b42d9cf91d 100644
--- a/src/pathcomp/frontend/service/__main__.py
+++ b/src/pathcomp/frontend/service/__main__.py
@@ -31,7 +31,7 @@ def main():
     global LOGGER # pylint: disable=global-statement
 
     log_level = get_log_level()
-    logging.basicConfig(level=log_level)
+    logging.basicConfig(level=log_level, format="[%(asctime)s] %(levelname)s:%(name)s:%(message)s")
     LOGGER = logging.getLogger(__name__)
 
     wait_for_environment_variables([
diff --git a/src/pathcomp/frontend/service/algorithms/KDisjointPathAlgorithm.py b/src/pathcomp/frontend/service/algorithms/KDisjointPathAlgorithm.py
index 1e676b0aec93d083217bd53ca8e078b00ddf8376..8a6d28610db64bbcadfe64c1d9e4e40bfbd8dbdb 100644
--- a/src/pathcomp/frontend/service/algorithms/KDisjointPathAlgorithm.py
+++ b/src/pathcomp/frontend/service/algorithms/KDisjointPathAlgorithm.py
@@ -12,17 +12,75 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import copy
+import operator
 from typing import Dict, List, Optional, Set, Tuple
-from common.proto.context_pb2 import Link
-from common.proto.pathcomp_pb2 import Algorithm_KDisjointPath, Algorithm_KShortestPath, PathCompReply
+from common.proto.context_pb2 import Connection, Link, Service
+from common.proto.pathcomp_pb2 import Algorithm_KDisjointPath, Algorithm_KShortestPath, PathCompReply, PathCompRequest
+from common.tools.grpc.Tools import grpc_message_to_json_string
+from pathcomp.frontend.service.algorithms.tools.ComputeSubServices import convert_explicit_path_hops_to_connections
+from pathcomp.frontend.service.algorithms.tools.EroPathToHops import eropath_to_hops
 from ._Algorithm import _Algorithm
 from .KShortestPathAlgorithm import KShortestPathAlgorithm
 
+Service_Id          = Tuple[str, str]   # (context_uuid, service_uuid)
+Service_Constraints = Dict[str, str]    # {constraint_type => constraint_value}
+Endpoint_Id         = Tuple[str, str]   # (device_uuid, endpoint_uuid)
+Endpoint_Details    = Tuple[str, int]   # (site_id, priority)
+Service_Endpoints   = Dict[Endpoint_Id, Endpoint_Details]
+Service_Details     = Tuple[int, Service_Constraints, Service_Endpoints]
+Services_Details    = Dict[Service_Id, Service_Details]
+
+CUSTOM_CONSTRAINTS = {'bandwidth[gbps]', 'latency[ms]', 'jitter[us]'}
+
+DUMP_EXECUTION_STEPS = False
+
 class KDisjointPathAlgorithm(_Algorithm):
     def __init__(self, algorithm : Algorithm_KDisjointPath, class_name=__name__) -> None:
         super().__init__('KDP', False, class_name=class_name)
         self.num_disjoint = algorithm.num_disjoint
+        self.services_details : Services_Details = dict()
+
+    def add_service_requests(self, request: PathCompRequest) -> None:
+        super().add_service_requests(request)
+        for service in request.services:
+            service_id = service.service_id
+            context_uuid = service_id.context_id.context_uuid.uuid
+            service_uuid = service_id.service_uuid.uuid
+            service_key = (context_uuid, service_uuid)
+
+            constraints = dict()
+            endpoints = dict()
+            service_details = (int(service.service_type), constraints, endpoints)
+            self.services_details.setdefault(service_key, service_details)
+
+            for constraint in service.service_constraints:
+                if constraint.WhichOneof('constraint') == 'custom':
+                    constraint_type = constraint.custom.constraint_type
+                    if constraint_type not in CUSTOM_CONSTRAINTS: continue
+                    constraint_value = constraint.custom.constraint_value
+                    constraints[constraint_type] = constraint_value
+
+                if constraint.WhichOneof('constraint') == 'endpoint_location':
+                    endpoint_id = constraint.endpoint_location.endpoint_id
+                    device_uuid = endpoint_id.device_id.device_uuid.uuid
+                    endpoint_uuid = endpoint_id.endpoint_uuid.uuid
+                    location_kind = constraint.endpoint_location.location.WhichOneof('location')
+                    if location_kind != 'region':
+                        MSG = 'Unsupported LocationType({:s}) in Constraint({:s})'
+                        raise Exception(MSG.format(location_kind, grpc_message_to_json_string(constraint)))
+                    site_id = constraint.endpoint_location.location.region
+                    endpoints.setdefault((device_uuid, endpoint_uuid), dict())['site_id'] = site_id
+
+                if constraint.WhichOneof('constraint') == 'endpoint_priority':
+                    endpoint_id = constraint.endpoint_priority.endpoint_id
+                    device_uuid = endpoint_id.device_id.device_uuid.uuid
+                    endpoint_uuid = endpoint_id.endpoint_uuid.uuid
+                    priority = constraint.endpoint_priority.priority
+                    endpoints.setdefault((device_uuid, endpoint_uuid), dict())['priority'] = priority
+
+            # TODO: ensure these constraints are provided in the request
+            if 'bandwidth[gbps]' not in constraints: constraints['bandwidth[gbps]'] = '20.0'
+            if 'latency[ms]' not in constraints: constraints['latency[ms]'] = '20.0'
 
     def get_link_from_endpoint(self, endpoint : Dict) -> Tuple[Dict, Link]:
         device_uuid = endpoint['device_id']
@@ -50,8 +108,8 @@ class KDisjointPathAlgorithm(_Algorithm):
     def remove_traversed_links(self, link_list : List[Dict], path_endpoints : List[Dict]):
         _, path_link_ids = self.path_to_links(path_endpoints)
         new_link_list = list(filter(lambda l: l['link_Id'] not in path_link_ids, link_list))
-        self.logger.info('cur_link_list = {:s}'.format(str(link_list)))
-        self.logger.info('new_link_list = {:s}'.format(str(new_link_list)))
+        #self.logger.info('cur_link_list = {:s}'.format(str(link_list)))
+        #self.logger.info('new_link_list = {:s}'.format(str(new_link_list)))
         return new_link_list
 
     def execute(self, dump_request_filename: Optional[str] = None, dump_reply_filename: Optional[str] = None) -> None:
@@ -63,18 +121,52 @@ class KDisjointPathAlgorithm(_Algorithm):
         algorithm.link_list = self.link_list
         algorithm.link_dict = self.link_dict
         algorithm.endpoint_to_link_dict = self.endpoint_to_link_dict
-        algorithm.service_list = self.service_list
-        algorithm.service_dict = self.service_dict
 
         Path = List[Dict]
         Path_NoPath = Optional[Path] # None = no path, list = path
         self.json_reply : Dict[Tuple[str, str], List[Path_NoPath]] = dict()
 
         for num_path in range(self.num_disjoint):
-            #dump_request_filename = 'ksp-{:d}-request.json'.format(num_path)
-            #dump_reply_filename   = 'ksp-{:d}-reply.txt'.format(num_path)
-            #algorithm.execute(dump_request_filename, dump_reply_filename)
-            algorithm.execute()
+            algorithm.service_list = list()
+            algorithm.service_dict = dict()
+
+            #self.logger.warning('services_details = {:s}'.format(str(self.services_details)))
+
+            _request = PathCompRequest()
+            for service_key, service_details in self.services_details.items():
+                service_type, constraints, endpoints = service_details
+                _service = _request.services.add()
+                _service.service_id.context_id.context_uuid.uuid = service_key[0]
+                _service.service_id.service_uuid.uuid = service_key[1]
+                _service.service_type = service_type
+                for constraint_type, constraint_value in constraints.items():
+                    constraint = _service.service_constraints.add()
+                    constraint.custom.constraint_type = constraint_type
+                    constraint.custom.constraint_value = constraint_value
+
+                site_to_endpoints : Dict[str, List[Tuple[Endpoint_Id, int]]] = {}
+                for endpoint_key,endpoint_details in endpoints.items():
+                    site_id = endpoint_details.get('site_id')
+                    if site_id is None: continue
+                    priority = endpoint_details.get('priority', 999)
+                    site_to_endpoints.setdefault(site_id, list()).append((endpoint_key, priority))
+
+                for site_id,site_endpoints in site_to_endpoints.items():
+                    pending_endpoints = sorted(site_endpoints, key=operator.itemgetter(1))
+                    if len(pending_endpoints) == 0: continue
+                    endpoint_key, _ = pending_endpoints[0]
+                    device_uuid, endpoint_uuid = endpoint_key
+                    endpoint_id = _service.service_endpoint_ids.add()
+                    endpoint_id.device_id.device_uuid.uuid = device_uuid
+                    endpoint_id.endpoint_uuid.uuid = endpoint_uuid
+                    endpoints.pop(endpoint_key)
+
+            algorithm.add_service_requests(_request)
+
+            dump_request_filename = 'ksp-{:d}-request.json'.format(num_path) if DUMP_EXECUTION_STEPS else None
+            dump_reply_filename   = 'ksp-{:d}-reply.txt'.format(num_path)    if DUMP_EXECUTION_STEPS else None
+            algorithm.execute(dump_request_filename, dump_reply_filename)
+
             response_list = algorithm.json_reply.get('response-list', [])
             for response in response_list:
                 service_id = response['serviceId']
@@ -94,10 +186,47 @@ class KDisjointPathAlgorithm(_Algorithm):
 
     def get_reply(self) -> PathCompReply:
         reply = PathCompReply()
+        grpc_services : Dict[Tuple[str, str], Service] = {}
+        grpc_connections : Dict[Tuple[int, str], Connection] = {}
         for service_key,paths in self.json_reply.items():
-            grpc_service = self.add_service_to_reply(reply, service_key[0], service_key[1])
-            for path_endpoints in paths:
-                if path_endpoints is None: continue
-                grpc_connection = self.add_connection_to_reply(reply, grpc_service)
-                self.add_path_to_connection(grpc_connection, path_endpoints)
+            context_uuid, service_uuid = service_key
+
+            grpc_services[service_key] = self.add_service_to_reply(reply, context_uuid, service_uuid)
+
+            for num_path,service_path_ero in enumerate(paths):
+                if service_path_ero is None: continue
+                path_hops = eropath_to_hops(service_path_ero, self.endpoint_to_link_dict)
+                connections = convert_explicit_path_hops_to_connections(path_hops, self.device_dict, service_uuid)
+
+                for connection in connections:
+                    connection_uuid,device_layer,path_hops,_ = connection
+
+                    service_key = (context_uuid, connection_uuid)
+                    grpc_service = grpc_services.get(service_key)
+                    if grpc_service is not None: continue
+                    grpc_service = self.add_service_to_reply(
+                        reply, context_uuid, connection_uuid, device_layer=device_layer, path_hops=path_hops)
+                    grpc_services[service_key] = grpc_service
+
+                for connection in connections:
+                    connection_uuid,device_layer,path_hops,dependencies = connection
+
+                    service_key = (context_uuid, connection_uuid)
+                    grpc_service = grpc_services.get(service_key)
+                    if grpc_service is None: raise Exception('Service({:s}) not found'.format(str(service_key)))
+
+                    connection_uuid = '{:s}:{:d}'.format(connection_uuid, num_path)
+                    grpc_connection = grpc_connections.get(connection_uuid)
+                    if grpc_connection is not None: continue
+                    grpc_connection = self.add_connection_to_reply(reply, connection_uuid, grpc_service, path_hops)
+                    grpc_connections[connection_uuid] = grpc_connection
+
+                    for service_uuid in dependencies:
+                        sub_service_key = (context_uuid, service_uuid)
+                        grpc_sub_service = grpc_services.get(sub_service_key)
+                        if grpc_sub_service is None:
+                            raise Exception('Service({:s}) not found'.format(str(sub_service_key)))
+                        grpc_sub_service_id = grpc_connection.sub_service_ids.add()
+                        grpc_sub_service_id.CopyFrom(grpc_sub_service.service_id)
+
         return reply
diff --git a/src/pathcomp/frontend/service/algorithms/_Algorithm.py b/src/pathcomp/frontend/service/algorithms/_Algorithm.py
index d4973f168dd7bce9fb830600c2d010fc238f3b48..bb96ff354ef32cb0a269d2b678fdb9552d86939d 100644
--- a/src/pathcomp/frontend/service/algorithms/_Algorithm.py
+++ b/src/pathcomp/frontend/service/algorithms/_Algorithm.py
@@ -14,11 +14,14 @@
 
 import json, logging, requests, uuid
 from typing import Dict, List, Optional, Tuple
-from common.proto.context_pb2 import Connection, Device, DeviceList, EndPointId, Link, LinkList, Service
+from common.proto.context_pb2 import Connection, Device, DeviceList, EndPointId, Link, LinkList, Service, ServiceStatusEnum, ServiceTypeEnum
 from common.proto.pathcomp_pb2 import PathCompReply, PathCompRequest
 from common.tools.grpc.Tools import grpc_message_to_json_string
 from pathcomp.frontend.Config import BACKEND_URL
+from pathcomp.frontend.service.algorithms.tools.ConstantsMappings import DEVICE_LAYER_TO_SERVICE_TYPE, DeviceLayerEnum
+from .tools.EroPathToHops import eropath_to_hops
 from .tools.ComposeRequest import compose_device, compose_link, compose_service
+from .tools.ComputeSubServices import convert_explicit_path_hops_to_connections
 
 class _Algorithm:
     def __init__(self, algorithm_id : str, sync_paths : bool, class_name=__name__) -> None:
@@ -85,60 +88,98 @@ class _Algorithm:
     def execute(self, dump_request_filename : Optional[str] = None, dump_reply_filename : Optional[str] = None) -> None:
         request = {'serviceList': self.service_list, 'deviceList': self.device_list, 'linkList': self.link_list}
 
+        self.logger.info('[execute] request={:s}'.format(str(request)))
         if dump_request_filename is not None:
             with open(dump_request_filename, 'w', encoding='UTF-8') as f:
                 f.write(json.dumps(request, sort_keys=True, indent=4))
 
+        self.logger.info('[execute] BACKEND_URL: {:s}'.format(str(BACKEND_URL)))
         reply = requests.post(BACKEND_URL, json=request)
         self.status_code = reply.status_code
         self.raw_reply = reply.content.decode('UTF-8')
 
+        self.logger.info('[execute] status_code={:s} reply={:s}'.format(str(reply.status_code), str(self.raw_reply)))
         if dump_reply_filename is not None:
             with open(dump_reply_filename, 'w', encoding='UTF-8') as f:
                 f.write('status_code={:s} reply={:s}'.format(str(self.status_code), str(self.raw_reply)))
 
-        self.logger.info('status_code={:s} reply={:s}'.format(str(reply.status_code), str(self.raw_reply)))
-
         if reply.status_code not in {requests.codes.ok}:
             raise Exception('Backend error({:s}) for request({:s})'.format(
                 str(self.raw_reply), json.dumps(request, sort_keys=True)))
         
         self.json_reply = reply.json()
 
-    def add_path_to_connection(self, connection : Connection, path_endpoints : List[Dict]) -> None:
-        for endpoint in path_endpoints:
-            device_uuid = endpoint['device_id']
-            endpoint_uuid = endpoint['endpoint_uuid']
-            endpoint_id = connection.path_hops_endpoint_ids.add()
-            endpoint_id.CopyFrom(self.endpoint_dict[device_uuid][endpoint_uuid][1])
-
-    def add_connection_to_reply(self, reply : PathCompReply, service : Service) -> Connection:
+    def add_connection_to_reply(
+        self, reply : PathCompReply, connection_uuid : str, service : Service, path_hops : List[Dict]
+    ) -> Connection:
         connection = reply.connections.add()
-        connection.connection_id.connection_uuid.uuid = str(uuid.uuid4())
+
+        connection.connection_id.connection_uuid.uuid = connection_uuid
         connection.service_id.CopyFrom(service.service_id)
-        return connection
 
-    def add_service_to_reply(self, reply : PathCompReply, context_uuid : str, service_uuid : str) -> Service:
-        service_key = (context_uuid, service_uuid)
-        tuple_service = self.service_dict.get(service_key)
-        if tuple_service is None: raise Exception('ServiceKey({:s}) not found'.format(str(service_key)))
-        _, grpc_service = tuple_service
+        for path_hop in path_hops:
+            device_uuid = path_hop['device']
 
-        # TODO: implement support for multi-point services
-        service_endpoint_ids = grpc_service.service_endpoint_ids
-        if len(service_endpoint_ids) != 2: raise NotImplementedError('Service must have 2 endpoints')
+            ingress_endpoint_uuid = path_hop['ingress_ep']
+            endpoint_id = connection.path_hops_endpoint_ids.add()
+            endpoint_id.CopyFrom(self.endpoint_dict[device_uuid][ingress_endpoint_uuid][1])
+
+            egress_endpoint_uuid = path_hop['egress_ep']
+            endpoint_id = connection.path_hops_endpoint_ids.add()
+            endpoint_id.CopyFrom(self.endpoint_dict[device_uuid][egress_endpoint_uuid][1])
+
+        return connection
 
-        service = reply.services.add()
-        service.CopyFrom(grpc_service)
+    def add_service_to_reply(
+        self, reply : PathCompReply, context_uuid : str, service_uuid : str,
+        device_layer : Optional[DeviceLayerEnum] = None, path_hops : List[Dict] = []
+    ) -> Service:
+        # TODO: implement support for multi-point services
+        # Control deactivated to enable disjoint paths with multiple redundant endpoints on each side
+        #service_endpoint_ids = service.service_endpoint_ids
+        #if len(service_endpoint_ids) != 2: raise NotImplementedError('Service must have 2 endpoints')
 
-        return grpc_service
+        service_key = (context_uuid, service_uuid)
+        tuple_service = self.service_dict.get(service_key)
+        if tuple_service is not None:
+            service = reply.services.add()
+            service.CopyFrom(tuple_service[1])
+        else:
+            service = reply.services.add()
+            service.service_id.context_id.context_uuid.uuid = context_uuid
+            service.service_id.service_uuid.uuid = service_uuid
+
+            if device_layer is not None:
+                service_type = DEVICE_LAYER_TO_SERVICE_TYPE.get(device_layer.value)
+                if service_type is None:
+                    MSG = 'Unable to map DeviceLayer({:s}) to ServiceType'
+                    raise Exception(MSG.format(str(device_layer)))
+                service.service_type = service_type
+
+            service.service_status.service_status = ServiceStatusEnum.SERVICESTATUS_PLANNED
+
+            if path_hops is not None and len(path_hops) > 0:
+                ingress_endpoint_id = service.service_endpoint_ids.add()
+                ingress_endpoint_id.device_id.device_uuid.uuid = path_hops[0]['device']
+                ingress_endpoint_id.endpoint_uuid.uuid = path_hops[0]['ingress_ep']
+
+                egress_endpoint_id = service.service_endpoint_ids.add()
+                egress_endpoint_id.device_id.device_uuid.uuid = path_hops[-1]['device']
+                egress_endpoint_id.endpoint_uuid.uuid = path_hops[-1]['egress_ep']
+
+        return service
 
     def get_reply(self) -> PathCompReply:
         response_list = self.json_reply.get('response-list', [])
         reply = PathCompReply()
+        grpc_services : Dict[Tuple[str, str], Service] = {}
+        grpc_connections : Dict[str, Connection] = {}
         for response in response_list:
             service_id = response['serviceId']
-            grpc_service = self.add_service_to_reply(reply, service_id['contextId'], service_id['service_uuid'])
+            context_uuid = service_id['contextId']
+            service_uuid = service_id['service_uuid']
+            service_key = (context_uuid, service_uuid)
+            grpc_services[service_key] = self.add_service_to_reply(reply, context_uuid, service_uuid)
 
             no_path_issue = response.get('noPath', {}).get('issue')
             if no_path_issue is not None:
@@ -146,9 +187,38 @@ class _Algorithm:
                 # no_path_issue == 1 => no path due to a constraint
                 continue
 
-            for service_path in response['path']:
-                grpc_connection = self.add_connection_to_reply(reply, grpc_service)
-                self.add_path_to_connection(grpc_connection, service_path['devices'])
+            for service_path_ero in response['path']:
+                path_hops = eropath_to_hops(service_path_ero['devices'], self.endpoint_to_link_dict)
+                connections = convert_explicit_path_hops_to_connections(path_hops, self.device_dict, service_uuid)
+
+                for connection in connections:
+                    connection_uuid,device_layer,path_hops,_ = connection
+                    service_key = (context_uuid, connection_uuid)
+                    grpc_service = grpc_services.get(service_key)
+                    if grpc_service is None:
+                        grpc_service = self.add_service_to_reply(
+                            reply, context_uuid, connection_uuid, device_layer=device_layer, path_hops=path_hops)
+                        grpc_services[service_key] = grpc_service
+
+                for connection in connections:
+                    connection_uuid,device_layer,path_hops,dependencies = connection
+
+                    service_key = (context_uuid, connection_uuid)
+                    grpc_service = grpc_services.get(service_key)
+                    if grpc_service is None: raise Exception('Service({:s}) not found'.format(str(service_key)))
+                        
+                    grpc_connection = grpc_connections.get(connection_uuid)
+                    if grpc_connection is not None: continue
+                    grpc_connection = self.add_connection_to_reply(reply, connection_uuid, grpc_service, path_hops)
+                    grpc_connections[connection_uuid] = grpc_connection
+
+                    for service_uuid in dependencies:
+                        sub_service_key = (context_uuid, service_uuid)
+                        grpc_sub_service = grpc_services.get(sub_service_key)
+                        if grpc_sub_service is None:
+                            raise Exception('Service({:s}) not found'.format(str(sub_service_key)))
+                        grpc_sub_service_id = grpc_connection.sub_service_ids.add()
+                        grpc_sub_service_id.CopyFrom(grpc_sub_service.service_id)
 
                 # ... "path-capacity": {"total-size": {"value": 200, "unit": 0}},
                 # ... "path-latency": {"fixed-latency-characteristic": "10.000000"},
diff --git a/src/pathcomp/frontend/service/algorithms/tools/ComposeRequest.py b/src/pathcomp/frontend/service/algorithms/tools/ComposeRequest.py
index 706906b36d02d3ac5d8bcf05f4ae8d2e1243a561..5c3461d89f63cdc7ab495d8f308500db0904d986 100644
--- a/src/pathcomp/frontend/service/algorithms/tools/ComposeRequest.py
+++ b/src/pathcomp/frontend/service/algorithms/tools/ComposeRequest.py
@@ -12,18 +12,30 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import logging
 from typing import Dict
+from common.Constants import DEFAULT_CONTEXT_UUID, DEFAULT_TOPOLOGY_UUID
 from common.proto.context_pb2 import Constraint, Device, EndPointId, Link, Service, ServiceId, TopologyId
 from common.tools.grpc.Tools import grpc_message_to_json_string
-from .Constants import CapacityUnit, LinkForwardingDirection, LinkPortDirection, TerminationDirection, TerminationState
+from .ConstantsMappings import (
+    CapacityUnit, LinkForwardingDirection, LinkPortDirection, TerminationDirection, TerminationState)
+
+LOGGER = logging.getLogger(__name__)
 
 def compose_topology_id(topology_id : TopologyId) -> Dict:
     context_uuid = topology_id.context_id.context_uuid.uuid
     topology_uuid = topology_id.topology_uuid.uuid
+
+    if len(context_uuid) == 0: context_uuid = DEFAULT_CONTEXT_UUID
+    if len(topology_uuid) == 0: topology_uuid = DEFAULT_TOPOLOGY_UUID
+
     return {'contextId': context_uuid, 'topology_uuid': topology_uuid}
 
 def compose_service_id(service_id : ServiceId) -> Dict:
     context_uuid = service_id.context_id.context_uuid.uuid
+
+    if len(context_uuid) == 0: context_uuid = DEFAULT_CONTEXT_UUID
+
     service_uuid = service_id.service_uuid.uuid
     return {'contextId': context_uuid, 'service_uuid': service_uuid}
 
@@ -54,9 +66,9 @@ def compose_latency_characteristics(fixed_latency_characteristic : str) -> Dict:
 
 def compose_constraint(constraint : Constraint) -> Dict:
     if constraint.WhichOneof('constraint') != 'custom':
-        MSG = 'Constraint({:s}) not supported'
         str_constraint = grpc_message_to_json_string(constraint)
-        raise NotImplementedError(MSG.format(str_constraint))
+        LOGGER.warning('Ignoring unsupported Constraint({:s})'.format(str_constraint))
+        return None
     constraint_type = constraint.custom.constraint_type
     constraint_value = constraint.custom.constraint_value
     return {'constraint_type': constraint_type, 'constraint_value': constraint_value}
@@ -110,10 +122,10 @@ def compose_service(grpc_service : Service) -> Dict:
         for service_endpoint_id in grpc_service.service_endpoint_ids
     ]
 
-    constraints = [
+    constraints = list(filter(lambda constraint: constraint is not None, [
         compose_constraint(service_constraint)
         for service_constraint in grpc_service.service_constraints
-    ]
+    ]))
 
     return {
         'serviceId': service_id,
diff --git a/src/pathcomp/frontend/service/algorithms/tools/ComputeSubServices.py b/src/pathcomp/frontend/service/algorithms/tools/ComputeSubServices.py
new file mode 100644
index 0000000000000000000000000000000000000000..f2c66cb24ca3c15c71f22dbe4eeca634e18d985a
--- /dev/null
+++ b/src/pathcomp/frontend/service/algorithms/tools/ComputeSubServices.py
@@ -0,0 +1,96 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Convert the path defined as explicit hops with ingress and egress endpoints per device into a set of connections and
+# compute the dependencies among them.
+#
+# Example:
+# o-- int DC1 eth1 -- 10/1 CS1 1/2 -- 1/2 R2 2/1 -- a7.. OLS 60.. -- 2/1 R3 1/1 -- 1/1 CS2 10/1 -- eth1 DC2 int --o
+#         APP              PKT            PKT            CTRL            PKT           PKT              APP
+#
+# path_hops = [
+#     {'device': 'DC1-GW', 'ingress_ep': 'int', 'egress_ep': 'eth1'},
+#     {'device': 'CS1-GW1', 'ingress_ep': '10/1', 'egress_ep': '1/2'},
+#     {'device': 'TN-R2', 'ingress_ep': '1/2', 'egress_ep': '2/1'},
+#     {'device': 'TN-OLS', 'ingress_ep': 'a7a80b23a703', 'egress_ep': '60519106029e'},
+#     {'device': 'TN-R3', 'ingress_ep': '2/1', 'egress_ep': '1/1'},
+#     {'device': 'CS2-GW1', 'ingress_ep': '1/1', 'egress_ep': '10/1'},
+#     {'device': 'DC2-GW', 'ingress_ep': 'eth1', 'egress_ep': 'int'}
+# ]
+#
+# connections=[
+#     (UUID('7548edf7-ee7c-4adf-ac0f-c7a0c0dfba8e'), <DeviceLayerEnum.OPTICAL_CONTROLLER: 1>, [
+#             {'device': 'TN-OLS', 'ingress_ep': '833760219d0f', 'egress_ep': 'cf176771a4b9'}
+#         ], []),
+#     (UUID('c2e57966-5d82-4705-a5fe-44cf6487219e'), <DeviceLayerEnum.PACKET_DEVICE: 30>, [
+#             {'device': 'CS1-GW1', 'ingress_ep': '10/1', 'egress_ep': '1/2'},
+#             {'device': 'TN-R2', 'ingress_ep': '1/2', 'egress_ep': '2/1'},
+#             {'device': 'TN-R3', 'ingress_ep': '2/1', 'egress_ep': '1/1'},
+#             {'device': 'CS2-GW1', 'ingress_ep': '1/1', 'egress_ep': '10/1'}
+#         ], [UUID('7548edf7-ee7c-4adf-ac0f-c7a0c0dfba8e')]),
+#     (UUID('1e205c82-f6ea-4977-9e97-dc27ef1f4802'), <DeviceLayerEnum.APPLICATION_DEVICE: 40>, [
+#             {'device': 'DC1-GW', 'ingress_ep': 'int', 'egress_ep': 'eth1'},
+#             {'device': 'DC2-GW', 'ingress_ep': 'eth1', 'egress_ep': 'int'}
+#         ], [UUID('c2e57966-5d82-4705-a5fe-44cf6487219e')])
+# ]
+
+import queue, uuid
+from typing import Dict, List, Tuple
+from common.proto.context_pb2 import Device
+from .ConstantsMappings import DEVICE_TYPE_TO_LAYER, DeviceLayerEnum
+
+def convert_explicit_path_hops_to_connections(
+    path_hops : List[Dict], device_dict : Dict[str, Tuple[Dict, Device]], main_connection_uuid : str
+) -> List[Tuple[str, DeviceLayerEnum, List[str], List[str]]]:
+
+    connection_stack = queue.LifoQueue()
+    connections : List[Tuple[str, DeviceLayerEnum, List[str], List[str]]] = list()
+    old_device_layer = None
+    last_device_uuid = None
+    for path_hop in path_hops:
+        device_uuid = path_hop['device']
+        if last_device_uuid == device_uuid: continue
+        device_tuple = device_dict.get(device_uuid)
+        if device_tuple is None: raise Exception('Device({:s}) not found'.format(str(device_uuid)))
+        json_device,_ = device_tuple
+        device_type = json_device['device_type']
+        device_layer = DEVICE_TYPE_TO_LAYER.get(device_type)
+        if device_layer is None: raise Exception('Undefined Layer for DeviceType({:s})'.format(str(device_type)))
+
+        if old_device_layer is None:
+            # path ingress
+            connection_stack.put((main_connection_uuid, device_layer, [path_hop], []))
+        elif old_device_layer > device_layer:
+            # underlying connection begins
+            connection_uuid = str(uuid.uuid4())
+            connection_stack.put((connection_uuid, device_layer, [path_hop], []))
+        elif old_device_layer == device_layer:
+            # same connection continues
+            connection_stack.queue[-1][2].append(path_hop)
+        elif old_device_layer < device_layer:
+            # underlying connection ended
+            connection = connection_stack.get()
+            connections.append(connection)
+            connection_stack.queue[-1][3].append(connection[0])
+            connection_stack.queue[-1][2].append(path_hop)
+        else:
+            raise Exception('Uncontrolled condition')
+
+        old_device_layer = device_layer
+        last_device_uuid = device_uuid
+
+    # path egress
+    connections.append(connection_stack.get())
+    assert connection_stack.empty()
+    return connections
diff --git a/src/pathcomp/frontend/service/algorithms/tools/Constants.py b/src/pathcomp/frontend/service/algorithms/tools/Constants.py
deleted file mode 100644
index cb774669c97144fa65afdaf0f3373c67a67c3212..0000000000000000000000000000000000000000
--- a/src/pathcomp/frontend/service/algorithms/tools/Constants.py
+++ /dev/null
@@ -1,66 +0,0 @@
-# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from enum import IntEnum
-
-class CapacityUnit(IntEnum):
-    TB   = 0
-    TBPS = 1
-    GB   = 2
-    GBPS = 3
-    MB   = 4
-    MBPS = 5
-    KB   = 6
-    KBPS = 7
-    GHZ  = 8
-    MHZ  = 9
-
-CAPACITY_MULTIPLIER = {
-    CapacityUnit.TB   : 1.e12,
-    CapacityUnit.TBPS : 1.e12,
-    CapacityUnit.GB   : 1.e9,
-    CapacityUnit.GBPS : 1.e9,
-    CapacityUnit.MB   : 1.e6,
-    CapacityUnit.MBPS : 1.e6,
-    CapacityUnit.KB   : 1.e3,
-    CapacityUnit.KBPS : 1.e3,
-    CapacityUnit.GHZ  : 1.e9,
-    CapacityUnit.MHZ  : 1.e6,
-}
-
-class LinkPortDirection(IntEnum):
-    BIDIRECTIONAL = 0
-    INPUT         = 1
-    OUTPUT        = 2
-    UNKNOWN       = 3
-
-class TerminationDirection(IntEnum):
-    BIDIRECTIONAL = 0
-    SINK          = 1
-    SOURCE        = 2
-    UNKNOWN       = 3
-
-class TerminationState(IntEnum):
-    CAN_NEVER_TERMINATE         = 0
-    NOT_TERMINATED              = 1
-    TERMINATED_SERVER_TO_CLIENT = 2
-    TERMINATED_CLIENT_TO_SERVER = 3
-    TERMINATED_BIDIRECTIONAL    = 4
-    PERMENANTLY_TERMINATED      = 5
-    TERMINATION_STATE_UNKNOWN   = 6
-
-class LinkForwardingDirection(IntEnum):
-    BIDIRECTIONAL  = 0
-    UNIDIRECTIONAL = 1
-    UNKNOWN        = 2
diff --git a/src/pathcomp/frontend/service/algorithms/tools/ConstantsMappings.py b/src/pathcomp/frontend/service/algorithms/tools/ConstantsMappings.py
new file mode 100644
index 0000000000000000000000000000000000000000..5e4f5408398cca012dca52fb19bf11a2b84a5721
--- /dev/null
+++ b/src/pathcomp/frontend/service/algorithms/tools/ConstantsMappings.py
@@ -0,0 +1,105 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from enum import IntEnum
+from common.DeviceTypes import DeviceTypeEnum
+from common.proto.context_pb2 import ServiceTypeEnum
+
+class CapacityUnit(IntEnum):
+    TB   = 0
+    TBPS = 1
+    GB   = 2
+    GBPS = 3
+    MB   = 4
+    MBPS = 5
+    KB   = 6
+    KBPS = 7
+    GHZ  = 8
+    MHZ  = 9
+
+CAPACITY_MULTIPLIER = {
+    CapacityUnit.TB   : 1.e12,
+    CapacityUnit.TBPS : 1.e12,
+    CapacityUnit.GB   : 1.e9,
+    CapacityUnit.GBPS : 1.e9,
+    CapacityUnit.MB   : 1.e6,
+    CapacityUnit.MBPS : 1.e6,
+    CapacityUnit.KB   : 1.e3,
+    CapacityUnit.KBPS : 1.e3,
+    CapacityUnit.GHZ  : 1.e9,
+    CapacityUnit.MHZ  : 1.e6,
+}
+
+class LinkPortDirection(IntEnum):
+    BIDIRECTIONAL = 0
+    INPUT         = 1
+    OUTPUT        = 2
+    UNKNOWN       = 3
+
+class TerminationDirection(IntEnum):
+    BIDIRECTIONAL = 0
+    SINK          = 1
+    SOURCE        = 2
+    UNKNOWN       = 3
+
+class TerminationState(IntEnum):
+    CAN_NEVER_TERMINATE         = 0
+    NOT_TERMINATED              = 1
+    TERMINATED_SERVER_TO_CLIENT = 2
+    TERMINATED_CLIENT_TO_SERVER = 3
+    TERMINATED_BIDIRECTIONAL    = 4
+    PERMENANTLY_TERMINATED      = 5
+    TERMINATION_STATE_UNKNOWN   = 6
+
+class LinkForwardingDirection(IntEnum):
+    BIDIRECTIONAL  = 0
+    UNIDIRECTIONAL = 1
+    UNKNOWN        = 2
+
+class DeviceLayerEnum(IntEnum):
+    APPLICATION_CONTROLLER = 41     # Layer 4 domain controller
+    APPLICATION_DEVICE     = 40     # Layer 4 domain device
+    PACKET_CONTROLLER      = 31     # Layer 3 domain controller
+    PACKET_DEVICE          = 30     # Layer 3 domain device
+    MAC_LAYER_CONTROLLER   = 21     # Layer 2 domain controller
+    MAC_LAYER_DEVICE       = 20     # Layer 2 domain device
+    OPTICAL_CONTROLLER     =  1     # Layer 0 domain controller
+    OPTICAL_DEVICE         =  0     # Layer 0 domain device
+
+DEVICE_TYPE_TO_LAYER = {
+    DeviceTypeEnum.EMULATED_DATACENTER.value      : DeviceLayerEnum.APPLICATION_DEVICE,
+    DeviceTypeEnum.DATACENTER.value               : DeviceLayerEnum.APPLICATION_DEVICE,
+
+    DeviceTypeEnum.EMULATED_PACKET_ROUTER.value   : DeviceLayerEnum.PACKET_DEVICE,
+    DeviceTypeEnum.PACKET_ROUTER.value            : DeviceLayerEnum.PACKET_DEVICE,
+    DeviceTypeEnum.PACKET_SWITCH.value            : DeviceLayerEnum.MAC_LAYER_DEVICE,
+    DeviceTypeEnum.P4_SWITCH.value                : DeviceLayerEnum.MAC_LAYER_DEVICE,
+
+    DeviceTypeEnum.MICROVAWE_RADIO_SYSTEM.value   : DeviceLayerEnum.MAC_LAYER_CONTROLLER,
+
+    DeviceTypeEnum.EMULATED_OPEN_LINE_SYSTEM.value: DeviceLayerEnum.OPTICAL_CONTROLLER,
+    DeviceTypeEnum.OPEN_LINE_SYSTEM.value         : DeviceLayerEnum.OPTICAL_CONTROLLER,
+
+    DeviceTypeEnum.OPTICAL_ROADM.value            : DeviceLayerEnum.OPTICAL_DEVICE,
+    DeviceTypeEnum.OPTICAL_TRANDPONDER.value      : DeviceLayerEnum.OPTICAL_DEVICE,
+}
+
+DEVICE_LAYER_TO_SERVICE_TYPE = {
+    DeviceLayerEnum.APPLICATION_DEVICE.value: ServiceTypeEnum.SERVICETYPE_L3NM,
+
+    DeviceLayerEnum.PACKET_DEVICE.value     : ServiceTypeEnum.SERVICETYPE_L3NM,
+    DeviceLayerEnum.MAC_LAYER_DEVICE.value  : ServiceTypeEnum.SERVICETYPE_L2NM,
+
+    DeviceLayerEnum.OPTICAL_CONTROLLER.value: ServiceTypeEnum.SERVICETYPE_TAPI_CONNECTIVITY_SERVICE,
+}
diff --git a/src/pathcomp/frontend/service/algorithms/tools/EroPathToHops.py b/src/pathcomp/frontend/service/algorithms/tools/EroPathToHops.py
new file mode 100644
index 0000000000000000000000000000000000000000..021940937c23a7cb461a603aa32a15f16626eb1d
--- /dev/null
+++ b/src/pathcomp/frontend/service/algorithms/tools/EroPathToHops.py
@@ -0,0 +1,76 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Convert the Explicit Route Object (ERO)-like paths produced by the PathComp component (response['path']) into
+# explicit hops with ingress and egress endpoints per device (path_hops).
+#
+# response['path'] = [{
+#     'path-capacity': {'total-size': {'value': 200, 'unit': 0}},
+#     'path-latency': {'fixed-latency-characteristic': '12.000000'},
+#     'path-cost': {'cost-name': '', 'cost-value': '6.000000', 'cost-algorithm': '0.000000'},
+#     'devices': [
+#         {'device_id': 'DC1-GW', 'endpoint_uuid': 'int'},
+#         {'device_id': 'DC1-GW', 'endpoint_uuid': 'eth1'},
+#         {'device_id': 'CS1-GW1', 'endpoint_uuid': '1/2'},
+#         {'device_id': 'TN-R2', 'endpoint_uuid': '2/1'},
+#         {'device_id': 'TN-OLS', 'endpoint_uuid': 'ca46812e8ad7'},
+#         {'device_id': 'TN-R3', 'endpoint_uuid': '1/1'},
+#         {'device_id': 'CS2-GW1', 'endpoint_uuid': '10/1'},
+#         {'device_id': 'DC2-GW', 'endpoint_uuid': 'int'}
+#     ]
+# }]
+#
+# path_hops = [
+#   {'device': 'DC1-GW', 'ingress_ep': 'int', 'egress_ep': 'eth1'},
+#   {'device': 'CS1-GW1', 'ingress_ep': '10/1', 'egress_ep': '1/2'},
+#   {'device': 'TN-R2', 'ingress_ep': '1/2', 'egress_ep': '2/1'},
+#   {'device': 'TN-OLS', 'ingress_ep': '951f2f57e4a4', 'egress_ep': 'ca46812e8ad7'},
+#   {'device': 'TN-R3', 'ingress_ep': '2/1', 'egress_ep': '1/1'},
+#   {'device': 'CS2-GW1', 'ingress_ep': '1/1', 'egress_ep': '10/1'},
+#   {'device': 'DC2-GW', 'ingress_ep': 'eth1', 'egress_ep': 'int'}
+# ]
+#
+
+from typing import Dict, List
+
+def eropath_to_hops(ero_path : List[Dict], endpoint_to_link_dict : Dict) -> List[Dict]:
+    path_hops = []
+    for endpoint in ero_path:
+        device_uuid = endpoint['device_id']
+        endpoint_uuid = endpoint['endpoint_uuid']
+
+        if len(path_hops) == 0:
+            path_hops.append({'device': device_uuid, 'ingress_ep': endpoint_uuid})
+            continue
+
+        last_hop = path_hops[-1]
+        if (last_hop['device'] == device_uuid):
+            if ('ingress_ep' not in last_hop) or ('egress_ep' in last_hop): continue
+            last_hop['egress_ep'] = endpoint_uuid
+            continue
+
+        endpoint_key = (last_hop['device'], last_hop['egress_ep'])
+        link_tuple = endpoint_to_link_dict.get(endpoint_key)
+        ingress = next(iter([
+            ep_id for ep_id in link_tuple[0]['link_endpoint_ids']
+            if (ep_id['endpoint_id']['device_id'] == device_uuid) and\
+                (ep_id['endpoint_id']['endpoint_uuid'] != endpoint_uuid)
+        ]), None)
+        if ingress['endpoint_id']['device_id'] != device_uuid: raise Exception('Malformed path')
+        path_hops.append({
+            'device': ingress['endpoint_id']['device_id'],
+            'ingress_ep': ingress['endpoint_id']['endpoint_uuid'],
+            'egress_ep': endpoint_uuid,
+        })
+    return path_hops
diff --git a/src/pathcomp/frontend/tests/MockService_Dependencies.py b/src/pathcomp/frontend/tests/MockService_Dependencies.py
index b5fe85aa9cec8dd3e3993493abf8a26956a1a886..16ff9a5efca5827fdb531dad74aabff29507a580 100644
--- a/src/pathcomp/frontend/tests/MockService_Dependencies.py
+++ b/src/pathcomp/frontend/tests/MockService_Dependencies.py
@@ -17,18 +17,12 @@ from typing import Union
 from common.Constants import ServiceNameEnum
 from common.Settings import ENVVAR_SUFIX_SERVICE_HOST, ENVVAR_SUFIX_SERVICE_PORT_GRPC, get_env_var_name
 from common.proto.context_pb2_grpc import add_ContextServiceServicer_to_server
-from common.proto.device_pb2_grpc import add_DeviceServiceServicer_to_server
-from common.proto.service_pb2_grpc import add_ServiceServiceServicer_to_server
 from common.tests.MockServicerImpl_Context import MockServicerImpl_Context
-from common.tests.MockServicerImpl_Device import MockServicerImpl_Device
-from common.tests.MockServicerImpl_Service import MockServicerImpl_Service
 from common.tools.service.GenericGrpcService import GenericGrpcService
 
 LOCAL_HOST = '127.0.0.1'
 
 SERVICE_CONTEXT = ServiceNameEnum.CONTEXT
-SERVICE_DEVICE  = ServiceNameEnum.DEVICE
-SERVICE_SERVICE = ServiceNameEnum.SERVICE
 
 class MockService_Dependencies(GenericGrpcService):
     # Mock Service implementing Context, Device, and Service to simplify unitary tests of PathComp
@@ -41,18 +35,6 @@ class MockService_Dependencies(GenericGrpcService):
         self.context_servicer = MockServicerImpl_Context()
         add_ContextServiceServicer_to_server(self.context_servicer, self.server)
 
-        self.device_servicer = MockServicerImpl_Device()
-        add_DeviceServiceServicer_to_server(self.device_servicer, self.server)
-
-        self.service_servicer = MockServicerImpl_Service()
-        add_ServiceServiceServicer_to_server(self.service_servicer, self.server)
-
     def configure_env_vars(self):
         os.environ[get_env_var_name(SERVICE_CONTEXT, ENVVAR_SUFIX_SERVICE_HOST     )] = str(self.bind_address)
         os.environ[get_env_var_name(SERVICE_CONTEXT, ENVVAR_SUFIX_SERVICE_PORT_GRPC)] = str(self.bind_port)
-
-        os.environ[get_env_var_name(SERVICE_DEVICE, ENVVAR_SUFIX_SERVICE_HOST     )] = str(self.bind_address)
-        os.environ[get_env_var_name(SERVICE_DEVICE, ENVVAR_SUFIX_SERVICE_PORT_GRPC)] = str(self.bind_port)
-
-        os.environ[get_env_var_name(SERVICE_SERVICE, ENVVAR_SUFIX_SERVICE_HOST     )] = str(self.bind_address)
-        os.environ[get_env_var_name(SERVICE_SERVICE, ENVVAR_SUFIX_SERVICE_PORT_GRPC)] = str(self.bind_port)
diff --git a/src/pathcomp/frontend/tests/Objects_DC_CSGW_TN.py b/src/pathcomp/frontend/tests/Objects_DC_CSGW_TN.py
index edb468455ba4edf5ea63a545f2b1c78d0c2cad94..06e9bbbc715a85a2c0d979584c58b268bff687e6 100644
--- a/src/pathcomp/frontend/tests/Objects_DC_CSGW_TN.py
+++ b/src/pathcomp/frontend/tests/Objects_DC_CSGW_TN.py
@@ -13,19 +13,36 @@
 # limitations under the License.
 
 from common.Constants import DEFAULT_CONTEXT_UUID, DEFAULT_TOPOLOGY_UUID
-from common.tools.object_factory.Constraint import json_constraint
+from common.tools.object_factory.Constraint import json_constraint_custom
 from common.tools.object_factory.Context import json_context, json_context_id
-from common.tools.object_factory.Device import json_device_emulated_packet_router_disabled, json_device_id
+from common.tools.object_factory.Device import (
+    json_device_emulated_connect_rules, json_device_emulated_datacenter_disabled,
+    json_device_emulated_packet_router_disabled, json_device_id)
 from common.tools.object_factory.EndPoint import json_endpoints
 from common.tools.object_factory.Link import get_link_uuid, json_link, json_link_id
 from common.tools.object_factory.Service import get_service_uuid, json_service_l3nm_planned
 from common.tools.object_factory.Topology import json_topology, json_topology_id
 
-def compose_device(device_uuid, endpoint_uuids, topology_id=None):
+# if true, Device component is present and will infeer the endpoints from connect-rules
+# if false, Device component is not present and device objects must contain preconfigured endpoints
+ADD_CONNECT_RULES_TO_DEVICES = False
+
+def compose_router(device_uuid, endpoint_uuids, topology_id=None):
+    device_id = json_device_id(device_uuid)
+    r_endpoints = [(endpoint_uuid, 'copper', []) for endpoint_uuid in endpoint_uuids]
+    config_rules = json_device_emulated_connect_rules(r_endpoints) if ADD_CONNECT_RULES_TO_DEVICES else []
+    endpoints = json_endpoints(device_id, r_endpoints, topology_id=topology_id)
+    j_endpoints = [] if ADD_CONNECT_RULES_TO_DEVICES else endpoints
+    device = json_device_emulated_packet_router_disabled(device_uuid, config_rules=config_rules, endpoints=j_endpoints)
+    return device_id, endpoints, device
+
+def compose_datacenter(device_uuid, endpoint_uuids, topology_id=None):
     device_id = json_device_id(device_uuid)
-    endpoints = [(endpoint_uuid, 'copper', []) for endpoint_uuid in endpoint_uuids]
-    endpoints = json_endpoints(device_id, endpoints, topology_id=topology_id)
-    device = json_device_emulated_packet_router_disabled(device_uuid, endpoints=endpoints)
+    r_endpoints = [(endpoint_uuid, 'copper', []) for endpoint_uuid in endpoint_uuids]
+    config_rules = json_device_emulated_connect_rules(r_endpoints) if ADD_CONNECT_RULES_TO_DEVICES else []
+    endpoints = json_endpoints(device_id, r_endpoints, topology_id=topology_id)
+    j_endpoints = [] if ADD_CONNECT_RULES_TO_DEVICES else endpoints
+    device = json_device_emulated_datacenter_disabled(device_uuid, config_rules=config_rules, endpoints=j_endpoints)
     return device_id, endpoints, device
 
 def compose_link(endpoint_a, endpoint_z):
@@ -78,20 +95,20 @@ TOPO_TN      = json_topology(TOPO_TN_UUID, context_id=CONTEXT_ID)
 
 # ----- Devices --------------------------------------------------------------------------------------------------------
 # DataCenters
-DEV_DC1GW_ID, DEV_DC1GW_EPS, DEV_DC1GW = compose_device('DC1-GW', ['eth1', 'eth2', 'int'], topology_id=TOPO_DC1_ID)
-DEV_DC2GW_ID, DEV_DC2GW_EPS, DEV_DC2GW = compose_device('DC2-GW', ['eth1', 'eth2', 'int'], topology_id=TOPO_DC2_ID)
+DEV_DC1GW_ID, DEV_DC1GW_EPS, DEV_DC1GW = compose_datacenter('DC1-GW', ['eth1', 'eth2', 'int'])
+DEV_DC2GW_ID, DEV_DC2GW_EPS, DEV_DC2GW = compose_datacenter('DC2-GW', ['eth1', 'eth2', 'int'])
 
 # CellSites
-DEV_CS1GW1_ID, DEV_CS1GW1_EPS, DEV_CS1GW1 = compose_device('CS1-GW1', ['1000', '100', '200'], topology_id=TOPO_CS1_ID)
-DEV_CS1GW2_ID, DEV_CS1GW2_EPS, DEV_CS1GW2 = compose_device('CS1-GW2', ['1000', '100', '200'], topology_id=TOPO_CS1_ID)
-DEV_CS2GW1_ID, DEV_CS2GW1_EPS, DEV_CS2GW1 = compose_device('CS2-GW1', ['1000', '100', '200'], topology_id=TOPO_CS2_ID)
-DEV_CS2GW2_ID, DEV_CS2GW2_EPS, DEV_CS2GW2 = compose_device('CS2-GW2', ['1000', '100', '200'], topology_id=TOPO_CS2_ID)
+DEV_CS1GW1_ID, DEV_CS1GW1_EPS, DEV_CS1GW1 = compose_router('CS1-GW1', ['10/1', '1/1', '1/2'])
+DEV_CS1GW2_ID, DEV_CS1GW2_EPS, DEV_CS1GW2 = compose_router('CS1-GW2', ['10/1', '1/1', '1/2'])
+DEV_CS2GW1_ID, DEV_CS2GW1_EPS, DEV_CS2GW1 = compose_router('CS2-GW1', ['10/1', '1/1', '1/2'])
+DEV_CS2GW2_ID, DEV_CS2GW2_EPS, DEV_CS2GW2 = compose_router('CS2-GW2', ['10/1', '1/1', '1/2'])
 
 # Transport Network
-DEV_TNR1_ID, DEV_TNR1_EPS, DEV_TNR1 = compose_device('TN-R1', ['100', '200', '1', '2', '3'], topology_id=TOPO_TN_ID)
-DEV_TNR2_ID, DEV_TNR2_EPS, DEV_TNR2 = compose_device('TN-R2', ['100', '200', '1', '2', '3'], topology_id=TOPO_TN_ID)
-DEV_TNR3_ID, DEV_TNR3_EPS, DEV_TNR3 = compose_device('TN-R3', ['100', '200', '1', '2', '3'], topology_id=TOPO_TN_ID)
-DEV_TNR4_ID, DEV_TNR4_EPS, DEV_TNR4 = compose_device('TN-R4', ['100', '200', '1', '2', '3'], topology_id=TOPO_TN_ID)
+DEV_TNR1_ID, DEV_TNR1_EPS, DEV_TNR1 = compose_router('TN-R1', ['1/1', '1/2', '2/1', '2/2', '2/3'])
+DEV_TNR2_ID, DEV_TNR2_EPS, DEV_TNR2 = compose_router('TN-R2', ['1/1', '1/2', '2/1', '2/2', '2/3'])
+DEV_TNR3_ID, DEV_TNR3_EPS, DEV_TNR3 = compose_router('TN-R3', ['1/1', '1/2', '2/1', '2/2', '2/3'])
+DEV_TNR4_ID, DEV_TNR4_EPS, DEV_TNR4 = compose_router('TN-R4', ['1/1', '1/2', '2/1', '2/2', '2/3'])
 
 
 # ----- Links ----------------------------------------------------------------------------------------------------------
@@ -122,32 +139,36 @@ LINK_TNR2_TNR4_ID, LINK_TNR2_TNR4 = compose_link(DEV_TNR2_EPS[4], DEV_TNR4_EPS[4
 
 # ----- Service --------------------------------------------------------------------------------------------------------
 SERVICE_DC1GW_DC2GW = compose_service(DEV_DC1GW_EPS[2], DEV_DC2GW_EPS[2], constraints=[
-    json_constraint('bandwidth[gbps]', 10.0),
-    json_constraint('latency[ms]',     12.0),
+    json_constraint_custom('bandwidth[gbps]', 10.0),
+    json_constraint_custom('latency[ms]',     20.0),
 ])
 
 # ----- Containers -----------------------------------------------------------------------------------------------------
-CONTEXTS   = [  CONTEXT]
-TOPOLOGIES = [  TOPO_ADMIN, TOPO_DC1, TOPO_DC2, TOPO_CS1, TOPO_CS2, TOPO_TN]
+CONTEXTS   = [  CONTEXT ]
+TOPOLOGIES = [  TOPO_ADMIN, TOPO_DC1, TOPO_DC2, TOPO_CS1, TOPO_CS2, TOPO_TN ]
 DEVICES    = [  DEV_DC1GW, DEV_DC2GW,
                 DEV_CS1GW1, DEV_CS1GW2, DEV_CS2GW1, DEV_CS2GW2,
-                DEV_TNR1, DEV_TNR2, DEV_TNR3, DEV_TNR4  ]
+                DEV_TNR1, DEV_TNR2, DEV_TNR3, DEV_TNR4,
+            ]
 LINKS      = [  LINK_DC1GW_CS1GW1, LINK_DC1GW_CS1GW2, LINK_DC2GW_CS2GW1, LINK_DC2GW_CS2GW2,
                 LINK_CS1GW1_TNR1, LINK_CS1GW2_TNR2, LINK_CS1GW1_TNR2, LINK_CS1GW2_TNR1,
                 LINK_CS2GW1_TNR3, LINK_CS2GW2_TNR4, LINK_CS2GW1_TNR4, LINK_CS2GW2_TNR3,
-                LINK_TNR1_TNR2, LINK_TNR2_TNR3, LINK_TNR3_TNR4, LINK_TNR4_TNR1, LINK_TNR1_TNR3, LINK_TNR2_TNR4  ]
+                LINK_TNR1_TNR2, LINK_TNR2_TNR3, LINK_TNR3_TNR4, LINK_TNR4_TNR1, LINK_TNR1_TNR3, LINK_TNR2_TNR4,
+            ]
 SERVICES   = [  SERVICE_DC1GW_DC2GW   ]
 
 OBJECTS_PER_TOPOLOGY = [
     (TOPO_ADMIN_ID,
         [   DEV_DC1GW_ID, DEV_DC2GW_ID,
             DEV_CS1GW1_ID, DEV_CS1GW2_ID, DEV_CS2GW1_ID, DEV_CS2GW2_ID,
-            DEV_TNR1_ID, DEV_TNR2_ID, DEV_TNR3_ID, DEV_TNR4_ID  ],
+            DEV_TNR1_ID, DEV_TNR2_ID, DEV_TNR3_ID, DEV_TNR4_ID,
+        ],
         [   LINK_DC1GW_CS1GW1_ID, LINK_DC1GW_CS1GW2_ID, LINK_DC2GW_CS2GW1_ID, LINK_DC2GW_CS2GW2_ID,
             LINK_CS1GW1_TNR1_ID, LINK_CS1GW2_TNR2_ID, LINK_CS1GW1_TNR2_ID, LINK_CS1GW2_TNR1_ID,
             LINK_CS2GW1_TNR3_ID, LINK_CS2GW2_TNR4_ID, LINK_CS2GW1_TNR4_ID, LINK_CS2GW2_TNR3_ID,
             LINK_TNR1_TNR2_ID, LINK_TNR2_TNR3_ID, LINK_TNR3_TNR4_ID, LINK_TNR4_TNR1_ID, LINK_TNR1_TNR3_ID,
-            LINK_TNR2_TNR4_ID   ],
+            LINK_TNR2_TNR4_ID,
+        ],
     ),
     (TOPO_DC1_ID,
         [DEV_DC1GW_ID],
@@ -162,7 +183,9 @@ OBJECTS_PER_TOPOLOGY = [
         [DEV_CS2GW1_ID, DEV_CS2GW2_ID],
         []),
     (TOPO_TN_ID,
-        [DEV_TNR1_ID, DEV_TNR2_ID, DEV_TNR3_ID, DEV_TNR4_ID],
-        [LINK_TNR1_TNR2_ID, LINK_TNR2_TNR3_ID, LINK_TNR3_TNR4_ID, LINK_TNR4_TNR1_ID, LINK_TNR1_TNR3_ID,
-            LINK_TNR2_TNR4_ID]),
+        [   DEV_TNR1_ID, DEV_TNR2_ID, DEV_TNR3_ID, DEV_TNR4_ID,
+        ],
+        [   LINK_TNR1_TNR2_ID, LINK_TNR2_TNR3_ID, LINK_TNR3_TNR4_ID, LINK_TNR4_TNR1_ID, LINK_TNR1_TNR3_ID,
+            LINK_TNR2_TNR4_ID,
+        ]),
 ]
diff --git a/src/pathcomp/frontend/tests/Objects_DC_CSGW_TN_OLS.py b/src/pathcomp/frontend/tests/Objects_DC_CSGW_TN_OLS.py
new file mode 100644
index 0000000000000000000000000000000000000000..99fd83ed9e1a7ca27faa6acb11b07abd573423ef
--- /dev/null
+++ b/src/pathcomp/frontend/tests/Objects_DC_CSGW_TN_OLS.py
@@ -0,0 +1,202 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import uuid
+from common.Constants import DEFAULT_CONTEXT_UUID, DEFAULT_TOPOLOGY_UUID
+from common.tools.object_factory.Constraint import json_constraint_custom
+from common.tools.object_factory.Context import json_context, json_context_id
+from common.tools.object_factory.Device import (
+    json_device_emulated_connect_rules, json_device_emulated_datacenter_disabled,
+    json_device_emulated_packet_router_disabled, json_device_emulated_tapi_disabled, json_device_id)
+from common.tools.object_factory.EndPoint import json_endpoints
+from common.tools.object_factory.Link import get_link_uuid, json_link, json_link_id
+from common.tools.object_factory.Service import get_service_uuid, json_service_l3nm_planned
+from common.tools.object_factory.Topology import json_topology, json_topology_id
+
+# if true, Device component is present and will infeer the endpoints from connect-rules
+# if false, Device component is not present and device objects must contain preconfigured endpoints
+ADD_CONNECT_RULES_TO_DEVICES = False
+
+def compose_router(device_uuid, endpoint_uuids, topology_id=None):
+    device_id = json_device_id(device_uuid)
+    r_endpoints = [(endpoint_uuid, 'copper', []) for endpoint_uuid in endpoint_uuids]
+    config_rules = json_device_emulated_connect_rules(r_endpoints) if ADD_CONNECT_RULES_TO_DEVICES else []
+    endpoints = json_endpoints(device_id, r_endpoints, topology_id=topology_id)
+    j_endpoints = [] if ADD_CONNECT_RULES_TO_DEVICES else endpoints
+    device = json_device_emulated_packet_router_disabled(device_uuid, config_rules=config_rules, endpoints=j_endpoints)
+    return device_id, endpoints, device
+
+def compose_ols(device_uuid, endpoint_uuids, topology_id=None):
+    device_id = json_device_id(device_uuid)
+    r_endpoints = [(endpoint_uuid, 'optical', []) for endpoint_uuid in endpoint_uuids]
+    config_rules = json_device_emulated_connect_rules(r_endpoints) if ADD_CONNECT_RULES_TO_DEVICES else []
+    endpoints = json_endpoints(device_id, r_endpoints, topology_id=topology_id)
+    j_endpoints = [] if ADD_CONNECT_RULES_TO_DEVICES else endpoints
+    device = json_device_emulated_tapi_disabled(device_uuid, config_rules=config_rules, endpoints=j_endpoints)
+    return device_id, endpoints, device
+
+def compose_datacenter(device_uuid, endpoint_uuids, topology_id=None):
+    device_id = json_device_id(device_uuid)
+    r_endpoints = [(endpoint_uuid, 'copper', []) for endpoint_uuid in endpoint_uuids]
+    config_rules = json_device_emulated_connect_rules(r_endpoints) if ADD_CONNECT_RULES_TO_DEVICES else []
+    endpoints = json_endpoints(device_id, r_endpoints, topology_id=topology_id)
+    j_endpoints = [] if ADD_CONNECT_RULES_TO_DEVICES else endpoints
+    device = json_device_emulated_datacenter_disabled(device_uuid, config_rules=config_rules, endpoints=j_endpoints)
+    return device_id, endpoints, device
+
+def compose_link(endpoint_a, endpoint_z):
+    link_uuid = get_link_uuid(endpoint_a['endpoint_id'], endpoint_z['endpoint_id'])
+    link_id   = json_link_id(link_uuid)
+    link      = json_link(link_uuid, [endpoint_a['endpoint_id'], endpoint_z['endpoint_id']])
+    return link_id, link
+
+def compose_service(endpoint_a, endpoint_z, constraints=[]):
+    service_uuid = get_service_uuid(endpoint_a['endpoint_id'], endpoint_z['endpoint_id'])
+    endpoint_ids = [endpoint_a['endpoint_id'], endpoint_z['endpoint_id']]
+    service = json_service_l3nm_planned(service_uuid, endpoint_ids=endpoint_ids, constraints=constraints)
+    return service
+
+# ----- Context --------------------------------------------------------------------------------------------------------
+CONTEXT_ID = json_context_id(DEFAULT_CONTEXT_UUID)
+CONTEXT    = json_context(DEFAULT_CONTEXT_UUID)
+
+# ----- Domains --------------------------------------------------------------------------------------------------------
+# Overall network topology
+TOPO_ADMIN_UUID = DEFAULT_TOPOLOGY_UUID
+TOPO_ADMIN_ID   = json_topology_id(TOPO_ADMIN_UUID, context_id=CONTEXT_ID)
+TOPO_ADMIN      = json_topology(TOPO_ADMIN_UUID, context_id=CONTEXT_ID)
+
+# DataCenter #1 Network
+TOPO_DC1_UUID = 'DC1'
+TOPO_DC1_ID   = json_topology_id(TOPO_DC1_UUID, context_id=CONTEXT_ID)
+TOPO_DC1      = json_topology(TOPO_DC1_UUID, context_id=CONTEXT_ID)
+
+# DataCenter #2 Network
+TOPO_DC2_UUID = 'DC2'
+TOPO_DC2_ID   = json_topology_id(TOPO_DC2_UUID, context_id=CONTEXT_ID)
+TOPO_DC2      = json_topology(TOPO_DC2_UUID, context_id=CONTEXT_ID)
+
+# CellSite #1 Network
+TOPO_CS1_UUID = 'CS1'
+TOPO_CS1_ID   = json_topology_id(TOPO_CS1_UUID, context_id=CONTEXT_ID)
+TOPO_CS1      = json_topology(TOPO_CS1_UUID, context_id=CONTEXT_ID)
+
+# CellSite #2 Network
+TOPO_CS2_UUID = 'CS2'
+TOPO_CS2_ID   = json_topology_id(TOPO_CS2_UUID, context_id=CONTEXT_ID)
+TOPO_CS2      = json_topology(TOPO_CS2_UUID, context_id=CONTEXT_ID)
+
+# Transport Network Network
+TOPO_TN_UUID = 'TN'
+TOPO_TN_ID   = json_topology_id(TOPO_TN_UUID, context_id=CONTEXT_ID)
+TOPO_TN      = json_topology(TOPO_TN_UUID, context_id=CONTEXT_ID)
+
+
+# ----- Devices --------------------------------------------------------------------------------------------------------
+# DataCenters
+DEV_DC1GW_ID, DEV_DC1GW_EPS, DEV_DC1GW = compose_datacenter('DC1-GW', ['eth1', 'eth2', 'int'])
+DEV_DC2GW_ID, DEV_DC2GW_EPS, DEV_DC2GW = compose_datacenter('DC2-GW', ['eth1', 'eth2', 'int'])
+
+# CellSites
+DEV_CS1GW1_ID, DEV_CS1GW1_EPS, DEV_CS1GW1 = compose_router('CS1-GW1', ['10/1', '1/1', '1/2'])
+DEV_CS1GW2_ID, DEV_CS1GW2_EPS, DEV_CS1GW2 = compose_router('CS1-GW2', ['10/1', '1/1', '1/2'])
+DEV_CS2GW1_ID, DEV_CS2GW1_EPS, DEV_CS2GW1 = compose_router('CS2-GW1', ['10/1', '1/1', '1/2'])
+DEV_CS2GW2_ID, DEV_CS2GW2_EPS, DEV_CS2GW2 = compose_router('CS2-GW2', ['10/1', '1/1', '1/2'])
+
+# Transport Network
+DEV_TNR1_ID, DEV_TNR1_EPS, DEV_TNR1 = compose_router('TN-R1', ['1/1', '1/2', '2/1'])
+DEV_TNR2_ID, DEV_TNR2_EPS, DEV_TNR2 = compose_router('TN-R2', ['1/1', '1/2', '2/1'])
+DEV_TNR3_ID, DEV_TNR3_EPS, DEV_TNR3 = compose_router('TN-R3', ['1/1', '1/2', '2/1'])
+DEV_TNR4_ID, DEV_TNR4_EPS, DEV_TNR4 = compose_router('TN-R4', ['1/1', '1/2', '2/1'])
+tols_ep_uuids = [str(uuid.uuid4()).split('-')[-1] for _ in range(4)]
+DEV_TOLS_ID, DEV_TOLS_EPS, DEV_TOLS = compose_ols('TN-OLS', tols_ep_uuids)
+
+
+# ----- Links ----------------------------------------------------------------------------------------------------------
+# InterDomain DC-CSGW
+LINK_DC1GW_CS1GW1_ID, LINK_DC1GW_CS1GW1 = compose_link(DEV_DC1GW_EPS[0], DEV_CS1GW1_EPS[0])
+LINK_DC1GW_CS1GW2_ID, LINK_DC1GW_CS1GW2 = compose_link(DEV_DC1GW_EPS[1], DEV_CS1GW2_EPS[0])
+LINK_DC2GW_CS2GW1_ID, LINK_DC2GW_CS2GW1 = compose_link(DEV_DC2GW_EPS[0], DEV_CS2GW1_EPS[0])
+LINK_DC2GW_CS2GW2_ID, LINK_DC2GW_CS2GW2 = compose_link(DEV_DC2GW_EPS[1], DEV_CS2GW2_EPS[0])
+
+# InterDomain CSGW-TN
+LINK_CS1GW1_TNR1_ID, LINK_CS1GW1_TNR1 = compose_link(DEV_CS1GW1_EPS[1], DEV_TNR1_EPS[0])
+LINK_CS1GW2_TNR2_ID, LINK_CS1GW2_TNR2 = compose_link(DEV_CS1GW2_EPS[1], DEV_TNR2_EPS[0])
+LINK_CS1GW1_TNR2_ID, LINK_CS1GW1_TNR2 = compose_link(DEV_CS1GW1_EPS[2], DEV_TNR2_EPS[1])
+LINK_CS1GW2_TNR1_ID, LINK_CS1GW2_TNR1 = compose_link(DEV_CS1GW2_EPS[2], DEV_TNR1_EPS[1])
+LINK_CS2GW1_TNR3_ID, LINK_CS2GW1_TNR3 = compose_link(DEV_CS2GW1_EPS[1], DEV_TNR3_EPS[0])
+LINK_CS2GW2_TNR4_ID, LINK_CS2GW2_TNR4 = compose_link(DEV_CS2GW2_EPS[1], DEV_TNR4_EPS[0])
+LINK_CS2GW1_TNR4_ID, LINK_CS2GW1_TNR4 = compose_link(DEV_CS2GW1_EPS[2], DEV_TNR4_EPS[1])
+LINK_CS2GW2_TNR3_ID, LINK_CS2GW2_TNR3 = compose_link(DEV_CS2GW2_EPS[2], DEV_TNR3_EPS[1])
+
+# IntraDomain TN
+LINK_TNR1_TOLS_ID, LINK_TNR1_TOLS = compose_link(DEV_TNR1_EPS[2], DEV_TOLS_EPS[0])
+LINK_TNR2_TOLS_ID, LINK_TNR2_TOLS = compose_link(DEV_TNR2_EPS[2], DEV_TOLS_EPS[1])
+LINK_TNR3_TOLS_ID, LINK_TNR3_TOLS = compose_link(DEV_TNR3_EPS[2], DEV_TOLS_EPS[2])
+LINK_TNR4_TOLS_ID, LINK_TNR4_TOLS = compose_link(DEV_TNR4_EPS[2], DEV_TOLS_EPS[3])
+
+
+# ----- Service --------------------------------------------------------------------------------------------------------
+SERVICE_DC1GW_DC2GW = compose_service(DEV_DC1GW_EPS[2], DEV_DC2GW_EPS[2], constraints=[
+    json_constraint_custom('bandwidth[gbps]', 10.0),
+    json_constraint_custom('latency[ms]',     20.0),
+])
+
+# ----- Containers -----------------------------------------------------------------------------------------------------
+CONTEXTS   = [  CONTEXT ]
+TOPOLOGIES = [  TOPO_ADMIN, TOPO_DC1, TOPO_DC2, TOPO_CS1, TOPO_CS2, TOPO_TN ]
+DEVICES    = [  DEV_DC1GW, DEV_DC2GW,
+                DEV_CS1GW1, DEV_CS1GW2, DEV_CS2GW1, DEV_CS2GW2,
+                DEV_TNR1, DEV_TNR2, DEV_TNR3, DEV_TNR4,
+                DEV_TOLS,
+            ]
+LINKS      = [  LINK_DC1GW_CS1GW1, LINK_DC1GW_CS1GW2, LINK_DC2GW_CS2GW1, LINK_DC2GW_CS2GW2,
+                LINK_CS1GW1_TNR1, LINK_CS1GW2_TNR2, LINK_CS1GW1_TNR2, LINK_CS1GW2_TNR1,
+                LINK_CS2GW1_TNR3, LINK_CS2GW2_TNR4, LINK_CS2GW1_TNR4, LINK_CS2GW2_TNR3,
+                LINK_TNR1_TOLS, LINK_TNR2_TOLS, LINK_TNR3_TOLS, LINK_TNR4_TOLS,
+            ]
+SERVICES   = [  SERVICE_DC1GW_DC2GW   ]
+
+OBJECTS_PER_TOPOLOGY = [
+    (TOPO_ADMIN_ID,
+        [   DEV_DC1GW_ID, DEV_DC2GW_ID,
+            DEV_CS1GW1_ID, DEV_CS1GW2_ID, DEV_CS2GW1_ID, DEV_CS2GW2_ID,
+            DEV_TNR1_ID, DEV_TNR2_ID, DEV_TNR3_ID, DEV_TNR4_ID,
+            DEV_TOLS_ID,
+        ],
+        [   LINK_DC1GW_CS1GW1_ID, LINK_DC1GW_CS1GW2_ID, LINK_DC2GW_CS2GW1_ID, LINK_DC2GW_CS2GW2_ID,
+            LINK_CS1GW1_TNR1_ID, LINK_CS1GW2_TNR2_ID, LINK_CS1GW1_TNR2_ID, LINK_CS1GW2_TNR1_ID,
+            LINK_CS2GW1_TNR3_ID, LINK_CS2GW2_TNR4_ID, LINK_CS2GW1_TNR4_ID, LINK_CS2GW2_TNR3_ID,
+            LINK_TNR1_TOLS_ID, LINK_TNR2_TOLS_ID, LINK_TNR3_TOLS_ID, LINK_TNR4_TOLS_ID,
+        ],
+    ),
+    (TOPO_DC1_ID,
+        [DEV_DC1GW_ID],
+        []),
+    (TOPO_DC2_ID,
+        [DEV_DC2GW_ID],
+        []),
+    (TOPO_CS1_ID,
+        [DEV_CS1GW1_ID, DEV_CS1GW2_ID],
+        []),
+    (TOPO_CS2_ID,
+        [DEV_CS2GW1_ID, DEV_CS2GW2_ID],
+        []),
+    (TOPO_TN_ID,
+        [   DEV_TNR1_ID, DEV_TNR2_ID, DEV_TNR3_ID, DEV_TNR4_ID,
+            DEV_TOLS_ID,
+        ],
+        [   LINK_TNR1_TOLS_ID, LINK_TNR2_TOLS_ID, LINK_TNR3_TOLS_ID, LINK_TNR4_TOLS_ID,
+        ]),
+]
diff --git a/src/pathcomp/frontend/tests/PrepareTestScenario.py b/src/pathcomp/frontend/tests/PrepareTestScenario.py
index cb0d8e466ca418226ff96f77a1cfaffc9dbdb6af..2e7002b0f70b81f0bbe728a7b8139730d004221e 100644
--- a/src/pathcomp/frontend/tests/PrepareTestScenario.py
+++ b/src/pathcomp/frontend/tests/PrepareTestScenario.py
@@ -17,7 +17,6 @@ from common.Constants import ServiceNameEnum
 from common.Settings import (
     ENVVAR_SUFIX_SERVICE_HOST, ENVVAR_SUFIX_SERVICE_PORT_GRPC, get_env_var_name, get_service_port_grpc)
 from context.client.ContextClient import ContextClient
-from device.client.DeviceClient import DeviceClient
 from pathcomp.frontend.client.PathCompClient import PathCompClient
 from pathcomp.frontend.service.PathCompService import PathCompService
 from pathcomp.frontend.tests.MockService_Dependencies import MockService_Dependencies
@@ -43,15 +42,7 @@ def context_client(mock_service : MockService_Dependencies): # pylint: disable=r
     _client.close()
 
 @pytest.fixture(scope='session')
-def device_client(mock_service : MockService_Dependencies): # pylint: disable=redefined-outer-name
-    _client = DeviceClient()
-    yield _client
-    _client.close()
-
-@pytest.fixture(scope='session')
-def pathcomp_service(
-    context_client : ContextClient, # pylint: disable=redefined-outer-name
-    device_client : DeviceClient):  # pylint: disable=redefined-outer-name
+def pathcomp_service(context_client : ContextClient):       # pylint: disable=redefined-outer-name
 
     _service = PathCompService()
     _service.start()
diff --git a/src/pathcomp/frontend/tests/test_unitary.py b/src/pathcomp/frontend/tests/test_unitary.py
index 2368a6c0ade2bd0125a0a8df66560496fe782a1d..53f4d7065e5ee847cd99f431c87c1231e52bbd63 100644
--- a/src/pathcomp/frontend/tests/test_unitary.py
+++ b/src/pathcomp/frontend/tests/test_unitary.py
@@ -16,14 +16,20 @@ import copy, logging, os
 from common.proto.context_pb2 import Context, ContextId, DeviceId, Link, LinkId, Topology, Device, TopologyId
 from common.proto.pathcomp_pb2 import PathCompRequest
 from common.tools.grpc.Tools import grpc_message_to_json
-from common.tools.object_factory.Constraint import json_constraint
+from common.tools.object_factory.Constraint import (
+    json_constraint_custom, json_constraint_endpoint_location_region, json_constraint_endpoint_priority,
+    json_constraint_sla_availability)
+from common.tools.object_factory.Device import json_device_id
+from common.tools.object_factory.EndPoint import json_endpoint_id
+from common.tools.object_factory.Service import json_service_l3nm_planned
 from context.client.ContextClient import ContextClient
 from device.client.DeviceClient import DeviceClient
 from pathcomp.frontend.client.PathCompClient import PathCompClient
 
 # Scenarios:
 #from .Objects_A_B_C import CONTEXTS, DEVICES, LINKS, OBJECTS_PER_TOPOLOGY, SERVICES, TOPOLOGIES
-from .Objects_DC_CSGW_TN import CONTEXTS, DEVICES, LINKS, OBJECTS_PER_TOPOLOGY, SERVICES, TOPOLOGIES
+#from .Objects_DC_CSGW_TN import CONTEXTS, DEVICES, LINKS, OBJECTS_PER_TOPOLOGY, SERVICES, TOPOLOGIES
+from .Objects_DC_CSGW_TN_OLS import CONTEXTS, DEVICES, LINKS, OBJECTS_PER_TOPOLOGY, SERVICES, TOPOLOGIES
 
 # configure backend environment variables before overwriting them with fixtures to use real backend pathcomp
 DEFAULT_PATHCOMP_BACKEND_SCHEME  = 'http'
@@ -48,18 +54,17 @@ os.environ['PATHCOMP_BACKEND_PORT'] = os.environ.get('PATHCOMP_BACKEND_PORT', ba
 
 from .PrepareTestScenario import ( # pylint: disable=unused-import
     # be careful, order of symbols is important here!
-    mock_service, pathcomp_service, context_client, device_client, pathcomp_client)
+    mock_service, pathcomp_service, context_client, pathcomp_client)
 
 LOGGER = logging.getLogger(__name__)
 LOGGER.setLevel(logging.DEBUG)
 
 def test_prepare_environment(
-    context_client : ContextClient, # pylint: disable=redefined-outer-name
-    device_client : DeviceClient):  # pylint: disable=redefined-outer-name
+    context_client : ContextClient):    # pylint: disable=redefined-outer-name
 
     for context  in CONTEXTS  : context_client.SetContext (Context (**context ))
     for topology in TOPOLOGIES: context_client.SetTopology(Topology(**topology))
-    for device   in DEVICES   : device_client .AddDevice  (Device  (**device  ))
+    for device   in DEVICES   : context_client.SetDevice  (Device  (**device  ))
     for link     in LINKS     : context_client.SetLink    (Link    (**link    ))
 
     for topology_id, device_ids, link_ids in OBJECTS_PER_TOPOLOGY:
@@ -85,8 +90,8 @@ def test_request_service_shortestpath(
 
     request_services = copy.deepcopy(SERVICES)
     #request_services[0]['service_constraints'] = [
-    #    json_constraint('bandwidth[gbps]', 1000.0),
-    #    json_constraint('latency[ms]',     1200.0),
+    #    json_constraint_custom('bandwidth[gbps]', 1000.0),
+    #    json_constraint_custom('latency[ms]',     1200.0),
     #]
     pathcomp_request = PathCompRequest(services=request_services)
     pathcomp_request.shortest_path.Clear()  # hack to select the shortest path algorithm that has no attributes
@@ -188,7 +193,33 @@ def test_request_service_kshortestpath(
 def test_request_service_kdisjointpath(
     pathcomp_client : PathCompClient):  # pylint: disable=redefined-outer-name
 
-    request_services = SERVICES
+    service_uuid = 'DC1-DC2'
+    raw_endpoints = [
+        ('CS1-GW1', '10/1', 'DC1', 10),
+        ('CS1-GW2', '10/1', 'DC1', 20),
+        ('CS2-GW1', '10/1', 'DC2', 10),
+        ('CS2-GW2', '10/1', 'DC2', 20),
+    ]
+    
+    endpoint_ids, constraints = [], [
+        json_constraint_custom('bandwidth[gbps]', 10.0),
+        json_constraint_custom('latency[ms]',     12.0),
+        json_constraint_sla_availability(2, True),
+        json_constraint_custom('diversity', {'end-to-end-diverse': 'all-other-accesses'}),
+    ]
+
+    for device_uuid, endpoint_uuid, region, priority in raw_endpoints:
+        device_id = json_device_id(device_uuid)
+        endpoint_id = json_endpoint_id(device_id, endpoint_uuid)
+        endpoint_ids.append(endpoint_id)
+        constraints.extend([
+            json_constraint_endpoint_location_region(endpoint_id, region),
+            json_constraint_endpoint_priority(endpoint_id, priority),
+        ])
+
+    service = json_service_l3nm_planned(service_uuid, endpoint_ids=endpoint_ids, constraints=constraints)
+    request_services = [service]
+
     pathcomp_request = PathCompRequest(services=request_services)
     pathcomp_request.k_disjoint_path.num_disjoint = 2   #pylint: disable=no-member
 
@@ -236,10 +267,9 @@ def test_request_service_kdisjointpath(
 
 
 def test_cleanup_environment(
-    context_client : ContextClient, # pylint: disable=redefined-outer-name
-    device_client : DeviceClient):  # pylint: disable=redefined-outer-name
+    context_client : ContextClient):    # pylint: disable=redefined-outer-name
 
     for link     in LINKS     : context_client.RemoveLink    (LinkId    (**link    ['link_id'    ]))
-    for device   in DEVICES   : device_client .DeleteDevice  (DeviceId  (**device  ['device_id'  ]))
+    for device   in DEVICES   : context_client.RemoveDevice  (DeviceId  (**device  ['device_id'  ]))
     for topology in TOPOLOGIES: context_client.RemoveTopology(TopologyId(**topology['topology_id']))
     for context  in CONTEXTS  : context_client.RemoveContext (ContextId (**context ['context_id' ]))