diff --git a/deploy/crdb.sh b/deploy/crdb.sh
index 76aa0737087cb9bfe28b4a42e6ebd58b5b784b7e..598980ac84a3b49e55402993e22ed963e80b2e38 100755
--- a/deploy/crdb.sh
+++ b/deploy/crdb.sh
@@ -111,7 +111,7 @@ function crdb_deploy_single() {
     echo
 
     echo "CockroachDB Port Mapping"
-    echo ">>> Expose CockroachDB SQL port (26257)"
+    echo ">>> Expose CockroachDB SQL port (26257->26257)"
     CRDB_SQL_PORT=$(kubectl --namespace ${CRDB_NAMESPACE} get service cockroachdb-public -o 'jsonpath={.spec.ports[?(@.name=="sql")].port}')
     PATCH='{"data": {"'${CRDB_SQL_PORT}'": "'${CRDB_NAMESPACE}'/cockroachdb-public:'${CRDB_SQL_PORT}'"}}'
     kubectl patch configmap nginx-ingress-tcp-microk8s-conf --namespace ingress --patch "${PATCH}"
@@ -122,12 +122,13 @@ function crdb_deploy_single() {
     kubectl patch daemonset nginx-ingress-microk8s-controller --namespace ingress --patch "${PATCH}"
     echo
 
-    echo ">>> Expose CockroachDB HTTP Mgmt GUI port (8080)"
+    echo ">>> Expose CockroachDB HTTP Mgmt GUI port (8080->8081)"
+    CRDB_GUI_PORT_EXT="8081"
     CRDB_GUI_PORT=$(kubectl --namespace ${CRDB_NAMESPACE} get service cockroachdb-public -o 'jsonpath={.spec.ports[?(@.name=="http")].port}')
-    PATCH='{"data": {"'${CRDB_GUI_PORT}'": "'${CRDB_NAMESPACE}'/cockroachdb-public:'${CRDB_GUI_PORT}'"}}'
+    PATCH='{"data": {"'${CRDB_GUI_PORT_EXT}'": "'${CRDB_NAMESPACE}'/cockroachdb-public:'${CRDB_GUI_PORT}'"}}'
     kubectl patch configmap nginx-ingress-tcp-microk8s-conf --namespace ingress --patch "${PATCH}"
 
-    PORT_MAP='{"containerPort": '${CRDB_GUI_PORT}', "hostPort": '${CRDB_GUI_PORT}'}'
+    PORT_MAP='{"containerPort": '${CRDB_GUI_PORT_EXT}', "hostPort": '${CRDB_GUI_PORT_EXT}'}'
     CONTAINER='{"name": "nginx-ingress-microk8s", "ports": ['${PORT_MAP}']}'
     PATCH='{"spec": {"template": {"spec": {"containers": ['${CONTAINER}']}}}}'
     kubectl patch daemonset nginx-ingress-microk8s-controller --namespace ingress --patch "${PATCH}"
@@ -275,12 +276,13 @@ function crdb_deploy_cluster() {
     kubectl patch daemonset nginx-ingress-microk8s-controller --namespace ingress --patch "${PATCH}"
     echo
 
-    echo ">>> Expose CockroachDB HTTP Mgmt GUI port (8080)"
+    echo ">>> Expose CockroachDB HTTP Mgmt GUI port (8080->8081)"
+    CRDB_GUI_PORT_EXT="8081"
     CRDB_GUI_PORT=$(kubectl --namespace ${CRDB_NAMESPACE} get service cockroachdb-public -o 'jsonpath={.spec.ports[?(@.name=="http")].port}')
-    PATCH='{"data": {"'${CRDB_GUI_PORT}'": "'${CRDB_NAMESPACE}'/cockroachdb-public:'${CRDB_GUI_PORT}'"}}'
+    PATCH='{"data": {"'${CRDB_GUI_PORT_EXT}'": "'${CRDB_NAMESPACE}'/cockroachdb-public:'${CRDB_GUI_PORT}'"}}'
     kubectl patch configmap nginx-ingress-tcp-microk8s-conf --namespace ingress --patch "${PATCH}"
 
-    PORT_MAP='{"containerPort": '${CRDB_GUI_PORT}', "hostPort": '${CRDB_GUI_PORT}'}'
+    PORT_MAP='{"containerPort": '${CRDB_GUI_PORT_EXT}', "hostPort": '${CRDB_GUI_PORT_EXT}'}'
     CONTAINER='{"name": "nginx-ingress-microk8s", "ports": ['${PORT_MAP}']}'
     PATCH='{"spec": {"template": {"spec": {"containers": ['${CONTAINER}']}}}}'
     kubectl patch daemonset nginx-ingress-microk8s-controller --namespace ingress --patch "${PATCH}"
diff --git a/hackfest/mock_osm/WimconnectorIETFL2VPN.py b/hackfest/mock_osm/WimconnectorIETFL2VPN.py
index e1273b4e483a06df23d94bdf107005ce7585fb5e..aa4ca045f41ffdc69d2ebf2fcd9b5db99ce45dbe 100644
--- a/hackfest/mock_osm/WimconnectorIETFL2VPN.py
+++ b/hackfest/mock_osm/WimconnectorIETFL2VPN.py
@@ -73,7 +73,7 @@ class WimconnectorIETFL2VPN(SdnConnectorBase):
             response = requests.get(endpoint, auth=self.auth)
             http_code = response.status_code
         except requests.exceptions.RequestException as e:
-            raise SdnConnectorError(e.message, http_code=503)
+            raise SdnConnectorError(e.response, http_code=503)
 
         if http_code != 200:
             raise SdnConnectorError("Failed while authenticating", http_code=http_code)
diff --git a/hackfest/mock_osm/__main__.py b/hackfest/mock_osm/__main__.py
index e76616eab38cc072f959231e08743e312cf6b0b2..410feda84d24e45949ac9db6303e16199ea54fd5 100644
--- a/hackfest/mock_osm/__main__.py
+++ b/hackfest/mock_osm/__main__.py
@@ -18,7 +18,7 @@ from .MockOSM import MockOSM
 LOGGER = logging.getLogger(__name__)
 LOGGER.setLevel(logging.DEBUG)
 
-WIM_URL = 'http://10.0.2.15:80'
+WIM_URL = 'http://10.0.2.10:80'
 WIM_USERNAME = 'admin'
 WIM_PASSWORD = 'admin'
 
diff --git a/hackfest/tapi/server/tapi_server/controllers/tapi_connectivity_controller.py b/hackfest/tapi/server/tapi_server/controllers/tapi_connectivity_controller.py
index 640f5a72dacea883d145bca664810812e1d37f58..dc0118b7ac437c6fb151ff7326bda7b29a2ec26b 100644
--- a/hackfest/tapi/server/tapi_server/controllers/tapi_connectivity_controller.py
+++ b/hackfest/tapi/server/tapi_server/controllers/tapi_connectivity_controller.py
@@ -2559,8 +2559,42 @@ def data_tapi_commoncontext_tapi_connectivityconnectivity_context_post(body=None
     :rtype: None
     """
     if connexion.request.is_json:
-        body = TapiConnectivityConnectivityContextWrapper.from_dict(connexion.request.get_json())  # noqa: E501
-    return 'do some magic!'
+        #body = TapiConnectivityConnectivityContextWrapper.from_dict(connexion.request.get_json())  # noqa: E501
+        raw_body = connexion.request.get_json()
+        if "tapi-connectivity:connectivity-service" in raw_body:
+            raw_body["connectivity-service"] = raw_body.pop("tapi-connectivity:connectivity-service")
+        if isinstance(raw_body["connectivity-service"], list) and len(raw_body["connectivity-service"]) > 0:
+            raw_body["connectivity-service"] = raw_body["connectivity-service"][0]
+        
+        connectivity_service = raw_body["connectivity-service"]
+        if "connectivity-constraint" in connectivity_service:
+            connectivity_constraint = connectivity_service.pop("connectivity-constraint")
+            if "requested-capacity" in connectivity_constraint:
+                connectivity_service["requested-capacity"] = connectivity_constraint.pop("requested-capacity")
+            if "connectivity-direction" in connectivity_constraint:
+                connectivity_service["connectivity-direction"] = connectivity_constraint.pop("connectivity-direction")
+
+        body = TapiConnectivityConnectivityServiceWrapper.from_dict(raw_body)  # noqa: E501
+
+    connection = TapiConnectivityConnection(
+        uuid=body.connectivity_service.uuid,
+        connection_end_point=[
+            TapiConnectivityConnectionEndPointRef(
+                node_edge_point_uuid="node-1-port-3", connection_end_point_uuid="cep13"),
+            TapiConnectivityConnectionEndPointRef(
+                node_edge_point_uuid="node-3-port-2", connection_end_point_uuid="cep32"),
+        ]
+    )
+    connection_ref = TapiConnectivityConnectionRef(connection.uuid)
+    body.connectivity_service.connection = [ connection_ref ]
+
+    if database.context.connectivity_context is None:
+        database.context.connectivity_context = TapiConnectivityConnectivityContext(
+            connectivity_service=[], connection=[]
+        )
+
+    database.context.connectivity_context.connection.append(connection)
+    database.context.connectivity_context.connectivity_service.append(body.connectivity_service)
 
 
 def data_tapi_commoncontext_tapi_connectivityconnectivity_context_put(body=None):  # noqa: E501
diff --git a/src/common/message_broker/backend/nats/NatsBackend.py b/src/common/message_broker/backend/nats/NatsBackend.py
index 6c644a0a815cb737211af1e00db5828da0120db5..4a6d5e9cd3cc4723f6bde669158e965caeff9dac 100644
--- a/src/common/message_broker/backend/nats/NatsBackend.py
+++ b/src/common/message_broker/backend/nats/NatsBackend.py
@@ -19,18 +19,24 @@ from common.message_broker.Message import Message
 from .._Backend import _Backend
 from .NatsBackendThread import NatsBackendThread
 
-DEFAULT_NATS_URI = 'nats://127.0.0.1:4222'
-#NATS_URI_TEMPLATE = 'nats://{:s}:{:s}@nats.{:s}.svc.cluster.local:{:s}' # with authentication
-NATS_URI_TEMPLATE = 'nats://nats.{:s}.svc.cluster.local:{:s}'
+NATS_URI_TEMPLATE_AUTH = 'nats://{:s}:{:s}@nats.{:s}.svc.cluster.local:{:s}'
+NATS_URI_TEMPLATE_NOAUTH = 'nats://nats.{:s}.svc.cluster.local:{:s}'
 
 class NatsBackend(_Backend):
     def __init__(self, **settings) -> None: # pylint: disable=super-init-not-called
-        nats_namespace   = get_setting('NATS_NAMESPACE', settings=settings)
-        nats_client_port = get_setting('NATS_CLIENT_PORT', settings=settings)
-        if nats_namespace is None or nats_client_port is None:
-            nats_uri = get_setting('NATS_URI', settings=settings, default=DEFAULT_NATS_URI)
-        else:
-            nats_uri = NATS_URI_TEMPLATE.format(nats_namespace, nats_client_port)
+        nats_uri = get_setting('NATS_URI', settings=settings, default=None)
+        if nats_uri is None:
+            nats_namespace   = get_setting('NATS_NAMESPACE', settings=settings)
+            nats_client_port = get_setting('NATS_CLIENT_PORT', settings=settings)
+            nats_username    = get_setting('NATS_USERNAME', settings=settings, default=None)
+            nats_password    = get_setting('NATS_PASSWORD', settings=settings, default=None)
+            if nats_username is None or nats_password is None:
+                nats_uri = NATS_URI_TEMPLATE_NOAUTH.format(
+                    nats_namespace, nats_client_port)
+            else:
+                nats_uri = NATS_URI_TEMPLATE_AUTH.format(
+                    nats_username, nats_password, nats_namespace, nats_client_port)
+
         self._terminate = threading.Event()
         self._nats_backend_thread = NatsBackendThread(nats_uri)
         self._nats_backend_thread.start()
diff --git a/src/common/method_wrappers/Decorator.py b/src/common/method_wrappers/Decorator.py
index f918b845827951def858e0a9f5981724b0c56640..558e5c17a2c1af5ef8a0efa35bc03a17b4b38acf 100644
--- a/src/common/method_wrappers/Decorator.py
+++ b/src/common/method_wrappers/Decorator.py
@@ -169,6 +169,7 @@ def metered_subclass_method(metrics_pool : MetricsPool):
                 raise
             except Exception:           # pylint: disable=broad-except
                 counter_failed.inc()
+                raise
 
         return inner_wrapper
     return outer_wrapper
diff --git a/src/common/tools/grpc/ConfigRules.py b/src/common/tools/grpc/ConfigRules.py
index e109cb7a00086da8530c7677967d86e57df1457a..4a7e588e4b3e69f1cd425bfca06b277bf0231be2 100644
--- a/src/common/tools/grpc/ConfigRules.py
+++ b/src/common/tools/grpc/ConfigRules.py
@@ -18,24 +18,31 @@
 
 import json
 from typing import Any, Dict, Tuple
-from common.proto.context_pb2 import ConfigActionEnum, ConfigRule
+from common.proto.context_pb2 import ConfigActionEnum
 from common.tools.grpc.Tools import grpc_message_to_json_string
 
-def update_config_rule_custom(config_rules, resource_key : str, fields : Dict[str, Tuple[Any, bool]]) -> ConfigRule:
+def update_config_rule_custom(
+    config_rules, resource_key : str, fields : Dict[str, Tuple[Any, bool]],
+    new_action : ConfigActionEnum = ConfigActionEnum.CONFIGACTION_SET
+) -> None:
     # fields: Dict[field_name : str, Tuple[field_value : Any, raise_if_differs : bool]]
 
+    # TODO: add support for ACL config rules
+
     for config_rule in config_rules:
-        if config_rule.WhichOneof('config_rule') != 'custom': continue
+        kind = config_rule.WhichOneof('config_rule')
+        if kind != 'custom': continue
         if config_rule.custom.resource_key != resource_key: continue
         json_resource_value = json.loads(config_rule.custom.resource_value)
         break   # found, end loop
     else:
         # not found, add it
         config_rule = config_rules.add()    # pylint: disable=no-member
-        config_rule.action = ConfigActionEnum.CONFIGACTION_SET
         config_rule.custom.resource_key = resource_key
         json_resource_value = {}
 
+    config_rule.action = new_action
+
     for field_name,(field_value, raise_if_differs) in fields.items():
         if (field_name not in json_resource_value) or not raise_if_differs:
             # missing or raise_if_differs=False, add/update it
diff --git a/src/context/service/database/ConfigRule.py b/src/context/service/database/ConfigRule.py
index 5443e178c0f726be5b55e7955a6dc7b575d9f53a..7e4a383c787af0a35eb08a014e9464c9e7384c3b 100644
--- a/src/context/service/database/ConfigRule.py
+++ b/src/context/service/database/ConfigRule.py
@@ -12,16 +12,18 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import datetime, logging
+import datetime, json, logging
 from sqlalchemy import delete
+#from sqlalchemy.dialects import postgresql
 from sqlalchemy.dialects.postgresql import insert
 from sqlalchemy.orm import Session
-from typing import Dict, List, Optional
+from typing import Dict, List, Optional, Set
 from common.proto.context_pb2 import ConfigRule
 from common.tools.grpc.Tools import grpc_message_to_json_string
-from .models.enums.ConfigAction import grpc_to_enum__config_action
+from .models.enums.ConfigAction import ORM_ConfigActionEnum, grpc_to_enum__config_action
 from .models.ConfigRuleModel import ConfigRuleKindEnum, ConfigRuleModel
-from .uuids._Builder import get_uuid_random
+from .uuids._Builder import get_uuid_from_string
+from .uuids.EndPoint import endpoint_get_uuid
 
 LOGGER = logging.getLogger(__name__)
 
@@ -31,173 +33,108 @@ def compose_config_rules_data(
 ) -> List[Dict]:
     dict_config_rules : List[Dict] = list()
     for position,config_rule in enumerate(config_rules):
-        configrule_uuid = get_uuid_random()
         str_kind = config_rule.WhichOneof('config_rule')
+        kind = ConfigRuleKindEnum._member_map_.get(str_kind.upper()) # pylint: disable=no-member
         dict_config_rule = {
-            'configrule_uuid': configrule_uuid,
-            'position'       : position,
-            'kind'           : ConfigRuleKindEnum._member_map_.get(str_kind.upper()), # pylint: disable=no-member
-            'action'         : grpc_to_enum__config_action(config_rule.action),
-            'data'           : grpc_message_to_json_string(getattr(config_rule, str_kind, {})),
-            'created_at'     : now,
-            'updated_at'     : now,
+            'position'  : position,
+            'kind'      : kind,
+            'action'    : grpc_to_enum__config_action(config_rule.action),
+            'data'      : grpc_message_to_json_string(getattr(config_rule, str_kind, {})),
+            'created_at': now,
+            'updated_at': now,
         }
-        if device_uuid  is not None: dict_config_rule['device_uuid' ] = device_uuid
-        if service_uuid is not None: dict_config_rule['service_uuid'] = service_uuid
-        if slice_uuid   is not None: dict_config_rule['slice_uuid'  ] = slice_uuid
+
+        parent_kind,parent_uuid = '',None
+        if device_uuid is not None:
+            dict_config_rule['device_uuid'] = device_uuid
+            parent_kind,parent_uuid = 'device',device_uuid
+        elif service_uuid is not None:
+            dict_config_rule['service_uuid'] = service_uuid
+            parent_kind,parent_uuid = 'service',service_uuid
+        elif slice_uuid is not None:
+            dict_config_rule['slice_uuid'] = slice_uuid
+            parent_kind,parent_uuid = 'slice',slice_uuid
+        else:
+            MSG = 'Parent for ConfigRule({:s}) cannot be identified '+\
+                  '(device_uuid={:s}, service_uuid={:s}, slice_uuid={:s})'
+            str_config_rule = grpc_message_to_json_string(config_rule)
+            raise Exception(MSG.format(str_config_rule, str(device_uuid), str(service_uuid), str(slice_uuid)))
+
+        configrule_name = None
+        if kind == ConfigRuleKindEnum.CUSTOM:
+            configrule_name = '{:s}:{:s}:{:s}'.format(parent_kind, kind.value, config_rule.custom.resource_key)
+        elif kind == ConfigRuleKindEnum.ACL:
+            _, _, endpoint_uuid = endpoint_get_uuid(config_rule.acl.endpoint_id, allow_random=False)
+            rule_set_name = config_rule.acl.rule_set.name
+            configrule_name = '{:s}:{:s}:{:s}:{:s}'.format(parent_kind, kind.value, endpoint_uuid, rule_set_name)
+        else:
+            MSG = 'Name for ConfigRule({:s}) cannot be inferred '+\
+                  '(device_uuid={:s}, service_uuid={:s}, slice_uuid={:s})'
+            str_config_rule = grpc_message_to_json_string(config_rule)
+            raise Exception(MSG.format(str_config_rule, str(device_uuid), str(service_uuid), str(slice_uuid)))
+
+        configrule_uuid = get_uuid_from_string(configrule_name, prefix_for_name=parent_uuid)
+        dict_config_rule['configrule_uuid'] = configrule_uuid
+
         dict_config_rules.append(dict_config_rule)
     return dict_config_rules
 
 def upsert_config_rules(
     session : Session, config_rules : List[Dict],
     device_uuid : Optional[str] = None, service_uuid : Optional[str] = None, slice_uuid : Optional[str] = None,
-) -> List[bool]:
-    # TODO: do not delete all rules; just add-remove as needed
-    stmt = delete(ConfigRuleModel)
-    if device_uuid  is not None: stmt = stmt.where(ConfigRuleModel.device_uuid  == device_uuid )
-    if service_uuid is not None: stmt = stmt.where(ConfigRuleModel.service_uuid == service_uuid)
-    if slice_uuid   is not None: stmt = stmt.where(ConfigRuleModel.slice_uuid   == slice_uuid  )
-    session.execute(stmt)
+) -> bool:
+    uuids_to_delete : Set[str] = set()
+    uuids_to_upsert : Dict[str, int] = dict()
+    rules_to_upsert : List[Dict] = list()
+    for config_rule in config_rules:
+        configrule_uuid = config_rule['configrule_uuid']
+        configrule_action = config_rule['action']
+        if configrule_action == ORM_ConfigActionEnum.SET:
+            position = uuids_to_upsert.get(configrule_uuid)
+            if position is None:
+                # if not added, add it
+                rules_to_upsert.append(config_rule)
+                uuids_to_upsert[configrule_uuid] = len(rules_to_upsert) - 1
+            else:
+                # if already added, update occurrence
+                rules_to_upsert[position] = config_rule
+        elif configrule_action == ORM_ConfigActionEnum.DELETE:
+            uuids_to_delete.add(configrule_uuid)
+        else:
+            MSG = 'Action for ConfigRule({:s}) is not supported '+\
+                  '(device_uuid={:s}, service_uuid={:s}, slice_uuid={:s})'
+            str_config_rule = json.dumps(config_rule)
+            raise Exception(MSG.format(str_config_rule, str(device_uuid), str(service_uuid), str(slice_uuid)))
 
-    configrule_updates = []
-    if len(config_rules) > 0:
-        stmt = insert(ConfigRuleModel).values(config_rules)
-        #stmt = stmt.on_conflict_do_update(
-        #    index_elements=[ConfigRuleModel.configrule_uuid],
-        #    set_=dict(
-        #        updated_at = stmt.excluded.updated_at,
-        #    )
-        #)
+    delete_affected = False
+    if len(uuids_to_delete) > 0:
+        stmt = delete(ConfigRuleModel)
+        if device_uuid  is not None: stmt = stmt.where(ConfigRuleModel.device_uuid  == device_uuid )
+        if service_uuid is not None: stmt = stmt.where(ConfigRuleModel.service_uuid == service_uuid)
+        if slice_uuid   is not None: stmt = stmt.where(ConfigRuleModel.slice_uuid   == slice_uuid  )
+        stmt = stmt.where(ConfigRuleModel.configrule_uuid.in_(uuids_to_delete))
+        #str_stmt = stmt.compile(dialect=postgresql.dialect(), compile_kwargs={"literal_binds": True})
+        #LOGGER.warning('delete stmt={:s}'.format(str(str_stmt)))
+        configrule_deletes = session.execute(stmt)
+        #LOGGER.warning('configrule_deletes.rowcount={:s}'.format(str(configrule_deletes.rowcount)))
+        delete_affected = int(configrule_deletes.rowcount) > 0
+
+    upsert_affected = False
+    if len(rules_to_upsert) > 0:
+        stmt = insert(ConfigRuleModel).values(rules_to_upsert)
+        stmt = stmt.on_conflict_do_update(
+            index_elements=[ConfigRuleModel.configrule_uuid],
+            set_=dict(
+                position   = stmt.excluded.position,
+                action     = stmt.excluded.action,
+                data       = stmt.excluded.data,
+                updated_at = stmt.excluded.updated_at,
+            )
+        )
         stmt = stmt.returning(ConfigRuleModel.created_at, ConfigRuleModel.updated_at)
+        #str_stmt = stmt.compile(dialect=postgresql.dialect(), compile_kwargs={"literal_binds": True})
+        #LOGGER.warning('upsert stmt={:s}'.format(str(str_stmt)))
         configrule_updates = session.execute(stmt).fetchall()
+        upsert_affected = any([(updated_at > created_at) for created_at,updated_at in configrule_updates])
 
-    return configrule_updates
-
-#Union_SpecificConfigRule = Union[
-#    ConfigRuleCustomModel, ConfigRuleAclModel
-#]
-#
-#def set_config_rule(
-#    database : Database, db_config : ConfigModel, position : int, resource_key : str, resource_value : str,
-#): # -> Tuple[ConfigRuleModel, bool]:
-#
-#    str_rule_key_hash = fast_hasher(resource_key)
-#    str_config_rule_key = key_to_str([db_config.config_uuid, str_rule_key_hash], separator=':')
-#
-#    data = {'config_fk': db_config, 'position': position, 'action': ORM_ConfigActionEnum.SET, 'key': resource_key,
-#            'value': resource_value}
-#    to_add = ConfigRuleModel(**data)
-#
-#    result = database.create_or_update(to_add)
-#    return result
-#Tuple_ConfigRuleSpecs = Tuple[Type, str, Dict, ConfigRuleKindEnum]
-#
-#def parse_config_rule_custom(database : Database, grpc_config_rule) -> Tuple_ConfigRuleSpecs:
-#    config_rule_class = ConfigRuleCustomModel
-#    str_config_rule_id = grpc_config_rule.custom.resource_key
-#    config_rule_data = {
-#        'key'  : grpc_config_rule.custom.resource_key,
-#        'value': grpc_config_rule.custom.resource_value,
-#    }
-#    return config_rule_class, str_config_rule_id, config_rule_data, ConfigRuleKindEnum.CUSTOM
-#
-#def parse_config_rule_acl(database : Database, grpc_config_rule) -> Tuple_ConfigRuleSpecs:
-#    config_rule_class = ConfigRuleAclModel
-#    grpc_endpoint_id = grpc_config_rule.acl.endpoint_id
-#    grpc_rule_set = grpc_config_rule.acl.rule_set
-#    device_uuid = grpc_endpoint_id.device_id.device_uuid.uuid
-#    endpoint_uuid = grpc_endpoint_id.endpoint_uuid.uuid
-#    str_endpoint_key = '/'.join([device_uuid, endpoint_uuid])
-#    #str_endpoint_key, db_endpoint = get_endpoint(database, grpc_endpoint_id)
-#    str_config_rule_id = ':'.join([str_endpoint_key, grpc_rule_set.name])
-#    config_rule_data = {
-#        #'endpoint_fk': db_endpoint,
-#        'endpoint_id': grpc_message_to_json_string(grpc_endpoint_id),
-#        'acl_data': grpc_message_to_json_string(grpc_rule_set),
-#    }
-#    return config_rule_class, str_config_rule_id, config_rule_data, ConfigRuleKindEnum.ACL
-#
-#CONFIGRULE_PARSERS = {
-#    'custom': parse_config_rule_custom,
-#    'acl'   : parse_config_rule_acl,
-#}
-#
-#Union_ConfigRuleModel = Union[
-#    ConfigRuleCustomModel, ConfigRuleAclModel,
-#]
-#
-#def set_config_rule(
-#    database : Database, db_config : ConfigModel, grpc_config_rule : ConfigRule, position : int
-#) -> Tuple[Union_ConfigRuleModel, bool]:
-#    grpc_config_rule_kind = str(grpc_config_rule.WhichOneof('config_rule'))
-#    parser = CONFIGRULE_PARSERS.get(grpc_config_rule_kind)
-#    if parser is None:
-#        raise NotImplementedError('ConfigRule of kind {:s} is not implemented: {:s}'.format(
-#            grpc_config_rule_kind, grpc_message_to_json_string(grpc_config_rule)))
-#
-#    # create specific ConfigRule
-#    config_rule_class, str_config_rule_id, config_rule_data, config_rule_kind = parser(database, grpc_config_rule)
-#    str_config_rule_key_hash = fast_hasher(':'.join([config_rule_kind.value, str_config_rule_id]))
-#    str_config_rule_key = key_to_str([db_config.pk, str_config_rule_key_hash], separator=':')
-#    result : Tuple[Union_ConfigRuleModel, bool] = update_or_create_object(
-#        database, config_rule_class, str_config_rule_key, config_rule_data)
-#    db_specific_config_rule, updated = result
-#
-#    # create generic ConfigRule
-#    config_rule_fk_field_name = 'config_rule_{:s}_fk'.format(config_rule_kind.value)
-#    config_rule_data = {
-#        'config_fk': db_config, 'kind': config_rule_kind, 'position': position,
-#        'action': ORM_ConfigActionEnum.SET,
-#        config_rule_fk_field_name: db_specific_config_rule
-#    }
-#    result : Tuple[ConfigRuleModel, bool] = update_or_create_object(
-#        database, ConfigRuleModel, str_config_rule_key, config_rule_data)
-#    db_config_rule, updated = result
-#
-#    return db_config_rule, updated
-#
-#def delete_config_rule(
-#    database : Database, db_config : ConfigModel, grpc_config_rule : ConfigRule
-#) -> None:
-#    grpc_config_rule_kind = str(grpc_config_rule.WhichOneof('config_rule'))
-#    parser = CONFIGRULE_PARSERS.get(grpc_config_rule_kind)
-#    if parser is None:
-#        raise NotImplementedError('ConfigRule of kind {:s} is not implemented: {:s}'.format(
-#            grpc_config_rule_kind, grpc_message_to_json_string(grpc_config_rule)))
-#
-#    # delete generic config rules; self deletes specific config rule
-#    _, str_config_rule_id, _, config_rule_kind = parser(database, grpc_config_rule)
-#    str_config_rule_key_hash = fast_hasher(':'.join([config_rule_kind.value, str_config_rule_id]))
-#    str_config_rule_key = key_to_str([db_config.pk, str_config_rule_key_hash], separator=':')
-#    db_config_rule : Optional[ConfigRuleModel] = get_object(
-#        database, ConfigRuleModel, str_config_rule_key, raise_if_not_found=False)
-#    if db_config_rule is None: return
-#    db_config_rule.delete()
-#
-#def update_config(
-#    database : Database, db_parent_pk : str, config_name : str, grpc_config_rules
-#) -> List[Tuple[Union[ConfigModel, ConfigRuleModel], bool]]:
-#
-#    str_config_key = key_to_str([config_name, db_parent_pk], separator=':')
-#    result : Tuple[ConfigModel, bool] = get_or_create_object(database, ConfigModel, str_config_key)
-#    db_config, created = result
-#
-#    db_objects = [(db_config, created)]
-#
-#    for position,grpc_config_rule in enumerate(grpc_config_rules):
-#        action = grpc_to_enum__config_action(grpc_config_rule.action)
-#
-#        if action == ORM_ConfigActionEnum.SET:
-#            result : Tuple[ConfigRuleModel, bool] = set_config_rule(
-#                database, db_config, grpc_config_rule, position)
-#            db_config_rule, updated = result
-#            db_objects.append((db_config_rule, updated))
-#        elif action == ORM_ConfigActionEnum.DELETE:
-#            delete_config_rule(database, db_config, grpc_config_rule)
-#        else:
-#            msg = 'Unsupported Action({:s}) for ConfigRule({:s})'
-#            str_action = str(ConfigActionEnum.Name(action))
-#            str_config_rule = grpc_message_to_json_string(grpc_config_rule)
-#            raise AttributeError(msg.format(str_action, str_config_rule))
-#
-#    return db_objects
+    return delete_affected or upsert_affected
diff --git a/src/context/service/database/Constraint.py b/src/context/service/database/Constraint.py
index 2880c05a85dbde7c3af87d6766375862767611a7..cf6e11ac9b0b58e5dc84bbb81b5d25f819ada6f7 100644
--- a/src/context/service/database/Constraint.py
+++ b/src/context/service/database/Constraint.py
@@ -14,13 +14,15 @@
 
 import datetime, logging
 from sqlalchemy import delete
+#from sqlalchemy.dialects import postgresql
 from sqlalchemy.dialects.postgresql import insert
 from sqlalchemy.orm import Session
 from typing import Dict, List, Optional
 from common.proto.context_pb2 import Constraint
 from common.tools.grpc.Tools import grpc_message_to_json_string
 from .models.ConstraintModel import ConstraintKindEnum, ConstraintModel
-from .uuids._Builder import get_uuid_random
+from .uuids._Builder import get_uuid_from_string
+from .uuids.EndPoint import endpoint_get_uuid
 
 LOGGER = logging.getLogger(__name__)
 
@@ -31,100 +33,98 @@ def compose_constraints_data(
     dict_constraints : List[Dict] = list()
     for position,constraint in enumerate(constraints):
         str_kind = constraint.WhichOneof('constraint')
+        kind = ConstraintKindEnum._member_map_.get(str_kind.upper()) # pylint: disable=no-member
         dict_constraint = {
-            'constraint_uuid': get_uuid_random(),
-            'position'       : position,
-            'kind'           : ConstraintKindEnum._member_map_.get(str_kind.upper()), # pylint: disable=no-member
-            'data'           : grpc_message_to_json_string(getattr(constraint, str_kind, {})),
-            'created_at'     : now,
-            'updated_at'     : now,
+            'position'  : position,
+            'kind'      : kind,
+            'data'      : grpc_message_to_json_string(getattr(constraint, str_kind, {})),
+            'created_at': now,
+            'updated_at': now,
         }
-        if service_uuid is not None: dict_constraint['service_uuid'] = service_uuid
-        if slice_uuid   is not None: dict_constraint['slice_uuid'  ] = slice_uuid
+
+        parent_kind,parent_uuid = '',None
+        if service_uuid is not None:
+            dict_constraint['service_uuid'] = service_uuid
+            parent_kind,parent_uuid = 'service',service_uuid
+        elif slice_uuid is not None:
+            dict_constraint['slice_uuid'] = slice_uuid
+            parent_kind,parent_uuid = 'slice',slice_uuid
+        else:
+            MSG = 'Parent for Constraint({:s}) cannot be identified (service_uuid={:s}, slice_uuid={:s})'
+            str_constraint = grpc_message_to_json_string(constraint)
+            raise Exception(MSG.format(str_constraint, str(service_uuid), str(slice_uuid)))
+
+        constraint_name = None
+        if kind == ConstraintKindEnum.CUSTOM:
+            constraint_name = '{:s}:{:s}:{:s}'.format(parent_kind, kind.value, constraint.custom.constraint_type)
+        elif kind == ConstraintKindEnum.ENDPOINT_LOCATION:
+            _, _, endpoint_uuid = endpoint_get_uuid(constraint.endpoint_location.endpoint_id, allow_random=False)
+            location_kind = constraint.endpoint_location.location.WhichOneof('location')
+            constraint_name = '{:s}:{:s}:{:s}:{:s}'.format(parent_kind, kind.value, endpoint_uuid, location_kind)
+        elif kind == ConstraintKindEnum.ENDPOINT_PRIORITY:
+            _, _, endpoint_uuid = endpoint_get_uuid(constraint.endpoint_priority.endpoint_id, allow_random=False)
+            constraint_name = '{:s}:{:s}:{:s}'.format(parent_kind, kind.value, endpoint_uuid)
+        elif kind in {
+            ConstraintKindEnum.SCHEDULE, ConstraintKindEnum.SLA_CAPACITY, ConstraintKindEnum.SLA_LATENCY,
+            ConstraintKindEnum.SLA_AVAILABILITY, ConstraintKindEnum.SLA_ISOLATION_LEVEL
+        }:
+            constraint_name = '{:s}:{:s}:'.format(parent_kind, kind.value)
+        else:
+            MSG = 'Name for Constraint({:s}) cannot be inferred (service_uuid={:s}, slice_uuid={:s})'
+            str_constraint = grpc_message_to_json_string(constraint)
+            raise Exception(MSG.format(str_constraint, str(service_uuid), str(slice_uuid)))
+
+        constraint_uuid = get_uuid_from_string(constraint_name, prefix_for_name=parent_uuid)
+        dict_constraint['constraint_uuid'] = constraint_uuid
+
         dict_constraints.append(dict_constraint)
     return dict_constraints
 
 def upsert_constraints(
     session : Session, constraints : List[Dict],
     service_uuid : Optional[str] = None, slice_uuid : Optional[str] = None
-) -> List[bool]:
-    # TODO: do not delete all constraints; just add-remove as needed
-    stmt = delete(ConstraintModel)
-    if service_uuid is not None: stmt = stmt.where(ConstraintModel.service_uuid == service_uuid)
-    if slice_uuid   is not None: stmt = stmt.where(ConstraintModel.slice_uuid   == slice_uuid  )
-    session.execute(stmt)
+) -> bool:
+    uuids_to_upsert : Dict[str, int] = dict()
+    rules_to_upsert : List[Dict] = list()
+    for constraint in constraints:
+        constraint_uuid = constraint['constraint_uuid']
+        position = uuids_to_upsert.get(constraint_uuid)
+        if position is None:
+            # if not added, add it
+            rules_to_upsert.append(constraint)
+            uuids_to_upsert[constraint_uuid] = len(rules_to_upsert) - 1
+        else:
+            # if already added, update occurrence
+            rules_to_upsert[position] = constraint
+
+    # Delete all constraints not in uuids_to_upsert
+    delete_affected = False
+    if len(uuids_to_upsert) > 0:
+        stmt = delete(ConstraintModel)
+        if service_uuid is not None: stmt = stmt.where(ConstraintModel.service_uuid == service_uuid)
+        if slice_uuid   is not None: stmt = stmt.where(ConstraintModel.slice_uuid   == slice_uuid  )
+        stmt = stmt.where(ConstraintModel.constraint_uuid.not_in(set(uuids_to_upsert.keys())))
+        #str_stmt = stmt.compile(dialect=postgresql.dialect(), compile_kwargs={"literal_binds": True})
+        #LOGGER.warning('delete stmt={:s}'.format(str(str_stmt)))
+        constraint_deletes = session.execute(stmt)
+        LOGGER.warning('constraint_deletes.rowcount={:s}'.format(str(constraint_deletes.rowcount)))
+        delete_affected = int(constraint_deletes.rowcount) > 0
 
-    constraint_updates = []
+    upsert_affected = False
     if len(constraints) > 0:
         stmt = insert(ConstraintModel).values(constraints)
-        #stmt = stmt.on_conflict_do_update(
-        #    index_elements=[ConstraintModel.configrule_uuid],
-        #    set_=dict(
-        #        updated_at = stmt.excluded.updated_at,
-        #    )
-        #)
+        stmt = stmt.on_conflict_do_update(
+            index_elements=[ConstraintModel.constraint_uuid],
+            set_=dict(
+                position   = stmt.excluded.position,
+                data       = stmt.excluded.data,
+                updated_at = stmt.excluded.updated_at,
+            )
+        )
         stmt = stmt.returning(ConstraintModel.created_at, ConstraintModel.updated_at)
+        #str_stmt = stmt.compile(dialect=postgresql.dialect(), compile_kwargs={"literal_binds": True})
+        #LOGGER.warning('upsert stmt={:s}'.format(str(str_stmt)))
         constraint_updates = session.execute(stmt).fetchall()
+        upsert_affected = any([(updated_at > created_at) for created_at,updated_at in constraint_updates])
 
-    return constraint_updates
-
-
-#    def set_constraint(self, db_constraints: ConstraintsModel, grpc_constraint: Constraint, position: int
-#    ) -> Tuple[Union_ConstraintModel, bool]:
-#        with self.session() as session:
-#
-#            grpc_constraint_kind = str(grpc_constraint.WhichOneof('constraint'))
-#
-#            parser = CONSTRAINT_PARSERS.get(grpc_constraint_kind)
-#            if parser is None:
-#                raise NotImplementedError('Constraint of kind {:s} is not implemented: {:s}'.format(
-#                    grpc_constraint_kind, grpc_message_to_json_string(grpc_constraint)))
-#
-#            # create specific constraint
-#            constraint_class, str_constraint_id, constraint_data, constraint_kind = parser(grpc_constraint)
-#            str_constraint_id = str(uuid.uuid4())
-#            LOGGER.info('str_constraint_id: {}'.format(str_constraint_id))
-#            # str_constraint_key_hash = fast_hasher(':'.join([constraint_kind.value, str_constraint_id]))
-#            # str_constraint_key = key_to_str([db_constraints.pk, str_constraint_key_hash], separator=':')
-#
-#            # result : Tuple[Union_ConstraintModel, bool] = update_or_create_object(
-#            #     database, constraint_class, str_constraint_key, constraint_data)
-#            constraint_data[constraint_class.main_pk_name()] = str_constraint_id
-#            db_new_constraint = constraint_class(**constraint_data)
-#            result: Tuple[Union_ConstraintModel, bool] = self.database.create_or_update(db_new_constraint)
-#            db_specific_constraint, updated = result
-#
-#            # create generic constraint
-#            # constraint_fk_field_name = 'constraint_uuid'.format(constraint_kind.value)
-#            constraint_data = {
-#                'constraints_uuid': db_constraints.constraints_uuid, 'position': position, 'kind': constraint_kind
-#            }
-#
-#            db_new_constraint = ConstraintModel(**constraint_data)
-#            result: Tuple[Union_ConstraintModel, bool] = self.database.create_or_update(db_new_constraint)
-#            db_constraint, updated = result
-#
-#            return db_constraint, updated
-#
-#    def set_constraints(self, service_uuid: str, constraints_name : str, grpc_constraints
-#    ) -> List[Tuple[Union[ConstraintsModel, ConstraintModel], bool]]:
-#        with self.session() as session:
-#            # str_constraints_key = key_to_str([db_parent_pk, constraints_name], separator=':')
-#            # result : Tuple[ConstraintsModel, bool] = get_or_create_object(database, ConstraintsModel, str_constraints_key)
-#            result = session.query(ConstraintsModel).filter_by(constraints_uuid=service_uuid).one_or_none()
-#            created = None
-#            if result:
-#                created = True
-#            session.query(ConstraintsModel).filter_by(constraints_uuid=service_uuid).one_or_none()
-#            db_constraints = ConstraintsModel(constraints_uuid=service_uuid)
-#            session.add(db_constraints)
-#
-#            db_objects = [(db_constraints, created)]
-#
-#            for position,grpc_constraint in enumerate(grpc_constraints):
-#                result : Tuple[ConstraintModel, bool] = self.set_constraint(
-#                    db_constraints, grpc_constraint, position)
-#                db_constraint, updated = result
-#                db_objects.append((db_constraint, updated))
-#
-#            return db_objects
+    return delete_affected or upsert_affected
diff --git a/src/context/service/database/Device.py b/src/context/service/database/Device.py
index 07d1c76061d8b228cf39ddc06d358190bfce48fd..cde8751b417072f3f0de53217dab99308ea882f3 100644
--- a/src/context/service/database/Device.py
+++ b/src/context/service/database/Device.py
@@ -20,6 +20,7 @@ from sqlalchemy_cockroachdb import run_transaction
 from typing import Dict, List, Optional, Set, Tuple
 from common.method_wrappers.ServiceExceptions import InvalidArgumentException, NotFoundException
 from common.proto.context_pb2 import Device, DeviceId
+from common.tools.grpc.Tools import grpc_message_to_json_string
 from common.tools.object_factory.Device import json_device_id
 from .models.DeviceModel import DeviceModel
 from .models.EndPointModel import EndPointModel
@@ -136,6 +137,7 @@ def device_set(db_engine : Engine, request : Device) -> Tuple[Dict, bool]:
         created_at,updated_at = session.execute(stmt).fetchone()
         updated = updated_at > created_at
 
+        updated_endpoints = False
         if len(endpoints_data) > 0:
             stmt = insert(EndPointModel).values(endpoints_data)
             stmt = stmt.on_conflict_do_update(
@@ -149,17 +151,16 @@ def device_set(db_engine : Engine, request : Device) -> Tuple[Dict, bool]:
             )
             stmt = stmt.returning(EndPointModel.created_at, EndPointModel.updated_at)
             endpoint_updates = session.execute(stmt).fetchall()
-            updated = updated or any([(updated_at > created_at) for created_at,updated_at in endpoint_updates])
+            updated_endpoints = any([(updated_at > created_at) for created_at,updated_at in endpoint_updates])
 
         if len(related_topologies) > 0:
             session.execute(insert(TopologyDeviceModel).values(related_topologies).on_conflict_do_nothing(
                 index_elements=[TopologyDeviceModel.topology_uuid, TopologyDeviceModel.device_uuid]
             ))
 
-        configrule_updates = upsert_config_rules(session, config_rules, device_uuid=device_uuid)
-        updated = updated or any([(updated_at > created_at) for created_at,updated_at in configrule_updates])
+        changed_config_rules = upsert_config_rules(session, config_rules, device_uuid=device_uuid)
 
-        return updated
+        return updated or updated_endpoints or changed_config_rules
 
     updated = run_transaction(sessionmaker(bind=db_engine), callback)
     return json_device_id(device_uuid),updated
diff --git a/src/context/service/database/Engine.py b/src/context/service/database/Engine.py
index a37ec0c1e88f6fd8b7f46fd5f6b0383d32cde2b8..46c1b8c25af59586ddeeffd693c3f4fe7546d6cb 100644
--- a/src/context/service/database/Engine.py
+++ b/src/context/service/database/Engine.py
@@ -24,15 +24,16 @@ CRDB_URI_TEMPLATE = 'cockroachdb://{:s}:{:s}@cockroachdb-public.{:s}.svc.cluster
 class Engine:
     @staticmethod
     def get_engine() -> sqlalchemy.engine.Engine:
-        CRDB_NAMESPACE = get_setting('CRDB_NAMESPACE')
-        CRDB_SQL_PORT  = get_setting('CRDB_SQL_PORT')
-        CRDB_DATABASE  = get_setting('CRDB_DATABASE')
-        CRDB_USERNAME  = get_setting('CRDB_USERNAME')
-        CRDB_PASSWORD  = get_setting('CRDB_PASSWORD')
-        CRDB_SSLMODE   = get_setting('CRDB_SSLMODE')
-
-        crdb_uri = CRDB_URI_TEMPLATE.format(
-            CRDB_USERNAME, CRDB_PASSWORD, CRDB_NAMESPACE, CRDB_SQL_PORT, CRDB_DATABASE, CRDB_SSLMODE)
+        crdb_uri = get_setting('CRDB_URI', default=None)
+        if crdb_uri is None:
+            CRDB_NAMESPACE = get_setting('CRDB_NAMESPACE')
+            CRDB_SQL_PORT  = get_setting('CRDB_SQL_PORT')
+            CRDB_DATABASE  = get_setting('CRDB_DATABASE')
+            CRDB_USERNAME  = get_setting('CRDB_USERNAME')
+            CRDB_PASSWORD  = get_setting('CRDB_PASSWORD')
+            CRDB_SSLMODE   = get_setting('CRDB_SSLMODE')
+            crdb_uri = CRDB_URI_TEMPLATE.format(
+                CRDB_USERNAME, CRDB_PASSWORD, CRDB_NAMESPACE, CRDB_SQL_PORT, CRDB_DATABASE, CRDB_SSLMODE)
 
         try:
             engine = sqlalchemy.create_engine(
diff --git a/src/context/service/database/Service.py b/src/context/service/database/Service.py
index 76a83053587aa8beb44c4d96771c3cfa46945b07..9b9e9a621446518aa0178d893decf46fbe427809 100644
--- a/src/context/service/database/Service.py
+++ b/src/context/service/database/Service.py
@@ -118,6 +118,7 @@ def service_set(db_engine : Engine, request : Service) -> Tuple[Dict, bool]:
         created_at,updated_at = session.execute(stmt).fetchone()
         updated = updated_at > created_at
 
+        # TODO: check if endpoints are changed
         if len(service_endpoints_data) > 0:
             stmt = insert(ServiceEndPointModel).values(service_endpoints_data)
             stmt = stmt.on_conflict_do_nothing(
@@ -125,13 +126,10 @@ def service_set(db_engine : Engine, request : Service) -> Tuple[Dict, bool]:
             )
             session.execute(stmt)
 
-        constraint_updates = upsert_constraints(session, constraints, service_uuid=service_uuid)
-        updated = updated or any([(updated_at > created_at) for created_at,updated_at in constraint_updates])
+        changed_constraints = upsert_constraints(session, constraints, service_uuid=service_uuid)
+        changed_config_rules = upsert_config_rules(session, config_rules, service_uuid=service_uuid)
 
-        configrule_updates = upsert_config_rules(session, config_rules, service_uuid=service_uuid)
-        updated = updated or any([(updated_at > created_at) for created_at,updated_at in configrule_updates])
-
-        return updated
+        return updated or changed_constraints or changed_config_rules
 
     updated = run_transaction(sessionmaker(bind=db_engine), callback)
     return json_service_id(service_uuid, json_context_id(context_uuid)),updated
diff --git a/src/context/service/database/Slice.py b/src/context/service/database/Slice.py
index 84bfff34391ada943fc61caaa0789e8e4d8e270f..113af9aa41420382d34a95bbd4996b795e95e065 100644
--- a/src/context/service/database/Slice.py
+++ b/src/context/service/database/Slice.py
@@ -136,6 +136,7 @@ def slice_set(db_engine : Engine, request : Slice) -> Tuple[Dict, bool]:
         created_at,updated_at = session.execute(stmt).fetchone()
         updated = updated_at > created_at
 
+        # TODO: check if endpoints are changed
         if len(slice_endpoints_data) > 0:
             stmt = insert(SliceEndPointModel).values(slice_endpoints_data)
             stmt = stmt.on_conflict_do_nothing(
@@ -143,6 +144,7 @@ def slice_set(db_engine : Engine, request : Slice) -> Tuple[Dict, bool]:
             )
             session.execute(stmt)
 
+        # TODO: check if services are changed
         if len(slice_services_data) > 0:
             stmt = insert(SliceServiceModel).values(slice_services_data)
             stmt = stmt.on_conflict_do_nothing(
@@ -150,6 +152,7 @@ def slice_set(db_engine : Engine, request : Slice) -> Tuple[Dict, bool]:
             )
             session.execute(stmt)
 
+        # TODO: check if subslices are changed
         if len(slice_subslices_data) > 0:
             stmt = insert(SliceSubSliceModel).values(slice_subslices_data)
             stmt = stmt.on_conflict_do_nothing(
@@ -157,13 +160,10 @@ def slice_set(db_engine : Engine, request : Slice) -> Tuple[Dict, bool]:
             )
             session.execute(stmt)
 
-        constraint_updates = upsert_constraints(session, constraints, slice_uuid=slice_uuid)
-        updated = updated or any([(updated_at > created_at) for created_at,updated_at in constraint_updates])
+        changed_constraints = upsert_constraints(session, constraints, slice_uuid=slice_uuid)
+        changed_config_rules = upsert_config_rules(session, config_rules, slice_uuid=slice_uuid)
 
-        configrule_updates = upsert_config_rules(session, config_rules, slice_uuid=slice_uuid)
-        updated = updated or any([(updated_at > created_at) for created_at,updated_at in configrule_updates])
-
-        return updated
+        return updated or changed_constraints or changed_config_rules
 
     updated = run_transaction(sessionmaker(bind=db_engine), callback)
     return json_slice_id(slice_uuid, json_context_id(context_uuid)),updated
diff --git a/src/context/service/database/models/ConstraintModel.py b/src/context/service/database/models/ConstraintModel.py
index 51fc0b91df07a2c86c5e84692b3dd5edd21fb761..e7767b4b2dfed578f6a2a3ceef19cd2d00a8dfd6 100644
--- a/src/context/service/database/models/ConstraintModel.py
+++ b/src/context/service/database/models/ConstraintModel.py
@@ -20,11 +20,14 @@ from ._Base import _Base
 
 # Enum values should match name of field in Constraint message
 class ConstraintKindEnum(enum.Enum):
-    CUSTOM                        = 'custom'
-    ENDPOINT_LOCATION_REGION      = 'ep_loc_region'
-    ENDPOINT_LOCATION_GPSPOSITION = 'ep_loc_gpspos'
-    ENDPOINT_PRIORITY             = 'ep_priority'
-    SLA_AVAILABILITY              = 'sla_avail'
+    CUSTOM              = 'custom'
+    SCHEDULE            = 'schedule'
+    ENDPOINT_LOCATION   = 'endpoint_location'
+    ENDPOINT_PRIORITY   = 'endpoint_priority'
+    SLA_CAPACITY        = 'sla_capacity'
+    SLA_LATENCY         = 'sla_latency'
+    SLA_AVAILABILITY    = 'sla_availability'
+    SLA_ISOLATION_LEVEL = 'sla_isolation'
 
 class ConstraintModel(_Base):
     __tablename__ = 'constraint'
@@ -46,354 +49,3 @@ class ConstraintModel(_Base):
 
     def dump(self) -> Dict:
         return {self.kind.value: json.loads(self.data)}
-
-
-#import logging, operator
-#from typing import Dict, List, Optional, Tuple, Type, Union
-#from common.orm.HighLevel import get_object, get_or_create_object, update_or_create_object
-#from common.orm.backend.Tools import key_to_str
-#from common.proto.context_pb2 import Constraint
-#from common.tools.grpc.Tools import grpc_message_to_json_string
-#from .EndPointModel import EndPointModel
-#from .Tools import fast_hasher
-#from sqlalchemy import Column, ForeignKey, String, Float, CheckConstraint, Integer, Boolean, Enum
-#from sqlalchemy.dialects.postgresql import UUID
-#from context.service.database.models._Base import Base
-#import enum
-#
-#LOGGER = logging.getLogger(__name__)
-#
-#def remove_dict_key(dictionary : Dict, key : str):
-#    dictionary.pop(key, None)
-#    return dictionary
-#
-#class ConstraintsModel(Base): # pylint: disable=abstract-method
-#    __tablename__ = 'Constraints'
-#    constraints_uuid = Column(UUID(as_uuid=False), primary_key=True, unique=True)
-#
-#    @staticmethod
-#    def main_pk_name():
-#        return 'constraints_uuid'
-#
-#
-#    def dump(self, constraints) -> List[Dict]:
-#        constraints = sorted(constraints, key=operator.itemgetter('position'))
-#        return [remove_dict_key(constraint, 'position') for constraint in constraints]
-#
-#
-#class ConstraintCustomModel(Base): # pylint: disable=abstract-method
-#    __tablename__ = 'ConstraintCustom'
-#    constraint_uuid = Column(UUID(as_uuid=False), primary_key=True, unique=True)
-#    constraint_type = Column(String, nullable=False)
-#    constraint_value = Column(String, nullable=False)
-#
-#    @staticmethod
-#    def main_pk_name():
-#        return 'constraint_uuid'
-#
-#
-#    def dump(self) -> Dict: # pylint: disable=arguments-differ
-#        return {'custom': {'constraint_type': self.constraint_type, 'constraint_value': self.constraint_value}}
-#
-#
-#Union_ConstraintEndpoint = Union[
-#    'ConstraintEndpointLocationGpsPositionModel', 'ConstraintEndpointLocationRegionModel',
-#    'ConstraintEndpointPriorityModel'
-#]
-#
-#class ConstraintEndpointLocationRegionModel(Model): # pylint: disable=abstract-method
-#    endpoint_fk = ForeignKeyField(EndPointModel)
-#    region = StringField(required=True, allow_empty=False)
-#
-#    def dump(self) -> Dict: # pylint: disable=arguments-differ
-#        json_endpoint_id = EndPointModel(self.database, self.endpoint_fk).dump_id()
-#        return {'endpoint_location': {'endpoint_id': json_endpoint_id, 'location': {'region': self.region}}}
-#
-## def dump_endpoint_id(endpoint_constraint: Union_ConstraintEndpoint):
-##     db_endpoints_pks = list(endpoint_constraint.references(EndPointModel))
-##     num_endpoints = len(db_endpoints_pks)
-##     if num_endpoints != 1:
-##         raise Exception('Wrong number({:d}) of associated Endpoints with constraint'.format(num_endpoints))
-##     db_endpoint = EndPointModel(endpoint_constraint.database, db_endpoints_pks[0])
-##     return db_endpoint.dump_id()
-#
-#
-#class ConstraintEndpointLocationRegionModel(Base): # pylint: disable=abstract-method
-#    __tablename__ = 'ConstraintEndpointLocationRegion'
-#    constraint_uuid = Column(UUID(as_uuid=False), primary_key=True, unique=True)
-#    endpoint_uuid = Column(UUID(as_uuid=False), ForeignKey("EndPoint.endpoint_uuid"))
-#    region = Column(String, nullable=False)
-#
-#    @staticmethod
-#    def main_pk_name():
-#        return 'constraint_uuid'
-#
-#    def dump(self, endpoint) -> Dict: # pylint: disable=arguments-differ
-#        return {'endpoint_location': {'endpoint_id': endpoint.dump_id(), 'region': self.region}}
-#
-#    def dump(self) -> Dict: # pylint: disable=arguments-differ
-#        gps_position = {'latitude': self.latitude, 'longitude': self.longitude}
-#        json_endpoint_id = EndPointModel(self.database, self.endpoint_fk).dump_id()
-#        return {'endpoint_location': {'endpoint_id': json_endpoint_id, 'location': {'gps_position': gps_position}}}
-#
-#class ConstraintEndpointPriorityModel(Model): # pylint: disable=abstract-method
-#    endpoint_fk = ForeignKeyField(EndPointModel)
-#    priority = IntegerField(required=True, min_value=0)
-#
-#    def dump(self) -> Dict: # pylint: disable=arguments-differ
-#        json_endpoint_id = EndPointModel(self.database, self.endpoint_fk).dump_id()
-#        return {'endpoint_priority': {'endpoint_id': json_endpoint_id, 'priority': self.priority}}
-#
-#class ConstraintEndpointLocationGpsPositionModel(Base): # pylint: disable=abstract-method
-#    __tablename__ = 'ConstraintEndpointLocationGpsPosition'
-#    constraint_uuid = Column(UUID(as_uuid=False), primary_key=True, unique=True)
-#    endpoint_uuid = Column(UUID(as_uuid=False), ForeignKey("EndPoint.endpoint_uuid"))
-#    latitude = Column(Float, CheckConstraint('latitude > -90.0 AND latitude < 90.0'), nullable=False)
-#    longitude = Column(Float, CheckConstraint('longitude > -90.0 AND longitude < 90.0'), nullable=False)
-#
-#    def dump(self, endpoint) -> Dict: # pylint: disable=arguments-differ
-#        gps_position = {'latitude': self.latitude, 'longitude': self.longitude}
-#        return {'endpoint_location': {'endpoint_id': endpoint.dump_id(), 'gps_position': gps_position}}
-#
-#
-#class ConstraintEndpointPriorityModel(Base): # pylint: disable=abstract-method
-#    __tablename__ = 'ConstraintEndpointPriority'
-#    constraint_uuid = Column(UUID(as_uuid=False), primary_key=True, unique=True)
-#    endpoint_uuid = Column(UUID(as_uuid=False), ForeignKey("EndPoint.endpoint_uuid"))
-#    # endpoint_fk = ForeignKeyField(EndPointModel)
-#    # priority = FloatField(required=True)
-#    priority = Column(Float, nullable=False)
-#    @staticmethod
-#    def main_pk_name():
-#        return 'constraint_uuid'
-#
-#    def dump(self, endpoint) -> Dict: # pylint: disable=arguments-differ
-#        return {'endpoint_priority': {'endpoint_id': endpoint.dump_id(), 'priority': self.priority}}
-#
-#
-#class ConstraintSlaAvailabilityModel(Base): # pylint: disable=abstract-method
-#    __tablename__ = 'ConstraintSlaAvailability'
-#    constraint_uuid = Column(UUID(as_uuid=False), primary_key=True, unique=True)
-#    # num_disjoint_paths = IntegerField(required=True, min_value=1)
-#    num_disjoint_paths = Column(Integer, CheckConstraint('num_disjoint_paths > 1'), nullable=False)
-#    # all_active = BooleanField(required=True)
-#    all_active = Column(Boolean, nullable=False)
-#    @staticmethod
-#    def main_pk_name():
-#        return 'constraint_uuid'
-#
-#    def dump(self) -> Dict: # pylint: disable=arguments-differ
-#        return {'sla_availability': {'num_disjoint_paths': self.num_disjoint_paths, 'all_active': self.all_active}}
-#
-#Union_SpecificConstraint = Union[
-#    ConstraintCustomModel, ConstraintEndpointLocationRegionModel, ConstraintEndpointLocationGpsPositionModel,
-#    ConstraintEndpointPriorityModel, ConstraintSlaAvailabilityModel,
-#]
-#
-#class ConstraintModel(Base): # pylint: disable=abstract-method
-#    __tablename__ = 'Constraint'
-#    # pk = PrimaryKeyField()
-#    # constraints_fk = ForeignKeyField(ConstraintsModel)
-#    constraint_uuid = Column(UUID(as_uuid=False), primary_key=True, unique=True)
-#    constraints_uuid = Column(UUID(as_uuid=False), ForeignKey("Constraints.constraints_uuid"), primary_key=True)
-#    # kind = EnumeratedField(ConstraintKindEnum)
-#    kind = Column(Enum(ConstraintKindEnum, create_constraint=False, native_enum=False))
-#    # position = IntegerField(min_value=0, required=True)
-#    position = Column(Integer, CheckConstraint('position >= 0'), nullable=False)
-#    # constraint_custom_fk        = ForeignKeyField(ConstraintCustomModel, required=False)
-#    constraint_custom = Column(UUID(as_uuid=False), ForeignKey("ConstraintCustom.constraint_uuid"))
-#    # constraint_ep_loc_region_fk = ForeignKeyField(ConstraintEndpointLocationRegionModel, required=False)
-#    constraint_ep_loc_region = Column(UUID(as_uuid=False), ForeignKey("ConstraintEndpointLocationRegion.constraint_uuid"))
-#    # constraint_ep_loc_gpspos_fk = ForeignKeyField(ConstraintEndpointLocationGpsPositionModel, required=False)
-#    constraint_ep_loc_gpspos = Column(UUID(as_uuid=False), ForeignKey("ConstraintEndpointLocationGpsPosition.constraint_uuid"))
-#    # constraint_ep_priority_fk   = ForeignKeyField(ConstraintEndpointPriorityModel, required=False)
-#    constraint_ep_priority = Column(UUID(as_uuid=False), ForeignKey("ConstraintEndpointPriority.constraint_uuid"),)
-#    # constraint_sla_avail_fk     = ForeignKeyField(ConstraintSlaAvailabilityModel, required=False)
-#    constraint_sla_avail = Column(UUID(as_uuid=False), ForeignKey("ConstraintSlaAvailability.constraint_uuid"))
-#
-#    @staticmethod
-#    def main_pk_name():
-#        return 'constraint_uuid'
-#
-#    # def delete(self) -> None:
-#    #     field_name = 'constraint_{:s}_fk'.format(str(self.kind.value))
-#    #     specific_fk_value : Optional[ForeignKeyField] = getattr(self, field_name, None)
-#    #     if specific_fk_value is None:
-#    #         raise Exception('Unable to find constraint key for field_name({:s})'.format(field_name))
-#    #     specific_fk_class = getattr(ConstraintModel, field_name, None)
-#    #     foreign_model_class : Model = specific_fk_class.foreign_model
-#    #     super().delete()
-#    #     get_object(self.database, foreign_model_class, str(specific_fk_value)).delete()
-#
-#    def dump(self, include_position=True) -> Dict: # pylint: disable=arguments-differ
-#        field_name = 'constraint_{:s}'.format(str(self.kind.value))
-#        specific_fk_value = getattr(self, field_name, None)
-#        if specific_fk_value is None:
-#            raise Exception('Unable to find constraint key for field_name({:s})'.format(field_name))
-#        specific_fk_class = getattr(ConstraintModel, field_name, None)
-#        foreign_model_class: Base = specific_fk_class.foreign_model
-#        constraint: Union_SpecificConstraint = get_object(self.database, foreign_model_class, str(specific_fk_value))
-#        result = constraint.dump()
-#        if include_position:
-#            result['position'] = self.position
-#        return result
-#
-#Tuple_ConstraintSpecs = Tuple[Type, str, Dict, ConstraintKindEnum]
-#
-#def parse_constraint_custom(grpc_constraint) -> Tuple_ConstraintSpecs:
-#    constraint_class = ConstraintCustomModel
-#    str_constraint_id = grpc_constraint.custom.constraint_type
-#    constraint_data = {
-#        'constraint_type' : grpc_constraint.custom.constraint_type,
-#        'constraint_value': grpc_constraint.custom.constraint_value,
-#    }
-#    return constraint_class, str_constraint_id, constraint_data, ConstraintKindEnum.CUSTOM
-#
-#def parse_constraint_endpoint_location(db_endpoint, grpc_constraint) -> Tuple_ConstraintSpecs:
-#    grpc_endpoint_id = grpc_constraint.endpoint_location.endpoint_id
-#    # str_endpoint_key, db_endpoint = get_endpoint(database, grpc_endpoint_id)
-#
-#    str_constraint_id = db_endpoint.endpoint_uuid
-#    constraint_data = {'endpoint_fk': db_endpoint}
-#
-#    grpc_location = grpc_constraint.endpoint_location.location
-#    location_kind = str(grpc_location.WhichOneof('location'))
-#    if location_kind == 'region':
-#        constraint_class = ConstraintEndpointLocationRegionModel
-#        constraint_data.update({'region': grpc_location.region})
-#        return constraint_class, str_constraint_id, constraint_data, ConstraintKindEnum.ENDPOINT_LOCATION_REGION
-#    elif location_kind == 'gps_position':
-#        constraint_class = ConstraintEndpointLocationGpsPositionModel
-#        gps_position = grpc_location.gps_position
-#        constraint_data.update({'latitude': gps_position.latitude, 'longitude': gps_position.longitude})
-#        return constraint_class, str_constraint_id, constraint_data, ConstraintKindEnum.ENDPOINT_LOCATION_GPSPOSITION
-#    else:
-#        MSG = 'Location kind {:s} in Constraint of kind endpoint_location is not implemented: {:s}'
-#        raise NotImplementedError(MSG.format(location_kind, grpc_message_to_json_string(grpc_constraint)))
-#
-#def parse_constraint_endpoint_priority(db_endpoint, grpc_constraint) -> Tuple_ConstraintSpecs:
-#    grpc_endpoint_id = grpc_constraint.endpoint_priority.endpoint_id
-#    # str_endpoint_key, db_endpoint = get_endpoint(database, grpc_endpoint_id)
-#
-#    constraint_class = ConstraintEndpointPriorityModel
-#    str_constraint_id = db_endpoint.endpoint_uuid
-#    priority = grpc_constraint.endpoint_priority.priority
-#    constraint_data = {'endpoint_fk': db_endpoint, 'priority': priority}
-#
-#    return constraint_class, str_constraint_id, constraint_data, ConstraintKindEnum.ENDPOINT_PRIORITY
-#
-#def parse_constraint_sla_availability(grpc_constraint) -> Tuple_ConstraintSpecs:
-#    constraint_class = ConstraintSlaAvailabilityModel
-#    str_constraint_id = ''
-#    constraint_data = {
-#        'num_disjoint_paths' : grpc_constraint.sla_availability.num_disjoint_paths,
-#        'all_active': grpc_constraint.sla_availability.all_active,
-#    }
-#    return constraint_class, str_constraint_id, constraint_data, ConstraintKindEnum.SLA_AVAILABILITY
-#
-#CONSTRAINT_PARSERS = {
-#    'custom'            : parse_constraint_custom,
-#    'endpoint_location' : parse_constraint_endpoint_location,
-#    'endpoint_priority' : parse_constraint_endpoint_priority,
-#    'sla_availability'  : parse_constraint_sla_availability,
-#}
-#
-#Union_ConstraintModel = Union[
-#    ConstraintCustomModel, ConstraintEndpointLocationGpsPositionModel, ConstraintEndpointLocationRegionModel,
-#    ConstraintEndpointPriorityModel, ConstraintSlaAvailabilityModel
-#]
-#
-## def set_constraint(
-##     db_constraints : ConstraintsModel, grpc_constraint : Constraint, position : int
-## ) -> Tuple[Union_ConstraintModel, bool]:
-##     grpc_constraint_kind = str(grpc_constraint.WhichOneof('constraint'))
-##
-##     parser = CONSTRAINT_PARSERS.get(grpc_constraint_kind)
-##     if parser is None:
-##         raise NotImplementedError('Constraint of kind {:s} is not implemented: {:s}'.format(
-##             grpc_constraint_kind, grpc_message_to_json_string(grpc_constraint)))
-##
-##     # create specific constraint
-##     constraint_class, str_constraint_id, constraint_data, constraint_kind = parser(database, grpc_constraint)
-##     str_constraint_key_hash = fast_hasher(':'.join([constraint_kind.value, str_constraint_id]))
-##     str_constraint_key = key_to_str([db_constraints.pk, str_constraint_key_hash], separator=':')
-##     result : Tuple[Union_ConstraintModel, bool] = update_or_create_object(
-##         database, constraint_class, str_constraint_key, constraint_data)
-##     db_specific_constraint, updated = result
-##
-##     # create generic constraint
-##     constraint_fk_field_name = 'constraint_{:s}_fk'.format(constraint_kind.value)
-##     constraint_data = {
-##         'constraints_fk': db_constraints, 'position': position, 'kind': constraint_kind,
-##         constraint_fk_field_name: db_specific_constraint
-##     }
-##     result : Tuple[ConstraintModel, bool] = update_or_create_object(
-##         database, ConstraintModel, str_constraint_key, constraint_data)
-##     db_constraint, updated = result
-##
-##     return db_constraint, updated
-##
-## def set_constraints(
-##     database : Database, db_parent_pk : str, constraints_name : str, grpc_constraints
-## ) -> List[Tuple[Union[ConstraintsModel, ConstraintModel], bool]]:
-##
-##     str_constraints_key = key_to_str([db_parent_pk, constraints_name], separator=':')
-##     result : Tuple[ConstraintsModel, bool] = get_or_create_object(database, ConstraintsModel, str_constraints_key)
-##     db_constraints, created = result
-##
-##     db_objects = [(db_constraints, created)]
-##
-##     for position,grpc_constraint in enumerate(grpc_constraints):
-##         result : Tuple[ConstraintModel, bool] = set_constraint(
-##             database, db_constraints, grpc_constraint, position)
-##         db_constraint, updated = result
-##         db_objects.append((db_constraint, updated))
-##
-##     return db_objects
-#def set_constraint(
-#    database : Database, db_constraints : ConstraintsModel, grpc_constraint : Constraint, position : int
-#) -> Tuple[Union_ConstraintModel, bool]:
-#    grpc_constraint_kind = str(grpc_constraint.WhichOneof('constraint'))
-#
-#    parser = CONSTRAINT_PARSERS.get(grpc_constraint_kind)
-#    if parser is None:
-#        raise NotImplementedError('Constraint of kind {:s} is not implemented: {:s}'.format(
-#            grpc_constraint_kind, grpc_message_to_json_string(grpc_constraint)))
-#
-#    # create specific constraint
-#    constraint_class, str_constraint_id, constraint_data, constraint_kind = parser(database, grpc_constraint)
-#    str_constraint_key_hash = fast_hasher(':'.join([constraint_kind.value, str_constraint_id]))
-#    str_constraint_key = key_to_str([db_constraints.pk, str_constraint_key_hash], separator=':')
-#    result : Tuple[Union_ConstraintModel, bool] = update_or_create_object(
-#        database, constraint_class, str_constraint_key, constraint_data)
-#    db_specific_constraint, updated = result
-#
-#    # create generic constraint
-#    constraint_fk_field_name = 'constraint_{:s}_fk'.format(constraint_kind.value)
-#    constraint_data = {
-#        'constraints_fk': db_constraints, 'position': position, 'kind': constraint_kind,
-#        constraint_fk_field_name: db_specific_constraint
-#    }
-#    result : Tuple[ConstraintModel, bool] = update_or_create_object(
-#        database, ConstraintModel, str_constraint_key, constraint_data)
-#    db_constraint, updated = result
-#
-#    return db_constraint, updated
-#
-#def set_constraints(
-#    database : Database, db_parent_pk : str, constraints_name : str, grpc_constraints
-#) -> List[Tuple[Union[ConstraintsModel, ConstraintModel], bool]]:
-#
-#    str_constraints_key = key_to_str([constraints_name, db_parent_pk], separator=':')
-#    result : Tuple[ConstraintsModel, bool] = get_or_create_object(database, ConstraintsModel, str_constraints_key)
-#    db_constraints, created = result
-#
-#    db_objects = [(db_constraints, created)]
-#
-#    for position,grpc_constraint in enumerate(grpc_constraints):
-#        result : Tuple[ConstraintModel, bool] = set_constraint(
-#            database, db_constraints, grpc_constraint, position)
-#        db_constraint, updated = result
-#        db_objects.append((db_constraint, updated))
-#
-#    return db_objects
diff --git a/src/device/service/DeviceServiceServicerImpl.py b/src/device/service/DeviceServiceServicerImpl.py
index 179b7795b541801afb1db63f1ab532253a8cc851..5b537609b89bde3fe5812662379c629665ab56aa 100644
--- a/src/device/service/DeviceServiceServicerImpl.py
+++ b/src/device/service/DeviceServiceServicerImpl.py
@@ -121,11 +121,7 @@ class DeviceServiceServicerImpl(DeviceServiceServicer):
                 for error in errors: LOGGER.error(error)
                 raise OperationFailedException('ConfigureDevice', extra_details=errors)
 
-            # Rules updated by configure_rules() and deconfigure_rules() methods.
-            # Code to be removed soon if not needed.
-            del device.device_config.config_rules[:]
-            populate_config_rules(device, driver)
-
+            # Note: Rules are updated by configure_rules() and deconfigure_rules() methods.
             device_id = context_client.SetDevice(device)
             return device_id
         finally:
diff --git a/src/device/service/Tools.py b/src/device/service/Tools.py
index d2cd0b48104857ac8a4525feb28a4ca480e0aec1..9245e08a80fab36372dd008b4ed4f6b7e0d2330a 100644
--- a/src/device/service/Tools.py
+++ b/src/device/service/Tools.py
@@ -19,6 +19,7 @@ from common.method_wrappers.ServiceExceptions import InvalidArgumentException
 from common.proto.context_pb2 import ConfigActionEnum, Device, DeviceConfig
 from common.proto.device_pb2 import MonitoringSettings
 from common.proto.kpi_sample_types_pb2 import KpiSampleType
+from common.tools.grpc.ConfigRules import update_config_rule_custom
 from common.tools.grpc.Tools import grpc_message_to_json
 from .driver_api._Driver import _Driver, RESOURCE_ENDPOINTS
 from .monitoring.MonitoringLoops import MonitoringLoops
@@ -107,7 +108,7 @@ def populate_endpoints(device : Device, driver : _Driver, monitoring_loops : Mon
     return errors
 
 def _raw_config_rules_to_grpc(
-    device_uuid : str, device_config : DeviceConfig, error_template : str, default_config_action : ConfigActionEnum,
+    device_uuid : str, device_config : DeviceConfig, error_template : str, config_action : ConfigActionEnum,
     raw_config_rules : List[Tuple[str, Union[Any, Exception, None]]]
 ) -> List[str]:
     errors : List[str] = list()
@@ -117,11 +118,9 @@ def _raw_config_rules_to_grpc(
             errors.append(error_template.format(device_uuid, str(resource_key), str(resource_value)))
             continue
 
-        config_rule = device_config.config_rules.add()
-        config_rule.action = default_config_action
-        config_rule.custom.resource_key = resource_key
-        config_rule.custom.resource_value = \
-            resource_value if isinstance(resource_value, str) else json.dumps(resource_value, sort_keys=True)
+        resource_value = json.loads(resource_value) if isinstance(resource_value, str) else resource_value
+        resource_value = {field_name : (field_value, False) for field_name,field_value in resource_value.items()}
+        update_config_rule_custom(device_config.config_rules, resource_key, resource_value, new_action=config_action)
 
     return errors
 
@@ -169,7 +168,7 @@ def compute_rules_to_add_delete(
     return resources_to_set, resources_to_delete
 
 def configure_rules(device : Device, driver : _Driver, resources_to_set : List[Tuple[str, Any]]) -> List[str]:
-    device_uuid = device.device_id.device_uuid.uuid
+    if len(resources_to_set) == 0: return []
 
     results_setconfig = driver.SetConfig(resources_to_set)
     results_setconfig = [
@@ -177,12 +176,12 @@ def configure_rules(device : Device, driver : _Driver, resources_to_set : List[T
         for (resource_key, resource_value), result in zip(resources_to_set, results_setconfig)
     ]
 
-    device_config = DeviceConfig() # ignored; added at the end of ConfigureDevice
+    device_uuid = device.device_id.device_uuid.uuid
     return _raw_config_rules_to_grpc(
-        device_uuid, device_config, ERROR_SET, ConfigActionEnum.CONFIGACTION_SET, results_setconfig)
+        device_uuid, device.device_config, ERROR_SET, ConfigActionEnum.CONFIGACTION_SET, results_setconfig)
 
 def deconfigure_rules(device : Device, driver : _Driver, resources_to_delete : List[Tuple[str, Any]]) -> List[str]:
-    device_uuid = device.device_id.device_uuid.uuid
+    if len(resources_to_delete) == 0: return []
 
     results_deleteconfig = driver.DeleteConfig(resources_to_delete)
     results_deleteconfig = [
@@ -190,9 +189,9 @@ def deconfigure_rules(device : Device, driver : _Driver, resources_to_delete : L
         for (resource_key, resource_value), result in zip(resources_to_delete, results_deleteconfig)
     ]
 
-    device_config = DeviceConfig() # ignored; added at the end of ConfigureDevice
+    device_uuid = device.device_id.device_uuid.uuid
     return _raw_config_rules_to_grpc(
-        device_uuid, device_config, ERROR_DELETE, ConfigActionEnum.CONFIGACTION_DELETE, results_deleteconfig)
+        device_uuid, device.device_config, ERROR_DELETE, ConfigActionEnum.CONFIGACTION_DELETE, results_deleteconfig)
 
 def subscribe_kpi(request : MonitoringSettings, driver : _Driver, monitoring_loops : MonitoringLoops) -> List[str]:
     kpi_uuid = request.kpi_id.kpi_id.uuid
diff --git a/src/device/service/drivers/transport_api/Tools.py b/src/device/service/drivers/transport_api/Tools.py
index 8989294194203d384348f4d2499252555fcb9aaa..e54e6497ab5fcaad7ab6dfde376e830953b8942a 100644
--- a/src/device/service/drivers/transport_api/Tools.py
+++ b/src/device/service/drivers/transport_api/Tools.py
@@ -12,7 +12,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import json, logging, requests
+import json, logging, operator, requests
+from requests.auth import HTTPBasicAuth
+from typing import Optional
 from device.service.driver_api._Driver import RESOURCE_ENDPOINTS
 
 LOGGER = logging.getLogger(__name__)
@@ -28,43 +30,57 @@ def find_key(resource, key):
     return json.loads(resource[1])[key]
 
 
-def config_getter(root_url, resource_key, timeout):
+def config_getter(
+    root_url : str, resource_key : str, auth : Optional[HTTPBasicAuth] = None, timeout : Optional[int] = None
+):
     url = '{:s}/restconf/data/tapi-common:context'.format(root_url)
     result = []
     try:
-        response = requests.get(url, timeout=timeout)
+        response = requests.get(url, timeout=timeout, verify=False, auth=auth)
     except requests.exceptions.Timeout:
         LOGGER.exception('Timeout connecting {:s}'.format(url))
+        return result
     except Exception as e:  # pylint: disable=broad-except
         LOGGER.exception('Exception retrieving {:s}'.format(resource_key))
         result.append((resource_key, e))
-    else:
+        return result
+
+    try:
         context = json.loads(response.content)
+    except Exception as e:  # pylint: disable=broad-except
+        LOGGER.warning('Unable to decode reply: {:s}'.format(str(response.content)))
+        result.append((resource_key, e))
+        return result
+
+    if resource_key != RESOURCE_ENDPOINTS: return result
+
+    if 'tapi-common:context' in context:
+        context = context['tapi-common:context']
+    elif 'context' in context:
+        context = context['context']
 
-        if resource_key == RESOURCE_ENDPOINTS:
-            if 'tapi-common:context' in context:
-                context = context['tapi-common:context']
-            elif 'context' in context:
-                context = context['context']
-            for sip in context['service-interface-point']:
-                layer_protocol_name = sip.get('layer-protocol-name', '?')
-                supportable_spectrum = sip.get('tapi-photonic-media:media-channel-service-interface-point-spec', {})
-                supportable_spectrum = supportable_spectrum.get('mc-pool', {})
-                supportable_spectrum = supportable_spectrum.get('supportable-spectrum', [])
-                supportable_spectrum = supportable_spectrum[0] if len(supportable_spectrum) == 1 else {}
-                grid_type = supportable_spectrum.get('frequency-constraint', {}).get('grid-type')
-                granularity = supportable_spectrum.get('frequency-constraint', {}).get('adjustment-granularity')
-                direction = sip.get('direction', '?')
-                endpoint_type = ':'.join([layer_protocol_name, grid_type, granularity, direction])
-                endpoint_url = '/endpoints/endpoint[{:s}]'.format(sip['uuid'])
-                endpoint_data = {'uuid': sip['uuid'], 'type': endpoint_type}
-                result.append((endpoint_url, endpoint_data))
+    for sip in context['service-interface-point']:
+        layer_protocol_name = sip.get('layer-protocol-name', '?')
+        supportable_spectrum = sip.get('tapi-photonic-media:media-channel-service-interface-point-spec', {})
+        supportable_spectrum = supportable_spectrum.get('mc-pool', {})
+        supportable_spectrum = supportable_spectrum.get('supportable-spectrum', [])
+        supportable_spectrum = supportable_spectrum[0] if len(supportable_spectrum) == 1 else {}
+        grid_type = supportable_spectrum.get('frequency-constraint', {}).get('grid-type')
+        granularity = supportable_spectrum.get('frequency-constraint', {}).get('adjustment-granularity')
+        direction = sip.get('direction', '?')
+        endpoint_type = [layer_protocol_name, grid_type, granularity, direction]
+        str_endpoint_type = ':'.join(filter(lambda i: operator.is_not(i, None), endpoint_type))
+        endpoint_url = '/endpoints/endpoint[{:s}]'.format(sip['uuid'])
+        endpoint_data = {'uuid': sip['uuid'], 'type': str_endpoint_type}
+        result.append((endpoint_url, endpoint_data))
 
     return result
 
 def create_connectivity_service(
-    root_url, timeout, uuid, input_sip, output_sip, direction, capacity_value, capacity_unit, layer_protocol_name,
-    layer_protocol_qualifier):
+    root_url, uuid, input_sip, output_sip, direction, capacity_value, capacity_unit, layer_protocol_name,
+    layer_protocol_qualifier,
+    auth : Optional[HTTPBasicAuth] = None, timeout : Optional[int] = None
+):
 
     url = '{:s}/restconf/data/tapi-common:context/tapi-connectivity:connectivity-context'.format(root_url)
     headers = {'content-type': 'application/json'}
@@ -105,7 +121,8 @@ def create_connectivity_service(
     results = []
     try:
         LOGGER.info('Connectivity service {:s}: {:s}'.format(str(uuid), str(data)))
-        response = requests.post(url=url, data=json.dumps(data), timeout=timeout, headers=headers)
+        response = requests.post(
+            url=url, data=json.dumps(data), timeout=timeout, headers=headers, verify=False, auth=auth)
         LOGGER.info('TAPI response: {:s}'.format(str(response)))
     except Exception as e:  # pylint: disable=broad-except
         LOGGER.exception('Exception creating ConnectivityService(uuid={:s}, data={:s})'.format(str(uuid), str(data)))
@@ -117,12 +134,12 @@ def create_connectivity_service(
         results.append(response.status_code in HTTP_OK_CODES)
     return results
 
-def delete_connectivity_service(root_url, timeout, uuid):
+def delete_connectivity_service(root_url, uuid, auth : Optional[HTTPBasicAuth] = None, timeout : Optional[int] = None):
     url = '{:s}/restconf/data/tapi-common:context/tapi-connectivity:connectivity-context/connectivity-service={:s}'
     url = url.format(root_url, uuid)
     results = []
     try:
-        response = requests.delete(url=url, timeout=timeout)
+        response = requests.delete(url=url, timeout=timeout, verify=False, auth=auth)
     except Exception as e:  # pylint: disable=broad-except
         LOGGER.exception('Exception deleting ConnectivityService(uuid={:s})'.format(str(uuid)))
         results.append(e)
diff --git a/src/device/service/drivers/transport_api/TransportApiDriver.py b/src/device/service/drivers/transport_api/TransportApiDriver.py
index 71d7aa33678cb945443565e1766de3234d947ef8..874d4aeb6d8ac28703c8b55201b069d3bf35ef72 100644
--- a/src/device/service/drivers/transport_api/TransportApiDriver.py
+++ b/src/device/service/drivers/transport_api/TransportApiDriver.py
@@ -13,6 +13,7 @@
 # limitations under the License.
 
 import logging, requests, threading
+from requests.auth import HTTPBasicAuth
 from typing import Any, Iterator, List, Optional, Tuple, Union
 from common.method_wrappers.Decorator import MetricsPool, metered_subclass_method
 from common.type_checkers.Checkers import chk_string, chk_type
@@ -29,7 +30,11 @@ class TransportApiDriver(_Driver):
         self.__lock = threading.Lock()
         self.__started = threading.Event()
         self.__terminate = threading.Event()
-        self.__tapi_root = 'http://' + address + ':' + str(port)
+        username = settings.get('username')
+        password = settings.get('password')
+        self.__auth = HTTPBasicAuth(username, password) if username is not None and password is not None else None
+        scheme = settings.get('scheme', 'http')
+        self.__tapi_root = '{:s}://{:s}:{:d}'.format(scheme, address, int(port))
         self.__timeout = int(settings.get('timeout', 120))
 
     def Connect(self) -> bool:
@@ -37,7 +42,7 @@ class TransportApiDriver(_Driver):
         with self.__lock:
             if self.__started.is_set(): return True
             try:
-                requests.get(url, timeout=self.__timeout)
+                requests.get(url, timeout=self.__timeout, verify=False, auth=self.__auth)
             except requests.exceptions.Timeout:
                 LOGGER.exception('Timeout connecting {:s}'.format(str(self.__tapi_root)))
                 return False
@@ -67,7 +72,8 @@ class TransportApiDriver(_Driver):
             for i, resource_key in enumerate(resource_keys):
                 str_resource_name = 'resource_key[#{:d}]'.format(i)
                 chk_string(str_resource_name, resource_key, allow_empty=False)
-                results.extend(config_getter(self.__tapi_root, resource_key, self.__timeout))
+                results.extend(config_getter(
+                    self.__tapi_root, resource_key, timeout=self.__timeout, auth=self.__auth))
         return results
 
     @metered_subclass_method(METRICS_POOL)
@@ -89,8 +95,8 @@ class TransportApiDriver(_Driver):
                 direction = find_key(resource, 'direction')
 
                 data = create_connectivity_service(
-                    self.__tapi_root, self.__timeout, uuid, input_sip, output_sip, direction, capacity_value,
-                    capacity_unit, layer_protocol_name, layer_protocol_qualifier)
+                    self.__tapi_root, uuid, input_sip, output_sip, direction, capacity_value, capacity_unit,
+                    layer_protocol_name, layer_protocol_qualifier, timeout=self.__timeout, auth=self.__auth)
                 results.extend(data)
         return results
 
@@ -102,7 +108,8 @@ class TransportApiDriver(_Driver):
             for resource in resources:
                 LOGGER.info('resource = {:s}'.format(str(resource)))
                 uuid = find_key(resource, 'uuid')
-                results.extend(delete_connectivity_service(self.__tapi_root, self.__timeout, uuid))
+                results.extend(delete_connectivity_service(
+                    self.__tapi_root, uuid, timeout=self.__timeout, auth=self.__auth))
         return results
 
     @metered_subclass_method(METRICS_POOL)
diff --git a/src/device/tests/test_unitary_emulated.py b/src/device/tests/test_unitary_emulated.py
index 8a1b30a6ec01ec004c92be97d27e318e427f4cbe..0e5f8e22de07cd53278b15ca3203661b27881472 100644
--- a/src/device/tests/test_unitary_emulated.py
+++ b/src/device/tests/test_unitary_emulated.py
@@ -156,8 +156,8 @@ def test_device_emulated_configure(
         for config_rule in device_data.device_config.config_rules
         if config_rule.WhichOneof('config_rule') == 'custom'
     ]
-    #LOGGER.info('device_data.device_config.config_rules = \n{:s}'.format(
-    #    '\n'.join(['{:s} {:s} = {:s}'.format(*config_rule) for config_rule in config_rules])))
+    LOGGER.info('device_data.device_config.config_rules = \n{:s}'.format(
+        '\n'.join(['{:s} {:s} = {:s}'.format(*config_rule) for config_rule in config_rules])))
     RESULTING_CONFIG_ENDPOINTS = {cr['custom']['resource_key']:cr for cr in copy.deepcopy(DEVICE_EMU_CONFIG_ENDPOINTS)}
     for endpoint_cooked in DEVICE_EMU_ENDPOINTS_COOKED:
         values = json.loads(RESULTING_CONFIG_ENDPOINTS[endpoint_cooked[0]]['custom']['resource_value'])
@@ -168,14 +168,14 @@ def test_device_emulated_configure(
         config_rule = (
             ConfigActionEnum.Name(config_rule['action']), config_rule['custom']['resource_key'],
             json.loads(json.dumps(config_rule['custom']['resource_value'])))
-        #LOGGER.info('config_rule: {:s} {:s} = {:s}'.format(*config_rule))
+        LOGGER.info('A config_rule: {:s} {:s} = {:s}'.format(*config_rule))
         assert config_rule in config_rules
     for config_rule in DEVICE_EMU_CONFIG_ADDRESSES:
         assert 'custom' in config_rule
         config_rule = (
             ConfigActionEnum.Name(config_rule['action']), config_rule['custom']['resource_key'],
             json.loads(json.dumps(config_rule['custom']['resource_value'])))
-        #LOGGER.info('config_rule: {:s} {:s} = {:s}'.format(*config_rule))
+        LOGGER.info('B config_rule: {:s} {:s} = {:s}'.format(*config_rule))
         assert config_rule in config_rules
 
     # Try to reconfigure...
@@ -379,7 +379,11 @@ def test_device_emulated_deconfigure(
     for config_rule in config_rules:
         assert config_rule.WhichOneof('config_rule') == 'custom'
         if config_rule.custom.resource_key.startswith('/endpoints/endpoint'): continue
-        config_rule_value = json.loads(config_rule.custom.resource_value)
+        if config_rule.custom.resource_key.startswith('_connect/'): continue
+        try:
+            config_rule_value = json.loads(config_rule.custom.resource_value)
+        except: # pylint: disable=bare-except
+            config_rule_value = config_rule.custom.resource_value
         if isinstance(config_rule_value, str) and config_rule_value.startswith('do_sampling (trigger:'): continue
         clean_config_rules.append(config_rule)
     LOGGER.info('clean_config_rules = {:s}'.format(str(clean_config_rules)))
diff --git a/src/monitoring/service/EventTools.py b/src/monitoring/service/EventTools.py
index 189e78ce617c69dc4514e9e0b713dece10ef9669..221a0ddbfdbb65b1a908e134cc25f55e235b7564 100644
--- a/src/monitoring/service/EventTools.py
+++ b/src/monitoring/service/EventTools.py
@@ -12,23 +12,19 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import threading
-from queue import Queue
-
-import grpc
-
+import grpc, logging, queue, threading
 from common.method_wrappers.ServiceExceptions import ServiceException
-from context.client.ContextClient import ContextClient
-
+from common.proto import monitoring_pb2
 from common.proto.context_pb2 import Empty, EventTypeEnum
-
+from context.client.ContextClient import ContextClient
 from monitoring.client.MonitoringClient import MonitoringClient
 from monitoring.service.MonitoringServiceServicerImpl import LOGGER
-from common.proto import monitoring_pb2
+
+LOGGER = logging.getLogger(__name__)
 
 class EventsDeviceCollector:
     def __init__(self) -> None: # pylint: disable=redefined-outer-name
-        self._events_queue = Queue()
+        self._events_queue = queue.Queue()
 
         self._context_client_grpc = ContextClient()
         self._device_stream     = self._context_client_grpc.GetDeviceEvents(Empty())
@@ -70,26 +66,31 @@ class EventsDeviceCollector:
         try:
             kpi_id_list = []
 
-            while not self._events_queue.empty():
+            while True:
                 # LOGGER.info('getting Kpi by KpiID')
-                event = self.get_event(block=True)
-                if event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE:
-                    device = self._context_client.GetDevice(event.device_id)
-                    for j,end_point in enumerate(device.device_endpoints):
-                        #for i, value in enumerate(kpi_sample_types_pb2.KpiSampleType.values()):
-                        for i, value in enumerate(end_point.kpi_sample_types):
-                            #if value == kpi_sample_types_pb2.KpiSampleType.KPISAMPLETYPE_UNKNOWN: continue
-
-                            kpi_descriptor = monitoring_pb2.KpiDescriptor()
-
-                            kpi_descriptor.kpi_description                      = device.device_type
-                            kpi_descriptor.kpi_sample_type                      = value
-                            #kpi_descriptor.service_id.service_uuid.uuid         = ""
-                            kpi_descriptor.device_id.CopyFrom(device.device_id)
-                            kpi_descriptor.endpoint_id.CopyFrom(end_point.endpoint_id)
-
-                            kpi_id = self._monitoring_client.SetKpi(kpi_descriptor)
-                            kpi_id_list.append(kpi_id)
+                try:
+                    event = self.get_event(block=True, timeout=0.5)
+
+                    if event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE:
+                        device = self._context_client.GetDevice(event.device_id)
+                        for j,end_point in enumerate(device.device_endpoints):
+                            #for i, value in enumerate(kpi_sample_types_pb2.KpiSampleType.values()):
+                            for i, value in enumerate(end_point.kpi_sample_types):
+                                #if value == kpi_sample_types_pb2.KpiSampleType.KPISAMPLETYPE_UNKNOWN: continue
+
+                                kpi_descriptor = monitoring_pb2.KpiDescriptor()
+
+                                kpi_descriptor.kpi_description                      = device.device_type
+                                kpi_descriptor.kpi_sample_type                      = value
+                                #kpi_descriptor.service_id.service_uuid.uuid         = ""
+                                kpi_descriptor.device_id.CopyFrom(device.device_id)
+                                kpi_descriptor.endpoint_id.CopyFrom(end_point.endpoint_id)
+
+                                kpi_id = self._monitoring_client.SetKpi(kpi_descriptor)
+                                kpi_id_list.append(kpi_id)
+                except queue.Empty:
+                    break
+
             return kpi_id_list
         except ServiceException as e:
             LOGGER.exception('ListenEvents exception')
diff --git a/src/monitoring/service/__main__.py b/src/monitoring/service/__main__.py
index 3334a860ccd94d51390ab5f5869d25e2475084ee..78764ea64e39c48d927901ad88e7cff569e7447b 100644
--- a/src/monitoring/service/__main__.py
+++ b/src/monitoring/service/__main__.py
@@ -12,7 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import logging, signal, sys, threading
+import logging, signal, sys, threading, time
 from prometheus_client import start_http_server
 from common.Constants import ServiceNameEnum
 from common.Settings import (
@@ -35,6 +35,8 @@ def start_monitoring():
     events_collector = EventsDeviceCollector()
     events_collector.start()
 
+    # TODO: redesign this method to be more clear and clean
+
     # Iterate while terminate is not set
     while not terminate.is_set():
         list_new_kpi_ids = events_collector.listen_events()
@@ -48,6 +50,8 @@ def start_monitoring():
                 monitor_kpi_request.monitoring_window_s = 86400
                 monitor_kpi_request.sampling_rate_s = 30
                 events_collector._monitoring_client.MonitorKpi(monitor_kpi_request)
+        
+        time.sleep(0.5) # let other tasks run; do not overload CPU
     else:
         # Terminate is set, looping terminates
         LOGGER.warning("Stopping execution...")
diff --git a/src/monitoring/tests/Messages.py b/src/monitoring/tests/Messages.py
index f15cb5ec2c1d14ed95731cd37e54cb714b29e8b7..5530c720cffa3e386d6a2ce806b988e872321672 100644
--- a/src/monitoring/tests/Messages.py
+++ b/src/monitoring/tests/Messages.py
@@ -55,6 +55,17 @@ def create_kpi_request_c():
     _create_kpi_request.connection_id.connection_uuid.uuid = 'CON3'  # pylint: disable=maybe-no-member
     return _create_kpi_request
 
+def create_kpi_request_d():
+    _create_kpi_request                                = monitoring_pb2.KpiDescriptor()
+    _create_kpi_request.kpi_description                = 'KPI Description Test'
+    _create_kpi_request.kpi_sample_type                = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED
+    _create_kpi_request.device_id.device_uuid.uuid     = 'DEV4'     # pylint: disable=maybe-no-member
+    _create_kpi_request.service_id.service_uuid.uuid   = 'SERV4'    # pylint: disable=maybe-no-member
+    _create_kpi_request.slice_id.slice_uuid.uuid       = 'SLC4'  # pylint: disable=maybe-no-member
+    _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END4'     # pylint: disable=maybe-no-member
+    _create_kpi_request.connection_id.connection_uuid.uuid = 'CON4'  # pylint: disable=maybe-no-member
+    return _create_kpi_request
+
 def monitor_kpi_request(kpi_uuid, monitoring_window_s, sampling_rate_s):
     _monitor_kpi_request                     = monitoring_pb2.MonitorKpiRequest()
     _monitor_kpi_request.kpi_id.kpi_id.uuid  = kpi_uuid   # pylint: disable=maybe-no-member
diff --git a/src/monitoring/tests/test_unitary.py b/src/monitoring/tests/test_unitary.py
index e70827cbc453d7ab754cddb6d7c2471a5f4c5bae..d892b98b8814ff3a23165fd6f893e8d136dba1e4 100644
--- a/src/monitoring/tests/test_unitary.py
+++ b/src/monitoring/tests/test_unitary.py
@@ -47,7 +47,7 @@ from monitoring.service.EventTools import EventsDeviceCollector
 from monitoring.service.MetricsDBTools import MetricsDB
 from monitoring.service.MonitoringService import MonitoringService
 #from monitoring.service.SubscriptionManager import SubscriptionManager
-from monitoring.tests.Messages import create_kpi_request, include_kpi_request, monitor_kpi_request, \
+from monitoring.tests.Messages import create_kpi_request, create_kpi_request_d, include_kpi_request, monitor_kpi_request, \
     create_kpi_request_c, kpi_query, subs_descriptor, alarm_descriptor, alarm_subscription #, create_kpi_request_b
 from monitoring.tests.Objects import DEVICE_DEV1, DEVICE_DEV1_CONNECT_RULES, DEVICE_DEV1_UUID
 
@@ -92,17 +92,32 @@ class MockContextService(GenericGrpcService):
 
 @pytest.fixture(scope='session')
 def context_service():
+    LOGGER.info('Initializing MockContextService...')
     _service = MockContextService(MOCKSERVICE_PORT)
     _service.start()
+    
+    LOGGER.info('Yielding MockContextService...')
     yield _service
+
+    LOGGER.info('Terminating MockContextService...')
+    _service.context_servicer.msg_broker.terminate()
     _service.stop()
 
+    LOGGER.info('Terminated MockContextService...')
+
 @pytest.fixture(scope='session')
 def context_client(context_service : MockContextService): # pylint: disable=redefined-outer-name,unused-argument
+    LOGGER.info('Initializing ContextClient...')
     _client = ContextClient()
+    
+    LOGGER.info('Yielding ContextClient...')
     yield _client
+
+    LOGGER.info('Closing ContextClient...')
     _client.close()
 
+    LOGGER.info('Closed ContextClient...')
+
 @pytest.fixture(scope='session')
 def device_service(context_service : MockContextService): # pylint: disable=redefined-outer-name,unused-argument
     LOGGER.info('Initializing DeviceService...')
@@ -118,12 +133,21 @@ def device_service(context_service : MockContextService): # pylint: disable=rede
     LOGGER.info('Terminating DeviceService...')
     _service.stop()
 
+    LOGGER.info('Terminated DeviceService...')
+
 @pytest.fixture(scope='session')
 def device_client(device_service : DeviceService): # pylint: disable=redefined-outer-name,unused-argument
+    LOGGER.info('Initializing DeviceClient...')
     _client = DeviceClient()
+
+    LOGGER.info('Yielding DeviceClient...')
     yield _client
+
+    LOGGER.info('Closing DeviceClient...')
     _client.close()
 
+    LOGGER.info('Closed DeviceClient...')
+
 # This fixture will be requested by test cases and last during testing session
 @pytest.fixture(scope='session')
 def monitoring_service(
@@ -141,6 +165,8 @@ def monitoring_service(
     LOGGER.info('Terminating MonitoringService...')
     _service.stop()
 
+    LOGGER.info('Terminated MonitoringService...')
+
 # This fixture will be requested by test cases and last during testing session.
 # The client requires the server, so client fixture has the server as dependency.
 @pytest.fixture(scope='session')
@@ -155,6 +181,8 @@ def monitoring_client(monitoring_service : MonitoringService): # pylint: disable
     LOGGER.info('Closing MonitoringClient...')
     _client.close()
 
+    LOGGER.info('Closed MonitoringClient...')
+
 @pytest.fixture(scope='session')
 def management_db():
     _management_db = ManagementDBTools.ManagementDB('monitoring.db')
@@ -373,7 +401,8 @@ def test_delete_alarm(monitoring_client): # pylint: disable=redefined-outer-name
 # Test case that makes use of client fixture to test server's GetStreamKpi method
 def test_get_stream_kpi(monitoring_client): # pylint: disable=redefined-outer-name
     LOGGER.warning('test_getstream_kpi begin')
-    response = monitoring_client.GetStreamKpi(monitoring_pb2.Kpi())
+    _kpi_id = monitoring_client.SetKpi(create_kpi_request_d())
+    response = monitoring_client.GetStreamKpi(_kpi_id)
     LOGGER.debug(str(response))
     assert isinstance(response, _MultiThreadedRendezvous)
 
@@ -521,6 +550,8 @@ def test_events_tools(
     device_client.DeleteDevice(response)
     events_collector.stop()
 
+    LOGGER.warning('test_get_device_events end')
+
 
 def test_get_device_events(
         context_client : ContextClient,                 # pylint: disable=redefined-outer-name,unused-argument
@@ -549,6 +580,8 @@ def test_get_device_events(
     device_client.DeleteDevice(response)
     events_collector.stop()
 
+    LOGGER.warning('test_get_device_events end')
+
 def test_listen_events(
         context_client : ContextClient,                 # pylint: disable=redefined-outer-name,unused-argument
         device_client : DeviceClient,                   # pylint: disable=redefined-outer-name
@@ -574,3 +607,5 @@ def test_listen_events(
 
     device_client.DeleteDevice(response)
     events_collector.stop()
+
+    LOGGER.warning('test_listen_events end')
diff --git a/src/pathcomp/backend/pathComp_tools.h b/src/pathcomp/backend/pathComp_tools.h
index adbbf30c4fda48564c126369b0aace839cdf5d93..118781eb05db713beb3f3ba05968320c384b1e2f 100644
--- a/src/pathcomp/backend/pathComp_tools.h
+++ b/src/pathcomp/backend/pathComp_tools.h
@@ -121,7 +121,7 @@ struct map_nodes_t {
 };
 
 #define MAX_NUM_VERTICES				20 // 100 # LGR: reduced from 100 to 20 to divide by 5 the memory used
-#define MAX_NUM_EDGES					20 // 100 # LGR: reduced from 100 to 20 to divide by 5 the memory used
+#define MAX_NUM_EDGES					40 // 100 # LGR: reduced from 100 to 40 to divide by 2.5 the memory used
 // Structures for the graph composition
 struct targetNodes_t {
 	// remote / targeted node
@@ -154,7 +154,7 @@ struct context_t {
 ////////////////////////////////////////////////////
 // Structure for the Set of Contexts
 ///////////////////////////////////////////////////
-#define MAX_NUMBER_CONTEXT		5 // 100 # LGR: reduced from 100 to 5 to divide by 20 the memory used
+#define MAX_NUMBER_CONTEXT		1 // 100 # LGR: reduced from 100 to 1 to divide by 100 the memory used
 struct contextSet_t {
 	struct context_t contextList[MAX_NUMBER_CONTEXT];
 	gint num_context_set;
@@ -251,7 +251,7 @@ struct endPoint_t {
 // Structure for the device contents
 ///////////////////////////////////////////////////////////////////
 #define MAX_DEV_TYPE_SIZE				128
-#define MAX_DEV_ENDPOINT_LENGTH			10
+#define MAX_DEV_ENDPOINT_LENGTH			40	// 10 # LGR: controllers might have large number of endpoints
 struct device_t {
 	gchar deviceId[UUID_CHAR_LENGTH]; // device ID using UUID (128 bits)
 
diff --git a/src/pathcomp/frontend/service/algorithms/_Algorithm.py b/src/pathcomp/frontend/service/algorithms/_Algorithm.py
index 5c49a1feccfd3abab6df418fd5e5e1f2f3577a2a..bf19ed3e10affd707b5032428efce154e05d4169 100644
--- a/src/pathcomp/frontend/service/algorithms/_Algorithm.py
+++ b/src/pathcomp/frontend/service/algorithms/_Algorithm.py
@@ -68,6 +68,7 @@ class _Algorithm:
         if isinstance(grpc_links, LinkList): grpc_links = grpc_links.links
         for grpc_link in grpc_links:
             json_link = compose_link(grpc_link)
+            if len(json_link['link_endpoint_ids']) != 2: continue
             self.link_list.append(json_link)
 
             link_uuid = json_link['link_Id']
diff --git a/src/service/service/ServiceServiceServicerImpl.py b/src/service/service/ServiceServiceServicerImpl.py
index bf152027037af46283c0901f0701ffb83d8a508e..7d9ddeffb52e716e9844147b510dfe9bded886f1 100644
--- a/src/service/service/ServiceServiceServicerImpl.py
+++ b/src/service/service/ServiceServiceServicerImpl.py
@@ -16,15 +16,15 @@ import grpc, json, logging
 from typing import Optional
 from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method
 from common.method_wrappers.ServiceExceptions import AlreadyExistsException, InvalidArgumentException
-from common.proto.context_pb2 import Empty, Service, ServiceId, ServiceStatusEnum
+from common.proto.context_pb2 import Empty, Service, ServiceId, ServiceStatusEnum, ServiceTypeEnum
 from common.proto.pathcomp_pb2 import PathCompRequest
 from common.proto.service_pb2_grpc import ServiceServiceServicer
 from common.tools.grpc.Tools import grpc_message_to_json, grpc_message_to_json_string
 from context.client.ContextClient import ContextClient
 from pathcomp.frontend.client.PathCompClient import PathCompClient
-from .tools.ContextGetters import get_service
 from .service_handler_api.ServiceHandlerFactory import ServiceHandlerFactory
 from .task_scheduler.TaskScheduler import TasksScheduler
+from .tools.ContextGetters import get_service
 
 LOGGER = logging.getLogger(__name__)
 
@@ -40,10 +40,6 @@ class ServiceServiceServicerImpl(ServiceServiceServicer):
     def CreateService(self, request : Service, context : grpc.ServicerContext) -> ServiceId:
         LOGGER.info('[CreateService] begin ; request = {:s}'.format(grpc_message_to_json_string(request)))
 
-        service_id = request.service_id
-        service_uuid = service_id.service_uuid.uuid
-        service_context_uuid = service_id.context_id.context_uuid.uuid
-
         if len(request.service_endpoint_ids) > 0:
             unexpected_endpoints = []
             for service_endpoint_id in request.service_endpoint_ids:
@@ -97,8 +93,24 @@ class ServiceServiceServicerImpl(ServiceServiceServicer):
         _service : Optional[Service] = get_service(context_client, request.service_id)
         service = Service()
         service.CopyFrom(request if _service is None else _service)
-        service.service_status.service_status = ServiceStatusEnum.SERVICESTATUS_PLANNED
-        context_client.SetService(service)
+        if service.service_type == ServiceTypeEnum.SERVICETYPE_UNKNOWN:                     # pylint: disable=no-member
+            service.service_type = request.service_type                                     # pylint: disable=no-member
+        service.service_status.service_status = ServiceStatusEnum.SERVICESTATUS_PLANNED     # pylint: disable=no-member
+
+        del service.service_endpoint_ids[:] # pylint: disable=no-member
+        for endpoint_id in request.service_endpoint_ids:
+            service.service_endpoint_ids.add().CopyFrom(endpoint_id)    # pylint: disable=no-member
+
+        del service.service_constraints[:]  # pylint: disable=no-member
+        for constraint in request.service_constraints:
+            service.service_constraints.add().CopyFrom(constraint)  # pylint: disable=no-member
+
+        del service.service_config.config_rules[:]  # pylint: disable=no-member
+        for config_rule in request.service_config.config_rules:
+            service.service_config.config_rules.add().CopyFrom(config_rule) # pylint: disable=no-member
+
+        service_id_with_uuids = context_client.SetService(service)
+        service_with_uuids = context_client.GetService(service_id_with_uuids)
 
         num_disjoint_paths = None
         for constraint in request.service_constraints:
@@ -107,14 +119,14 @@ class ServiceServiceServicerImpl(ServiceServiceServicer):
                 break
 
         tasks_scheduler = TasksScheduler(self.service_handler_factory)
-        if len(request.service_endpoint_ids) >= (2 if num_disjoint_paths is None else 4):
+        if len(service_with_uuids.service_endpoint_ids) >= (2 if num_disjoint_paths is None else 4):
             pathcomp_request = PathCompRequest()
-            pathcomp_request.services.append(request)
+            pathcomp_request.services.append(service_with_uuids)    # pylint: disable=no-member
 
             if num_disjoint_paths is None:
-                pathcomp_request.shortest_path.Clear()
+                pathcomp_request.shortest_path.Clear()              # pylint: disable=no-member
             else:
-                pathcomp_request.k_disjoint_path.num_disjoint = num_disjoint_paths
+                pathcomp_request.k_disjoint_path.num_disjoint = num_disjoint_paths  # pylint: disable=no-member
 
             LOGGER.info('pathcomp_request={:s}'.format(grpc_message_to_json_string(pathcomp_request)))
             pathcomp = PathCompClient()
@@ -128,7 +140,7 @@ class ServiceServiceServicerImpl(ServiceServiceServicer):
             tasks_scheduler.compose_from_pathcompreply(pathcomp_reply, is_delete=False)
 
         tasks_scheduler.execute_all()
-        return request.service_id
+        return service_with_uuids.service_id
 
     @safe_and_metered_rpc_method(METRICS_POOL, LOGGER)
     def DeleteService(self, request : ServiceId, context : grpc.ServicerContext) -> Empty:
@@ -142,6 +154,7 @@ class ServiceServiceServicerImpl(ServiceServiceServicer):
         if _service is None: raise Exception('Service({:s}) not found'.format(grpc_message_to_json_string(request)))
         service = Service()
         service.CopyFrom(_service)
+        # pylint: disable=no-member
         service.service_status.service_status = ServiceStatusEnum.SERVICESTATUS_PENDING_REMOVAL
         context_client.SetService(service)
 
diff --git a/src/service/service/service_handler_api/SettingsHandler.py b/src/service/service/service_handler_api/SettingsHandler.py
new file mode 100644
index 0000000000000000000000000000000000000000..4df24cee0e5a865dcc36473d3e118864c3421881
--- /dev/null
+++ b/src/service/service/service_handler_api/SettingsHandler.py
@@ -0,0 +1,88 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import anytree, json, logging
+from typing import Any, List, Optional, Tuple, Union
+from common.proto.context_pb2 import ConfigActionEnum, ConfigRule, Device, EndPoint, ServiceConfig
+from common.tools.grpc.Tools import grpc_message_to_json, grpc_message_to_json_string
+from service.service.service_handler_api.AnyTreeTools import TreeNode, delete_subnode, get_subnode, set_subnode_value
+
+LOGGER = logging.getLogger(__name__)
+
+class SettingsHandler:
+    def __init__(self, service_config : ServiceConfig, **settings) -> None:
+        self.__resolver = anytree.Resolver(pathattr='name')
+        self.__config = TreeNode('.')
+        for config_rule in service_config.config_rules:
+            self.update_config_rule(config_rule)
+
+    @staticmethod
+    def _config_rule_to_raw(config_rule : ConfigRule) -> Optional[Tuple[int, str, Any]]:
+        action = config_rule.action
+        kind = config_rule.WhichOneof('config_rule')
+        if kind == 'custom':
+            key_or_path = config_rule.custom.resource_key
+            value = config_rule.custom.resource_value
+            try:
+                value = json.loads(value)
+            except: # pylint: disable=bare-except
+                pass
+        elif kind == 'acl':
+            device_uuid = config_rule.acl.endpoint_id.device_id.device_uuid.uuid
+            endpoint_uuid = config_rule.acl.endpoint_id.endpoint_uuid.uuid
+            acl_ruleset_name = config_rule.acl.rule_set.name
+            ACL_KEY_TEMPLATE = '/device[{:s}]/endpoint[{:s}]/acl_ruleset[{:s}]'
+            key_or_path = ACL_KEY_TEMPLATE.format(device_uuid, endpoint_uuid, acl_ruleset_name)
+            value = grpc_message_to_json(config_rule.acl)
+        else:
+            MSG = 'Unsupported Kind({:s}) in ConfigRule({:s})'
+            LOGGER.warning(MSG.format(str(kind), grpc_message_to_json_string(config_rule)))
+            return None
+
+        return action, key_or_path, value
+
+    def get(self, key_or_path : Union[str, List[str]], default : Optional[Any] = None) -> Optional[TreeNode]:
+        return get_subnode(self.__resolver, self.__config, key_or_path, default=default)
+
+    def get_endpoint_settings(self, device : Device, endpoint : EndPoint) -> Optional[TreeNode]:
+        device_keys   = device.device_id.device_uuid.uuid,       device.name
+        endpoint_keys = endpoint.endpoint_id.endpoint_uuid.uuid, endpoint.name
+
+        for device_key in device_keys:
+            for endpoint_key in endpoint_keys:
+                endpoint_settings_uri = '/device[{:s}]/endpoint[{:s}]/settings'.format(device_key, endpoint_key)
+                endpoint_settings = self.get(endpoint_settings_uri)
+                if endpoint_settings is not None: return endpoint_settings
+
+        return None
+
+    def set(self, key_or_path : Union[str, List[str]], value : Any) -> None:
+        set_subnode_value(self.__resolver, self.__config, key_or_path, value)
+
+    def delete(self, key_or_path : Union[str, List[str]]) -> None:
+        delete_subnode(self.__resolver, self.__config, key_or_path)
+
+    def update_config_rule(self, config_rule : ConfigRule) -> None:
+        raw_data = SettingsHandler._config_rule_to_raw(config_rule)
+        if raw_data is None: return
+        action, key_or_path, value = raw_data
+
+        if action == ConfigActionEnum.CONFIGACTION_SET:
+            self.set(key_or_path, value)
+        elif action == ConfigActionEnum.CONFIGACTION_DELETE:
+            self.delete(key_or_path)
+        else:
+            MSG = 'Unsupported Action({:s}) in ConfigRule({:s})'
+            LOGGER.warning(MSG.format(str(action), grpc_message_to_json_string(config_rule)))
+            return
diff --git a/src/service/service/service_handler_api/Tools.py b/src/service/service/service_handler_api/Tools.py
index 61ad7976132d2175319fffdeb5199f459c16d14f..ebd16a532c4ef4e74a61fd075afe9298755e26fb 100644
--- a/src/service/service/service_handler_api/Tools.py
+++ b/src/service/service/service_handler_api/Tools.py
@@ -14,6 +14,8 @@
 
 import functools
 from typing import Any, List, Union
+from common.method_wrappers.ServiceExceptions import NotFoundException
+from common.proto.context_pb2 import Device, EndPoint
 
 ACTION_MSG_SET_ENDPOINT      = 'Set EndPoint(device_uuid={:s}, endpoint_uuid={:s}, topology_uuid={:s})'
 ACTION_MSG_DELETE_ENDPOINT   = 'Delete EndPoint(device_uuid={:s}, endpoint_uuid={:s}, topology_uuid={:s})'
@@ -40,3 +42,12 @@ check_errors_setconstraint    = functools.partial(_check_errors, ACTION_MSG_SET_
 check_errors_deleteconstraint = functools.partial(_check_errors, ACTION_MSG_DELETE_CONSTRAINT)
 check_errors_setconfig        = functools.partial(_check_errors, ACTION_MSG_SET_CONFIG       )
 check_errors_deleteconfig     = functools.partial(_check_errors, ACTION_MSG_DELETE_CONFIG    )
+
+def get_endpoint_matching(device : Device, endpoint_uuid_or_name : str) -> EndPoint:
+    for endpoint in device.device_endpoints:
+        choices = {endpoint.endpoint_id.endpoint_uuid.uuid, endpoint.name}
+        if endpoint_uuid_or_name in choices: return endpoint
+
+    device_uuid = device.device_id.device_uuid.uuid
+    extra_details = 'Device({:s})'.format(str(device_uuid))
+    raise NotFoundException('Endpoint', endpoint_uuid_or_name, extra_details=extra_details)
diff --git a/src/service/service/service_handlers/l2nm_emulated/L2NMEmulatedServiceHandler.py b/src/service/service/service_handlers/l2nm_emulated/L2NMEmulatedServiceHandler.py
index bc628c160eaaa9ac282c81bd4c0e02536e88a80c..66259d1f636c712bc41f282b4a5d947c57e01fc4 100644
--- a/src/service/service/service_handlers/l2nm_emulated/L2NMEmulatedServiceHandler.py
+++ b/src/service/service/service_handlers/l2nm_emulated/L2NMEmulatedServiceHandler.py
@@ -12,14 +12,15 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import anytree, json, logging
+import json, logging
 from typing import Any, List, Optional, Tuple, Union
 from common.method_wrappers.Decorator import MetricTypeEnum, MetricsPool, metered_subclass_method, INF
-from common.proto.context_pb2 import ConfigActionEnum, ConfigRule, DeviceId, Service
+from common.proto.context_pb2 import ConfigRule, DeviceId, Service
 from common.tools.object_factory.Device import json_device_id
 from common.type_checkers.Checkers import chk_length, chk_type
+from service.service.service_handler_api.Tools import get_endpoint_matching
 from service.service.service_handler_api._ServiceHandler import _ServiceHandler
-from service.service.service_handler_api.AnyTreeTools import TreeNode, delete_subnode, get_subnode, set_subnode_value
+from service.service.service_handler_api.SettingsHandler import SettingsHandler
 from service.service.task_scheduler.TaskExecutor import TaskExecutor
 from .ConfigRules import setup_config_rules, teardown_config_rules
 
@@ -47,22 +48,8 @@ class L2NMEmulatedServiceHandler(_ServiceHandler):
         self, service : Service, task_executor : TaskExecutor, **settings
     ) -> None:
         self.__service = service
-        self.__task_executor = task_executor # pylint: disable=unused-private-member
-        self.__resolver = anytree.Resolver(pathattr='name')
-        self.__config = TreeNode('.')
-        for config_rule in service.service_config.config_rules:
-            action = config_rule.action
-            if config_rule.WhichOneof('config_rule') != 'custom': continue
-            resource_key = config_rule.custom.resource_key
-            resource_value = config_rule.custom.resource_value
-            if action == ConfigActionEnum.CONFIGACTION_SET:
-                try:
-                    resource_value = json.loads(resource_value)
-                except: # pylint: disable=bare-except
-                    pass
-                set_subnode_value(self.__resolver, self.__config, resource_key, resource_value)
-            elif action == ConfigActionEnum.CONFIGACTION_DELETE:
-                delete_subnode(self.__resolver, self.__config, resource_key)
+        self.__task_executor = task_executor
+        self.__settings_handler = SettingsHandler(service.service_config, **settings)
 
     @metered_subclass_method(METRICS_POOL)
     def SetEndpoint(
@@ -72,7 +59,7 @@ class L2NMEmulatedServiceHandler(_ServiceHandler):
         if len(endpoints) == 0: return []
 
         service_uuid = self.__service.service_id.service_uuid.uuid
-        settings : TreeNode = get_subnode(self.__resolver, self.__config, '/settings', None)
+        settings = self.__settings_handler.get('/settings')
 
         results = []
         for endpoint in endpoints:
@@ -81,17 +68,17 @@ class L2NMEmulatedServiceHandler(_ServiceHandler):
                 chk_length('endpoint', endpoint, min_length=2, max_length=3)
                 device_uuid, endpoint_uuid = endpoint[0:2] # ignore topology_uuid by now
 
-                endpoint_settings_uri = '/device[{:s}]/endpoint[{:s}]/settings'.format(device_uuid, endpoint_uuid)
-                endpoint_settings : TreeNode = get_subnode(self.__resolver, self.__config, endpoint_settings_uri, None)
+                device_obj = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
+                endpoint_obj = get_endpoint_matching(device_obj, endpoint_uuid)
+                endpoint_settings = self.__settings_handler.get_endpoint_settings(device_obj, endpoint_obj)
 
                 json_config_rules = setup_config_rules(
                     service_uuid, connection_uuid, device_uuid, endpoint_uuid, settings, endpoint_settings)
 
-                device = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
-                del device.device_config.config_rules[:]
+                del device_obj.device_config.config_rules[:]
                 for json_config_rule in json_config_rules:
-                    device.device_config.config_rules.append(ConfigRule(**json_config_rule))
-                self.__task_executor.configure_device(device)
+                    device_obj.device_config.config_rules.append(ConfigRule(**json_config_rule))
+                self.__task_executor.configure_device(device_obj)
                 results.append(True)
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to SetEndpoint({:s})'.format(str(endpoint)))
@@ -107,7 +94,7 @@ class L2NMEmulatedServiceHandler(_ServiceHandler):
         if len(endpoints) == 0: return []
 
         service_uuid = self.__service.service_id.service_uuid.uuid
-        settings : TreeNode = get_subnode(self.__resolver, self.__config, '/settings', None)
+        settings = self.__settings_handler.get('/settings')
 
         results = []
         for endpoint in endpoints:
@@ -116,17 +103,17 @@ class L2NMEmulatedServiceHandler(_ServiceHandler):
                 chk_length('endpoint', endpoint, min_length=2, max_length=3)
                 device_uuid, endpoint_uuid = endpoint[0:2] # ignore topology_uuid by now
 
-                endpoint_settings_uri = '/device[{:s}]/endpoint[{:s}]/settings'.format(device_uuid, endpoint_uuid)
-                endpoint_settings : TreeNode = get_subnode(self.__resolver, self.__config, endpoint_settings_uri, None)
+                device_obj = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
+                endpoint_obj = get_endpoint_matching(device_obj, endpoint_uuid)
+                endpoint_settings = self.__settings_handler.get_endpoint_settings(device_obj, endpoint_obj)
 
                 json_config_rules = teardown_config_rules(
                     service_uuid, connection_uuid, device_uuid, endpoint_uuid, settings, endpoint_settings)
 
-                device = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
-                del device.device_config.config_rules[:]
+                del device_obj.device_config.config_rules[:]
                 for json_config_rule in json_config_rules:
-                    device.device_config.config_rules.append(ConfigRule(**json_config_rule))
-                self.__task_executor.configure_device(device)
+                    device_obj.device_config.config_rules.append(ConfigRule(**json_config_rule))
+                self.__task_executor.configure_device(device_obj)
                 results.append(True)
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to DeleteEndpoint({:s})'.format(str(endpoint)))
@@ -160,9 +147,8 @@ class L2NMEmulatedServiceHandler(_ServiceHandler):
         results = []
         for resource in resources:
             try:
-                resource_key, resource_value = resource
-                resource_value = json.loads(resource_value)
-                set_subnode_value(self.__resolver, self.__config, resource_key, resource_value)
+                resource_value = json.loads(resource[1])
+                self.__settings_handler.set(resource[0], resource_value)
                 results.append(True)
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to SetConfig({:s})'.format(str(resource)))
@@ -178,8 +164,7 @@ class L2NMEmulatedServiceHandler(_ServiceHandler):
         results = []
         for resource in resources:
             try:
-                resource_key, _ = resource
-                delete_subnode(self.__resolver, self.__config, resource_key)
+                self.__settings_handler.delete(resource[0])
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to DeleteConfig({:s})'.format(str(resource)))
                 results.append(e)
diff --git a/src/service/service/service_handlers/l2nm_openconfig/L2NMOpenConfigServiceHandler.py b/src/service/service/service_handlers/l2nm_openconfig/L2NMOpenConfigServiceHandler.py
index 23df44413c17e66a631988eb6256316badf0d554..63442a6b46d3301ff15ee0d4416468f01d2f61a5 100644
--- a/src/service/service/service_handlers/l2nm_openconfig/L2NMOpenConfigServiceHandler.py
+++ b/src/service/service/service_handlers/l2nm_openconfig/L2NMOpenConfigServiceHandler.py
@@ -12,14 +12,15 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import anytree, json, logging
+import json, logging
 from typing import Any, List, Optional, Tuple, Union
 from common.method_wrappers.Decorator import MetricTypeEnum, MetricsPool, metered_subclass_method, INF
-from common.proto.context_pb2 import ConfigActionEnum, ConfigRule, DeviceId, Service
+from common.proto.context_pb2 import ConfigRule, DeviceId, Service
 from common.tools.object_factory.Device import json_device_id
 from common.type_checkers.Checkers import chk_length, chk_type
+from service.service.service_handler_api.Tools import get_endpoint_matching
 from service.service.service_handler_api._ServiceHandler import _ServiceHandler
-from service.service.service_handler_api.AnyTreeTools import TreeNode, delete_subnode, get_subnode, set_subnode_value
+from service.service.service_handler_api.SettingsHandler import SettingsHandler
 from service.service.task_scheduler.TaskExecutor import TaskExecutor
 from .ConfigRules import setup_config_rules, teardown_config_rules
 
@@ -47,22 +48,8 @@ class L2NMOpenConfigServiceHandler(_ServiceHandler):
         self, service : Service, task_executor : TaskExecutor, **settings
     ) -> None:
         self.__service = service
-        self.__task_executor = task_executor # pylint: disable=unused-private-member
-        self.__resolver = anytree.Resolver(pathattr='name')
-        self.__config = TreeNode('.')
-        for config_rule in service.service_config.config_rules:
-            action = config_rule.action
-            if config_rule.WhichOneof('config_rule') != 'custom': continue
-            resource_key = config_rule.custom.resource_key
-            resource_value = config_rule.custom.resource_value
-            if action == ConfigActionEnum.CONFIGACTION_SET:
-                try:
-                    resource_value = json.loads(resource_value)
-                except: # pylint: disable=bare-except
-                    pass
-                set_subnode_value(self.__resolver, self.__config, resource_key, resource_value)
-            elif action == ConfigActionEnum.CONFIGACTION_DELETE:
-                delete_subnode(self.__resolver, self.__config, resource_key)
+        self.__task_executor = task_executor
+        self.__settings_handler = SettingsHandler(service.service_config, **settings)
 
     @metered_subclass_method(METRICS_POOL)
     def SetEndpoint(
@@ -72,7 +59,7 @@ class L2NMOpenConfigServiceHandler(_ServiceHandler):
         if len(endpoints) == 0: return []
 
         service_uuid = self.__service.service_id.service_uuid.uuid
-        settings : TreeNode = get_subnode(self.__resolver, self.__config, '/settings', None)
+        settings = self.__settings_handler.get('/settings')
 
         results = []
         for endpoint in endpoints:
@@ -81,17 +68,17 @@ class L2NMOpenConfigServiceHandler(_ServiceHandler):
                 chk_length('endpoint', endpoint, min_length=2, max_length=3)
                 device_uuid, endpoint_uuid = endpoint[0:2] # ignore topology_uuid by now
 
-                endpoint_settings_uri = '/device[{:s}]/endpoint[{:s}]/settings'.format(device_uuid, endpoint_uuid)
-                endpoint_settings : TreeNode = get_subnode(self.__resolver, self.__config, endpoint_settings_uri, None)
+                device_obj = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
+                endpoint_obj = get_endpoint_matching(device_obj, endpoint_uuid)
+                endpoint_settings = self.__settings_handler.get_endpoint_settings(device_obj, endpoint_obj)
 
                 json_config_rules = setup_config_rules(
                     service_uuid, connection_uuid, device_uuid, endpoint_uuid, settings, endpoint_settings)
 
-                device = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
-                del device.device_config.config_rules[:]
+                del device_obj.device_config.config_rules[:]
                 for json_config_rule in json_config_rules:
-                    device.device_config.config_rules.append(ConfigRule(**json_config_rule))
-                self.__task_executor.configure_device(device)
+                    device_obj.device_config.config_rules.append(ConfigRule(**json_config_rule))
+                self.__task_executor.configure_device(device_obj)
                 results.append(True)
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to SetEndpoint({:s})'.format(str(endpoint)))
@@ -107,7 +94,7 @@ class L2NMOpenConfigServiceHandler(_ServiceHandler):
         if len(endpoints) == 0: return []
 
         service_uuid = self.__service.service_id.service_uuid.uuid
-        settings : TreeNode = get_subnode(self.__resolver, self.__config, '/settings', None)
+        settings = self.__settings_handler.get('/settings')
 
         results = []
         for endpoint in endpoints:
@@ -116,17 +103,17 @@ class L2NMOpenConfigServiceHandler(_ServiceHandler):
                 chk_length('endpoint', endpoint, min_length=2, max_length=3)
                 device_uuid, endpoint_uuid = endpoint[0:2] # ignore topology_uuid by now
 
-                endpoint_settings_uri = '/device[{:s}]/endpoint[{:s}]/settings'.format(device_uuid, endpoint_uuid)
-                endpoint_settings : TreeNode = get_subnode(self.__resolver, self.__config, endpoint_settings_uri, None)
+                device_obj = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
+                endpoint_obj = get_endpoint_matching(device_obj, endpoint_uuid)
+                endpoint_settings = self.__settings_handler.get_endpoint_settings(device_obj, endpoint_obj)
 
                 json_config_rules = teardown_config_rules(
                     service_uuid, connection_uuid, device_uuid, endpoint_uuid, settings, endpoint_settings)
 
-                device = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
-                del device.device_config.config_rules[:]
+                del device_obj.device_config.config_rules[:]
                 for json_config_rule in json_config_rules:
-                    device.device_config.config_rules.append(ConfigRule(**json_config_rule))
-                self.__task_executor.configure_device(device)
+                    device_obj.device_config.config_rules.append(ConfigRule(**json_config_rule))
+                self.__task_executor.configure_device(device_obj)
                 results.append(True)
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to DeleteEndpoint({:s})'.format(str(endpoint)))
@@ -160,9 +147,8 @@ class L2NMOpenConfigServiceHandler(_ServiceHandler):
         results = []
         for resource in resources:
             try:
-                resource_key, resource_value = resource
-                resource_value = json.loads(resource_value)
-                set_subnode_value(self.__resolver, self.__config, resource_key, resource_value)
+                resource_value = json.loads(resource[1])
+                self.__settings_handler.set(resource[0], resource_value)
                 results.append(True)
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to SetConfig({:s})'.format(str(resource)))
@@ -178,8 +164,7 @@ class L2NMOpenConfigServiceHandler(_ServiceHandler):
         results = []
         for resource in resources:
             try:
-                resource_key, _ = resource
-                delete_subnode(self.__resolver, self.__config, resource_key)
+                self.__settings_handler.delete(resource[0])
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to DeleteConfig({:s})'.format(str(resource)))
                 results.append(e)
diff --git a/src/service/service/service_handlers/l3nm_emulated/L3NMEmulatedServiceHandler.py b/src/service/service/service_handlers/l3nm_emulated/L3NMEmulatedServiceHandler.py
index f161225192dfe7f9eb0804b9d9bff4e5acba9e21..8a39ed47463d70bf2c2c42cbb9308ba5e072caf4 100644
--- a/src/service/service/service_handlers/l3nm_emulated/L3NMEmulatedServiceHandler.py
+++ b/src/service/service/service_handlers/l3nm_emulated/L3NMEmulatedServiceHandler.py
@@ -12,14 +12,15 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import anytree, json, logging
+import json, logging
 from typing import Any, List, Optional, Tuple, Union
 from common.method_wrappers.Decorator import MetricTypeEnum, MetricsPool, metered_subclass_method, INF
-from common.proto.context_pb2 import ConfigActionEnum, ConfigRule, DeviceId, Service
+from common.proto.context_pb2 import ConfigRule, DeviceId, Service
 from common.tools.object_factory.Device import json_device_id
 from common.type_checkers.Checkers import chk_length, chk_type
+from service.service.service_handler_api.Tools import get_endpoint_matching
 from service.service.service_handler_api._ServiceHandler import _ServiceHandler
-from service.service.service_handler_api.AnyTreeTools import TreeNode, delete_subnode, get_subnode, set_subnode_value
+from service.service.service_handler_api.SettingsHandler import SettingsHandler
 from service.service.task_scheduler.TaskExecutor import TaskExecutor
 from .ConfigRules import setup_config_rules, teardown_config_rules
 
@@ -47,67 +48,64 @@ class L3NMEmulatedServiceHandler(_ServiceHandler):
         self, service : Service, task_executor : TaskExecutor, **settings
     ) -> None:
         self.__service = service
-        self.__task_executor = task_executor # pylint: disable=unused-private-member
-        self.__resolver = anytree.Resolver(pathattr='name')
-        self.__config = TreeNode('.')
-        for config_rule in service.service_config.config_rules:
-            action = config_rule.action
-            if config_rule.WhichOneof('config_rule') != 'custom': continue
-            resource_key = config_rule.custom.resource_key
-            resource_value = config_rule.custom.resource_value
-            if action == ConfigActionEnum.CONFIGACTION_SET:
-                try:
-                    resource_value = json.loads(resource_value)
-                except: # pylint: disable=bare-except
-                    pass
-                set_subnode_value(self.__resolver, self.__config, resource_key, resource_value)
-            elif action == ConfigActionEnum.CONFIGACTION_DELETE:
-                delete_subnode(self.__resolver, self.__config, resource_key)
+        self.__task_executor = task_executor
+        self.__settings_handler = SettingsHandler(service.service_config, **settings)
 
     @metered_subclass_method(METRICS_POOL)
     def SetEndpoint(
         self, endpoints : List[Tuple[str, str, Optional[str]]], connection_uuid : Optional[str] = None
     ) -> List[Union[bool, Exception]]:
+        LOGGER.info('[SetEndpoint] endpoints={:s}'.format(str(endpoints)))
+        LOGGER.info('[SetEndpoint] connection_uuid={:s}'.format(str(connection_uuid)))
+
         chk_type('endpoints', endpoints, list)
         if len(endpoints) == 0: return []
 
         service_uuid = self.__service.service_id.service_uuid.uuid
-        settings : TreeNode = get_subnode(self.__resolver, self.__config, '/settings', None)
+        settings = self.__settings_handler.get('/settings')
+        LOGGER.info('[SetEndpoint] settings={:s}'.format(str(settings)))
 
         results = []
         for endpoint in endpoints:
+            LOGGER.info('[SetEndpoint] endpoint={:s}'.format(str(endpoint)))
             try:
                 chk_type('endpoint', endpoint, (tuple, list))
                 chk_length('endpoint', endpoint, min_length=2, max_length=3)
                 device_uuid, endpoint_uuid = endpoint[0:2] # ignore topology_uuid by now
 
-                endpoint_settings_uri = '/device[{:s}]/endpoint[{:s}]/settings'.format(device_uuid, endpoint_uuid)
-                endpoint_settings : TreeNode = get_subnode(self.__resolver, self.__config, endpoint_settings_uri, None)
+                device_obj = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
+                endpoint_obj = get_endpoint_matching(device_obj, endpoint_uuid)
+                endpoint_settings = self.__settings_handler.get_endpoint_settings(device_obj, endpoint_obj)
+                LOGGER.info('[SetEndpoint] endpoint_settings={:s}'.format(str(endpoint_settings)))
 
                 json_config_rules = setup_config_rules(
                     service_uuid, connection_uuid, device_uuid, endpoint_uuid, settings, endpoint_settings)
+                LOGGER.info('[SetEndpoint] json_config_rules={:s}'.format(str(json_config_rules)))
 
-                device = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
-                del device.device_config.config_rules[:]
+                del device_obj.device_config.config_rules[:]
                 for json_config_rule in json_config_rules:
-                    device.device_config.config_rules.append(ConfigRule(**json_config_rule))
-                self.__task_executor.configure_device(device)
+                    device_obj.device_config.config_rules.append(ConfigRule(**json_config_rule))
+                self.__task_executor.configure_device(device_obj)
                 results.append(True)
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to SetEndpoint({:s})'.format(str(endpoint)))
                 results.append(e)
 
+        LOGGER.info('[SetEndpoint] results={:s}'.format(str(results)))
         return results
 
     @metered_subclass_method(METRICS_POOL)
     def DeleteEndpoint(
         self, endpoints : List[Tuple[str, str, Optional[str]]], connection_uuid : Optional[str] = None
     ) -> List[Union[bool, Exception]]:
+        LOGGER.info('[DeleteEndpoint] endpoints={:s}'.format(str(endpoints)))
+        LOGGER.info('[DeleteEndpoint] connection_uuid={:s}'.format(str(connection_uuid)))
+
         chk_type('endpoints', endpoints, list)
         if len(endpoints) == 0: return []
 
         service_uuid = self.__service.service_id.service_uuid.uuid
-        settings : TreeNode = get_subnode(self.__resolver, self.__config, '/settings', None)
+        settings = self.__settings_handler.get('/settings')
 
         results = []
         for endpoint in endpoints:
@@ -116,17 +114,17 @@ class L3NMEmulatedServiceHandler(_ServiceHandler):
                 chk_length('endpoint', endpoint, min_length=2, max_length=3)
                 device_uuid, endpoint_uuid = endpoint[0:2] # ignore topology_uuid by now
 
-                endpoint_settings_uri = '/device[{:s}]/endpoint[{:s}]/settings'.format(device_uuid, endpoint_uuid)
-                endpoint_settings : TreeNode = get_subnode(self.__resolver, self.__config, endpoint_settings_uri, None)
+                device_obj = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
+                endpoint_obj = get_endpoint_matching(device_obj, endpoint_uuid)
+                endpoint_settings = self.__settings_handler.get_endpoint_settings(device_obj, endpoint_obj)
 
                 json_config_rules = teardown_config_rules(
                     service_uuid, connection_uuid, device_uuid, endpoint_uuid, settings, endpoint_settings)
 
-                device = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
-                del device.device_config.config_rules[:]
+                del device_obj.device_config.config_rules[:]
                 for json_config_rule in json_config_rules:
-                    device.device_config.config_rules.append(ConfigRule(**json_config_rule))
-                self.__task_executor.configure_device(device)
+                    device_obj.device_config.config_rules.append(ConfigRule(**json_config_rule))
+                self.__task_executor.configure_device(device_obj)
                 results.append(True)
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to DeleteEndpoint({:s})'.format(str(endpoint)))
@@ -160,9 +158,8 @@ class L3NMEmulatedServiceHandler(_ServiceHandler):
         results = []
         for resource in resources:
             try:
-                resource_key, resource_value = resource
-                resource_value = json.loads(resource_value)
-                set_subnode_value(self.__resolver, self.__config, resource_key, resource_value)
+                resource_value = json.loads(resource[1])
+                self.__settings_handler.set(resource[0], resource_value)
                 results.append(True)
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to SetConfig({:s})'.format(str(resource)))
@@ -178,8 +175,7 @@ class L3NMEmulatedServiceHandler(_ServiceHandler):
         results = []
         for resource in resources:
             try:
-                resource_key, _ = resource
-                delete_subnode(self.__resolver, self.__config, resource_key)
+                self.__settings_handler.delete(resource[0])
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to DeleteConfig({:s})'.format(str(resource)))
                 results.append(e)
diff --git a/src/service/service/service_handlers/l3nm_openconfig/L3NMOpenConfigServiceHandler.py b/src/service/service/service_handlers/l3nm_openconfig/L3NMOpenConfigServiceHandler.py
index 0f5cb6c558c1515b81d011074ecda7e167c47e90..3dc98f71b3f64557782b700220d1d3ab84314b4b 100644
--- a/src/service/service/service_handlers/l3nm_openconfig/L3NMOpenConfigServiceHandler.py
+++ b/src/service/service/service_handlers/l3nm_openconfig/L3NMOpenConfigServiceHandler.py
@@ -12,14 +12,15 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import anytree, json, logging
+import json, logging
 from typing import Any, List, Optional, Tuple, Union
 from common.method_wrappers.Decorator import MetricTypeEnum, MetricsPool, metered_subclass_method, INF
-from common.proto.context_pb2 import ConfigActionEnum, ConfigRule, DeviceId, Service
+from common.proto.context_pb2 import ConfigRule, DeviceId, Service
 from common.tools.object_factory.Device import json_device_id
 from common.type_checkers.Checkers import chk_length, chk_type
+from service.service.service_handler_api.Tools import get_endpoint_matching
 from service.service.service_handler_api._ServiceHandler import _ServiceHandler
-from service.service.service_handler_api.AnyTreeTools import TreeNode, delete_subnode, get_subnode, set_subnode_value
+from service.service.service_handler_api.SettingsHandler import SettingsHandler
 from service.service.task_scheduler.TaskExecutor import TaskExecutor
 from .ConfigRules import setup_config_rules, teardown_config_rules
 
@@ -47,22 +48,8 @@ class L3NMOpenConfigServiceHandler(_ServiceHandler):
         self, service : Service, task_executor : TaskExecutor, **settings
     ) -> None:
         self.__service = service
-        self.__task_executor = task_executor # pylint: disable=unused-private-member
-        self.__resolver = anytree.Resolver(pathattr='name')
-        self.__config = TreeNode('.')
-        for config_rule in service.service_config.config_rules:
-            action = config_rule.action
-            if config_rule.WhichOneof('config_rule') != 'custom': continue
-            resource_key = config_rule.custom.resource_key
-            resource_value = config_rule.custom.resource_value
-            if action == ConfigActionEnum.CONFIGACTION_SET:
-                try:
-                    resource_value = json.loads(resource_value)
-                except: # pylint: disable=bare-except
-                    pass
-                set_subnode_value(self.__resolver, self.__config, resource_key, resource_value)
-            elif action == ConfigActionEnum.CONFIGACTION_DELETE:
-                delete_subnode(self.__resolver, self.__config, resource_key)
+        self.__task_executor = task_executor
+        self.__settings_handler = SettingsHandler(service.service_config, **settings)
 
     @metered_subclass_method(METRICS_POOL)
     def SetEndpoint(
@@ -72,7 +59,7 @@ class L3NMOpenConfigServiceHandler(_ServiceHandler):
         if len(endpoints) == 0: return []
 
         service_uuid = self.__service.service_id.service_uuid.uuid
-        settings : TreeNode = get_subnode(self.__resolver, self.__config, '/settings', None)
+        settings = self.__settings_handler.get('/settings')
 
         results = []
         for endpoint in endpoints:
@@ -81,17 +68,17 @@ class L3NMOpenConfigServiceHandler(_ServiceHandler):
                 chk_length('endpoint', endpoint, min_length=2, max_length=3)
                 device_uuid, endpoint_uuid = endpoint[0:2] # ignore topology_uuid by now
 
-                endpoint_settings_uri = '/device[{:s}]/endpoint[{:s}]/settings'.format(device_uuid, endpoint_uuid)
-                endpoint_settings : TreeNode = get_subnode(self.__resolver, self.__config, endpoint_settings_uri, None)
+                device_obj = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
+                endpoint_obj = get_endpoint_matching(device_obj, endpoint_uuid)
+                endpoint_settings = self.__settings_handler.get_endpoint_settings(device_obj, endpoint_obj)
 
                 json_config_rules = setup_config_rules(
                     service_uuid, connection_uuid, device_uuid, endpoint_uuid, settings, endpoint_settings)
 
-                device = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
-                del device.device_config.config_rules[:]
+                del device_obj.device_config.config_rules[:]
                 for json_config_rule in json_config_rules:
-                    device.device_config.config_rules.append(ConfigRule(**json_config_rule))
-                self.__task_executor.configure_device(device)
+                    device_obj.device_config.config_rules.append(ConfigRule(**json_config_rule))
+                self.__task_executor.configure_device(device_obj)
                 results.append(True)
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to SetEndpoint({:s})'.format(str(endpoint)))
@@ -107,7 +94,7 @@ class L3NMOpenConfigServiceHandler(_ServiceHandler):
         if len(endpoints) == 0: return []
 
         service_uuid = self.__service.service_id.service_uuid.uuid
-        settings : TreeNode = get_subnode(self.__resolver, self.__config, '/settings', None)
+        settings = self.__settings_handler.get('/settings')
 
         results = []
         for endpoint in endpoints:
@@ -116,17 +103,17 @@ class L3NMOpenConfigServiceHandler(_ServiceHandler):
                 chk_length('endpoint', endpoint, min_length=2, max_length=3)
                 device_uuid, endpoint_uuid = endpoint[0:2] # ignore topology_uuid by now
 
-                endpoint_settings_uri = '/device[{:s}]/endpoint[{:s}]/settings'.format(device_uuid, endpoint_uuid)
-                endpoint_settings : TreeNode = get_subnode(self.__resolver, self.__config, endpoint_settings_uri, None)
+                device_obj = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
+                endpoint_obj = get_endpoint_matching(device_obj, endpoint_uuid)
+                endpoint_settings = self.__settings_handler.get_endpoint_settings(device_obj, endpoint_obj)
 
                 json_config_rules = teardown_config_rules(
                     service_uuid, connection_uuid, device_uuid, endpoint_uuid, settings, endpoint_settings)
 
-                device = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
-                del device.device_config.config_rules[:]
+                del device_obj.device_config.config_rules[:]
                 for json_config_rule in json_config_rules:
-                    device.device_config.config_rules.append(ConfigRule(**json_config_rule))
-                self.__task_executor.configure_device(device)
+                    device_obj.device_config.config_rules.append(ConfigRule(**json_config_rule))
+                self.__task_executor.configure_device(device_obj)
                 results.append(True)
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to DeleteEndpoint({:s})'.format(str(endpoint)))
@@ -160,9 +147,8 @@ class L3NMOpenConfigServiceHandler(_ServiceHandler):
         results = []
         for resource in resources:
             try:
-                resource_key, resource_value = resource
-                resource_value = json.loads(resource_value)
-                set_subnode_value(self.__resolver, self.__config, resource_key, resource_value)
+                resource_value = json.loads(resource[1])
+                self.__settings_handler.set(resource[0], resource_value)
                 results.append(True)
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to SetConfig({:s})'.format(str(resource)))
@@ -178,8 +164,7 @@ class L3NMOpenConfigServiceHandler(_ServiceHandler):
         results = []
         for resource in resources:
             try:
-                resource_key, _ = resource
-                delete_subnode(self.__resolver, self.__config, resource_key)
+                self.__settings_handler.delete(resource[0])
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to DeleteConfig({:s})'.format(str(resource)))
                 results.append(e)
diff --git a/src/service/service/service_handlers/microwave/MicrowaveServiceHandler.py b/src/service/service/service_handlers/microwave/MicrowaveServiceHandler.py
index fb54a1bc1db3071e88fd26e935c7779c7c2f19ee..a16f8cdfad5524a45c36502610d615f3b5dbbba4 100644
--- a/src/service/service/service_handlers/microwave/MicrowaveServiceHandler.py
+++ b/src/service/service/service_handlers/microwave/MicrowaveServiceHandler.py
@@ -12,15 +12,15 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import anytree, json, logging
+import json, logging
 from typing import Any, Dict, List, Optional, Tuple, Union
 from common.method_wrappers.Decorator import MetricsPool, metered_subclass_method
-from common.proto.context_pb2 import ConfigActionEnum, ConfigRule, DeviceId, Service
+from common.proto.context_pb2 import ConfigRule, DeviceId, Service
 from common.tools.object_factory.ConfigRule import json_config_rule_delete, json_config_rule_set
 from common.tools.object_factory.Device import json_device_id
 from common.type_checkers.Checkers import chk_type
 from service.service.service_handler_api._ServiceHandler import _ServiceHandler
-from service.service.service_handler_api.AnyTreeTools import TreeNode, delete_subnode, get_subnode, set_subnode_value
+from service.service.service_handler_api.SettingsHandler import SettingsHandler
 from service.service.task_scheduler.TaskExecutor import TaskExecutor
 
 LOGGER = logging.getLogger(__name__)
@@ -38,22 +38,8 @@ class MicrowaveServiceHandler(_ServiceHandler):
         self, service : Service, task_executor : TaskExecutor, **settings
     ) -> None:
         self.__service = service
-        self.__task_executor = task_executor # pylint: disable=unused-private-member
-        self.__resolver = anytree.Resolver(pathattr='name')
-        self.__config = TreeNode('.')
-        for config_rule in service.service_config.config_rules:
-            action = config_rule.action
-            if config_rule.WhichOneof('config_rule') != 'custom': continue
-            resource_key = config_rule.custom.resource_key
-            resource_value = config_rule.custom.resource_value
-            if action == ConfigActionEnum.CONFIGACTION_SET:
-                try:
-                    resource_value = json.loads(resource_value)
-                except: # pylint: disable=bare-except
-                    pass
-                set_subnode_value(self.__resolver, self.__config, resource_key, resource_value)
-            elif action == ConfigActionEnum.CONFIGACTION_DELETE:
-                delete_subnode(self.__resolver, self.__config, resource_key)
+        self.__task_executor = task_executor
+        self.__settings_handler = SettingsHandler(service.service_config, **settings)
 
     @metered_subclass_method(METRICS_POOL)
     def SetEndpoint(
@@ -61,26 +47,22 @@ class MicrowaveServiceHandler(_ServiceHandler):
     ) -> List[Union[bool, Exception]]:
         LOGGER.info('[SetEndpoint] endpoints={:s}'.format(str(endpoints)))
         LOGGER.info('[SetEndpoint] connection_uuid={:s}'.format(str(connection_uuid)))
+        chk_type('endpoints', endpoints, list)
+        if len(endpoints) != 2: return []
 
         service_uuid = self.__service.service_id.service_uuid.uuid
+        settings = self.__settings_handler.get('/settings')
+        json_settings : Dict = {} if settings is None else settings.value
+        vlan_id = json_settings.get('vlan_id', 121)
 
         results = []
         try:
-            chk_type('endpoints', endpoints, list)
-            if len(endpoints) != 2: raise Exception('len(endpoints) != 2')
-
-            settings : TreeNode = get_subnode(self.__resolver, self.__config, '/settings', None)
-            if settings is None:
-                raise Exception('Unable to retrieve settings for Service({:s})'.format(str(service_uuid)))
-
-            json_settings : Dict = settings.value
-            vlan_id = json_settings.get('vlan_id', 121)
             # endpoints are retrieved in the following format --> '/endpoints/endpoint[172.26.60.243:9]'
             node_id_src, tp_id_src = check_endpoint(endpoints[0][1], service_uuid)
             node_id_dst, tp_id_dst = check_endpoint(endpoints[1][1], service_uuid)
         
             device_uuid = endpoints[0][0]
-            device = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
+            device_obj = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
             json_config_rule = json_config_rule_set('/services/service[{:s}]'.format(service_uuid), {
                 'uuid'       : service_uuid,
                 'node_id_src': node_id_src,
@@ -89,9 +71,9 @@ class MicrowaveServiceHandler(_ServiceHandler):
                 'tp_id_dst'  : tp_id_dst,
                 'vlan_id'    : vlan_id,
             })
-            del device.device_config.config_rules[:]
-            device.device_config.config_rules.append(ConfigRule(**json_config_rule))
-            self.__task_executor.configure_device(device)
+            del device_obj.device_config.config_rules[:]
+            device_obj.device_config.config_rules.append(ConfigRule(**json_config_rule))
+            self.__task_executor.configure_device(device_obj)
             results.append(True)
         except Exception as e: # pylint: disable=broad-except
             LOGGER.exception('Unable to SetEndpoint for Service({:s})'.format(str(service_uuid)))
@@ -106,21 +88,21 @@ class MicrowaveServiceHandler(_ServiceHandler):
         LOGGER.info('[DeleteEndpoint] endpoints={:s}'.format(str(endpoints)))
         LOGGER.info('[DeleteEndpoint] connection_uuid={:s}'.format(str(connection_uuid)))
 
+        chk_type('endpoints', endpoints, list)
+        if len(endpoints) != 2: return []
+
         service_uuid = self.__service.service_id.service_uuid.uuid
 
         results = []
         try:
-            chk_type('endpoints', endpoints, list)
-            if len(endpoints) < 1: raise Exception('len(endpoints) < 1')
-
             device_uuid = endpoints[0][0]
-            device = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
+            device_obj = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
             json_config_rule = json_config_rule_delete('/services/service[{:s}]'.format(service_uuid), {
                 'uuid': service_uuid
             })
-            del device.device_config.config_rules[:]
-            device.device_config.config_rules.append(ConfigRule(**json_config_rule))
-            self.__task_executor.configure_device(device)
+            del device_obj.device_config.config_rules[:]
+            device_obj.device_config.config_rules.append(ConfigRule(**json_config_rule))
+            self.__task_executor.configure_device(device_obj)
             results.append(True)
         except Exception as e: # pylint: disable=broad-except
             LOGGER.exception('Unable to DeleteEndpoint for Service({:s})'.format(str(service_uuid)))
@@ -154,9 +136,8 @@ class MicrowaveServiceHandler(_ServiceHandler):
         results = []
         for resource in resources:
             try:
-                resource_key, resource_value = resource
-                resource_value = json.loads(resource_value)
-                set_subnode_value(self.__resolver, self.__config, resource_key, resource_value)
+                resource_value = json.loads(resource[1])
+                self.__settings_handler.set(resource[0], resource_value)
                 results.append(True)
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to SetConfig({:s})'.format(str(resource)))
@@ -172,8 +153,7 @@ class MicrowaveServiceHandler(_ServiceHandler):
         results = []
         for resource in resources:
             try:
-                resource_key, _ = resource
-                delete_subnode(self.__resolver, self.__config, resource_key)
+                self.__settings_handler.delete(resource[0])
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to DeleteConfig({:s})'.format(str(resource)))
                 results.append(e)
diff --git a/src/service/service/service_handlers/tapi_tapi/TapiServiceHandler.py b/src/service/service/service_handlers/tapi_tapi/TapiServiceHandler.py
index 24371203ad599d7ad9a7f66e5ad96874471be00b..d8a4668bbf102fa5b5f8c9e9f542f34b063bc819 100644
--- a/src/service/service/service_handlers/tapi_tapi/TapiServiceHandler.py
+++ b/src/service/service/service_handlers/tapi_tapi/TapiServiceHandler.py
@@ -12,15 +12,15 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import anytree, json, logging
+import json, logging
 from typing import Any, Dict, List, Optional, Tuple, Union
 from common.method_wrappers.Decorator import MetricsPool, metered_subclass_method
-from common.proto.context_pb2 import ConfigActionEnum, ConfigRule, DeviceId, Service
+from common.proto.context_pb2 import ConfigRule, DeviceId, Service
 from common.tools.object_factory.ConfigRule import json_config_rule_delete, json_config_rule_set
 from common.tools.object_factory.Device import json_device_id
 from common.type_checkers.Checkers import chk_type
 from service.service.service_handler_api._ServiceHandler import _ServiceHandler
-from service.service.service_handler_api.AnyTreeTools import TreeNode, delete_subnode, get_subnode, set_subnode_value
+from service.service.service_handler_api.SettingsHandler import SettingsHandler
 from service.service.task_scheduler.TaskExecutor import TaskExecutor
 
 LOGGER = logging.getLogger(__name__)
@@ -32,22 +32,8 @@ class TapiServiceHandler(_ServiceHandler):
         self, service : Service, task_executor : TaskExecutor, **settings
     ) -> None:
         self.__service = service
-        self.__task_executor = task_executor # pylint: disable=unused-private-member
-        self.__resolver = anytree.Resolver(pathattr='name')
-        self.__config = TreeNode('.')
-        for config_rule in service.service_config.config_rules:
-            action = config_rule.action
-            if config_rule.WhichOneof('config_rule') != 'custom': continue
-            resource_key = config_rule.custom.resource_key
-            resource_value = config_rule.custom.resource_value
-            if action == ConfigActionEnum.CONFIGACTION_SET:
-                try:
-                    resource_value = json.loads(resource_value)
-                except: # pylint: disable=bare-except
-                    pass
-                set_subnode_value(self.__resolver, self.__config, resource_key, resource_value)
-            elif action == ConfigActionEnum.CONFIGACTION_DELETE:
-                delete_subnode(self.__resolver, self.__config, resource_key)
+        self.__task_executor = task_executor
+        self.__settings_handler = SettingsHandler(service.service_config, **settings)
 
     @metered_subclass_method(METRICS_POOL)
     def SetEndpoint(
@@ -59,10 +45,8 @@ class TapiServiceHandler(_ServiceHandler):
         if len(endpoints) != 2: return []
 
         service_uuid = self.__service.service_id.service_uuid.uuid
-        settings : TreeNode = get_subnode(self.__resolver, self.__config, '/settings', None)
-        if settings is None: raise Exception('Unable to retrieve settings for Service({:s})'.format(str(service_uuid)))
-
-        json_settings : Dict = settings.value
+        settings = self.__settings_handler.get('/settings')
+        json_settings : Dict = {} if settings is None else settings.value
         capacity_value   = json_settings.get('capacity_value',   50.0)
         capacity_unit    = json_settings.get('capacity_unit',    'GHz')
         layer_proto_name = json_settings.get('layer_proto_name', 'PHOTONIC_MEDIA')
@@ -72,7 +56,7 @@ class TapiServiceHandler(_ServiceHandler):
         results = []
         try:
             device_uuid = endpoints[0][0]
-            device = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
+            device_obj = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
             json_config_rule = json_config_rule_set('/service[{:s}]'.format(service_uuid), {
                 'uuid'                    : service_uuid,
                 'input_sip'               : endpoints[0][1],
@@ -83,12 +67,12 @@ class TapiServiceHandler(_ServiceHandler):
                 'layer_protocol_qualifier': layer_proto_qual,
                 'direction'               : direction,
             })
-            del device.device_config.config_rules[:]
-            device.device_config.config_rules.append(ConfigRule(**json_config_rule))
-            self.__task_executor.configure_device(device)
+            del device_obj.device_config.config_rules[:]
+            device_obj.device_config.config_rules.append(ConfigRule(**json_config_rule))
+            self.__task_executor.configure_device(device_obj)
             results.append(True)
         except Exception as e: # pylint: disable=broad-except
-            LOGGER.exception('Unable to configure Service({:s})'.format(str(service_uuid)))
+            LOGGER.exception('Unable to SetEndpoint for Service({:s})'.format(str(service_uuid)))
             results.append(e)
 
         return results
@@ -104,14 +88,17 @@ class TapiServiceHandler(_ServiceHandler):
         if len(endpoints) != 2: return []
 
         service_uuid = self.__service.service_id.service_uuid.uuid
+
         results = []
         try:
             device_uuid = endpoints[0][0]
-            device = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
-            json_config_rule = json_config_rule_delete('/service[{:s}]'.format(service_uuid), {'uuid': service_uuid})
-            del device.device_config.config_rules[:]
-            device.device_config.config_rules.append(ConfigRule(**json_config_rule))
-            self.__task_executor.configure_device(device)
+            device_obj = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
+            json_config_rule = json_config_rule_delete('/service[{:s}]'.format(service_uuid), {
+                'uuid': service_uuid
+            })
+            del device_obj.device_config.config_rules[:]
+            device_obj.device_config.config_rules.append(ConfigRule(**json_config_rule))
+            self.__task_executor.configure_device(device_obj)
             results.append(True)
         except Exception as e: # pylint: disable=broad-except
             LOGGER.exception('Unable to DeleteEndpoint for Service({:s})'.format(str(service_uuid)))
@@ -145,9 +132,8 @@ class TapiServiceHandler(_ServiceHandler):
         results = []
         for resource in resources:
             try:
-                resource_key, resource_value = resource
-                resource_value = json.loads(resource_value)
-                set_subnode_value(self.__resolver, self.__config, resource_key, resource_value)
+                resource_value = json.loads(resource[1])
+                self.__settings_handler.set(resource[0], resource_value)
                 results.append(True)
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to SetConfig({:s})'.format(str(resource)))
@@ -163,8 +149,7 @@ class TapiServiceHandler(_ServiceHandler):
         results = []
         for resource in resources:
             try:
-                resource_key, _ = resource
-                delete_subnode(self.__resolver, self.__config, resource_key)
+                self.__settings_handler.delete(resource[0])
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to DeleteConfig({:s})'.format(str(resource)))
                 results.append(e)
diff --git a/src/slice/service/SliceServiceServicerImpl.py b/src/slice/service/SliceServiceServicerImpl.py
index d693abd8f3470fe59d5664073c2f3c50f53234e2..413750662410dd936575463d44355c30708aff28 100644
--- a/src/slice/service/SliceServiceServicerImpl.py
+++ b/src/slice/service/SliceServiceServicerImpl.py
@@ -12,12 +12,14 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from typing import Optional
 import grpc, json, logging #, deepdiff
 from common.proto.context_pb2 import (
     Empty, Service, ServiceId, ServiceStatusEnum, ServiceTypeEnum, Slice, SliceId, SliceStatusEnum)
 from common.proto.slice_pb2_grpc import SliceServiceServicer
 from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method
 from common.tools.context_queries.InterDomain import is_multi_domain
+from common.tools.context_queries.Slice import get_slice
 from common.tools.grpc.ConfigRules import copy_config_rules
 from common.tools.grpc.Constraints import copy_constraints
 from common.tools.grpc.EndPointIds import copy_endpoint_ids
@@ -37,29 +39,32 @@ class SliceServiceServicerImpl(SliceServiceServicer):
         LOGGER.debug('Servicer Created')
 
     def create_update(self, request : Slice) -> SliceId:
+        # Set slice status to "SERVICESTATUS_PLANNED" to ensure rest of components are aware the slice is
+        # being modified.
         context_client = ContextClient()
-        try:
-            _slice = context_client.GetSlice(request.slice_id)
-            #json_current_slice = grpc_message_to_json(_slice)
-        except: # pylint: disable=bare-except
-            #json_current_slice = {}
-            slice_request = Slice()
-            slice_request.slice_id.CopyFrom(request.slice_id) # pylint: disable=no-member
-            slice_request.slice_status.slice_status = SliceStatusEnum.SLICESTATUS_PLANNED # pylint: disable=no-member
-            context_client.SetSlice(slice_request)
-            _slice = context_client.GetSlice(request.slice_id)
+        slice_ro : Optional[Service] = get_slice(
+            context_client, request.slice_id.slice_uuid.uuid, request.slice_id.context_id.context_uuid.uuid,
+            rw_copy=False)
+
+        slice_rw = Slice()
+        slice_rw.CopyFrom(request if slice_ro is None else slice_ro)
+        slice_rw.name = request.name
+        slice_rw.slice_owner.CopyFrom(request.slice_owner)                          # pylint: disable=no-member
+        slice_rw.slice_status.slice_status = SliceStatusEnum.SLICESTATUS_PLANNED    # pylint: disable=no-member
 
-        slice_request = Slice()
-        slice_request.CopyFrom(_slice)
+        copy_endpoint_ids(request.slice_endpoint_ids,        slice_rw.slice_endpoint_ids       ) # pylint: disable=no-member
+        copy_constraints (request.slice_constraints,         slice_rw.slice_constraints        ) # pylint: disable=no-member
+        copy_config_rules(request.slice_config.config_rules, slice_rw.slice_config.config_rules) # pylint: disable=no-member
 
-        if len(request.slice_endpoint_ids) < 2:
+        slice_id_with_uuids = context_client.SetSlice(slice_rw)
+
+        if len(slice_rw.slice_endpoint_ids) < 2: # pylint: disable=no-member
             # unable to identify the kind of slice; just update endpoints, constraints and config rules
             # update the slice in database, and return
             # pylint: disable=no-member
-            copy_endpoint_ids(request.slice_endpoint_ids, slice_request.slice_endpoint_ids)
-            copy_constraints(request.slice_constraints, slice_request.slice_constraints)
-            copy_config_rules(request.slice_config.config_rules, slice_request.slice_config.config_rules)
-            return context_client.SetSlice(slice_request)
+            return context_client.SetSlice(slice_rw)
+
+        slice_with_uuids = context_client.GetSlice(slice_id_with_uuids)
 
         #LOGGER.info('json_current_slice = {:s}'.format(str(json_current_slice)))
         #json_updated_slice = grpc_message_to_json(request)
@@ -67,9 +72,9 @@ class SliceServiceServicerImpl(SliceServiceServicer):
         #changes = deepdiff.DeepDiff(json_current_slice, json_updated_slice)
         #LOGGER.info('changes = {:s}'.format(str(changes)))
 
-        if is_multi_domain(context_client, request.slice_endpoint_ids):
+        if is_multi_domain(context_client, slice_with_uuids.slice_endpoint_ids):
             interdomain_client = InterdomainClient()
-            slice_id = interdomain_client.RequestSlice(request)
+            slice_id = interdomain_client.RequestSlice(slice_with_uuids)
             slice_ = context_client.GetSlice(slice_id)
             slice_active = Slice()
             slice_active.CopyFrom(slice_)
@@ -80,8 +85,8 @@ class SliceServiceServicerImpl(SliceServiceServicer):
         # Local domain slice
         service_id = ServiceId()
         # pylint: disable=no-member
-        context_uuid = service_id.context_id.context_uuid.uuid = request.slice_id.context_id.context_uuid.uuid
-        service_uuid = service_id.service_uuid.uuid = request.slice_id.slice_uuid.uuid
+        context_uuid = service_id.context_id.context_uuid.uuid = slice_with_uuids.slice_id.context_id.context_uuid.uuid
+        service_uuid = service_id.service_uuid.uuid = slice_with_uuids.slice_id.slice_uuid.uuid
 
         service_client = ServiceClient()
         try:
@@ -136,13 +141,13 @@ class SliceServiceServicerImpl(SliceServiceServicer):
 
         service_client.UpdateService(service_request)
 
-        copy_endpoint_ids(request.slice_endpoint_ids, slice_request.slice_endpoint_ids)
-        copy_constraints(request.slice_constraints, slice_request.slice_constraints)
-        copy_config_rules(request.slice_config.config_rules, slice_request.slice_config.config_rules)
+        #copy_endpoint_ids(request.slice_endpoint_ids, slice_with_uuids.slice_endpoint_ids)
+        #copy_constraints(request.slice_constraints, slice_with_uuids.slice_constraints)
+        #copy_config_rules(request.slice_config.config_rules, slice_with_uuids.slice_config.config_rules)
 
-        update_service_ids(slice_request.slice_service_ids, context_uuid, service_uuid)
-        context_client.SetSlice(slice_request)
-        slice_id = slice_request.slice_id
+        update_service_ids(slice_with_uuids.slice_service_ids, context_uuid, service_uuid)
+        context_client.SetSlice(slice_with_uuids)
+        slice_id = slice_with_uuids.slice_id
 
         slice_ = context_client.GetSlice(slice_id)
         slice_active = Slice()
@@ -194,14 +199,14 @@ class SliceServiceServicerImpl(SliceServiceServicer):
         else:
             current_slice = Slice()
             current_slice.CopyFrom(_slice)
-            current_slice.slice_status.slice_status = SliceStatusEnum.SLICESTATUS_DEINIT
+            current_slice.slice_status.slice_status = SliceStatusEnum.SLICESTATUS_DEINIT # pylint: disable=no-member
             context_client.SetSlice(current_slice)
 
             service_client = ServiceClient()
             for service_id in _slice.slice_service_ids:
                 current_slice = Slice()
-                current_slice.slice_id.CopyFrom(_slice.slice_id)
-                slice_service_id = current_slice.slice_service_ids.add()
+                current_slice.slice_id.CopyFrom(_slice.slice_id) # pylint: disable=no-member
+                slice_service_id = current_slice.slice_service_ids.add() # pylint: disable=no-member
                 slice_service_id.CopyFrom(service_id)
                 context_client.UnsetSlice(current_slice)
 
diff --git a/src/tests/tools/mock_osm/WimconnectorIETFL2VPN.py b/src/tests/tools/mock_osm/WimconnectorIETFL2VPN.py
index e1273b4e483a06df23d94bdf107005ce7585fb5e..aa4ca045f41ffdc69d2ebf2fcd9b5db99ce45dbe 100644
--- a/src/tests/tools/mock_osm/WimconnectorIETFL2VPN.py
+++ b/src/tests/tools/mock_osm/WimconnectorIETFL2VPN.py
@@ -73,7 +73,7 @@ class WimconnectorIETFL2VPN(SdnConnectorBase):
             response = requests.get(endpoint, auth=self.auth)
             http_code = response.status_code
         except requests.exceptions.RequestException as e:
-            raise SdnConnectorError(e.message, http_code=503)
+            raise SdnConnectorError(e.response, http_code=503)
 
         if http_code != 200:
             raise SdnConnectorError("Failed while authenticating", http_code=http_code)
diff --git a/src/webui/service/templates/device/home.html b/src/webui/service/templates/device/home.html
index 7b4437ccecd0c4d4948edba862666ee83c01e4cf..ef4345b363d708da6c7b672eba2d95f9d1f9f10d 100644
--- a/src/webui/service/templates/device/home.html
+++ b/src/webui/service/templates/device/home.html
@@ -48,7 +48,7 @@
             <th scope="col">Endpoints</th>
             <th scope="col">Drivers</th>
             <th scope="col">Status</th>
-            <!-- <th scope="col">Configuration</th> -->
+            <th scope="col">Config Rules</th>
             <th scope="col"></th>
           </tr>
         </thead>
@@ -56,40 +56,16 @@
             {% if devices %}
                 {% for device in devices %}
                 <tr>
-                    <td>
-                        {{ device.device_id.device_uuid.uuid }}
-                    </td>
-                    <td>
-                        {{ device.name }}
-                    </td>
-                    <td>
-                        {{ device.device_type }}
-                    </td>
-                    <td>
-                        <ul>
-                            {% for end_point in device.device_endpoints %}
-                            <li>{{ end_point.endpoint_id.endpoint_uuid.uuid }}</li>
-                            {% endfor %}
-                        </ul>
-                    </td>
-                    <td>
-                        <ul>
-                            {% for driver in device.device_drivers %}
-                            <li>{{ dde.Name(driver).replace('DEVICEDRIVER_', '').replace('UNDEFINED', 'EMULATED') }}</li>
-                            {% endfor %}
-                        </ul>
-                    </td>
+                    <td>{{ device.device_id.device_uuid.uuid }}</td>
+                    <td>{{ device.name }}</td>
+                    <td>{{ device.device_type }}</td>
+                    <td>{{ device.device_endpoints | length }}</td>
+                    <td><ul>{% for driver in device.device_drivers %}
+                        <li>{{ dde.Name(driver).replace('DEVICEDRIVER_', '').replace('UNDEFINED', 'EMULATED') }}</li>
+                        {% endfor %}
+                    </ul></td>
                     <td>{{ dose.Name(device.device_operational_status).replace('DEVICEOPERATIONALSTATUS_', '') }}</td>
-                    <!-- <td>
-                        <ul>
-                            {% for config in device.device_config.config_rules %}
-                            <li>
-                                Key: {{ config.resource_key }}<br/>
-                                Value: {{ config.resource_value }}
-                            </li>
-                            {% endfor %}
-                        </ul>
-                    </td> -->
+                    <td>{{ device.device_config.config_rules | length }}</td>
                     <td>
                         <a href="{{ url_for('device.detail', device_uuid=device.device_id.device_uuid.uuid) }}">
                             <svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-eye" viewBox="0 0 16 16">
diff --git a/src/webui/service/templates/js/topology.js b/src/webui/service/templates/js/topology.js
index adcabf62cd4cf59bb11fda3584a1e367836e45e1..47c2466148543582acd679093a33cd1fe38a4e62 100644
--- a/src/webui/service/templates/js/topology.js
+++ b/src/webui/service/templates/js/topology.js
@@ -88,9 +88,9 @@ d3.json("{{ url_for('main.topology') }}", function(data) {
         .call(d3.drag().on("start", dragstarted).on("drag", dragged).on("end", dragended));
 
     // node tooltip
-    node.append("title").text(function(n) { return n.name + ' (' + n.id + ')'; });
+    node.append("title").text(function(n) { return n.name; });
     // link tooltip
-    link.append("title").text(function(l) { return l.name + ' (' + l.id + ')'; });
+    link.append("title").text(function(l) { return l.name; });
 
     // link style
     link
diff --git a/src/webui/service/templates/service/detail.html b/src/webui/service/templates/service/detail.html
index 67b240b3d0e79745b839c0103c40f4721e4befad..e8d3a4221f68c5b0ea1eb51dcdc81cf13266c8e8 100644
--- a/src/webui/service/templates/service/detail.html
+++ b/src/webui/service/templates/service/detail.html
@@ -17,7 +17,7 @@
 {% extends 'base.html' %}
 
 {% block content %}
-<h1>Service {{ service.service_id.service_uuid.uuid }}</h1>
+<h1>Service {{ service.name }} ({{ service.service_id.service_uuid.uuid }})</h1>
 
 <div class="row mb-3">
     <div class="col-sm-3">
@@ -45,6 +45,7 @@
     <div class="col-sm-4">
         <b>Context: </b> {{ service.service_id.context_id.context_uuid.uuid }}<br>
         <b>UUID: </b> {{ service.service_id.service_uuid.uuid }}<br>
+        <b>Name: </b> {{ service.name }}<br>
         <b>Type: </b> {{ ste.Name(service.service_type).replace('SERVICETYPE_', '') }}<br>
         <b>Status: </b> {{ sse.Name(service.service_status.service_status).replace('SERVICESTATUS_', '') }}<br>
     </div>
diff --git a/src/webui/service/templates/service/home.html b/src/webui/service/templates/service/home.html
index c0a01839bb519074526a4ed34669ebfdd3d8b8e4..280685fc537af1b0eafd4d04056f4e4b0ed48e48 100644
--- a/src/webui/service/templates/service/home.html
+++ b/src/webui/service/templates/service/home.html
@@ -43,7 +43,8 @@
     <table class="table table-striped table-hover">
         <thead>
           <tr>
-            <th scope="col">#</th>
+            <th scope="col">UUID</th>
+            <th scope="col">Name</th>
             <th scope="col">Type</th>
             <th scope="col">End points</th>
             <th scope="col">Status</th>
@@ -59,6 +60,9 @@
                             {{ service.service_id.service_uuid.uuid }}
                         <!-- </a> -->
                     </td>
+                    <td>
+                        {{ service.name }}
+                    </td>
                     <td>
                         {{ ste.Name(service.service_type).replace('SERVICETYPE_', '') }}
                     </td>
diff --git a/src/webui/service/templates/slice/detail.html b/src/webui/service/templates/slice/detail.html
index 404dede394fbbc4b30d181c1872c989686c4b17a..4f26c75a50138df4fe1ff0806d250a5906e8cfd3 100644
--- a/src/webui/service/templates/slice/detail.html
+++ b/src/webui/service/templates/slice/detail.html
@@ -17,7 +17,7 @@
 {% extends 'base.html' %}
 
 {% block content %}
-<h1>Slice {{ slice.slice_id.slice_uuid.uuid }} </h1>
+<h1>Slice {{ slice.name }} ({{ slice.slice_id.slice_uuid.uuid }}) </h1>
 
 <div class="row mb-3">
     <div class="col-sm-3">
@@ -46,6 +46,7 @@
     <div class="col-sm-4">
         <b>Context: </b> {{ slice.slice_id.context_id.context_uuid.uuid }}<br>
         <b>UUID: </b> {{ slice.slice_id.slice_uuid.uuid }}<br>
+        <b>Name: </b> {{ slice.name }}<br>
         <b>Owner: </b> {{ slice.slice_owner.owner_uuid.uuid }}<br>
         <b>Status: </b> {{ sse.Name(slice.slice_status.slice_status).replace('SLICESTATUS_', '') }}<br>
     </div>
diff --git a/src/webui/service/templates/slice/home.html b/src/webui/service/templates/slice/home.html
index 46a2b4f1a5b4aceb5e432b7b69563d20258fc152..141234acadfc4dac61070e55ac9b161bbb01e2b2 100644
--- a/src/webui/service/templates/slice/home.html
+++ b/src/webui/service/templates/slice/home.html
@@ -31,20 +31,19 @@
     <table class="table table-striped table-hover">
         <thead>
           <tr>
-            <th scope="col">#</th>
+            <th scope="col">UUID</th>
+            <th scope="col">Name</th>
             <th scope="col">End points</th>
             <th scope="col">Status</th>
             <th scope="col"></th>
-            
           </tr>
         </thead>
         <tbody>
             {% if slices %}
                 {% for slice in slices %}
                 <tr>
-                    <td>
-                        {{ slice.slice_id.slice_uuid.uuid }}
-                    </td>
+                    <td>{{ slice.slice_id.slice_uuid.uuid }}</td>
+                    <td>{{ slice.name }}</td>
                     <td>
                         <ul>
                         {% for i in range(slice.slice_endpoint_ids|length) %}