diff --git a/scripts/run_tests_locally-context.sh b/scripts/run_tests_locally-context.sh
index 9e5ac4b92b5d55509173b23f0896cb108bdd3a1e..a9e601208aa9259219708a5e1ca770232e44faa6 100755
--- a/scripts/run_tests_locally-context.sh
+++ b/scripts/run_tests_locally-context.sh
@@ -22,6 +22,7 @@ RCFILE=$PROJECTDIR/coverage/.coveragerc
 K8S_NAMESPACE="tf-dev"
 K8S_HOSTNAME="kubernetes-master"
 
+kubectl --namespace $K8S_NAMESPACE expose deployment contextservice --port=6379 --name=redis-tests --type=NodePort
 export REDIS_SERVICE_HOST=$(kubectl get node $K8S_HOSTNAME -o 'jsonpath={.status.addresses[?(@.type=="InternalIP")].address}')
 export REDIS_SERVICE_PORT=$(kubectl get service redis-tests --namespace $K8S_NAMESPACE -o 'jsonpath={.spec.ports[?(@.port==6379)].nodePort}')
 
diff --git a/src/common/tools/grpc/ConfigRules.py b/src/common/tools/grpc/ConfigRules.py
new file mode 100644
index 0000000000000000000000000000000000000000..e109cb7a00086da8530c7677967d86e57df1457a
--- /dev/null
+++ b/src/common/tools/grpc/ConfigRules.py
@@ -0,0 +1,62 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# RFC 8466 - L2VPN Service Model (L2SM)
+# Ref: https://datatracker.ietf.org/doc/html/rfc8466
+
+
+import json
+from typing import Any, Dict, Tuple
+from common.proto.context_pb2 import ConfigActionEnum, ConfigRule
+from common.tools.grpc.Tools import grpc_message_to_json_string
+
+def update_config_rule_custom(config_rules, resource_key : str, fields : Dict[str, Tuple[Any, bool]]) -> ConfigRule:
+    # fields: Dict[field_name : str, Tuple[field_value : Any, raise_if_differs : bool]]
+
+    for config_rule in config_rules:
+        if config_rule.WhichOneof('config_rule') != 'custom': continue
+        if config_rule.custom.resource_key != resource_key: continue
+        json_resource_value = json.loads(config_rule.custom.resource_value)
+        break   # found, end loop
+    else:
+        # not found, add it
+        config_rule = config_rules.add()    # pylint: disable=no-member
+        config_rule.action = ConfigActionEnum.CONFIGACTION_SET
+        config_rule.custom.resource_key = resource_key
+        json_resource_value = {}
+
+    for field_name,(field_value, raise_if_differs) in fields.items():
+        if (field_name not in json_resource_value) or not raise_if_differs:
+            # missing or raise_if_differs=False, add/update it
+            json_resource_value[field_name] = field_value
+        elif json_resource_value[field_name] != field_value:
+            # exists, differs, and raise_if_differs=True
+            msg = 'Specified {:s}({:s}) differs existing value({:s})'
+            raise Exception(msg.format(str(field_name), str(field_value), str(json_resource_value[field_name])))
+
+    config_rule.custom.resource_value = json.dumps(json_resource_value, sort_keys=True)
+
+def copy_config_rules(source_config_rules, target_config_rules):
+    for source_config_rule in source_config_rules:
+        config_rule_kind = source_config_rule.WhichOneof('config_rule')
+        if config_rule_kind == 'custom':
+            custom = source_config_rule.custom
+            resource_key = custom.resource_key
+            resource_value = json.loads(custom.resource_value)
+            raise_if_differs = True
+            fields = {name:(value, raise_if_differs) for name,value in resource_value.items()}
+            update_config_rule_custom(target_config_rules, resource_key, fields)
+
+        else:
+            raise NotImplementedError('ConfigRule({:s})'.format(grpc_message_to_json_string(source_config_rule)))
diff --git a/src/common/tools/grpc/Constraints.py b/src/common/tools/grpc/Constraints.py
new file mode 100644
index 0000000000000000000000000000000000000000..a9dd4f40cbd823752b8cc09936ac48ebe32ec1a5
--- /dev/null
+++ b/src/common/tools/grpc/Constraints.py
@@ -0,0 +1,164 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# RFC 8466 - L2VPN Service Model (L2SM)
+# Ref: https://datatracker.ietf.org/doc/html/rfc8466
+
+
+import json
+from typing import Any, Dict, Optional, Tuple
+from common.proto.context_pb2 import Constraint, EndPointId
+from common.tools.grpc.Tools import grpc_message_to_json_string
+
+def update_constraint_custom(constraints, constraint_type : str, fields : Dict[str, Tuple[Any, bool]]) -> Constraint:
+    # fields: Dict[field_name : str, Tuple[field_value : Any, raise_if_differs : bool]]
+
+    for constraint in constraints:
+        if constraint.WhichOneof('constraint') != 'custom': continue
+        if constraint.custom.constraint_type != constraint_type: continue
+        json_constraint_value = json.loads(constraint.custom.constraint_value)
+        break   # found, end loop
+    else:
+        # not found, add it
+        constraint = constraints.add()      # pylint: disable=no-member
+        constraint.custom.constraint_type = constraint_type
+        json_constraint_value = {}
+
+    for field_name,(field_value, raise_if_differs) in fields.items():
+        if (field_name not in json_constraint_value) or not raise_if_differs:
+            # missing or raise_if_differs=False, add/update it
+            json_constraint_value[field_name] = field_value
+        elif json_constraint_value[field_name] != field_value:
+            # exists, differs, and raise_if_differs=True
+            msg = 'Specified {:s}({:s}) differs existing value({:s})'
+            raise Exception(msg.format(str(field_name), str(field_value), str(json_constraint_value[field_name])))
+
+    constraint.custom.constraint_value = json.dumps(json_constraint_value, sort_keys=True)
+
+def update_constraint_endpoint_location(
+    constraints, endpoint_id : EndPointId,
+    region : Optional[str] = None, gps_position : Optional[Tuple[float, float]] = None
+) -> Constraint:
+    # gps_position: (latitude, longitude)
+    if region is not None and gps_position is not None:
+        raise Exception('Only one of region/gps_position can be provided')
+
+    endpoint_uuid = endpoint_id.endpoint_uuid.uuid
+    device_uuid = endpoint_id.device_id.device_uuid.uuid
+    topology_uuid = endpoint_id.topology_id.topology_uuid.uuid
+    context_uuid = endpoint_id.topology_id.context_id.context_uuid.uuid
+
+    for constraint in constraints:
+        if constraint.WhichOneof('constraint') != 'endpoint_location': continue
+        _endpoint_id = constraint.endpoint_location.endpoint_id
+        if _endpoint_id.endpoint_uuid.uuid != endpoint_uuid: continue
+        if _endpoint_id.device_id.device_uuid.uuid != device_uuid: continue
+        if _endpoint_id.topology_id.topology_uuid.uuid != topology_uuid: continue
+        if _endpoint_id.topology_id.context_id.context_uuid.uuid != context_uuid: continue
+        break   # found, end loop
+    else:
+        # not found, add it
+        constraint = constraints.add()      # pylint: disable=no-member
+        _endpoint_id = constraint.endpoint_location.endpoint_id
+        _endpoint_id.endpoint_uuid.uuid = endpoint_uuid
+        _endpoint_id.device_id.device_uuid.uuid = device_uuid
+        _endpoint_id.topology_id.topology_uuid.uuid = topology_uuid
+        _endpoint_id.topology_id.context_id.context_uuid.uuid = context_uuid
+
+    location = constraint.endpoint_location.location
+    if region is not None:
+        location.region = region
+    elif gps_position is not None:
+        location.gps_position.latitude = gps_position[0]
+        location.gps_position.longitude = gps_position[1]
+    return constraint
+
+def update_constraint_endpoint_priority(constraints, endpoint_id : EndPointId, priority : int) -> Constraint:
+    endpoint_uuid = endpoint_id.endpoint_uuid.uuid
+    device_uuid = endpoint_id.device_id.device_uuid.uuid
+    topology_uuid = endpoint_id.topology_id.topology_uuid.uuid
+    context_uuid = endpoint_id.topology_id.context_id.context_uuid.uuid
+
+    for constraint in constraints:
+        if constraint.WhichOneof('constraint') != 'endpoint_priority': continue
+        _endpoint_id = constraint.endpoint_priority.endpoint_id
+        if _endpoint_id.endpoint_uuid.uuid != endpoint_uuid: continue
+        if _endpoint_id.device_id.device_uuid.uuid != device_uuid: continue
+        if _endpoint_id.topology_id.topology_uuid.uuid != topology_uuid: continue
+        if _endpoint_id.topology_id.context_id.context_uuid.uuid != context_uuid: continue
+        break   # found, end loop
+    else:
+        # not found, add it
+        constraint = constraints.add()      # pylint: disable=no-member
+        _endpoint_id = constraint.endpoint_priority.endpoint_id
+        _endpoint_id.endpoint_uuid.uuid = endpoint_uuid
+        _endpoint_id.device_id.device_uuid.uuid = device_uuid
+        _endpoint_id.topology_id.topology_uuid.uuid = topology_uuid
+        _endpoint_id.topology_id.context_id.context_uuid.uuid = context_uuid
+
+    constraint.endpoint_priority.priority = priority
+    return constraint
+
+def update_constraint_sla_availability(constraints, num_disjoint_paths : int, all_active : bool) -> Constraint:
+    for constraint in constraints:
+        if constraint.WhichOneof('constraint') != 'sla_availability': continue
+        break   # found, end loop
+    else:
+        # not found, add it
+        constraint = constraints.add()      # pylint: disable=no-member
+
+    constraint.sla_availability.num_disjoint_paths = num_disjoint_paths
+    constraint.sla_availability.all_active = all_active
+    return constraint
+
+
+def copy_constraints(source_constraints, target_constraints):
+    for source_constraint in source_constraints:
+        constraint_kind = source_constraint.WhichOneof('constraint')
+        if constraint_kind == 'custom':
+            custom = source_constraint.custom
+            constraint_type = custom.constraint_type
+            constraint_value = json.loads(custom.constraint_value)
+            raise_if_differs = True
+            fields = {name:(value, raise_if_differs) for name,value in constraint_value.items()}
+            update_constraint_custom(target_constraints, constraint_type, fields)
+
+        elif constraint_kind == 'endpoint_location':
+            endpoint_id = source_constraint.endpoint_location.endpoint_id
+            location = source_constraint.endpoint_location.location
+            location_kind = location.WhichOneof('location')
+            if location_kind == 'region':
+                region = location.region
+                update_constraint_endpoint_location(target_constraints, endpoint_id, region=region)
+            elif location_kind == 'gps_position':
+                gps_position = location.gps_position
+                gps_position = (gps_position.latitude, gps_position.longitude)
+                update_constraint_endpoint_location(target_constraints, endpoint_id, gps_position=gps_position)
+            else:
+                str_constraint = grpc_message_to_json_string(source_constraint)
+                raise NotImplementedError('Constraint({:s}): Location({:s})'.format(str_constraint, constraint_kind))
+
+        elif constraint_kind == 'endpoint_priority':
+            endpoint_id = source_constraint.endpoint_priority.endpoint_id
+            priority = source_constraint.endpoint_priority.priority
+            update_constraint_endpoint_priority(target_constraints, endpoint_id, priority)
+
+        elif constraint_kind == 'sla_availability':
+            sla_availability = source_constraint.sla_availability
+            num_disjoint_paths = sla_availability.num_disjoint_paths
+            all_active = sla_availability.all_active
+            update_constraint_sla_availability(target_constraints, num_disjoint_paths, all_active)
+
+        else:
+            raise NotImplementedError('Constraint({:s})'.format(grpc_message_to_json_string(source_constraint)))
diff --git a/src/common/tools/grpc/EndPointIds.py b/src/common/tools/grpc/EndPointIds.py
new file mode 100644
index 0000000000000000000000000000000000000000..b3830d4c809bb6dc065e58bc8af0f1ad56c610f4
--- /dev/null
+++ b/src/common/tools/grpc/EndPointIds.py
@@ -0,0 +1,50 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# RFC 8466 - L2VPN Service Model (L2SM)
+# Ref: https://datatracker.ietf.org/doc/html/rfc8466
+
+
+from typing import Optional
+from common.proto.context_pb2 import EndPointId
+
+def update_endpoint_ids(
+    endpoint_ids, device_uuid : str, endpoint_uuid : str,
+    context_uuid : Optional[str] = None, topology_uuid : Optional[str] = None
+) -> EndPointId:
+    for endpoint_id in endpoint_ids:
+        if endpoint_id.endpoint_uuid.uuid != endpoint_uuid: continue
+        if endpoint_id.device_id.device_uuid.uuid != device_uuid: continue
+        topology_id = endpoint_id.topology_id
+        if topology_uuid is not None and topology_id.topology_uuid.uuid != topology_uuid: continue
+        if context_uuid is not None and topology_id.context_id.context_uuid.uuid != context_uuid: continue
+        break   # found, do nothing
+    else:
+        # not found, add it
+        endpoint_id = endpoint_ids.add()    # pylint: disable=no-member
+        endpoint_id.endpoint_uuid.uuid = endpoint_uuid
+        endpoint_id.device_id.device_uuid.uuid = device_uuid
+        if topology_uuid is not None: endpoint_id.topology_id.topology_uuid.uuid = topology_uuid
+        if context_uuid is not None: endpoint_id.topology_id.context_id.context_uuid.uuid = context_uuid
+    return endpoint_id
+
+def copy_endpoint_ids(source_endpoint_ids, target_endpoint_ids):
+    for source_endpoint_id in source_endpoint_ids:
+        device_uuid = source_endpoint_id.device_id.device_uuid.uuid
+        endpoint_uuid = source_endpoint_id.endpoint_uuid.uuid
+        source_topology_id = source_endpoint_id.topology_id
+        context_uuid = source_topology_id.context_id.context_uuid.uuid
+        topology_uuid = source_topology_id.topology_uuid.uuid
+        update_endpoint_ids(
+            target_endpoint_ids, device_uuid, endpoint_uuid, context_uuid=context_uuid, topology_uuid=topology_uuid)
diff --git a/src/common/tools/grpc/ServiceIds.py b/src/common/tools/grpc/ServiceIds.py
new file mode 100644
index 0000000000000000000000000000000000000000..0e1073cab7c4e82e0f21584f2831d94a0b8a23ba
--- /dev/null
+++ b/src/common/tools/grpc/ServiceIds.py
@@ -0,0 +1,37 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# RFC 8466 - L2VPN Service Model (L2SM)
+# Ref: https://datatracker.ietf.org/doc/html/rfc8466
+
+
+from common.proto.context_pb2 import ServiceId
+
+def update_service_ids(service_ids, context_uuid : str, service_uuid : str) -> ServiceId:
+    for service_id in service_ids:
+        if service_id.service_uuid.uuid != service_uuid: continue
+        if service_id.context_id.context_uuid.uuid != context_uuid: continue
+        break   # found, do nothing
+    else:
+        # not found, add it
+        service_id = service_ids.add()    # pylint: disable=no-member
+        service_id.service_uuid.uuid = service_uuid
+        service_id.context_id.context_uuid.uuid = context_uuid
+    return service_id
+
+def copy_service_ids(source_service_ids, target_service_ids):
+    for source_service_id in source_service_ids:
+        context_uuid = source_service_id.context_id.context_uuid.uuid
+        service_uuid = source_service_id.service_uuid.uuid
+        update_service_ids(target_service_ids, context_uuid, service_uuid)
diff --git a/src/context/service/database/ConfigModel.py b/src/context/service/database/ConfigModel.py
index e36fc58cfa2a42425004f6c9766457c5d1c53896..bb2a37467ce3ad451bd29f824a5092ec1ad43cee 100644
--- a/src/context/service/database/ConfigModel.py
+++ b/src/context/service/database/ConfigModel.py
@@ -41,6 +41,11 @@ grpc_to_enum__config_action = functools.partial(
 class ConfigModel(Model): # pylint: disable=abstract-method
     pk = PrimaryKeyField()
 
+    def delete(self) -> None:
+        db_config_rule_pks = self.references(ConfigRuleModel)
+        for pk,_ in db_config_rule_pks: ConfigRuleModel(self.database, pk).delete()
+        super().delete()
+
     def dump(self) -> List[Dict]:
         db_config_rule_pks = self.references(ConfigRuleModel)
         config_rules = [ConfigRuleModel(self.database, pk).dump(include_position=True) for pk,_ in db_config_rule_pks]
diff --git a/src/context/service/database/ConnectionModel.py b/src/context/service/database/ConnectionModel.py
index 1d3f093727fc59b06b13e9f39636bbd9da3b010a..4cbed43a40f3538633216f09060f8a2483fe5e1f 100644
--- a/src/context/service/database/ConnectionModel.py
+++ b/src/context/service/database/ConnectionModel.py
@@ -32,6 +32,11 @@ LOGGER = logging.getLogger(__name__)
 class PathModel(Model): # pylint: disable=abstract-method
     pk = PrimaryKeyField()
 
+    def delete(self) -> None:
+        for db_path_hop_pk,_ in self.references(PathHopModel):
+            PathHopModel(self.database, db_path_hop_pk).delete()
+        super().delete()
+
     def dump(self) -> List[Dict]:
         db_path_hop_pks = self.references(PathHopModel)
         path_hops = [PathHopModel(self.database, pk).dump(include_position=True) for pk,_ in db_path_hop_pks]
@@ -56,6 +61,18 @@ class ConnectionModel(Model):
     service_fk = ForeignKeyField(ServiceModel, required=False)
     path_fk = ForeignKeyField(PathModel, required=True)
 
+    def delete(self) -> None:
+        # pylint: disable=import-outside-toplevel
+        from .RelationModels import ConnectionSubServiceModel
+
+        # Do not remove sub-services automatically. They are supported by real services, so Service component should
+        # deal with the correct removal workflow to deconfigure the devices.
+        for db_connection_sub_service_pk,_ in self.references(ConnectionSubServiceModel):
+            ConnectionSubServiceModel(self.database, db_connection_sub_service_pk).delete()
+
+        super().delete()
+        PathModel(self.database, self.path_fk).delete()
+
     def dump_id(self) -> Dict:
         return {
             'connection_uuid': {'uuid': self.connection_uuid},
@@ -108,7 +125,7 @@ def set_path(
 
     str_path_key = connection_uuid if len(path_name) == 0 else key_to_str([connection_uuid, path_name], separator=':')
     result : Tuple[PathModel, bool] = get_or_create_object(database, PathModel, str_path_key)
-    db_path, created = result
+    db_path, created = result # pylint: disable=unused-variable
 
     db_path_hop_pks : Set[str] = set(map(operator.itemgetter(0), db_path.references(PathHopModel)))
     db_objects : List[Tuple[Union[PathModel, PathHopModel], bool]] = [db_path]
@@ -127,7 +144,7 @@ def set_path(
         db_endpoint : EndPointModel = get_object(database, EndPointModel, str_endpoint_key)
 
         result : Tuple[PathHopModel, bool] = set_path_hop(database, db_path, position, db_endpoint)
-        db_path_hop, updated = result
+        db_path_hop, updated = result # pylint: disable=unused-variable
         db_objects.append(db_path_hop)
         db_path_hop_pks.discard(db_path_hop.instance_key)
 
diff --git a/src/context/service/database/ConstraintModel.py b/src/context/service/database/ConstraintModel.py
index 4eca404ca32b0c88136c5c89fed379919c60a33d..a35ec250d8a62a8a2534e9f27ddecac801db6eff 100644
--- a/src/context/service/database/ConstraintModel.py
+++ b/src/context/service/database/ConstraintModel.py
@@ -13,10 +13,14 @@
 # limitations under the License.
 
 import logging, operator
-from typing import Dict, List, Tuple, Union
+from enum import Enum
+from typing import Dict, List, Optional, Tuple, Type, Union
 from common.orm.Database import Database
-from common.orm.HighLevel import get_or_create_object, update_or_create_object
+from common.orm.HighLevel import get_object, get_or_create_object, update_or_create_object
 from common.orm.backend.Tools import key_to_str
+from common.orm.fields.BooleanField import BooleanField
+from common.orm.fields.EnumeratedField import EnumeratedField
+from common.orm.fields.FloatField import FloatField
 from common.orm.fields.ForeignKeyField import ForeignKeyField
 from common.orm.fields.IntegerField import IntegerField
 from common.orm.fields.PrimaryKeyField import PrimaryKeyField
@@ -24,55 +28,213 @@ from common.orm.fields.StringField import StringField
 from common.orm.model.Model import Model
 from common.proto.context_pb2 import Constraint
 from common.tools.grpc.Tools import grpc_message_to_json_string
-from context.service.database.Tools import fast_hasher, remove_dict_key
+from .EndPointModel import EndPointModel, get_endpoint
+from .Tools import fast_hasher, remove_dict_key
 
 LOGGER = logging.getLogger(__name__)
 
 class ConstraintsModel(Model): # pylint: disable=abstract-method
     pk = PrimaryKeyField()
 
+    def delete(self) -> None:
+        db_constraint_pks = self.references(ConstraintModel)
+        for pk,_ in db_constraint_pks: ConstraintModel(self.database, pk).delete()
+        super().delete()
+
     def dump(self) -> List[Dict]:
         db_constraint_pks = self.references(ConstraintModel)
         constraints = [ConstraintModel(self.database, pk).dump(include_position=True) for pk,_ in db_constraint_pks]
         constraints = sorted(constraints, key=operator.itemgetter('position'))
         return [remove_dict_key(constraint, 'position') for constraint in constraints]
 
+class ConstraintCustomModel(Model): # pylint: disable=abstract-method
+    constraint_type = StringField(required=True, allow_empty=False)
+    constraint_value = StringField(required=True, allow_empty=False)
+
+    def dump(self) -> Dict: # pylint: disable=arguments-differ
+        return {'custom': {'constraint_type': self.constraint_type, 'constraint_value': self.constraint_value}}
+
+Union_ConstraintEndpoint = Union[
+    'ConstraintEndpointLocationGpsPositionModel', 'ConstraintEndpointLocationRegionModel',
+    'ConstraintEndpointPriorityModel'
+]
+def dump_endpoint_id(endpoint_constraint : Union_ConstraintEndpoint):
+    db_endpoints_pks = list(endpoint_constraint.references(EndPointModel))
+    num_endpoints = len(db_endpoints_pks)
+    if num_endpoints != 1:
+        raise Exception('Wrong number({:d}) of associated Endpoints with constraint'.format(num_endpoints))
+    db_endpoint = EndPointModel(endpoint_constraint.database, db_endpoints_pks[0])
+    return db_endpoint.dump_id()
+
+class ConstraintEndpointLocationRegionModel(Model): # pylint: disable=abstract-method
+    endpoint_fk = ForeignKeyField(EndPointModel)
+    region = StringField(required=True, allow_empty=False)
+
+    def dump(self) -> Dict: # pylint: disable=arguments-differ
+        return {'endpoint_location': {'endpoint_id': dump_endpoint_id(self), 'region': self.region}}
+
+class ConstraintEndpointLocationGpsPositionModel(Model): # pylint: disable=abstract-method
+    endpoint_fk = ForeignKeyField(EndPointModel)
+    latitude = FloatField(required=True, min_value=-90.0, max_value=90.0)
+    longitude = FloatField(required=True, min_value=-180.0, max_value=180.0)
+
+    def dump(self) -> Dict: # pylint: disable=arguments-differ
+        gps_position = {'latitude': self.latitude, 'longitude': self.longitude}
+        return {'endpoint_location': {'endpoint_id': dump_endpoint_id(self), 'gps_position': gps_position}}
+
+class ConstraintEndpointPriorityModel(Model): # pylint: disable=abstract-method
+    endpoint_fk = ForeignKeyField(EndPointModel)
+    priority = FloatField(required=True)
+
+    def dump(self) -> Dict: # pylint: disable=arguments-differ
+        return {'endpoint_priority': {'endpoint_id': dump_endpoint_id(self), 'priority': self.priority}}
+
+class ConstraintSlaAvailabilityModel(Model): # pylint: disable=abstract-method
+    num_disjoint_paths = IntegerField(required=True, min_value=1)
+    all_active = BooleanField(required=True)
+
+    def dump(self) -> Dict: # pylint: disable=arguments-differ
+        return {'sla_availability': {'num_disjoint_paths': self.num_disjoint_paths, 'all_active': self.all_active}}
+
+# enum values should match name of field in ConstraintModel
+class ConstraintKindEnum(Enum):
+    CUSTOM                        = 'custom'
+    ENDPOINT_LOCATION_REGION      = 'ep_loc_region'
+    ENDPOINT_LOCATION_GPSPOSITION = 'ep_loc_gpspos'
+    ENDPOINT_PRIORITY             = 'ep_priority'
+    SLA_AVAILABILITY              = 'sla_avail'
+
+Union_SpecificConstraint = Union[
+    ConstraintCustomModel, ConstraintEndpointLocationRegionModel, ConstraintEndpointLocationGpsPositionModel,
+    ConstraintEndpointPriorityModel, ConstraintSlaAvailabilityModel,
+]
+
 class ConstraintModel(Model): # pylint: disable=abstract-method
     pk = PrimaryKeyField()
     constraints_fk = ForeignKeyField(ConstraintsModel)
+    kind = EnumeratedField(ConstraintKindEnum)
     position = IntegerField(min_value=0, required=True)
-    constraint_type = StringField(required=True, allow_empty=False)
-    constraint_value = StringField(required=True, allow_empty=False)
+    constraint_custom_fk        = ForeignKeyField(ConstraintCustomModel, required=False)
+    constraint_ep_loc_region_fk = ForeignKeyField(ConstraintEndpointLocationRegionModel, required=False)
+    constraint_ep_loc_gpspos_fk = ForeignKeyField(ConstraintEndpointLocationGpsPositionModel, required=False)
+    constraint_ep_priority_fk   = ForeignKeyField(ConstraintEndpointPriorityModel, required=False)
+    constraint_sla_avail_fk     = ForeignKeyField(ConstraintSlaAvailabilityModel, required=False)
+
+    def delete(self) -> None:
+        field_name = 'constraint_{:s}_fk'.format(str(self.kind.value))
+        specific_fk_value : Optional[ForeignKeyField] = getattr(self, field_name, None)
+        if specific_fk_value is None:
+            raise Exception('Unable to find constraint key for field_name({:s})'.format(field_name))
+        specific_fk_class = getattr(ConstraintModel, field_name, None)
+        foreign_model_class : Model = specific_fk_class.foreign_model
+        super().delete()
+        get_object(self.database, foreign_model_class, str(specific_fk_value)).delete()
 
     def dump(self, include_position=True) -> Dict: # pylint: disable=arguments-differ
-        result = {
-            'custom': {
-                'constraint_type': self.constraint_type,
-                'constraint_value': self.constraint_value,
-            },
-        }
+        field_name = 'constraint_{:s}_fk'.format(str(self.kind.value))
+        specific_fk_value : Optional[ForeignKeyField] = getattr(self, field_name, None)
+        if specific_fk_value is None:
+            raise Exception('Unable to find constraint key for field_name({:s})'.format(field_name))
+        specific_fk_class = getattr(ConstraintModel, field_name, None)
+        foreign_model_class : Model = specific_fk_class.foreign_model
+        constraint : Union_SpecificConstraint = get_object(self.database, foreign_model_class, str(specific_fk_value))
+        result = constraint.dump()
         if include_position: result['position'] = self.position
         return result
 
+Tuple_ConstraintSpecs = Tuple[Type, str, Dict, ConstraintKindEnum]
+def parse_constraint_custom(database : Database, grpc_constraint) -> Tuple_ConstraintSpecs:
+    constraint_class = ConstraintCustomModel
+    str_constraint_id = grpc_constraint.custom.constraint_type
+    constraint_data = {
+        'constraint_type' : grpc_constraint.custom.constraint_type,
+        'constraint_value': grpc_constraint.custom.constraint_value,
+    }
+    return constraint_class, str_constraint_id, constraint_data, ConstraintKindEnum.CUSTOM
+
+def parse_constraint_endpoint_location(database : Database, grpc_constraint) -> Tuple_ConstraintSpecs:
+    grpc_endpoint_id = grpc_constraint.endpoint_location.endpoint_id
+    str_endpoint_key, db_endpoint = get_endpoint(database, grpc_endpoint_id)
+
+    str_constraint_id = str_endpoint_key
+    constraint_data = {'endpoint_fk': db_endpoint}
+
+    grpc_location = grpc_constraint.endpoint_location.location
+    location_kind = str(grpc_location.WhichOneof('location'))
+    if location_kind == 'region':
+        constraint_class = ConstraintEndpointLocationRegionModel
+        constraint_data.update({'region': grpc_location.region})
+        return constraint_class, str_constraint_id, constraint_data, ConstraintKindEnum.ENDPOINT_LOCATION_REGION
+    elif location_kind == 'gps_position':
+        constraint_class = ConstraintEndpointLocationGpsPositionModel
+        gps_position = grpc_location.gps_position
+        constraint_data.update({'latitude': gps_position.latitude, 'longitude': gps_position.longitude})
+        return constraint_class, str_constraint_id, constraint_data, ConstraintKindEnum.ENDPOINT_LOCATION_GPSPOSITION
+    else:
+        MSG = 'Location kind {:s} in Constraint of kind endpoint_location is not implemented: {:s}'
+        raise NotImplementedError(MSG.format(location_kind, grpc_message_to_json_string(grpc_constraint)))
+
+def parse_constraint_endpoint_priority(database : Database, grpc_constraint) -> Tuple_ConstraintSpecs:
+    grpc_endpoint_id = grpc_constraint.endpoint_priority.endpoint_id
+    str_endpoint_key, db_endpoint = get_endpoint(database, grpc_endpoint_id)
+
+    constraint_class = ConstraintEndpointPriorityModel
+    str_constraint_id = str_endpoint_key
+    priority = grpc_constraint.endpoint_priority.priority
+    constraint_data = {'endpoint_fk': db_endpoint, 'priority': priority}
+
+    return constraint_class, str_constraint_id, constraint_data, ConstraintKindEnum.ENDPOINT_PRIORITY
+
+def parse_constraint_sla_availability(database : Database, grpc_constraint) -> Tuple_ConstraintSpecs:
+    constraint_class = ConstraintSlaAvailabilityModel
+    str_constraint_id = ''
+    constraint_data = {
+        'num_disjoint_paths' : grpc_constraint.sla_availability.num_disjoint_paths,
+        'all_active': grpc_constraint.sla_availability.all_active,
+    }
+    return constraint_class, str_constraint_id, constraint_data, ConstraintKindEnum.SLA_AVAILABILITY
+
+CONSTRAINT_PARSERS = {
+    'custom'            : parse_constraint_custom,
+    'endpoint_location' : parse_constraint_endpoint_location,
+    'endpoint_priority' : parse_constraint_endpoint_priority,
+    'sla_availability'  : parse_constraint_sla_availability,
+}
+
+Union_ConstraintModel = Union[
+    ConstraintCustomModel, ConstraintEndpointLocationGpsPositionModel, ConstraintEndpointLocationRegionModel,
+    ConstraintEndpointPriorityModel, ConstraintSlaAvailabilityModel
+]
+
 def set_constraint(
-    database : Database, db_constraints : ConstraintsModel, grpc_constraint, position : int
-) -> Tuple[Constraint, bool]:
-    constraint_type = str(grpc_constraint.WhichOneof('constraint'))
-    if constraint_type != 'custom':
-        raise NotImplementedError('Constraint of type {:s} is not implemented: {:s}'.format(
-            constraint_type, grpc_message_to_json_string(grpc_constraint)))
-
-    str_constraint_key_hash = fast_hasher(grpc_constraint.custom.constraint_type)
+    database : Database, db_constraints : ConstraintsModel, grpc_constraint : Constraint, position : int
+) -> Tuple[Union_ConstraintModel, bool]:
+    grpc_constraint_kind = str(grpc_constraint.WhichOneof('constraint'))
+
+    parser = CONSTRAINT_PARSERS.get(grpc_constraint_kind)
+    if parser is None:
+        raise NotImplementedError('Constraint of kind {:s} is not implemented: {:s}'.format(
+            grpc_constraint_kind, grpc_message_to_json_string(grpc_constraint)))
+
+    # create specific constraint
+    constraint_class, str_constraint_id, constraint_data, constraint_kind = parser(database, grpc_constraint)
+    str_constraint_key_hash = fast_hasher(':'.join([constraint_kind.value, str_constraint_id]))
     str_constraint_key = key_to_str([db_constraints.pk, str_constraint_key_hash], separator=':')
+    result : Tuple[Union_ConstraintModel, bool] = update_or_create_object(
+        database, constraint_class, str_constraint_key, constraint_data)
+    db_specific_constraint, updated = result
 
-    result : Tuple[ConstraintModel, bool] = update_or_create_object(database, ConstraintModel, str_constraint_key, {
-        'constraints_fk'  : db_constraints,
-        'position'        : position,
-        'constraint_type' : grpc_constraint.custom.constraint_type,
-        'constraint_value': grpc_constraint.custom.constraint_value,
-    })
-    db_config_rule, updated = result
-    return db_config_rule, updated
+    # create generic constraint
+    constraint_fk_field_name = 'constraint_{:s}_fk'.format(constraint_kind.value)
+    constraint_data = {
+        'constraints_fk': db_constraints, 'position': position, 'kind': constraint_kind,
+        constraint_fk_field_name: db_specific_constraint
+    }
+    result : Tuple[ConstraintModel, bool] = update_or_create_object(
+        database, ConstraintModel, str_constraint_key, constraint_data)
+    db_constraint, updated = result
+
+    return db_constraint, updated
 
 def set_constraints(
     database : Database, db_parent_pk : str, constraints_name : str, grpc_constraints
@@ -85,7 +247,8 @@ def set_constraints(
     db_objects = [(db_constraints, created)]
 
     for position,grpc_constraint in enumerate(grpc_constraints):
-        result : Tuple[ConstraintModel, bool] = set_constraint(database, db_constraints, grpc_constraint, position)
+        result : Tuple[ConstraintModel, bool] = set_constraint(
+            database, db_constraints, grpc_constraint, position)
         db_constraint, updated = result
         db_objects.append((db_constraint, updated))
 
diff --git a/src/context/service/database/DeviceModel.py b/src/context/service/database/DeviceModel.py
index 0f0201190542397a34b68fa217706c904606ead3..0d42326793b44473d8aef3da2c3e9ce8464bd1c4 100644
--- a/src/context/service/database/DeviceModel.py
+++ b/src/context/service/database/DeviceModel.py
@@ -54,6 +54,24 @@ class DeviceModel(Model):
     device_config_fk = ForeignKeyField(ConfigModel)
     device_operational_status = EnumeratedField(ORM_DeviceOperationalStatusEnum, required=True)
 
+    def delete(self) -> None:
+        # pylint: disable=import-outside-toplevel
+        from .EndPointModel import EndPointModel
+        from .RelationModels import TopologyDeviceModel
+
+        for db_endpoint_pk,_ in self.references(EndPointModel):
+            EndPointModel(self.database, db_endpoint_pk).delete()
+
+        for db_topology_device_pk,_ in self.references(TopologyDeviceModel):
+            TopologyDeviceModel(self.database, db_topology_device_pk).delete()
+
+        for db_driver_pk,_ in self.references(DriverModel):
+            DriverModel(self.database, db_driver_pk).delete()
+
+        super().delete()
+
+        ConfigModel(self.database, self.device_config_fk).delete()
+
     def dump_id(self) -> Dict:
         return {'device_uuid': {'uuid': self.device_uuid}}
 
diff --git a/src/context/service/database/EndPointModel.py b/src/context/service/database/EndPointModel.py
index abeeb1b690b97e47772e3bf38d77016569bf55dc..aeef91b654dfaaaaf14d53f625126632b7303741 100644
--- a/src/context/service/database/EndPointModel.py
+++ b/src/context/service/database/EndPointModel.py
@@ -13,14 +13,16 @@
 # limitations under the License.
 
 import logging
-from typing import Dict, List
+from typing import Dict, List, Optional, Tuple
 from common.orm.Database import Database
+from common.orm.HighLevel import get_object
 from common.orm.backend.Tools import key_to_str
 from common.orm.fields.EnumeratedField import EnumeratedField
 from common.orm.fields.ForeignKeyField import ForeignKeyField
 from common.orm.fields.PrimaryKeyField import PrimaryKeyField
 from common.orm.fields.StringField import StringField
 from common.orm.model.Model import Model
+from common.proto.context_pb2 import EndPointId
 from .DeviceModel import DeviceModel
 from .KpiSampleType import ORM_KpiSampleTypeEnum, grpc_to_enum__kpi_sample_type
 from .TopologyModel import TopologyModel
@@ -34,6 +36,11 @@ class EndPointModel(Model):
     endpoint_uuid = StringField(required=True, allow_empty=False)
     endpoint_type = StringField()
 
+    def delete(self) -> None:
+        for db_kpi_sample_type_pk,_ in self.references(KpiSampleTypeModel):
+            KpiSampleTypeModel(self.database, db_kpi_sample_type_pk).delete()
+        super().delete()
+
     def dump_id(self) -> Dict:
         device_id = DeviceModel(self.database, self.device_fk).dump_id()
         result = {
@@ -75,3 +82,31 @@ def set_kpi_sample_types(database : Database, db_endpoint : EndPointModel, grpc_
         db_endpoint_kpi_sample_type.endpoint_fk = db_endpoint
         db_endpoint_kpi_sample_type.kpi_sample_type = orm_kpi_sample_type
         db_endpoint_kpi_sample_type.save()
+
+def get_endpoint(
+    database : Database, grpc_endpoint_id : EndPointId,
+    validate_topology_exists : bool = True, validate_device_in_topology : bool = True
+) -> Tuple[str, EndPointModel]:
+    endpoint_uuid                  = grpc_endpoint_id.endpoint_uuid.uuid
+    endpoint_device_uuid           = grpc_endpoint_id.device_id.device_uuid.uuid
+    endpoint_topology_uuid         = grpc_endpoint_id.topology_id.topology_uuid.uuid
+    endpoint_topology_context_uuid = grpc_endpoint_id.topology_id.context_id.context_uuid.uuid
+    str_endpoint_key = key_to_str([endpoint_device_uuid, endpoint_uuid])
+
+    if len(endpoint_topology_context_uuid) > 0 and len(endpoint_topology_uuid) > 0:
+        # check topology exists
+        str_topology_key = key_to_str([endpoint_topology_context_uuid, endpoint_topology_uuid])
+        if validate_topology_exists:
+            from .TopologyModel import TopologyModel
+            get_object(database, TopologyModel, str_topology_key)
+
+        # check device is in topology
+        str_topology_device_key = key_to_str([str_topology_key, endpoint_device_uuid], separator='--')
+        if validate_device_in_topology:
+            from .RelationModels import TopologyDeviceModel
+            get_object(database, TopologyDeviceModel, str_topology_device_key)
+
+        str_endpoint_key = key_to_str([str_endpoint_key, str_topology_key], separator=':')
+
+    db_endpoint : EndPointModel = get_object(database, EndPointModel, str_endpoint_key)
+    return str_endpoint_key, db_endpoint
diff --git a/src/context/service/database/LinkModel.py b/src/context/service/database/LinkModel.py
index 742044b9758df297413ad2d0318520c825e8b738..8f1d971c3127371e0d9a1a401d885a02269bd8dd 100644
--- a/src/context/service/database/LinkModel.py
+++ b/src/context/service/database/LinkModel.py
@@ -25,6 +25,18 @@ class LinkModel(Model):
     pk = PrimaryKeyField()
     link_uuid = StringField(required=True, allow_empty=False)
 
+    def delete(self) -> None:
+        #pylint: disable=import-outside-toplevel
+        from .RelationModels import LinkEndPointModel, TopologyLinkModel
+
+        for db_link_endpoint_pk,_ in self.references(LinkEndPointModel):
+            LinkEndPointModel(self.database, db_link_endpoint_pk).delete()
+
+        for db_topology_link_pk,_ in self.references(TopologyLinkModel):
+            TopologyLinkModel(self.database, db_topology_link_pk).delete()
+
+        super().delete()
+
     def dump_id(self) -> Dict:
         return {'link_uuid': {'uuid': self.link_uuid}}
 
diff --git a/src/context/service/database/ServiceModel.py b/src/context/service/database/ServiceModel.py
index cf756af60a8178a9ae2fda2a5fa5ddeebc73912c..8b32d1cc9eeec248d1097f972df93dbd2c0882fa 100644
--- a/src/context/service/database/ServiceModel.py
+++ b/src/context/service/database/ServiceModel.py
@@ -56,6 +56,18 @@ class ServiceModel(Model):
     service_status = EnumeratedField(ORM_ServiceStatusEnum, required=True)
     service_config_fk = ForeignKeyField(ConfigModel)
 
+    def delete(self) -> None:
+        #pylint: disable=import-outside-toplevel
+        from .RelationModels import ServiceEndPointModel
+
+        for db_service_endpoint_pk,_ in self.references(ServiceEndPointModel):
+            ServiceEndPointModel(self.database, db_service_endpoint_pk).delete()
+
+        super().delete()
+
+        ConfigModel(self.database, self.service_config_fk).delete()
+        ConstraintsModel(self.database, self.service_constraints_fk).delete()
+
     def dump_id(self) -> Dict:
         context_id = ContextModel(self.database, self.context_fk).dump_id()
         return {
diff --git a/src/context/service/database/SliceModel.py b/src/context/service/database/SliceModel.py
index 1bc51df2e10d46bb565ef7fbb34524e1e238be5f..bc00ada43758c9c5ffefbb88a87134aa46fbd73a 100644
--- a/src/context/service/database/SliceModel.py
+++ b/src/context/service/database/SliceModel.py
@@ -22,6 +22,7 @@ from common.orm.fields.StringField import StringField
 from common.orm.model.Model import Model
 from common.orm.HighLevel import get_related_objects
 from common.proto.context_pb2 import SliceStatusEnum
+from .ConfigModel import ConfigModel
 from .ConstraintModel import ConstraintsModel
 from .ContextModel import ContextModel
 from .Tools import grpc_to_enum
@@ -44,6 +45,25 @@ class SliceModel(Model):
     slice_uuid = StringField(required=True, allow_empty=False)
     slice_constraints_fk = ForeignKeyField(ConstraintsModel)
     slice_status = EnumeratedField(ORM_SliceStatusEnum, required=True)
+    slice_config_fk = ForeignKeyField(ConfigModel)
+
+    def delete(self) -> None:
+        # pylint: disable=import-outside-toplevel
+        from .RelationModels import SliceEndPointModel, SliceServiceModel, SliceSubSliceModel
+
+        for db_slice_endpoint_pk,_ in self.references(SliceEndPointModel):
+            SliceEndPointModel(self.database, db_slice_endpoint_pk).delete()
+
+        for db_slice_service_pk,_ in self.references(SliceServiceModel):
+            SliceServiceModel(self.database, db_slice_service_pk).delete()
+
+        for db_slice_subslice_pk,_ in self.references(SliceSubSliceModel):
+            SliceSubSliceModel(self.database, db_slice_subslice_pk).delete()
+
+        super().delete()
+
+        ConfigModel(self.database, self.slice_config_fk).delete()
+        ConstraintsModel(self.database, self.slice_constraints_fk).delete()
 
     def dump_id(self) -> Dict:
         context_id = ContextModel(self.database, self.context_fk).dump_id()
@@ -60,6 +80,9 @@ class SliceModel(Model):
     def dump_constraints(self) -> List[Dict]:
         return ConstraintsModel(self.database, self.slice_constraints_fk).dump()
 
+    def dump_config(self) -> Dict:
+        return ConfigModel(self.database, self.slice_config_fk).dump()
+
     def dump_service_ids(self) -> List[Dict]:
         from .RelationModels import SliceServiceModel # pylint: disable=import-outside-toplevel
         db_services = get_related_objects(self, SliceServiceModel, 'service_fk')
@@ -71,8 +94,8 @@ class SliceModel(Model):
         return [db_subslice.dump_id() for db_subslice in sorted(db_subslices, key=operator.attrgetter('pk'))]
 
     def dump(   # pylint: disable=arguments-differ
-            self, include_endpoint_ids=True, include_constraints=True, include_service_ids=True,
-            include_subslice_ids=True
+            self, include_endpoint_ids=True, include_constraints=True, include_config_rules=True,
+            include_service_ids=True, include_subslice_ids=True
         ) -> Dict:
         result = {
             'slice_id': self.dump_id(),
@@ -80,6 +103,7 @@ class SliceModel(Model):
         }
         if include_endpoint_ids: result['slice_endpoint_ids'] = self.dump_endpoint_ids()
         if include_constraints: result['slice_constraints'] = self.dump_constraints()
+        if include_config_rules: result.setdefault('slice_config', {})['config_rules'] = self.dump_config()
         if include_service_ids: result['slice_service_ids'] = self.dump_service_ids()
         if include_subslice_ids: result['slice_subslice_ids'] = self.dump_subslice_ids()
         return result
diff --git a/src/context/service/grpc_server/ContextServiceServicerImpl.py b/src/context/service/grpc_server/ContextServiceServicerImpl.py
index 49663069d77b6da3aad989d00e9e5bd108a89f78..4c8f957ecb70765cbd36032fca7bfacc27f9b5ae 100644
--- a/src/context/service/grpc_server/ContextServiceServicerImpl.py
+++ b/src/context/service/grpc_server/ContextServiceServicerImpl.py
@@ -31,23 +31,24 @@ from common.proto.context_pb2 import (
 from common.proto.context_pb2_grpc import ContextServiceServicer
 from common.rpc_method_wrapper.Decorator import create_metrics, safe_and_metered_rpc_method
 from common.rpc_method_wrapper.ServiceExceptions import InvalidArgumentException
-from context.service.database.ConfigModel import ConfigModel, ConfigRuleModel, grpc_config_rules_to_raw, update_config
-from context.service.database.ConnectionModel import ConnectionModel, PathHopModel, PathModel, set_path
-from context.service.database.ConstraintModel import ConstraintModel, ConstraintsModel, set_constraints
+from context.service.database.ConfigModel import grpc_config_rules_to_raw, update_config
+from context.service.database.ConnectionModel import ConnectionModel, set_path
+from context.service.database.ConstraintModel import set_constraints
 from context.service.database.ContextModel import ContextModel
-from context.service.database.DeviceModel import (
-    DeviceModel, DriverModel, grpc_to_enum__device_operational_status, set_drivers)
-from context.service.database.EndPointModel import EndPointModel, KpiSampleTypeModel, set_kpi_sample_types
+from context.service.database.DeviceModel import DeviceModel, grpc_to_enum__device_operational_status, set_drivers
+from context.service.database.EndPointModel import EndPointModel, set_kpi_sample_types
 from context.service.database.Events import notify_event
 from context.service.database.LinkModel import LinkModel
 from context.service.database.RelationModels import (
-    ConnectionSubServiceModel, LinkEndPointModel, ServiceEndPointModel, SliceEndPointModel, SliceServiceModel, SliceSubSliceModel, TopologyDeviceModel, TopologyLinkModel)
+    ConnectionSubServiceModel, LinkEndPointModel, ServiceEndPointModel, SliceEndPointModel, SliceServiceModel,
+    SliceSubSliceModel, TopologyDeviceModel, TopologyLinkModel)
 from context.service.database.ServiceModel import (
     ServiceModel, grpc_to_enum__service_status, grpc_to_enum__service_type)
 from context.service.database.SliceModel import SliceModel, grpc_to_enum__slice_status
 from context.service.database.TopologyModel import TopologyModel
 from .Constants import (
-    CONSUME_TIMEOUT, TOPIC_CONNECTION, TOPIC_CONTEXT, TOPIC_DEVICE, TOPIC_LINK, TOPIC_SERVICE, TOPIC_SLICE, TOPIC_TOPOLOGY)
+    CONSUME_TIMEOUT, TOPIC_CONNECTION, TOPIC_CONTEXT, TOPIC_DEVICE, TOPIC_LINK, TOPIC_SERVICE, TOPIC_SLICE,
+    TOPIC_TOPOLOGY)
 
 LOGGER = logging.getLogger(__name__)
 
@@ -336,25 +337,7 @@ class ContextServiceServicerImpl(ContextServiceServicer):
             if not found: return Empty()
 
             dict_device_id = db_device.dump_id()
-
-            for db_endpoint_pk,_ in db_device.references(EndPointModel):
-                db_endpoint = EndPointModel(self.database, db_endpoint_pk)
-                for db_kpi_sample_type_pk,_ in db_endpoint.references(KpiSampleTypeModel):
-                    KpiSampleTypeModel(self.database, db_kpi_sample_type_pk).delete()
-                db_endpoint.delete()
-
-            for db_topology_device_pk,_ in db_device.references(TopologyDeviceModel):
-                TopologyDeviceModel(self.database, db_topology_device_pk).delete()
-
-            for db_driver_pk,_ in db_device.references(DriverModel):
-                DriverModel(self.database, db_driver_pk).delete()
-
-            db_config = ConfigModel(self.database, db_device.device_config_fk)
-            for db_config_rule_pk,_ in db_config.references(ConfigRuleModel):
-                ConfigRuleModel(self.database, db_config_rule_pk).delete()
-
             db_device.delete()
-            db_config.delete()
 
             event_type = EventTypeEnum.EVENTTYPE_REMOVE
             notify_event(self.messagebroker, TOPIC_DEVICE, event_type, {'device_id': dict_device_id})
@@ -443,14 +426,8 @@ class ContextServiceServicerImpl(ContextServiceServicer):
             if not found: return Empty()
 
             dict_link_id = db_link.dump_id()
-
-            for db_link_endpoint_pk,_ in db_link.references(LinkEndPointModel):
-                LinkEndPointModel(self.database, db_link_endpoint_pk).delete()
-
-            for db_topology_link_pk,_ in db_link.references(TopologyLinkModel):
-                TopologyLinkModel(self.database, db_topology_link_pk).delete()
-
             db_link.delete()
+
             event_type = EventTypeEnum.EVENTTYPE_REMOVE
             notify_event(self.messagebroker, TOPIC_LINK, event_type, {'link_id': dict_link_id})
             return Empty()
@@ -557,21 +534,7 @@ class ContextServiceServicerImpl(ContextServiceServicer):
             if not found: return Empty()
 
             dict_service_id = db_service.dump_id()
-
-            for db_service_endpoint_pk,_ in db_service.references(ServiceEndPointModel):
-                ServiceEndPointModel(self.database, db_service_endpoint_pk).delete()
-
-            db_config = ConfigModel(self.database, db_service.service_config_fk)
-            for db_config_rule_pk,_ in db_config.references(ConfigRuleModel):
-                ConfigRuleModel(self.database, db_config_rule_pk).delete()
-
-            db_constraints = ConstraintsModel(self.database, db_service.service_constraints_fk)
-            for db_constraint_pk,_ in db_constraints.references(ConstraintModel):
-                ConstraintModel(self.database, db_constraint_pk).delete()
-
             db_service.delete()
-            db_config.delete()
-            db_constraints.delete()
 
             event_type = EventTypeEnum.EVENTTYPE_REMOVE
             notify_event(self.messagebroker, TOPIC_SERVICE, event_type, {'service_id': dict_service_id})
@@ -607,8 +570,8 @@ class ContextServiceServicerImpl(ContextServiceServicer):
             str_key = key_to_str([request.context_id.context_uuid.uuid, request.slice_uuid.uuid])
             db_slice : SliceModel = get_object(self.database, SliceModel, str_key)
             return Slice(**db_slice.dump(
-                include_endpoint_ids=True, include_constraints=True, include_service_ids=True,
-                include_subslice_ids=True))
+                include_endpoint_ids=True, include_constraints=True, include_config_rules=True,
+                include_service_ids=True, include_subslice_ids=True))
 
     @safe_and_metered_rpc_method(METRICS, LOGGER)
     def SetSlice(self, request: Slice, context : grpc.ServicerContext) -> SliceId:
@@ -632,11 +595,16 @@ class ContextServiceServicerImpl(ContextServiceServicer):
                 self.database, str_slice_key, 'constraints', request.slice_constraints)
             db_constraints = constraints_result[0][0]
 
+            config_rules = grpc_config_rules_to_raw(request.slice_config.config_rules)
+            running_config_result = update_config(self.database, str_slice_key, 'running', config_rules)
+            db_running_config = running_config_result[0][0]
+
             result : Tuple[SliceModel, bool] = update_or_create_object(self.database, SliceModel, str_slice_key, {
                 'context_fk'          : db_context,
                 'slice_uuid'          : slice_uuid,
                 'slice_constraints_fk': db_constraints,
                 'slice_status'        : grpc_to_enum__slice_status(request.slice_status.slice_status),
+                'slice_config_fk'     : db_running_config,
             })
             db_slice, updated = result
 
@@ -698,22 +666,7 @@ class ContextServiceServicerImpl(ContextServiceServicer):
             if not found: return Empty()
 
             dict_slice_id = db_slice.dump_id()
-
-            for db_slice_endpoint_pk,_ in db_slice.references(SliceEndPointModel):
-                SliceEndPointModel(self.database, db_slice_endpoint_pk).delete()
-
-            db_constraints = ConstraintsModel(self.database, db_slice.slice_constraints_fk)
-            for db_constraint_pk,_ in db_constraints.references(ConstraintModel):
-                ConstraintModel(self.database, db_constraint_pk).delete()
-
-            for db_slice_service_pk,_ in db_slice.references(SliceServiceModel):
-                SliceServiceModel(self.database, db_slice_service_pk).delete()
-
-            for db_slice_subslice_pk,_ in db_slice.references(SliceSubSliceModel):
-                SliceSubSliceModel(self.database, db_slice_subslice_pk).delete()
-
             db_slice.delete()
-            db_constraints.delete()
 
             event_type = EventTypeEnum.EVENTTYPE_REMOVE
             notify_event(self.messagebroker, TOPIC_SLICE, event_type, {'slice_id': dict_slice_id})
@@ -798,20 +751,7 @@ class ContextServiceServicerImpl(ContextServiceServicer):
             if not found: return Empty()
 
             dict_connection_id = db_connection.dump_id()
-
-            db_path = PathModel(self.database, db_connection.path_fk)
-            for db_path_hop_pk,_ in db_path.references(PathHopModel):
-                PathHopModel(self.database, db_path_hop_pk).delete()
-
-            # Do not remove sub-services automatically. They are supported by real services, so Service component should
-            # deal with the correct removal workflow to deconfigure the devices.
-            for db_connection_sub_service_pk,_ in db_connection.references(ConnectionSubServiceModel):
-                db_connection_sub_service : ConnectionSubServiceModel = get_object(
-                    self.database, ConnectionSubServiceModel, db_connection_sub_service_pk)
-                db_connection_sub_service.delete()
-
             db_connection.delete()
-            db_path.delete()
 
             event_type = EventTypeEnum.EVENTTYPE_REMOVE
             notify_event(self.messagebroker, TOPIC_CONNECTION, event_type, {'connection_id': dict_connection_id})
diff --git a/src/context/tests/test_unitary.py b/src/context/tests/test_unitary.py
index efdd1f8b96a09b31b7d144b676ddd355152fc003..b46c9468c56974be5c987dbbc284daae337d3c7b 100644
--- a/src/context/tests/test_unitary.py
+++ b/src/context/tests/test_unitary.py
@@ -846,7 +846,7 @@ def test_grpc_service(
     for db_entry in db_entries:
         LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
     LOGGER.info('-----------------------------------------------------------')
-    assert len(db_entries) == 84
+    assert len(db_entries) == 89
 
     # ----- Get when the object exists ---------------------------------------------------------------------------------
     response = context_client_grpc.GetService(ServiceId(**SERVICE_R1_R2_ID))
@@ -1042,7 +1042,7 @@ def test_grpc_connection(
     for db_entry in db_entries:
         LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
     LOGGER.info('-----------------------------------------------------------')
-    assert len(db_entries) == 137
+    assert len(db_entries) == 150
 
     # ----- Create the object ------------------------------------------------------------------------------------------
     with pytest.raises(grpc.RpcError) as e:
@@ -1082,7 +1082,7 @@ def test_grpc_connection(
     for db_entry in db_entries:
         LOGGER.info('  [{:>4s}] {:40s} :: {:s}'.format(*db_entry)) # pragma: no cover
     LOGGER.info('-----------------------------------------------------------')
-    assert len(db_entries) == 153
+    assert len(db_entries) == 166
 
     # ----- Get when the object exists ---------------------------------------------------------------------------------
     response = context_client_grpc.GetConnection(ConnectionId(**CONNECTION_R1_R3_ID))