From 95f27fa082e4660025790c22be467b7c27081d00 Mon Sep 17 00:00:00 2001
From: gifrerenom <lluis.gifre@cttc.es>
Date: Fri, 27 Jan 2023 12:35:12 +0000
Subject: [PATCH 01/31] Common MessageBroker - NATS:

- added support for username and password
- added support for direct URI
---
 .../backend/nats/NatsBackend.py               | 24 ++++++++++++-------
 1 file changed, 15 insertions(+), 9 deletions(-)

diff --git a/src/common/message_broker/backend/nats/NatsBackend.py b/src/common/message_broker/backend/nats/NatsBackend.py
index 6c644a0a8..4a6d5e9cd 100644
--- a/src/common/message_broker/backend/nats/NatsBackend.py
+++ b/src/common/message_broker/backend/nats/NatsBackend.py
@@ -19,18 +19,24 @@ from common.message_broker.Message import Message
 from .._Backend import _Backend
 from .NatsBackendThread import NatsBackendThread
 
-DEFAULT_NATS_URI = 'nats://127.0.0.1:4222'
-#NATS_URI_TEMPLATE = 'nats://{:s}:{:s}@nats.{:s}.svc.cluster.local:{:s}' # with authentication
-NATS_URI_TEMPLATE = 'nats://nats.{:s}.svc.cluster.local:{:s}'
+NATS_URI_TEMPLATE_AUTH = 'nats://{:s}:{:s}@nats.{:s}.svc.cluster.local:{:s}'
+NATS_URI_TEMPLATE_NOAUTH = 'nats://nats.{:s}.svc.cluster.local:{:s}'
 
 class NatsBackend(_Backend):
     def __init__(self, **settings) -> None: # pylint: disable=super-init-not-called
-        nats_namespace   = get_setting('NATS_NAMESPACE', settings=settings)
-        nats_client_port = get_setting('NATS_CLIENT_PORT', settings=settings)
-        if nats_namespace is None or nats_client_port is None:
-            nats_uri = get_setting('NATS_URI', settings=settings, default=DEFAULT_NATS_URI)
-        else:
-            nats_uri = NATS_URI_TEMPLATE.format(nats_namespace, nats_client_port)
+        nats_uri = get_setting('NATS_URI', settings=settings, default=None)
+        if nats_uri is None:
+            nats_namespace   = get_setting('NATS_NAMESPACE', settings=settings)
+            nats_client_port = get_setting('NATS_CLIENT_PORT', settings=settings)
+            nats_username    = get_setting('NATS_USERNAME', settings=settings, default=None)
+            nats_password    = get_setting('NATS_PASSWORD', settings=settings, default=None)
+            if nats_username is None or nats_password is None:
+                nats_uri = NATS_URI_TEMPLATE_NOAUTH.format(
+                    nats_namespace, nats_client_port)
+            else:
+                nats_uri = NATS_URI_TEMPLATE_AUTH.format(
+                    nats_username, nats_password, nats_namespace, nats_client_port)
+
         self._terminate = threading.Event()
         self._nats_backend_thread = NatsBackendThread(nats_uri)
         self._nats_backend_thread.start()
-- 
GitLab


From 227bdc14cbb7f519f446d1680d0e03c1454ac875 Mon Sep 17 00:00:00 2001
From: gifrerenom <lluis.gifre@cttc.es>
Date: Fri, 27 Jan 2023 12:35:32 +0000
Subject: [PATCH 02/31] Context component:

- added support for direct CockroachDB URI
---
 src/context/service/database/Engine.py | 19 ++++++++++---------
 1 file changed, 10 insertions(+), 9 deletions(-)

diff --git a/src/context/service/database/Engine.py b/src/context/service/database/Engine.py
index a37ec0c1e..46c1b8c25 100644
--- a/src/context/service/database/Engine.py
+++ b/src/context/service/database/Engine.py
@@ -24,15 +24,16 @@ CRDB_URI_TEMPLATE = 'cockroachdb://{:s}:{:s}@cockroachdb-public.{:s}.svc.cluster
 class Engine:
     @staticmethod
     def get_engine() -> sqlalchemy.engine.Engine:
-        CRDB_NAMESPACE = get_setting('CRDB_NAMESPACE')
-        CRDB_SQL_PORT  = get_setting('CRDB_SQL_PORT')
-        CRDB_DATABASE  = get_setting('CRDB_DATABASE')
-        CRDB_USERNAME  = get_setting('CRDB_USERNAME')
-        CRDB_PASSWORD  = get_setting('CRDB_PASSWORD')
-        CRDB_SSLMODE   = get_setting('CRDB_SSLMODE')
-
-        crdb_uri = CRDB_URI_TEMPLATE.format(
-            CRDB_USERNAME, CRDB_PASSWORD, CRDB_NAMESPACE, CRDB_SQL_PORT, CRDB_DATABASE, CRDB_SSLMODE)
+        crdb_uri = get_setting('CRDB_URI', default=None)
+        if crdb_uri is None:
+            CRDB_NAMESPACE = get_setting('CRDB_NAMESPACE')
+            CRDB_SQL_PORT  = get_setting('CRDB_SQL_PORT')
+            CRDB_DATABASE  = get_setting('CRDB_DATABASE')
+            CRDB_USERNAME  = get_setting('CRDB_USERNAME')
+            CRDB_PASSWORD  = get_setting('CRDB_PASSWORD')
+            CRDB_SSLMODE   = get_setting('CRDB_SSLMODE')
+            crdb_uri = CRDB_URI_TEMPLATE.format(
+                CRDB_USERNAME, CRDB_PASSWORD, CRDB_NAMESPACE, CRDB_SQL_PORT, CRDB_DATABASE, CRDB_SSLMODE)
 
         try:
             engine = sqlalchemy.create_engine(
-- 
GitLab


From 87d0e798d170c08e2986eae361fcbbf448bfee5f Mon Sep 17 00:00:00 2001
From: gifrerenom <lluis.gifre@cttc.es>
Date: Fri, 27 Jan 2023 12:44:44 +0000
Subject: [PATCH 03/31] Device component - TAPI Driver:

- added basic support for HTTPS
- added optional support for authentication
---
 .../service/drivers/transport_api/Tools.py    | 21 ++++++++++++-------
 .../transport_api/TransportApiDriver.py       | 19 +++++++++++------
 2 files changed, 27 insertions(+), 13 deletions(-)

diff --git a/src/device/service/drivers/transport_api/Tools.py b/src/device/service/drivers/transport_api/Tools.py
index 898929419..6b1be361b 100644
--- a/src/device/service/drivers/transport_api/Tools.py
+++ b/src/device/service/drivers/transport_api/Tools.py
@@ -13,6 +13,8 @@
 # limitations under the License.
 
 import json, logging, requests
+from requests.auth import HTTPBasicAuth
+from typing import Optional
 from device.service.driver_api._Driver import RESOURCE_ENDPOINTS
 
 LOGGER = logging.getLogger(__name__)
@@ -28,11 +30,13 @@ def find_key(resource, key):
     return json.loads(resource[1])[key]
 
 
-def config_getter(root_url, resource_key, timeout):
+def config_getter(
+    root_url : str, resource_key : str, auth : Optional[HTTPBasicAuth] = None, timeout : Optional[int] = None
+):
     url = '{:s}/restconf/data/tapi-common:context'.format(root_url)
     result = []
     try:
-        response = requests.get(url, timeout=timeout)
+        response = requests.get(url, timeout=timeout, verify=False, auth=auth)
     except requests.exceptions.Timeout:
         LOGGER.exception('Timeout connecting {:s}'.format(url))
     except Exception as e:  # pylint: disable=broad-except
@@ -63,8 +67,10 @@ def config_getter(root_url, resource_key, timeout):
     return result
 
 def create_connectivity_service(
-    root_url, timeout, uuid, input_sip, output_sip, direction, capacity_value, capacity_unit, layer_protocol_name,
-    layer_protocol_qualifier):
+    root_url, uuid, input_sip, output_sip, direction, capacity_value, capacity_unit, layer_protocol_name,
+    layer_protocol_qualifier,
+    auth : Optional[HTTPBasicAuth] = None, timeout : Optional[int] = None
+):
 
     url = '{:s}/restconf/data/tapi-common:context/tapi-connectivity:connectivity-context'.format(root_url)
     headers = {'content-type': 'application/json'}
@@ -105,7 +111,8 @@ def create_connectivity_service(
     results = []
     try:
         LOGGER.info('Connectivity service {:s}: {:s}'.format(str(uuid), str(data)))
-        response = requests.post(url=url, data=json.dumps(data), timeout=timeout, headers=headers)
+        response = requests.post(
+            url=url, data=json.dumps(data), timeout=timeout, headers=headers, verify=False, auth=auth)
         LOGGER.info('TAPI response: {:s}'.format(str(response)))
     except Exception as e:  # pylint: disable=broad-except
         LOGGER.exception('Exception creating ConnectivityService(uuid={:s}, data={:s})'.format(str(uuid), str(data)))
@@ -117,12 +124,12 @@ def create_connectivity_service(
         results.append(response.status_code in HTTP_OK_CODES)
     return results
 
-def delete_connectivity_service(root_url, timeout, uuid):
+def delete_connectivity_service(root_url, uuid, auth : Optional[HTTPBasicAuth] = None, timeout : Optional[int] = None):
     url = '{:s}/restconf/data/tapi-common:context/tapi-connectivity:connectivity-context/connectivity-service={:s}'
     url = url.format(root_url, uuid)
     results = []
     try:
-        response = requests.delete(url=url, timeout=timeout)
+        response = requests.delete(url=url, timeout=timeout, verify=False, auth=auth)
     except Exception as e:  # pylint: disable=broad-except
         LOGGER.exception('Exception deleting ConnectivityService(uuid={:s})'.format(str(uuid)))
         results.append(e)
diff --git a/src/device/service/drivers/transport_api/TransportApiDriver.py b/src/device/service/drivers/transport_api/TransportApiDriver.py
index 71d7aa336..874d4aeb6 100644
--- a/src/device/service/drivers/transport_api/TransportApiDriver.py
+++ b/src/device/service/drivers/transport_api/TransportApiDriver.py
@@ -13,6 +13,7 @@
 # limitations under the License.
 
 import logging, requests, threading
+from requests.auth import HTTPBasicAuth
 from typing import Any, Iterator, List, Optional, Tuple, Union
 from common.method_wrappers.Decorator import MetricsPool, metered_subclass_method
 from common.type_checkers.Checkers import chk_string, chk_type
@@ -29,7 +30,11 @@ class TransportApiDriver(_Driver):
         self.__lock = threading.Lock()
         self.__started = threading.Event()
         self.__terminate = threading.Event()
-        self.__tapi_root = 'http://' + address + ':' + str(port)
+        username = settings.get('username')
+        password = settings.get('password')
+        self.__auth = HTTPBasicAuth(username, password) if username is not None and password is not None else None
+        scheme = settings.get('scheme', 'http')
+        self.__tapi_root = '{:s}://{:s}:{:d}'.format(scheme, address, int(port))
         self.__timeout = int(settings.get('timeout', 120))
 
     def Connect(self) -> bool:
@@ -37,7 +42,7 @@ class TransportApiDriver(_Driver):
         with self.__lock:
             if self.__started.is_set(): return True
             try:
-                requests.get(url, timeout=self.__timeout)
+                requests.get(url, timeout=self.__timeout, verify=False, auth=self.__auth)
             except requests.exceptions.Timeout:
                 LOGGER.exception('Timeout connecting {:s}'.format(str(self.__tapi_root)))
                 return False
@@ -67,7 +72,8 @@ class TransportApiDriver(_Driver):
             for i, resource_key in enumerate(resource_keys):
                 str_resource_name = 'resource_key[#{:d}]'.format(i)
                 chk_string(str_resource_name, resource_key, allow_empty=False)
-                results.extend(config_getter(self.__tapi_root, resource_key, self.__timeout))
+                results.extend(config_getter(
+                    self.__tapi_root, resource_key, timeout=self.__timeout, auth=self.__auth))
         return results
 
     @metered_subclass_method(METRICS_POOL)
@@ -89,8 +95,8 @@ class TransportApiDriver(_Driver):
                 direction = find_key(resource, 'direction')
 
                 data = create_connectivity_service(
-                    self.__tapi_root, self.__timeout, uuid, input_sip, output_sip, direction, capacity_value,
-                    capacity_unit, layer_protocol_name, layer_protocol_qualifier)
+                    self.__tapi_root, uuid, input_sip, output_sip, direction, capacity_value, capacity_unit,
+                    layer_protocol_name, layer_protocol_qualifier, timeout=self.__timeout, auth=self.__auth)
                 results.extend(data)
         return results
 
@@ -102,7 +108,8 @@ class TransportApiDriver(_Driver):
             for resource in resources:
                 LOGGER.info('resource = {:s}'.format(str(resource)))
                 uuid = find_key(resource, 'uuid')
-                results.extend(delete_connectivity_service(self.__tapi_root, self.__timeout, uuid))
+                results.extend(delete_connectivity_service(
+                    self.__tapi_root, uuid, timeout=self.__timeout, auth=self.__auth))
         return results
 
     @metered_subclass_method(METRICS_POOL)
-- 
GitLab


From 2887024257d5f06cd86a5e046fe5612cde9751de Mon Sep 17 00:00:00 2001
From: gifrerenom <lluis.gifre@cttc.es>
Date: Fri, 27 Jan 2023 14:48:05 +0000
Subject: [PATCH 04/31] Deploy CockroachDB:

- moved Mgmt WebUI to port 8081 to prevent collisions with TAPI server used in hackfest
---
 deploy/crdb.sh | 16 +++++++++-------
 1 file changed, 9 insertions(+), 7 deletions(-)

diff --git a/deploy/crdb.sh b/deploy/crdb.sh
index 76aa07370..598980ac8 100755
--- a/deploy/crdb.sh
+++ b/deploy/crdb.sh
@@ -111,7 +111,7 @@ function crdb_deploy_single() {
     echo
 
     echo "CockroachDB Port Mapping"
-    echo ">>> Expose CockroachDB SQL port (26257)"
+    echo ">>> Expose CockroachDB SQL port (26257->26257)"
     CRDB_SQL_PORT=$(kubectl --namespace ${CRDB_NAMESPACE} get service cockroachdb-public -o 'jsonpath={.spec.ports[?(@.name=="sql")].port}')
     PATCH='{"data": {"'${CRDB_SQL_PORT}'": "'${CRDB_NAMESPACE}'/cockroachdb-public:'${CRDB_SQL_PORT}'"}}'
     kubectl patch configmap nginx-ingress-tcp-microk8s-conf --namespace ingress --patch "${PATCH}"
@@ -122,12 +122,13 @@ function crdb_deploy_single() {
     kubectl patch daemonset nginx-ingress-microk8s-controller --namespace ingress --patch "${PATCH}"
     echo
 
-    echo ">>> Expose CockroachDB HTTP Mgmt GUI port (8080)"
+    echo ">>> Expose CockroachDB HTTP Mgmt GUI port (8080->8081)"
+    CRDB_GUI_PORT_EXT="8081"
     CRDB_GUI_PORT=$(kubectl --namespace ${CRDB_NAMESPACE} get service cockroachdb-public -o 'jsonpath={.spec.ports[?(@.name=="http")].port}')
-    PATCH='{"data": {"'${CRDB_GUI_PORT}'": "'${CRDB_NAMESPACE}'/cockroachdb-public:'${CRDB_GUI_PORT}'"}}'
+    PATCH='{"data": {"'${CRDB_GUI_PORT_EXT}'": "'${CRDB_NAMESPACE}'/cockroachdb-public:'${CRDB_GUI_PORT}'"}}'
     kubectl patch configmap nginx-ingress-tcp-microk8s-conf --namespace ingress --patch "${PATCH}"
 
-    PORT_MAP='{"containerPort": '${CRDB_GUI_PORT}', "hostPort": '${CRDB_GUI_PORT}'}'
+    PORT_MAP='{"containerPort": '${CRDB_GUI_PORT_EXT}', "hostPort": '${CRDB_GUI_PORT_EXT}'}'
     CONTAINER='{"name": "nginx-ingress-microk8s", "ports": ['${PORT_MAP}']}'
     PATCH='{"spec": {"template": {"spec": {"containers": ['${CONTAINER}']}}}}'
     kubectl patch daemonset nginx-ingress-microk8s-controller --namespace ingress --patch "${PATCH}"
@@ -275,12 +276,13 @@ function crdb_deploy_cluster() {
     kubectl patch daemonset nginx-ingress-microk8s-controller --namespace ingress --patch "${PATCH}"
     echo
 
-    echo ">>> Expose CockroachDB HTTP Mgmt GUI port (8080)"
+    echo ">>> Expose CockroachDB HTTP Mgmt GUI port (8080->8081)"
+    CRDB_GUI_PORT_EXT="8081"
     CRDB_GUI_PORT=$(kubectl --namespace ${CRDB_NAMESPACE} get service cockroachdb-public -o 'jsonpath={.spec.ports[?(@.name=="http")].port}')
-    PATCH='{"data": {"'${CRDB_GUI_PORT}'": "'${CRDB_NAMESPACE}'/cockroachdb-public:'${CRDB_GUI_PORT}'"}}'
+    PATCH='{"data": {"'${CRDB_GUI_PORT_EXT}'": "'${CRDB_NAMESPACE}'/cockroachdb-public:'${CRDB_GUI_PORT}'"}}'
     kubectl patch configmap nginx-ingress-tcp-microk8s-conf --namespace ingress --patch "${PATCH}"
 
-    PORT_MAP='{"containerPort": '${CRDB_GUI_PORT}', "hostPort": '${CRDB_GUI_PORT}'}'
+    PORT_MAP='{"containerPort": '${CRDB_GUI_PORT_EXT}', "hostPort": '${CRDB_GUI_PORT_EXT}'}'
     CONTAINER='{"name": "nginx-ingress-microk8s", "ports": ['${PORT_MAP}']}'
     PATCH='{"spec": {"template": {"spec": {"containers": ['${CONTAINER}']}}}}'
     kubectl patch daemonset nginx-ingress-microk8s-controller --namespace ingress --patch "${PATCH}"
-- 
GitLab


From 6103d18dcb6efd26ef84f512a59bdfce65b263c5 Mon Sep 17 00:00:00 2001
From: gifrerenom <lluis.gifre@cttc.es>
Date: Fri, 27 Jan 2023 14:48:29 +0000
Subject: [PATCH 05/31] Commo method-wrappers:

- added missing "raise" statement when something goes wrong
---
 src/common/method_wrappers/Decorator.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/src/common/method_wrappers/Decorator.py b/src/common/method_wrappers/Decorator.py
index f918b8458..558e5c17a 100644
--- a/src/common/method_wrappers/Decorator.py
+++ b/src/common/method_wrappers/Decorator.py
@@ -169,6 +169,7 @@ def metered_subclass_method(metrics_pool : MetricsPool):
                 raise
             except Exception:           # pylint: disable=broad-except
                 counter_failed.inc()
+                raise
 
         return inner_wrapper
     return outer_wrapper
-- 
GitLab


From d8aae80fab3d569831785613fc89dc24f20167be Mon Sep 17 00:00:00 2001
From: gifrerenom <lluis.gifre@cttc.es>
Date: Fri, 27 Jan 2023 14:49:13 +0000
Subject: [PATCH 06/31] Device component - TAPI Driver:

- corrected parsing of endpoints to improve flexibility in retrieved fields
---
 .../service/drivers/transport_api/Tools.py    | 50 +++++++++++--------
 1 file changed, 30 insertions(+), 20 deletions(-)

diff --git a/src/device/service/drivers/transport_api/Tools.py b/src/device/service/drivers/transport_api/Tools.py
index 6b1be361b..e54e6497a 100644
--- a/src/device/service/drivers/transport_api/Tools.py
+++ b/src/device/service/drivers/transport_api/Tools.py
@@ -12,7 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import json, logging, requests
+import json, logging, operator, requests
 from requests.auth import HTTPBasicAuth
 from typing import Optional
 from device.service.driver_api._Driver import RESOURCE_ENDPOINTS
@@ -39,30 +39,40 @@ def config_getter(
         response = requests.get(url, timeout=timeout, verify=False, auth=auth)
     except requests.exceptions.Timeout:
         LOGGER.exception('Timeout connecting {:s}'.format(url))
+        return result
     except Exception as e:  # pylint: disable=broad-except
         LOGGER.exception('Exception retrieving {:s}'.format(resource_key))
         result.append((resource_key, e))
-    else:
+        return result
+
+    try:
         context = json.loads(response.content)
+    except Exception as e:  # pylint: disable=broad-except
+        LOGGER.warning('Unable to decode reply: {:s}'.format(str(response.content)))
+        result.append((resource_key, e))
+        return result
+
+    if resource_key != RESOURCE_ENDPOINTS: return result
+
+    if 'tapi-common:context' in context:
+        context = context['tapi-common:context']
+    elif 'context' in context:
+        context = context['context']
 
-        if resource_key == RESOURCE_ENDPOINTS:
-            if 'tapi-common:context' in context:
-                context = context['tapi-common:context']
-            elif 'context' in context:
-                context = context['context']
-            for sip in context['service-interface-point']:
-                layer_protocol_name = sip.get('layer-protocol-name', '?')
-                supportable_spectrum = sip.get('tapi-photonic-media:media-channel-service-interface-point-spec', {})
-                supportable_spectrum = supportable_spectrum.get('mc-pool', {})
-                supportable_spectrum = supportable_spectrum.get('supportable-spectrum', [])
-                supportable_spectrum = supportable_spectrum[0] if len(supportable_spectrum) == 1 else {}
-                grid_type = supportable_spectrum.get('frequency-constraint', {}).get('grid-type')
-                granularity = supportable_spectrum.get('frequency-constraint', {}).get('adjustment-granularity')
-                direction = sip.get('direction', '?')
-                endpoint_type = ':'.join([layer_protocol_name, grid_type, granularity, direction])
-                endpoint_url = '/endpoints/endpoint[{:s}]'.format(sip['uuid'])
-                endpoint_data = {'uuid': sip['uuid'], 'type': endpoint_type}
-                result.append((endpoint_url, endpoint_data))
+    for sip in context['service-interface-point']:
+        layer_protocol_name = sip.get('layer-protocol-name', '?')
+        supportable_spectrum = sip.get('tapi-photonic-media:media-channel-service-interface-point-spec', {})
+        supportable_spectrum = supportable_spectrum.get('mc-pool', {})
+        supportable_spectrum = supportable_spectrum.get('supportable-spectrum', [])
+        supportable_spectrum = supportable_spectrum[0] if len(supportable_spectrum) == 1 else {}
+        grid_type = supportable_spectrum.get('frequency-constraint', {}).get('grid-type')
+        granularity = supportable_spectrum.get('frequency-constraint', {}).get('adjustment-granularity')
+        direction = sip.get('direction', '?')
+        endpoint_type = [layer_protocol_name, grid_type, granularity, direction]
+        str_endpoint_type = ':'.join(filter(lambda i: operator.is_not(i, None), endpoint_type))
+        endpoint_url = '/endpoints/endpoint[{:s}]'.format(sip['uuid'])
+        endpoint_data = {'uuid': sip['uuid'], 'type': str_endpoint_type}
+        result.append((endpoint_url, endpoint_data))
 
     return result
 
-- 
GitLab


From 9ccc623ac7713d74b3f79d0592ae6569d471047c Mon Sep 17 00:00:00 2001
From: gifrerenom <lluis.gifre@cttc.es>
Date: Fri, 27 Jan 2023 17:06:04 +0000
Subject: [PATCH 07/31] PathComp component:

- Frontend: ensure links have exactly 2 endpoints
- Backend: allow larger number of endpoints per node
---
 src/pathcomp/backend/pathComp_tools.h                  | 6 +++---
 src/pathcomp/frontend/service/algorithms/_Algorithm.py | 1 +
 2 files changed, 4 insertions(+), 3 deletions(-)

diff --git a/src/pathcomp/backend/pathComp_tools.h b/src/pathcomp/backend/pathComp_tools.h
index adbbf30c4..118781eb0 100644
--- a/src/pathcomp/backend/pathComp_tools.h
+++ b/src/pathcomp/backend/pathComp_tools.h
@@ -121,7 +121,7 @@ struct map_nodes_t {
 };
 
 #define MAX_NUM_VERTICES				20 // 100 # LGR: reduced from 100 to 20 to divide by 5 the memory used
-#define MAX_NUM_EDGES					20 // 100 # LGR: reduced from 100 to 20 to divide by 5 the memory used
+#define MAX_NUM_EDGES					40 // 100 # LGR: reduced from 100 to 40 to divide by 2.5 the memory used
 // Structures for the graph composition
 struct targetNodes_t {
 	// remote / targeted node
@@ -154,7 +154,7 @@ struct context_t {
 ////////////////////////////////////////////////////
 // Structure for the Set of Contexts
 ///////////////////////////////////////////////////
-#define MAX_NUMBER_CONTEXT		5 // 100 # LGR: reduced from 100 to 5 to divide by 20 the memory used
+#define MAX_NUMBER_CONTEXT		1 // 100 # LGR: reduced from 100 to 1 to divide by 100 the memory used
 struct contextSet_t {
 	struct context_t contextList[MAX_NUMBER_CONTEXT];
 	gint num_context_set;
@@ -251,7 +251,7 @@ struct endPoint_t {
 // Structure for the device contents
 ///////////////////////////////////////////////////////////////////
 #define MAX_DEV_TYPE_SIZE				128
-#define MAX_DEV_ENDPOINT_LENGTH			10
+#define MAX_DEV_ENDPOINT_LENGTH			40	// 10 # LGR: controllers might have large number of endpoints
 struct device_t {
 	gchar deviceId[UUID_CHAR_LENGTH]; // device ID using UUID (128 bits)
 
diff --git a/src/pathcomp/frontend/service/algorithms/_Algorithm.py b/src/pathcomp/frontend/service/algorithms/_Algorithm.py
index 5c49a1fec..bf19ed3e1 100644
--- a/src/pathcomp/frontend/service/algorithms/_Algorithm.py
+++ b/src/pathcomp/frontend/service/algorithms/_Algorithm.py
@@ -68,6 +68,7 @@ class _Algorithm:
         if isinstance(grpc_links, LinkList): grpc_links = grpc_links.links
         for grpc_link in grpc_links:
             json_link = compose_link(grpc_link)
+            if len(json_link['link_endpoint_ids']) != 2: continue
             self.link_list.append(json_link)
 
             link_uuid = json_link['link_Id']
-- 
GitLab


From 941df77d4728ecd5e0b5f880de823781cb12aa08 Mon Sep 17 00:00:00 2001
From: gifrerenom <lluis.gifre@cttc.es>
Date: Fri, 27 Jan 2023 17:07:34 +0000
Subject: [PATCH 08/31] Service component:

- store service in UpdateService to get UUIDs from given endpoint names and store constraints
- code cleanup
---
 .../service/ServiceServiceServicerImpl.py     | 31 ++++++++++++-------
 1 file changed, 19 insertions(+), 12 deletions(-)

diff --git a/src/service/service/ServiceServiceServicerImpl.py b/src/service/service/ServiceServiceServicerImpl.py
index bf1520270..50f645e42 100644
--- a/src/service/service/ServiceServiceServicerImpl.py
+++ b/src/service/service/ServiceServiceServicerImpl.py
@@ -22,9 +22,9 @@ from common.proto.service_pb2_grpc import ServiceServiceServicer
 from common.tools.grpc.Tools import grpc_message_to_json, grpc_message_to_json_string
 from context.client.ContextClient import ContextClient
 from pathcomp.frontend.client.PathCompClient import PathCompClient
-from .tools.ContextGetters import get_service
 from .service_handler_api.ServiceHandlerFactory import ServiceHandlerFactory
 from .task_scheduler.TaskScheduler import TasksScheduler
+from .tools.ContextGetters import get_service
 
 LOGGER = logging.getLogger(__name__)
 
@@ -40,10 +40,6 @@ class ServiceServiceServicerImpl(ServiceServiceServicer):
     def CreateService(self, request : Service, context : grpc.ServicerContext) -> ServiceId:
         LOGGER.info('[CreateService] begin ; request = {:s}'.format(grpc_message_to_json_string(request)))
 
-        service_id = request.service_id
-        service_uuid = service_id.service_uuid.uuid
-        service_context_uuid = service_id.context_id.context_uuid.uuid
-
         if len(request.service_endpoint_ids) > 0:
             unexpected_endpoints = []
             for service_endpoint_id in request.service_endpoint_ids:
@@ -97,8 +93,18 @@ class ServiceServiceServicerImpl(ServiceServiceServicer):
         _service : Optional[Service] = get_service(context_client, request.service_id)
         service = Service()
         service.CopyFrom(request if _service is None else _service)
-        service.service_status.service_status = ServiceStatusEnum.SERVICESTATUS_PLANNED
-        context_client.SetService(service)
+        service.service_status.service_status = ServiceStatusEnum.SERVICESTATUS_PLANNED     # pylint: disable=no-member
+
+        del service.service_endpoint_ids[:]                                     # pylint: disable=no-member
+        for service_endpoint_id in request.service_endpoint_ids:
+            service.service_endpoint_ids.add().CopyFrom(service_endpoint_id)    # pylint: disable=no-member
+
+        del service.service_constraints[:]                                      # pylint: disable=no-member
+        for service_constraint in request.service_constraints:
+            service.service_constraints.add().CopyFrom(service_constraint)      # pylint: disable=no-member
+
+        service_id_with_uuids = context_client.SetService(service)
+        service_with_uuids = context_client.GetService(service_id_with_uuids)
 
         num_disjoint_paths = None
         for constraint in request.service_constraints:
@@ -107,14 +113,14 @@ class ServiceServiceServicerImpl(ServiceServiceServicer):
                 break
 
         tasks_scheduler = TasksScheduler(self.service_handler_factory)
-        if len(request.service_endpoint_ids) >= (2 if num_disjoint_paths is None else 4):
+        if len(service_with_uuids.service_endpoint_ids) >= (2 if num_disjoint_paths is None else 4):
             pathcomp_request = PathCompRequest()
-            pathcomp_request.services.append(request)
+            pathcomp_request.services.append(service_with_uuids)    # pylint: disable=no-member
 
             if num_disjoint_paths is None:
-                pathcomp_request.shortest_path.Clear()
+                pathcomp_request.shortest_path.Clear()              # pylint: disable=no-member
             else:
-                pathcomp_request.k_disjoint_path.num_disjoint = num_disjoint_paths
+                pathcomp_request.k_disjoint_path.num_disjoint = num_disjoint_paths  # pylint: disable=no-member
 
             LOGGER.info('pathcomp_request={:s}'.format(grpc_message_to_json_string(pathcomp_request)))
             pathcomp = PathCompClient()
@@ -128,7 +134,7 @@ class ServiceServiceServicerImpl(ServiceServiceServicer):
             tasks_scheduler.compose_from_pathcompreply(pathcomp_reply, is_delete=False)
 
         tasks_scheduler.execute_all()
-        return request.service_id
+        return service_with_uuids.service_id
 
     @safe_and_metered_rpc_method(METRICS_POOL, LOGGER)
     def DeleteService(self, request : ServiceId, context : grpc.ServicerContext) -> Empty:
@@ -142,6 +148,7 @@ class ServiceServiceServicerImpl(ServiceServiceServicer):
         if _service is None: raise Exception('Service({:s}) not found'.format(grpc_message_to_json_string(request)))
         service = Service()
         service.CopyFrom(_service)
+        # pylint: disable=no-member
         service.service_status.service_status = ServiceStatusEnum.SERVICESTATUS_PENDING_REMOVAL
         context_client.SetService(service)
 
-- 
GitLab


From 2645c398feef1019f5a0ab2741d7ffe7729b63f4 Mon Sep 17 00:00:00 2001
From: gifrerenom <lluis.gifre@cttc.es>
Date: Fri, 27 Jan 2023 17:09:25 +0000
Subject: [PATCH 09/31] WebUI component:

- Device List: show number of endpoints instead of detail
- Device List: show number of rules
- Remove UUIDs from topology elements
- Service List: add service name
- Service Details: add service name
- Slice List: add slice name
- Slice Details: add slice name
---
 src/webui/service/templates/device/home.html  | 44 +++++--------------
 src/webui/service/templates/js/topology.js    |  4 +-
 .../service/templates/service/detail.html     |  3 +-
 src/webui/service/templates/service/home.html |  6 ++-
 src/webui/service/templates/slice/detail.html |  3 +-
 src/webui/service/templates/slice/home.html   |  9 ++--
 6 files changed, 25 insertions(+), 44 deletions(-)

diff --git a/src/webui/service/templates/device/home.html b/src/webui/service/templates/device/home.html
index 7b4437cce..ef4345b36 100644
--- a/src/webui/service/templates/device/home.html
+++ b/src/webui/service/templates/device/home.html
@@ -48,7 +48,7 @@
             <th scope="col">Endpoints</th>
             <th scope="col">Drivers</th>
             <th scope="col">Status</th>
-            <!-- <th scope="col">Configuration</th> -->
+            <th scope="col">Config Rules</th>
             <th scope="col"></th>
           </tr>
         </thead>
@@ -56,40 +56,16 @@
             {% if devices %}
                 {% for device in devices %}
                 <tr>
-                    <td>
-                        {{ device.device_id.device_uuid.uuid }}
-                    </td>
-                    <td>
-                        {{ device.name }}
-                    </td>
-                    <td>
-                        {{ device.device_type }}
-                    </td>
-                    <td>
-                        <ul>
-                            {% for end_point in device.device_endpoints %}
-                            <li>{{ end_point.endpoint_id.endpoint_uuid.uuid }}</li>
-                            {% endfor %}
-                        </ul>
-                    </td>
-                    <td>
-                        <ul>
-                            {% for driver in device.device_drivers %}
-                            <li>{{ dde.Name(driver).replace('DEVICEDRIVER_', '').replace('UNDEFINED', 'EMULATED') }}</li>
-                            {% endfor %}
-                        </ul>
-                    </td>
+                    <td>{{ device.device_id.device_uuid.uuid }}</td>
+                    <td>{{ device.name }}</td>
+                    <td>{{ device.device_type }}</td>
+                    <td>{{ device.device_endpoints | length }}</td>
+                    <td><ul>{% for driver in device.device_drivers %}
+                        <li>{{ dde.Name(driver).replace('DEVICEDRIVER_', '').replace('UNDEFINED', 'EMULATED') }}</li>
+                        {% endfor %}
+                    </ul></td>
                     <td>{{ dose.Name(device.device_operational_status).replace('DEVICEOPERATIONALSTATUS_', '') }}</td>
-                    <!-- <td>
-                        <ul>
-                            {% for config in device.device_config.config_rules %}
-                            <li>
-                                Key: {{ config.resource_key }}<br/>
-                                Value: {{ config.resource_value }}
-                            </li>
-                            {% endfor %}
-                        </ul>
-                    </td> -->
+                    <td>{{ device.device_config.config_rules | length }}</td>
                     <td>
                         <a href="{{ url_for('device.detail', device_uuid=device.device_id.device_uuid.uuid) }}">
                             <svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-eye" viewBox="0 0 16 16">
diff --git a/src/webui/service/templates/js/topology.js b/src/webui/service/templates/js/topology.js
index adcabf62c..47c246614 100644
--- a/src/webui/service/templates/js/topology.js
+++ b/src/webui/service/templates/js/topology.js
@@ -88,9 +88,9 @@ d3.json("{{ url_for('main.topology') }}", function(data) {
         .call(d3.drag().on("start", dragstarted).on("drag", dragged).on("end", dragended));
 
     // node tooltip
-    node.append("title").text(function(n) { return n.name + ' (' + n.id + ')'; });
+    node.append("title").text(function(n) { return n.name; });
     // link tooltip
-    link.append("title").text(function(l) { return l.name + ' (' + l.id + ')'; });
+    link.append("title").text(function(l) { return l.name; });
 
     // link style
     link
diff --git a/src/webui/service/templates/service/detail.html b/src/webui/service/templates/service/detail.html
index 67b240b3d..e8d3a4221 100644
--- a/src/webui/service/templates/service/detail.html
+++ b/src/webui/service/templates/service/detail.html
@@ -17,7 +17,7 @@
 {% extends 'base.html' %}
 
 {% block content %}
-<h1>Service {{ service.service_id.service_uuid.uuid }}</h1>
+<h1>Service {{ service.name }} ({{ service.service_id.service_uuid.uuid }})</h1>
 
 <div class="row mb-3">
     <div class="col-sm-3">
@@ -45,6 +45,7 @@
     <div class="col-sm-4">
         <b>Context: </b> {{ service.service_id.context_id.context_uuid.uuid }}<br>
         <b>UUID: </b> {{ service.service_id.service_uuid.uuid }}<br>
+        <b>Name: </b> {{ service.name }}<br>
         <b>Type: </b> {{ ste.Name(service.service_type).replace('SERVICETYPE_', '') }}<br>
         <b>Status: </b> {{ sse.Name(service.service_status.service_status).replace('SERVICESTATUS_', '') }}<br>
     </div>
diff --git a/src/webui/service/templates/service/home.html b/src/webui/service/templates/service/home.html
index c0a01839b..280685fc5 100644
--- a/src/webui/service/templates/service/home.html
+++ b/src/webui/service/templates/service/home.html
@@ -43,7 +43,8 @@
     <table class="table table-striped table-hover">
         <thead>
           <tr>
-            <th scope="col">#</th>
+            <th scope="col">UUID</th>
+            <th scope="col">Name</th>
             <th scope="col">Type</th>
             <th scope="col">End points</th>
             <th scope="col">Status</th>
@@ -59,6 +60,9 @@
                             {{ service.service_id.service_uuid.uuid }}
                         <!-- </a> -->
                     </td>
+                    <td>
+                        {{ service.name }}
+                    </td>
                     <td>
                         {{ ste.Name(service.service_type).replace('SERVICETYPE_', '') }}
                     </td>
diff --git a/src/webui/service/templates/slice/detail.html b/src/webui/service/templates/slice/detail.html
index 404dede39..4f26c75a5 100644
--- a/src/webui/service/templates/slice/detail.html
+++ b/src/webui/service/templates/slice/detail.html
@@ -17,7 +17,7 @@
 {% extends 'base.html' %}
 
 {% block content %}
-<h1>Slice {{ slice.slice_id.slice_uuid.uuid }} </h1>
+<h1>Slice {{ slice.name }} ({{ slice.slice_id.slice_uuid.uuid }}) </h1>
 
 <div class="row mb-3">
     <div class="col-sm-3">
@@ -46,6 +46,7 @@
     <div class="col-sm-4">
         <b>Context: </b> {{ slice.slice_id.context_id.context_uuid.uuid }}<br>
         <b>UUID: </b> {{ slice.slice_id.slice_uuid.uuid }}<br>
+        <b>Name: </b> {{ slice.name }}<br>
         <b>Owner: </b> {{ slice.slice_owner.owner_uuid.uuid }}<br>
         <b>Status: </b> {{ sse.Name(slice.slice_status.slice_status).replace('SLICESTATUS_', '') }}<br>
     </div>
diff --git a/src/webui/service/templates/slice/home.html b/src/webui/service/templates/slice/home.html
index 46a2b4f1a..141234aca 100644
--- a/src/webui/service/templates/slice/home.html
+++ b/src/webui/service/templates/slice/home.html
@@ -31,20 +31,19 @@
     <table class="table table-striped table-hover">
         <thead>
           <tr>
-            <th scope="col">#</th>
+            <th scope="col">UUID</th>
+            <th scope="col">Name</th>
             <th scope="col">End points</th>
             <th scope="col">Status</th>
             <th scope="col"></th>
-            
           </tr>
         </thead>
         <tbody>
             {% if slices %}
                 {% for slice in slices %}
                 <tr>
-                    <td>
-                        {{ slice.slice_id.slice_uuid.uuid }}
-                    </td>
+                    <td>{{ slice.slice_id.slice_uuid.uuid }}</td>
+                    <td>{{ slice.name }}</td>
                     <td>
                         <ul>
                         {% for i in range(slice.slice_endpoint_ids|length) %}
-- 
GitLab


From c241ced146db9b3b716bf5d21b6c394329a8fdd3 Mon Sep 17 00:00:00 2001
From: gifrerenom <lluis.gifre@cttc.es>
Date: Fri, 27 Jan 2023 18:39:35 +0000
Subject: [PATCH 10/31] Monitoring component:

- added checkpoints in CI/CD pipeline since it gets stuck for unknown reasons
---
 src/monitoring/.gitlab-ci.yml | 7 +++++++
 1 file changed, 7 insertions(+)

diff --git a/src/monitoring/.gitlab-ci.yml b/src/monitoring/.gitlab-ci.yml
index 4a981cba2..a083134bc 100644
--- a/src/monitoring/.gitlab-ci.yml
+++ b/src/monitoring/.gitlab-ci.yml
@@ -60,13 +60,20 @@ unit_test monitoring:
     - sleep 30
     - docker ps -a
     - docker logs $IMAGE_NAME
+    - echo "checkpoint-1"
     - docker exec -i $IMAGE_NAME bash -c "coverage run -m pytest --log-level=INFO --verbose $IMAGE_NAME/tests/test_unitary.py --junitxml=/opt/results/${IMAGE_NAME}_report.xml"
+    - echo "checkpoint-2"
     - docker exec -i $IMAGE_NAME bash -c "coverage report --include='${IMAGE_NAME}/*' --show-missing"
+    - echo "checkpoint-3"
   coverage: '/TOTAL\s+\d+\s+\d+\s+(\d+%)/'
   after_script:
+    - echo "checkpoint-4"
     - docker rm -f $IMAGE_NAME
+    - echo "checkpoint-5"
     - docker rm -f  questdb
+    - echo "checkpoint-6"
     - docker network rm teraflowbridge
+    - echo "checkpoint-7"
   rules:
     - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
     - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
-- 
GitLab


From 780e2197f0fc7596cab5914b1bba919b84e6c159 Mon Sep 17 00:00:00 2001
From: gifrerenom <lluis.gifre@cttc.es>
Date: Sat, 28 Jan 2023 14:48:19 +0000
Subject: [PATCH 11/31] Monitoring component:

- added log verbosity in CI/CD pipeline since it gets stuck for unknown reasons
---
 src/monitoring/.gitlab-ci.yml        |  2 +-
 src/monitoring/tests/test_unitary.py | 15 +++++++++++++++
 2 files changed, 16 insertions(+), 1 deletion(-)

diff --git a/src/monitoring/.gitlab-ci.yml b/src/monitoring/.gitlab-ci.yml
index a083134bc..75cba475c 100644
--- a/src/monitoring/.gitlab-ci.yml
+++ b/src/monitoring/.gitlab-ci.yml
@@ -61,7 +61,7 @@ unit_test monitoring:
     - docker ps -a
     - docker logs $IMAGE_NAME
     - echo "checkpoint-1"
-    - docker exec -i $IMAGE_NAME bash -c "coverage run -m pytest --log-level=INFO --verbose $IMAGE_NAME/tests/test_unitary.py --junitxml=/opt/results/${IMAGE_NAME}_report.xml"
+    - docker exec -i $IMAGE_NAME bash -c "coverage run -m pytest --log-level=INFO -o log_cli=true --verbose $IMAGE_NAME/tests/test_unitary.py --junitxml=/opt/results/${IMAGE_NAME}_report.xml"
     - echo "checkpoint-2"
     - docker exec -i $IMAGE_NAME bash -c "coverage report --include='${IMAGE_NAME}/*' --show-missing"
     - echo "checkpoint-3"
diff --git a/src/monitoring/tests/test_unitary.py b/src/monitoring/tests/test_unitary.py
index e70827cbc..b52e719ec 100644
--- a/src/monitoring/tests/test_unitary.py
+++ b/src/monitoring/tests/test_unitary.py
@@ -92,15 +92,25 @@ class MockContextService(GenericGrpcService):
 
 @pytest.fixture(scope='session')
 def context_service():
+    LOGGER.info('Initializing MockContextService...')
     _service = MockContextService(MOCKSERVICE_PORT)
     _service.start()
+    
+    LOGGER.info('Yielding MockContextService...')
     yield _service
+
+    LOGGER.info('Terminating MockContextService...')
     _service.stop()
 
 @pytest.fixture(scope='session')
 def context_client(context_service : MockContextService): # pylint: disable=redefined-outer-name,unused-argument
+    LOGGER.info('Initializing ContextClient...')
     _client = ContextClient()
+    
+    LOGGER.info('Yielding ContextClient...')
     yield _client
+
+    LOGGER.info('Closing ContextClient...')
     _client.close()
 
 @pytest.fixture(scope='session')
@@ -120,8 +130,13 @@ def device_service(context_service : MockContextService): # pylint: disable=rede
 
 @pytest.fixture(scope='session')
 def device_client(device_service : DeviceService): # pylint: disable=redefined-outer-name,unused-argument
+    LOGGER.info('Initializing DeviceClient...')
     _client = DeviceClient()
+
+    LOGGER.info('Yielding DeviceClient...')
     yield _client
+
+    LOGGER.info('Closing DeviceClient...')
     _client.close()
 
 # This fixture will be requested by test cases and last during testing session
-- 
GitLab


From 65f210b0d24e7c7f3f5ab0f28a39e932d2b3ed25 Mon Sep 17 00:00:00 2001
From: gifrerenom <lluis.gifre@cttc.es>
Date: Sat, 28 Jan 2023 15:04:17 +0000
Subject: [PATCH 12/31] Monitoring component:

- added log verbosity in CI/CD pipeline since it gets stuck for unknown reasons
---
 src/monitoring/service/EventTools.py | 10 +++++++++-
 src/monitoring/tests/test_unitary.py | 18 ++++++++++++++++++
 2 files changed, 27 insertions(+), 1 deletion(-)

diff --git a/src/monitoring/service/EventTools.py b/src/monitoring/service/EventTools.py
index 189e78ce6..c881be163 100644
--- a/src/monitoring/service/EventTools.py
+++ b/src/monitoring/service/EventTools.py
@@ -15,7 +15,7 @@
 import threading
 from queue import Queue
 
-import grpc
+import grpc, logging
 
 from common.method_wrappers.ServiceExceptions import ServiceException
 from context.client.ContextClient import ContextClient
@@ -26,6 +26,8 @@ from monitoring.client.MonitoringClient import MonitoringClient
 from monitoring.service.MonitoringServiceServicerImpl import LOGGER
 from common.proto import monitoring_pb2
 
+LOGGER = logging.getLogger(__name__)
+
 class EventsDeviceCollector:
     def __init__(self) -> None: # pylint: disable=redefined-outer-name
         self._events_queue = Queue()
@@ -46,12 +48,15 @@ class EventsDeviceCollector:
             return False
 
     def _collect(self, events_stream):
+        LOGGER.warning('[_collect] begin')
         try:
             for event in events_stream:
                 self._events_queue.put_nowait(event)
         except grpc.RpcError as e:
             if e.code() != grpc.StatusCode.CANCELLED: # pylint: disable=no-member
+                LOGGER.warning('[_collect] raise')
                 raise # pragma: no cover
+        LOGGER.warning('[_collect] end')
 
     def start(self):
         try:
@@ -63,8 +68,11 @@ class EventsDeviceCollector:
         return self._events_queue.get(block=block, timeout=timeout)
 
     def stop(self):
+        LOGGER.warning('[stop] begin')
         self._device_stream.cancel()
+        LOGGER.warning('[stop] joining')
         self._device_thread.join()
+        LOGGER.warning('[stop] end')
 
     def listen_events(self):
         try:
diff --git a/src/monitoring/tests/test_unitary.py b/src/monitoring/tests/test_unitary.py
index b52e719ec..4f0508ce5 100644
--- a/src/monitoring/tests/test_unitary.py
+++ b/src/monitoring/tests/test_unitary.py
@@ -102,6 +102,8 @@ def context_service():
     LOGGER.info('Terminating MockContextService...')
     _service.stop()
 
+    LOGGER.info('Terminated MockContextService...')
+
 @pytest.fixture(scope='session')
 def context_client(context_service : MockContextService): # pylint: disable=redefined-outer-name,unused-argument
     LOGGER.info('Initializing ContextClient...')
@@ -113,6 +115,8 @@ def context_client(context_service : MockContextService): # pylint: disable=rede
     LOGGER.info('Closing ContextClient...')
     _client.close()
 
+    LOGGER.info('Closed ContextClient...')
+
 @pytest.fixture(scope='session')
 def device_service(context_service : MockContextService): # pylint: disable=redefined-outer-name,unused-argument
     LOGGER.info('Initializing DeviceService...')
@@ -128,6 +132,8 @@ def device_service(context_service : MockContextService): # pylint: disable=rede
     LOGGER.info('Terminating DeviceService...')
     _service.stop()
 
+    LOGGER.info('Terminated DeviceService...')
+
 @pytest.fixture(scope='session')
 def device_client(device_service : DeviceService): # pylint: disable=redefined-outer-name,unused-argument
     LOGGER.info('Initializing DeviceClient...')
@@ -139,6 +145,8 @@ def device_client(device_service : DeviceService): # pylint: disable=redefined-o
     LOGGER.info('Closing DeviceClient...')
     _client.close()
 
+    LOGGER.info('Closed DeviceClient...')
+
 # This fixture will be requested by test cases and last during testing session
 @pytest.fixture(scope='session')
 def monitoring_service(
@@ -156,6 +164,8 @@ def monitoring_service(
     LOGGER.info('Terminating MonitoringService...')
     _service.stop()
 
+    LOGGER.info('Terminated MonitoringService...')
+
 # This fixture will be requested by test cases and last during testing session.
 # The client requires the server, so client fixture has the server as dependency.
 @pytest.fixture(scope='session')
@@ -170,6 +180,8 @@ def monitoring_client(monitoring_service : MonitoringService): # pylint: disable
     LOGGER.info('Closing MonitoringClient...')
     _client.close()
 
+    LOGGER.info('Closed MonitoringClient...')
+
 @pytest.fixture(scope='session')
 def management_db():
     _management_db = ManagementDBTools.ManagementDB('monitoring.db')
@@ -536,6 +548,8 @@ def test_events_tools(
     device_client.DeleteDevice(response)
     events_collector.stop()
 
+    LOGGER.warning('test_get_device_events end')
+
 
 def test_get_device_events(
         context_client : ContextClient,                 # pylint: disable=redefined-outer-name,unused-argument
@@ -564,6 +578,8 @@ def test_get_device_events(
     device_client.DeleteDevice(response)
     events_collector.stop()
 
+    LOGGER.warning('test_get_device_events end')
+
 def test_listen_events(
         context_client : ContextClient,                 # pylint: disable=redefined-outer-name,unused-argument
         device_client : DeviceClient,                   # pylint: disable=redefined-outer-name
@@ -589,3 +605,5 @@ def test_listen_events(
 
     device_client.DeleteDevice(response)
     events_collector.stop()
+
+    LOGGER.warning('test_listen_events end')
-- 
GitLab


From 5a1efbe9312d3b176292588bfb158a7c5a4693f5 Mon Sep 17 00:00:00 2001
From: gifrerenom <lluis.gifre@cttc.es>
Date: Sat, 28 Jan 2023 15:20:54 +0000
Subject: [PATCH 13/31] Monitoring component:

- added log verbosity in CI/CD pipeline since it gets stuck for unknown reasons
---
 src/monitoring/service/EventTools.py | 5 +++++
 1 file changed, 5 insertions(+)

diff --git a/src/monitoring/service/EventTools.py b/src/monitoring/service/EventTools.py
index c881be163..9ebe0774c 100644
--- a/src/monitoring/service/EventTools.py
+++ b/src/monitoring/service/EventTools.py
@@ -75,12 +75,15 @@ class EventsDeviceCollector:
         LOGGER.warning('[stop] end')
 
     def listen_events(self):
+        LOGGER.warning('[listen_events] begin')
         try:
             kpi_id_list = []
 
             while not self._events_queue.empty():
                 # LOGGER.info('getting Kpi by KpiID')
+                LOGGER.warning('[listen_events] waiting event')
                 event = self.get_event(block=True)
+                LOGGER.warning('[listen_events] event received')
                 if event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE:
                     device = self._context_client.GetDevice(event.device_id)
                     for j,end_point in enumerate(device.device_endpoints):
@@ -98,6 +101,8 @@ class EventsDeviceCollector:
 
                             kpi_id = self._monitoring_client.SetKpi(kpi_descriptor)
                             kpi_id_list.append(kpi_id)
+
+            LOGGER.warning('[listen_events] return')
             return kpi_id_list
         except ServiceException as e:
             LOGGER.exception('ListenEvents exception')
-- 
GitLab


From 13f6b4385da2103c70c7f77e74093d69042614b4 Mon Sep 17 00:00:00 2001
From: gifrerenom <lluis.gifre@cttc.es>
Date: Sat, 28 Jan 2023 16:19:37 +0000
Subject: [PATCH 14/31] Monitoring component:

- added log verbosity in CI/CD pipeline since it gets stuck for unknown reasons
---
 src/monitoring/service/EventTools.py | 60 ++++++++++++++--------------
 src/monitoring/service/__main__.py   |  6 ++-
 2 files changed, 34 insertions(+), 32 deletions(-)

diff --git a/src/monitoring/service/EventTools.py b/src/monitoring/service/EventTools.py
index 9ebe0774c..95350ae70 100644
--- a/src/monitoring/service/EventTools.py
+++ b/src/monitoring/service/EventTools.py
@@ -12,25 +12,19 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import threading
-from queue import Queue
-
-import grpc, logging
-
+import grpc, logging, queue, threading
 from common.method_wrappers.ServiceExceptions import ServiceException
-from context.client.ContextClient import ContextClient
-
+from common.proto import monitoring_pb2
 from common.proto.context_pb2 import Empty, EventTypeEnum
-
+from context.client.ContextClient import ContextClient
 from monitoring.client.MonitoringClient import MonitoringClient
 from monitoring.service.MonitoringServiceServicerImpl import LOGGER
-from common.proto import monitoring_pb2
 
 LOGGER = logging.getLogger(__name__)
 
 class EventsDeviceCollector:
     def __init__(self) -> None: # pylint: disable=redefined-outer-name
-        self._events_queue = Queue()
+        self._events_queue = queue.Queue()
 
         self._context_client_grpc = ContextClient()
         self._device_stream     = self._context_client_grpc.GetDeviceEvents(Empty())
@@ -79,28 +73,32 @@ class EventsDeviceCollector:
         try:
             kpi_id_list = []
 
-            while not self._events_queue.empty():
+            while True:
                 # LOGGER.info('getting Kpi by KpiID')
-                LOGGER.warning('[listen_events] waiting event')
-                event = self.get_event(block=True)
-                LOGGER.warning('[listen_events] event received')
-                if event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE:
-                    device = self._context_client.GetDevice(event.device_id)
-                    for j,end_point in enumerate(device.device_endpoints):
-                        #for i, value in enumerate(kpi_sample_types_pb2.KpiSampleType.values()):
-                        for i, value in enumerate(end_point.kpi_sample_types):
-                            #if value == kpi_sample_types_pb2.KpiSampleType.KPISAMPLETYPE_UNKNOWN: continue
-
-                            kpi_descriptor = monitoring_pb2.KpiDescriptor()
-
-                            kpi_descriptor.kpi_description                      = device.device_type
-                            kpi_descriptor.kpi_sample_type                      = value
-                            #kpi_descriptor.service_id.service_uuid.uuid         = ""
-                            kpi_descriptor.device_id.CopyFrom(device.device_id)
-                            kpi_descriptor.endpoint_id.CopyFrom(end_point.endpoint_id)
-
-                            kpi_id = self._monitoring_client.SetKpi(kpi_descriptor)
-                            kpi_id_list.append(kpi_id)
+                try:
+                    LOGGER.warning('[listen_events] waiting event')
+                    event = self.get_event(block=True, timeout=0.5)
+                    LOGGER.warning('[listen_events] event received')
+
+                    if event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE:
+                        device = self._context_client.GetDevice(event.device_id)
+                        for j,end_point in enumerate(device.device_endpoints):
+                            #for i, value in enumerate(kpi_sample_types_pb2.KpiSampleType.values()):
+                            for i, value in enumerate(end_point.kpi_sample_types):
+                                #if value == kpi_sample_types_pb2.KpiSampleType.KPISAMPLETYPE_UNKNOWN: continue
+
+                                kpi_descriptor = monitoring_pb2.KpiDescriptor()
+
+                                kpi_descriptor.kpi_description                      = device.device_type
+                                kpi_descriptor.kpi_sample_type                      = value
+                                #kpi_descriptor.service_id.service_uuid.uuid         = ""
+                                kpi_descriptor.device_id.CopyFrom(device.device_id)
+                                kpi_descriptor.endpoint_id.CopyFrom(end_point.endpoint_id)
+
+                                kpi_id = self._monitoring_client.SetKpi(kpi_descriptor)
+                                kpi_id_list.append(kpi_id)
+                except queue.Empty:
+                    break
 
             LOGGER.warning('[listen_events] return')
             return kpi_id_list
diff --git a/src/monitoring/service/__main__.py b/src/monitoring/service/__main__.py
index 3334a860c..78764ea64 100644
--- a/src/monitoring/service/__main__.py
+++ b/src/monitoring/service/__main__.py
@@ -12,7 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import logging, signal, sys, threading
+import logging, signal, sys, threading, time
 from prometheus_client import start_http_server
 from common.Constants import ServiceNameEnum
 from common.Settings import (
@@ -35,6 +35,8 @@ def start_monitoring():
     events_collector = EventsDeviceCollector()
     events_collector.start()
 
+    # TODO: redesign this method to be more clear and clean
+
     # Iterate while terminate is not set
     while not terminate.is_set():
         list_new_kpi_ids = events_collector.listen_events()
@@ -48,6 +50,8 @@ def start_monitoring():
                 monitor_kpi_request.monitoring_window_s = 86400
                 monitor_kpi_request.sampling_rate_s = 30
                 events_collector._monitoring_client.MonitorKpi(monitor_kpi_request)
+        
+        time.sleep(0.5) # let other tasks run; do not overload CPU
     else:
         # Terminate is set, looping terminates
         LOGGER.warning("Stopping execution...")
-- 
GitLab


From 7860c44cd608fb0db4e9a4268db39d1ad36f6158 Mon Sep 17 00:00:00 2001
From: gifrerenom <lluis.gifre@cttc.es>
Date: Sat, 28 Jan 2023 17:07:13 +0000
Subject: [PATCH 15/31] Monitoring component:

- added explicit terminate in Context Mock
- added missing KpiId in GetStreamKpi test
---
 src/monitoring/tests/Messages.py     | 11 +++++++++++
 src/monitoring/tests/test_unitary.py |  6 ++++--
 2 files changed, 15 insertions(+), 2 deletions(-)

diff --git a/src/monitoring/tests/Messages.py b/src/monitoring/tests/Messages.py
index f15cb5ec2..5530c720c 100644
--- a/src/monitoring/tests/Messages.py
+++ b/src/monitoring/tests/Messages.py
@@ -55,6 +55,17 @@ def create_kpi_request_c():
     _create_kpi_request.connection_id.connection_uuid.uuid = 'CON3'  # pylint: disable=maybe-no-member
     return _create_kpi_request
 
+def create_kpi_request_d():
+    _create_kpi_request                                = monitoring_pb2.KpiDescriptor()
+    _create_kpi_request.kpi_description                = 'KPI Description Test'
+    _create_kpi_request.kpi_sample_type                = KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED
+    _create_kpi_request.device_id.device_uuid.uuid     = 'DEV4'     # pylint: disable=maybe-no-member
+    _create_kpi_request.service_id.service_uuid.uuid   = 'SERV4'    # pylint: disable=maybe-no-member
+    _create_kpi_request.slice_id.slice_uuid.uuid       = 'SLC4'  # pylint: disable=maybe-no-member
+    _create_kpi_request.endpoint_id.endpoint_uuid.uuid = 'END4'     # pylint: disable=maybe-no-member
+    _create_kpi_request.connection_id.connection_uuid.uuid = 'CON4'  # pylint: disable=maybe-no-member
+    return _create_kpi_request
+
 def monitor_kpi_request(kpi_uuid, monitoring_window_s, sampling_rate_s):
     _monitor_kpi_request                     = monitoring_pb2.MonitorKpiRequest()
     _monitor_kpi_request.kpi_id.kpi_id.uuid  = kpi_uuid   # pylint: disable=maybe-no-member
diff --git a/src/monitoring/tests/test_unitary.py b/src/monitoring/tests/test_unitary.py
index 4f0508ce5..d892b98b8 100644
--- a/src/monitoring/tests/test_unitary.py
+++ b/src/monitoring/tests/test_unitary.py
@@ -47,7 +47,7 @@ from monitoring.service.EventTools import EventsDeviceCollector
 from monitoring.service.MetricsDBTools import MetricsDB
 from monitoring.service.MonitoringService import MonitoringService
 #from monitoring.service.SubscriptionManager import SubscriptionManager
-from monitoring.tests.Messages import create_kpi_request, include_kpi_request, monitor_kpi_request, \
+from monitoring.tests.Messages import create_kpi_request, create_kpi_request_d, include_kpi_request, monitor_kpi_request, \
     create_kpi_request_c, kpi_query, subs_descriptor, alarm_descriptor, alarm_subscription #, create_kpi_request_b
 from monitoring.tests.Objects import DEVICE_DEV1, DEVICE_DEV1_CONNECT_RULES, DEVICE_DEV1_UUID
 
@@ -100,6 +100,7 @@ def context_service():
     yield _service
 
     LOGGER.info('Terminating MockContextService...')
+    _service.context_servicer.msg_broker.terminate()
     _service.stop()
 
     LOGGER.info('Terminated MockContextService...')
@@ -400,7 +401,8 @@ def test_delete_alarm(monitoring_client): # pylint: disable=redefined-outer-name
 # Test case that makes use of client fixture to test server's GetStreamKpi method
 def test_get_stream_kpi(monitoring_client): # pylint: disable=redefined-outer-name
     LOGGER.warning('test_getstream_kpi begin')
-    response = monitoring_client.GetStreamKpi(monitoring_pb2.Kpi())
+    _kpi_id = monitoring_client.SetKpi(create_kpi_request_d())
+    response = monitoring_client.GetStreamKpi(_kpi_id)
     LOGGER.debug(str(response))
     assert isinstance(response, _MultiThreadedRendezvous)
 
-- 
GitLab


From ebbbc1340b441097194fdee4e2b1e70b2215b040 Mon Sep 17 00:00:00 2001
From: gifrerenom <lluis.gifre@cttc.es>
Date: Sat, 28 Jan 2023 17:26:41 +0000
Subject: [PATCH 16/31] Monitoring component:

- removed checkpoints in CI/CD pipeline
- removed log messages in classes
---
 src/monitoring/.gitlab-ci.yml        |  9 +--------
 src/monitoring/service/EventTools.py | 10 ----------
 2 files changed, 1 insertion(+), 18 deletions(-)

diff --git a/src/monitoring/.gitlab-ci.yml b/src/monitoring/.gitlab-ci.yml
index 75cba475c..4a981cba2 100644
--- a/src/monitoring/.gitlab-ci.yml
+++ b/src/monitoring/.gitlab-ci.yml
@@ -60,20 +60,13 @@ unit_test monitoring:
     - sleep 30
     - docker ps -a
     - docker logs $IMAGE_NAME
-    - echo "checkpoint-1"
-    - docker exec -i $IMAGE_NAME bash -c "coverage run -m pytest --log-level=INFO -o log_cli=true --verbose $IMAGE_NAME/tests/test_unitary.py --junitxml=/opt/results/${IMAGE_NAME}_report.xml"
-    - echo "checkpoint-2"
+    - docker exec -i $IMAGE_NAME bash -c "coverage run -m pytest --log-level=INFO --verbose $IMAGE_NAME/tests/test_unitary.py --junitxml=/opt/results/${IMAGE_NAME}_report.xml"
     - docker exec -i $IMAGE_NAME bash -c "coverage report --include='${IMAGE_NAME}/*' --show-missing"
-    - echo "checkpoint-3"
   coverage: '/TOTAL\s+\d+\s+\d+\s+(\d+%)/'
   after_script:
-    - echo "checkpoint-4"
     - docker rm -f $IMAGE_NAME
-    - echo "checkpoint-5"
     - docker rm -f  questdb
-    - echo "checkpoint-6"
     - docker network rm teraflowbridge
-    - echo "checkpoint-7"
   rules:
     - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
     - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
diff --git a/src/monitoring/service/EventTools.py b/src/monitoring/service/EventTools.py
index 95350ae70..221a0ddbf 100644
--- a/src/monitoring/service/EventTools.py
+++ b/src/monitoring/service/EventTools.py
@@ -42,15 +42,12 @@ class EventsDeviceCollector:
             return False
 
     def _collect(self, events_stream):
-        LOGGER.warning('[_collect] begin')
         try:
             for event in events_stream:
                 self._events_queue.put_nowait(event)
         except grpc.RpcError as e:
             if e.code() != grpc.StatusCode.CANCELLED: # pylint: disable=no-member
-                LOGGER.warning('[_collect] raise')
                 raise # pragma: no cover
-        LOGGER.warning('[_collect] end')
 
     def start(self):
         try:
@@ -62,23 +59,17 @@ class EventsDeviceCollector:
         return self._events_queue.get(block=block, timeout=timeout)
 
     def stop(self):
-        LOGGER.warning('[stop] begin')
         self._device_stream.cancel()
-        LOGGER.warning('[stop] joining')
         self._device_thread.join()
-        LOGGER.warning('[stop] end')
 
     def listen_events(self):
-        LOGGER.warning('[listen_events] begin')
         try:
             kpi_id_list = []
 
             while True:
                 # LOGGER.info('getting Kpi by KpiID')
                 try:
-                    LOGGER.warning('[listen_events] waiting event')
                     event = self.get_event(block=True, timeout=0.5)
-                    LOGGER.warning('[listen_events] event received')
 
                     if event.event.event_type == EventTypeEnum.EVENTTYPE_CREATE:
                         device = self._context_client.GetDevice(event.device_id)
@@ -100,7 +91,6 @@ class EventsDeviceCollector:
                 except queue.Empty:
                     break
 
-            LOGGER.warning('[listen_events] return')
             return kpi_id_list
         except ServiceException as e:
             LOGGER.exception('ListenEvents exception')
-- 
GitLab


From 3b11d31e3d82fcfd51e2dc7858d3e7f57f29e42a Mon Sep 17 00:00:00 2001
From: gifrerenom <lluis.gifre@cttc.es>
Date: Sat, 28 Jan 2023 18:33:39 +0000
Subject: [PATCH 17/31] Context component:

- ConfigRules: improved management and added support for ACL rules
- Constraints: improved management
---
 src/context/service/database/ConfigRule.py | 256 ++++++++-------------
 src/context/service/database/Constraint.py |   7 +-
 src/context/service/database/Device.py     |   9 +-
 src/context/service/database/Service.py    |  10 +-
 src/context/service/database/Slice.py      |  12 +-
 5 files changed, 114 insertions(+), 180 deletions(-)

diff --git a/src/context/service/database/ConfigRule.py b/src/context/service/database/ConfigRule.py
index 5443e178c..e35f246b6 100644
--- a/src/context/service/database/ConfigRule.py
+++ b/src/context/service/database/ConfigRule.py
@@ -12,16 +12,17 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import datetime, logging
+import datetime, json, logging
 from sqlalchemy import delete
 from sqlalchemy.dialects.postgresql import insert
 from sqlalchemy.orm import Session
-from typing import Dict, List, Optional
+from typing import Dict, List, Optional, Set
 from common.proto.context_pb2 import ConfigRule
 from common.tools.grpc.Tools import grpc_message_to_json_string
-from .models.enums.ConfigAction import grpc_to_enum__config_action
+from .models.enums.ConfigAction import ORM_ConfigActionEnum, grpc_to_enum__config_action
 from .models.ConfigRuleModel import ConfigRuleKindEnum, ConfigRuleModel
-from .uuids._Builder import get_uuid_random
+from .uuids._Builder import get_uuid_from_string
+from .uuids.EndPoint import endpoint_get_uuid
 
 LOGGER = logging.getLogger(__name__)
 
@@ -31,173 +32,106 @@ def compose_config_rules_data(
 ) -> List[Dict]:
     dict_config_rules : List[Dict] = list()
     for position,config_rule in enumerate(config_rules):
-        configrule_uuid = get_uuid_random()
         str_kind = config_rule.WhichOneof('config_rule')
+        kind = ConfigRuleKindEnum._member_map_.get(str_kind.upper()) # pylint: disable=no-member
         dict_config_rule = {
-            'configrule_uuid': configrule_uuid,
-            'position'       : position,
-            'kind'           : ConfigRuleKindEnum._member_map_.get(str_kind.upper()), # pylint: disable=no-member
-            'action'         : grpc_to_enum__config_action(config_rule.action),
-            'data'           : grpc_message_to_json_string(getattr(config_rule, str_kind, {})),
-            'created_at'     : now,
-            'updated_at'     : now,
+            'position'  : position,
+            'kind'      : kind,
+            'action'    : grpc_to_enum__config_action(config_rule.action),
+            'data'      : grpc_message_to_json_string(getattr(config_rule, str_kind, {})),
+            'created_at': now,
+            'updated_at': now,
         }
-        if device_uuid  is not None: dict_config_rule['device_uuid' ] = device_uuid
-        if service_uuid is not None: dict_config_rule['service_uuid'] = service_uuid
-        if slice_uuid   is not None: dict_config_rule['slice_uuid'  ] = slice_uuid
+
+        parent_uuid = None
+        if device_uuid is not None:
+            dict_config_rule['device_uuid'] = device_uuid
+            parent_uuid = device_uuid
+        elif service_uuid is not None:
+            dict_config_rule['service_uuid'] = service_uuid
+            parent_uuid = service_uuid
+        elif slice_uuid is not None:
+            dict_config_rule['slice_uuid'] = slice_uuid
+            parent_uuid = slice_uuid
+        else:
+            MSG = 'Parent for ConfigRule({:s}) cannot be identified '+\
+                  '(device_uuid={:s}, service_uuid={:s}, slice_uuid={:s})'
+            str_config_rule = grpc_message_to_json_string(config_rule)
+            raise Exception(MSG.format(str_config_rule, str(device_uuid), str(service_uuid), str(slice_uuid)))
+        
+        configrule_name = None
+        if kind == ConfigRuleKindEnum.CUSTOM:
+            configrule_name = config_rule.custom.resource_key
+        elif kind == ConfigRuleKindEnum.ACL:
+            endpoint_uuid = endpoint_get_uuid(config_rule.acl.endpoint_id, allow_random=False)
+            rule_set_name = config_rule.acl.rule_set.name
+            configrule_name = '{:s}/{:s}'.format(endpoint_uuid, rule_set_name)
+        else:
+            MSG = 'Name for ConfigRule({:s}) cannot be inferred '+\
+                  '(device_uuid={:s}, service_uuid={:s}, slice_uuid={:s})'
+            str_config_rule = grpc_message_to_json_string(config_rule)
+            raise Exception(MSG.format(str_config_rule, str(device_uuid), str(service_uuid), str(slice_uuid)))
+
+        configrule_uuid = get_uuid_from_string(configrule_name, prefix_for_name=parent_uuid)
+        dict_config_rule['configrule_uuid'] = configrule_uuid
+
         dict_config_rules.append(dict_config_rule)
     return dict_config_rules
 
 def upsert_config_rules(
     session : Session, config_rules : List[Dict],
     device_uuid : Optional[str] = None, service_uuid : Optional[str] = None, slice_uuid : Optional[str] = None,
-) -> List[bool]:
-    # TODO: do not delete all rules; just add-remove as needed
-    stmt = delete(ConfigRuleModel)
-    if device_uuid  is not None: stmt = stmt.where(ConfigRuleModel.device_uuid  == device_uuid )
-    if service_uuid is not None: stmt = stmt.where(ConfigRuleModel.service_uuid == service_uuid)
-    if slice_uuid   is not None: stmt = stmt.where(ConfigRuleModel.slice_uuid   == slice_uuid  )
-    session.execute(stmt)
+) -> bool:
+    uuids_to_delete : Set[str] = set()
+    uuids_to_upsert : Dict[str, int] = dict()
+    rules_to_upsert : List[Dict] = list()
+    for config_rule in config_rules:
+        if config_rule['action'] == ORM_ConfigActionEnum.SET:
+            configrule_uuid = config_rule['configrule_uuid']
+            position = uuids_to_upsert.get(configrule_uuid)
+            if position is None:
+                # if not added, add it
+                rules_to_upsert.append(config_rule)
+                uuids_to_upsert[config_rule['configrule_uuid']] = len(rules_to_upsert) - 1
+            else:
+                # if already added, update occurrence
+                rules_to_upsert[position] = config_rule
+        elif config_rule['action'] == ORM_ConfigActionEnum.DELETE:
+            uuids_to_delete.add(config_rule['configrule_uuid'])
+        else:
+            MSG = 'Action for ConfigRule({:s}) is not supported '+\
+                  '(device_uuid={:s}, service_uuid={:s}, slice_uuid={:s})'
+            str_config_rule = json.dumps(config_rule)
+            raise Exception(MSG.format(str_config_rule, str(device_uuid), str(service_uuid), str(slice_uuid)))
+
+    #LOGGER.warning('uuids_to_delete={:s}'.format(str(uuids_to_delete)))
+    #LOGGER.warning('rules_to_upsert={:s}'.format(str(rules_to_upsert)))
 
-    configrule_updates = []
-    if len(config_rules) > 0:
-        stmt = insert(ConfigRuleModel).values(config_rules)
-        #stmt = stmt.on_conflict_do_update(
-        #    index_elements=[ConfigRuleModel.configrule_uuid],
-        #    set_=dict(
-        #        updated_at = stmt.excluded.updated_at,
-        #    )
-        #)
+    delete_affected = False
+    upsert_affected = False
+
+    if len(uuids_to_delete) > 0:
+        stmt = delete(ConfigRuleModel)
+        if device_uuid  is not None: stmt = stmt.where(ConfigRuleModel.device_uuid  == device_uuid )
+        if service_uuid is not None: stmt = stmt.where(ConfigRuleModel.service_uuid == service_uuid)
+        if slice_uuid   is not None: stmt = stmt.where(ConfigRuleModel.slice_uuid   == slice_uuid  )
+        stmt = stmt.where(ConfigRuleModel.configrule_uuid.in_(uuids_to_delete))
+        configrule_deletes = session.execute(stmt)#.fetchall()
+        delete_affected = int(configrule_deletes.rowcount) > 0
+
+    if len(rules_to_upsert) > 0:
+        stmt = insert(ConfigRuleModel).values(rules_to_upsert)
+        stmt = stmt.on_conflict_do_update(
+            index_elements=[ConfigRuleModel.configrule_uuid],
+            set_=dict(
+                position   = stmt.excluded.position,
+                action     = stmt.excluded.action,
+                data       = stmt.excluded.data,
+                updated_at = stmt.excluded.updated_at,
+            )
+        )
         stmt = stmt.returning(ConfigRuleModel.created_at, ConfigRuleModel.updated_at)
         configrule_updates = session.execute(stmt).fetchall()
+        upsert_affected = any([(updated_at > created_at) for created_at,updated_at in configrule_updates])
 
-    return configrule_updates
-
-#Union_SpecificConfigRule = Union[
-#    ConfigRuleCustomModel, ConfigRuleAclModel
-#]
-#
-#def set_config_rule(
-#    database : Database, db_config : ConfigModel, position : int, resource_key : str, resource_value : str,
-#): # -> Tuple[ConfigRuleModel, bool]:
-#
-#    str_rule_key_hash = fast_hasher(resource_key)
-#    str_config_rule_key = key_to_str([db_config.config_uuid, str_rule_key_hash], separator=':')
-#
-#    data = {'config_fk': db_config, 'position': position, 'action': ORM_ConfigActionEnum.SET, 'key': resource_key,
-#            'value': resource_value}
-#    to_add = ConfigRuleModel(**data)
-#
-#    result = database.create_or_update(to_add)
-#    return result
-#Tuple_ConfigRuleSpecs = Tuple[Type, str, Dict, ConfigRuleKindEnum]
-#
-#def parse_config_rule_custom(database : Database, grpc_config_rule) -> Tuple_ConfigRuleSpecs:
-#    config_rule_class = ConfigRuleCustomModel
-#    str_config_rule_id = grpc_config_rule.custom.resource_key
-#    config_rule_data = {
-#        'key'  : grpc_config_rule.custom.resource_key,
-#        'value': grpc_config_rule.custom.resource_value,
-#    }
-#    return config_rule_class, str_config_rule_id, config_rule_data, ConfigRuleKindEnum.CUSTOM
-#
-#def parse_config_rule_acl(database : Database, grpc_config_rule) -> Tuple_ConfigRuleSpecs:
-#    config_rule_class = ConfigRuleAclModel
-#    grpc_endpoint_id = grpc_config_rule.acl.endpoint_id
-#    grpc_rule_set = grpc_config_rule.acl.rule_set
-#    device_uuid = grpc_endpoint_id.device_id.device_uuid.uuid
-#    endpoint_uuid = grpc_endpoint_id.endpoint_uuid.uuid
-#    str_endpoint_key = '/'.join([device_uuid, endpoint_uuid])
-#    #str_endpoint_key, db_endpoint = get_endpoint(database, grpc_endpoint_id)
-#    str_config_rule_id = ':'.join([str_endpoint_key, grpc_rule_set.name])
-#    config_rule_data = {
-#        #'endpoint_fk': db_endpoint,
-#        'endpoint_id': grpc_message_to_json_string(grpc_endpoint_id),
-#        'acl_data': grpc_message_to_json_string(grpc_rule_set),
-#    }
-#    return config_rule_class, str_config_rule_id, config_rule_data, ConfigRuleKindEnum.ACL
-#
-#CONFIGRULE_PARSERS = {
-#    'custom': parse_config_rule_custom,
-#    'acl'   : parse_config_rule_acl,
-#}
-#
-#Union_ConfigRuleModel = Union[
-#    ConfigRuleCustomModel, ConfigRuleAclModel,
-#]
-#
-#def set_config_rule(
-#    database : Database, db_config : ConfigModel, grpc_config_rule : ConfigRule, position : int
-#) -> Tuple[Union_ConfigRuleModel, bool]:
-#    grpc_config_rule_kind = str(grpc_config_rule.WhichOneof('config_rule'))
-#    parser = CONFIGRULE_PARSERS.get(grpc_config_rule_kind)
-#    if parser is None:
-#        raise NotImplementedError('ConfigRule of kind {:s} is not implemented: {:s}'.format(
-#            grpc_config_rule_kind, grpc_message_to_json_string(grpc_config_rule)))
-#
-#    # create specific ConfigRule
-#    config_rule_class, str_config_rule_id, config_rule_data, config_rule_kind = parser(database, grpc_config_rule)
-#    str_config_rule_key_hash = fast_hasher(':'.join([config_rule_kind.value, str_config_rule_id]))
-#    str_config_rule_key = key_to_str([db_config.pk, str_config_rule_key_hash], separator=':')
-#    result : Tuple[Union_ConfigRuleModel, bool] = update_or_create_object(
-#        database, config_rule_class, str_config_rule_key, config_rule_data)
-#    db_specific_config_rule, updated = result
-#
-#    # create generic ConfigRule
-#    config_rule_fk_field_name = 'config_rule_{:s}_fk'.format(config_rule_kind.value)
-#    config_rule_data = {
-#        'config_fk': db_config, 'kind': config_rule_kind, 'position': position,
-#        'action': ORM_ConfigActionEnum.SET,
-#        config_rule_fk_field_name: db_specific_config_rule
-#    }
-#    result : Tuple[ConfigRuleModel, bool] = update_or_create_object(
-#        database, ConfigRuleModel, str_config_rule_key, config_rule_data)
-#    db_config_rule, updated = result
-#
-#    return db_config_rule, updated
-#
-#def delete_config_rule(
-#    database : Database, db_config : ConfigModel, grpc_config_rule : ConfigRule
-#) -> None:
-#    grpc_config_rule_kind = str(grpc_config_rule.WhichOneof('config_rule'))
-#    parser = CONFIGRULE_PARSERS.get(grpc_config_rule_kind)
-#    if parser is None:
-#        raise NotImplementedError('ConfigRule of kind {:s} is not implemented: {:s}'.format(
-#            grpc_config_rule_kind, grpc_message_to_json_string(grpc_config_rule)))
-#
-#    # delete generic config rules; self deletes specific config rule
-#    _, str_config_rule_id, _, config_rule_kind = parser(database, grpc_config_rule)
-#    str_config_rule_key_hash = fast_hasher(':'.join([config_rule_kind.value, str_config_rule_id]))
-#    str_config_rule_key = key_to_str([db_config.pk, str_config_rule_key_hash], separator=':')
-#    db_config_rule : Optional[ConfigRuleModel] = get_object(
-#        database, ConfigRuleModel, str_config_rule_key, raise_if_not_found=False)
-#    if db_config_rule is None: return
-#    db_config_rule.delete()
-#
-#def update_config(
-#    database : Database, db_parent_pk : str, config_name : str, grpc_config_rules
-#) -> List[Tuple[Union[ConfigModel, ConfigRuleModel], bool]]:
-#
-#    str_config_key = key_to_str([config_name, db_parent_pk], separator=':')
-#    result : Tuple[ConfigModel, bool] = get_or_create_object(database, ConfigModel, str_config_key)
-#    db_config, created = result
-#
-#    db_objects = [(db_config, created)]
-#
-#    for position,grpc_config_rule in enumerate(grpc_config_rules):
-#        action = grpc_to_enum__config_action(grpc_config_rule.action)
-#
-#        if action == ORM_ConfigActionEnum.SET:
-#            result : Tuple[ConfigRuleModel, bool] = set_config_rule(
-#                database, db_config, grpc_config_rule, position)
-#            db_config_rule, updated = result
-#            db_objects.append((db_config_rule, updated))
-#        elif action == ORM_ConfigActionEnum.DELETE:
-#            delete_config_rule(database, db_config, grpc_config_rule)
-#        else:
-#            msg = 'Unsupported Action({:s}) for ConfigRule({:s})'
-#            str_action = str(ConfigActionEnum.Name(action))
-#            str_config_rule = grpc_message_to_json_string(grpc_config_rule)
-#            raise AttributeError(msg.format(str_action, str_config_rule))
-#
-#    return db_objects
+    return delete_affected or upsert_affected
diff --git a/src/context/service/database/Constraint.py b/src/context/service/database/Constraint.py
index 2880c05a8..82629b25c 100644
--- a/src/context/service/database/Constraint.py
+++ b/src/context/service/database/Constraint.py
@@ -47,14 +47,14 @@ def compose_constraints_data(
 def upsert_constraints(
     session : Session, constraints : List[Dict],
     service_uuid : Optional[str] = None, slice_uuid : Optional[str] = None
-) -> List[bool]:
+) -> bool:
     # TODO: do not delete all constraints; just add-remove as needed
     stmt = delete(ConstraintModel)
     if service_uuid is not None: stmt = stmt.where(ConstraintModel.service_uuid == service_uuid)
     if slice_uuid   is not None: stmt = stmt.where(ConstraintModel.slice_uuid   == slice_uuid  )
     session.execute(stmt)
 
-    constraint_updates = []
+    changed = False
     if len(constraints) > 0:
         stmt = insert(ConstraintModel).values(constraints)
         #stmt = stmt.on_conflict_do_update(
@@ -65,8 +65,9 @@ def upsert_constraints(
         #)
         stmt = stmt.returning(ConstraintModel.created_at, ConstraintModel.updated_at)
         constraint_updates = session.execute(stmt).fetchall()
+        changed = any([(updated_at > created_at) for created_at,updated_at in constraint_updates])
 
-    return constraint_updates
+    return changed
 
 
 #    def set_constraint(self, db_constraints: ConstraintsModel, grpc_constraint: Constraint, position: int
diff --git a/src/context/service/database/Device.py b/src/context/service/database/Device.py
index 07d1c7606..cde8751b4 100644
--- a/src/context/service/database/Device.py
+++ b/src/context/service/database/Device.py
@@ -20,6 +20,7 @@ from sqlalchemy_cockroachdb import run_transaction
 from typing import Dict, List, Optional, Set, Tuple
 from common.method_wrappers.ServiceExceptions import InvalidArgumentException, NotFoundException
 from common.proto.context_pb2 import Device, DeviceId
+from common.tools.grpc.Tools import grpc_message_to_json_string
 from common.tools.object_factory.Device import json_device_id
 from .models.DeviceModel import DeviceModel
 from .models.EndPointModel import EndPointModel
@@ -136,6 +137,7 @@ def device_set(db_engine : Engine, request : Device) -> Tuple[Dict, bool]:
         created_at,updated_at = session.execute(stmt).fetchone()
         updated = updated_at > created_at
 
+        updated_endpoints = False
         if len(endpoints_data) > 0:
             stmt = insert(EndPointModel).values(endpoints_data)
             stmt = stmt.on_conflict_do_update(
@@ -149,17 +151,16 @@ def device_set(db_engine : Engine, request : Device) -> Tuple[Dict, bool]:
             )
             stmt = stmt.returning(EndPointModel.created_at, EndPointModel.updated_at)
             endpoint_updates = session.execute(stmt).fetchall()
-            updated = updated or any([(updated_at > created_at) for created_at,updated_at in endpoint_updates])
+            updated_endpoints = any([(updated_at > created_at) for created_at,updated_at in endpoint_updates])
 
         if len(related_topologies) > 0:
             session.execute(insert(TopologyDeviceModel).values(related_topologies).on_conflict_do_nothing(
                 index_elements=[TopologyDeviceModel.topology_uuid, TopologyDeviceModel.device_uuid]
             ))
 
-        configrule_updates = upsert_config_rules(session, config_rules, device_uuid=device_uuid)
-        updated = updated or any([(updated_at > created_at) for created_at,updated_at in configrule_updates])
+        changed_config_rules = upsert_config_rules(session, config_rules, device_uuid=device_uuid)
 
-        return updated
+        return updated or updated_endpoints or changed_config_rules
 
     updated = run_transaction(sessionmaker(bind=db_engine), callback)
     return json_device_id(device_uuid),updated
diff --git a/src/context/service/database/Service.py b/src/context/service/database/Service.py
index 76a830535..9b9e9a621 100644
--- a/src/context/service/database/Service.py
+++ b/src/context/service/database/Service.py
@@ -118,6 +118,7 @@ def service_set(db_engine : Engine, request : Service) -> Tuple[Dict, bool]:
         created_at,updated_at = session.execute(stmt).fetchone()
         updated = updated_at > created_at
 
+        # TODO: check if endpoints are changed
         if len(service_endpoints_data) > 0:
             stmt = insert(ServiceEndPointModel).values(service_endpoints_data)
             stmt = stmt.on_conflict_do_nothing(
@@ -125,13 +126,10 @@ def service_set(db_engine : Engine, request : Service) -> Tuple[Dict, bool]:
             )
             session.execute(stmt)
 
-        constraint_updates = upsert_constraints(session, constraints, service_uuid=service_uuid)
-        updated = updated or any([(updated_at > created_at) for created_at,updated_at in constraint_updates])
+        changed_constraints = upsert_constraints(session, constraints, service_uuid=service_uuid)
+        changed_config_rules = upsert_config_rules(session, config_rules, service_uuid=service_uuid)
 
-        configrule_updates = upsert_config_rules(session, config_rules, service_uuid=service_uuid)
-        updated = updated or any([(updated_at > created_at) for created_at,updated_at in configrule_updates])
-
-        return updated
+        return updated or changed_constraints or changed_config_rules
 
     updated = run_transaction(sessionmaker(bind=db_engine), callback)
     return json_service_id(service_uuid, json_context_id(context_uuid)),updated
diff --git a/src/context/service/database/Slice.py b/src/context/service/database/Slice.py
index 84bfff343..113af9aa4 100644
--- a/src/context/service/database/Slice.py
+++ b/src/context/service/database/Slice.py
@@ -136,6 +136,7 @@ def slice_set(db_engine : Engine, request : Slice) -> Tuple[Dict, bool]:
         created_at,updated_at = session.execute(stmt).fetchone()
         updated = updated_at > created_at
 
+        # TODO: check if endpoints are changed
         if len(slice_endpoints_data) > 0:
             stmt = insert(SliceEndPointModel).values(slice_endpoints_data)
             stmt = stmt.on_conflict_do_nothing(
@@ -143,6 +144,7 @@ def slice_set(db_engine : Engine, request : Slice) -> Tuple[Dict, bool]:
             )
             session.execute(stmt)
 
+        # TODO: check if services are changed
         if len(slice_services_data) > 0:
             stmt = insert(SliceServiceModel).values(slice_services_data)
             stmt = stmt.on_conflict_do_nothing(
@@ -150,6 +152,7 @@ def slice_set(db_engine : Engine, request : Slice) -> Tuple[Dict, bool]:
             )
             session.execute(stmt)
 
+        # TODO: check if subslices are changed
         if len(slice_subslices_data) > 0:
             stmt = insert(SliceSubSliceModel).values(slice_subslices_data)
             stmt = stmt.on_conflict_do_nothing(
@@ -157,13 +160,10 @@ def slice_set(db_engine : Engine, request : Slice) -> Tuple[Dict, bool]:
             )
             session.execute(stmt)
 
-        constraint_updates = upsert_constraints(session, constraints, slice_uuid=slice_uuid)
-        updated = updated or any([(updated_at > created_at) for created_at,updated_at in constraint_updates])
+        changed_constraints = upsert_constraints(session, constraints, slice_uuid=slice_uuid)
+        changed_config_rules = upsert_config_rules(session, config_rules, slice_uuid=slice_uuid)
 
-        configrule_updates = upsert_config_rules(session, config_rules, slice_uuid=slice_uuid)
-        updated = updated or any([(updated_at > created_at) for created_at,updated_at in configrule_updates])
-
-        return updated
+        return updated or changed_constraints or changed_config_rules
 
     updated = run_transaction(sessionmaker(bind=db_engine), callback)
     return json_slice_id(slice_uuid, json_context_id(context_uuid)),updated
-- 
GitLab


From 83d3a5f69d222c21c079bacf865297f56b207b83 Mon Sep 17 00:00:00 2001
From: gifrerenom <lluis.gifre@cttc.es>
Date: Sat, 28 Jan 2023 18:35:33 +0000
Subject: [PATCH 18/31] Device component:

- Improved update of ConfigRules changed in Context
---
 src/device/service/DeviceServiceServicerImpl.py |  6 +-----
 src/device/service/Tools.py                     | 10 ++++------
 2 files changed, 5 insertions(+), 11 deletions(-)

diff --git a/src/device/service/DeviceServiceServicerImpl.py b/src/device/service/DeviceServiceServicerImpl.py
index 179b7795b..5b537609b 100644
--- a/src/device/service/DeviceServiceServicerImpl.py
+++ b/src/device/service/DeviceServiceServicerImpl.py
@@ -121,11 +121,7 @@ class DeviceServiceServicerImpl(DeviceServiceServicer):
                 for error in errors: LOGGER.error(error)
                 raise OperationFailedException('ConfigureDevice', extra_details=errors)
 
-            # Rules updated by configure_rules() and deconfigure_rules() methods.
-            # Code to be removed soon if not needed.
-            del device.device_config.config_rules[:]
-            populate_config_rules(device, driver)
-
+            # Note: Rules are updated by configure_rules() and deconfigure_rules() methods.
             device_id = context_client.SetDevice(device)
             return device_id
         finally:
diff --git a/src/device/service/Tools.py b/src/device/service/Tools.py
index d2cd0b481..b4dc66dde 100644
--- a/src/device/service/Tools.py
+++ b/src/device/service/Tools.py
@@ -107,7 +107,7 @@ def populate_endpoints(device : Device, driver : _Driver, monitoring_loops : Mon
     return errors
 
 def _raw_config_rules_to_grpc(
-    device_uuid : str, device_config : DeviceConfig, error_template : str, default_config_action : ConfigActionEnum,
+    device_uuid : str, device_config : DeviceConfig, error_template : str, config_action : ConfigActionEnum,
     raw_config_rules : List[Tuple[str, Union[Any, Exception, None]]]
 ) -> List[str]:
     errors : List[str] = list()
@@ -118,7 +118,7 @@ def _raw_config_rules_to_grpc(
             continue
 
         config_rule = device_config.config_rules.add()
-        config_rule.action = default_config_action
+        config_rule.action = config_action
         config_rule.custom.resource_key = resource_key
         config_rule.custom.resource_value = \
             resource_value if isinstance(resource_value, str) else json.dumps(resource_value, sort_keys=True)
@@ -177,9 +177,8 @@ def configure_rules(device : Device, driver : _Driver, resources_to_set : List[T
         for (resource_key, resource_value), result in zip(resources_to_set, results_setconfig)
     ]
 
-    device_config = DeviceConfig() # ignored; added at the end of ConfigureDevice
     return _raw_config_rules_to_grpc(
-        device_uuid, device_config, ERROR_SET, ConfigActionEnum.CONFIGACTION_SET, results_setconfig)
+        device_uuid, device.device_config, ERROR_SET, ConfigActionEnum.CONFIGACTION_SET, results_setconfig)
 
 def deconfigure_rules(device : Device, driver : _Driver, resources_to_delete : List[Tuple[str, Any]]) -> List[str]:
     device_uuid = device.device_id.device_uuid.uuid
@@ -190,9 +189,8 @@ def deconfigure_rules(device : Device, driver : _Driver, resources_to_delete : L
         for (resource_key, resource_value), result in zip(resources_to_delete, results_deleteconfig)
     ]
 
-    device_config = DeviceConfig() # ignored; added at the end of ConfigureDevice
     return _raw_config_rules_to_grpc(
-        device_uuid, device_config, ERROR_DELETE, ConfigActionEnum.CONFIGACTION_DELETE, results_deleteconfig)
+        device_uuid, device.device_config, ERROR_DELETE, ConfigActionEnum.CONFIGACTION_DELETE, results_deleteconfig)
 
 def subscribe_kpi(request : MonitoringSettings, driver : _Driver, monitoring_loops : MonitoringLoops) -> List[str]:
     kpi_uuid = request.kpi_id.kpi_id.uuid
-- 
GitLab


From 2799209f23ff8675221e0a2ced35a37df0e588bd Mon Sep 17 00:00:00 2001
From: gifrerenom <lluis.gifre@cttc.es>
Date: Sat, 28 Jan 2023 18:38:55 +0000
Subject: [PATCH 19/31] Service component:

- Update ConfigRules in Context when UpdateService is executed
- Defined SettingsHandler class to help in retrieving settings
- Updated ServiceHandlers to use the new SettingsHandler class
---
 .../service/ServiceServiceServicerImpl.py     | 16 ++--
 .../service_handler_api/SettingsHandler.py    | 88 +++++++++++++++++++
 .../service/service_handler_api/Tools.py      | 11 +++
 .../L2NMEmulatedServiceHandler.py             | 61 +++++--------
 .../L2NMOpenConfigServiceHandler.py           | 61 +++++--------
 .../L3NMEmulatedServiceHandler.py             | 72 +++++++--------
 .../L3NMOpenConfigServiceHandler.py           | 61 +++++--------
 .../microwave/MicrowaveServiceHandler.py      | 68 +++++---------
 .../tapi_tapi/TapiServiceHandler.py           | 61 +++++--------
 9 files changed, 259 insertions(+), 240 deletions(-)
 create mode 100644 src/service/service/service_handler_api/SettingsHandler.py

diff --git a/src/service/service/ServiceServiceServicerImpl.py b/src/service/service/ServiceServiceServicerImpl.py
index 50f645e42..87eda2678 100644
--- a/src/service/service/ServiceServiceServicerImpl.py
+++ b/src/service/service/ServiceServiceServicerImpl.py
@@ -95,13 +95,17 @@ class ServiceServiceServicerImpl(ServiceServiceServicer):
         service.CopyFrom(request if _service is None else _service)
         service.service_status.service_status = ServiceStatusEnum.SERVICESTATUS_PLANNED     # pylint: disable=no-member
 
-        del service.service_endpoint_ids[:]                                     # pylint: disable=no-member
-        for service_endpoint_id in request.service_endpoint_ids:
-            service.service_endpoint_ids.add().CopyFrom(service_endpoint_id)    # pylint: disable=no-member
+        del service.service_endpoint_ids[:] # pylint: disable=no-member
+        for endpoint_id in request.service_endpoint_ids:
+            service.service_endpoint_ids.add().CopyFrom(endpoint_id)    # pylint: disable=no-member
 
-        del service.service_constraints[:]                                      # pylint: disable=no-member
-        for service_constraint in request.service_constraints:
-            service.service_constraints.add().CopyFrom(service_constraint)      # pylint: disable=no-member
+        del service.service_constraints[:]  # pylint: disable=no-member
+        for constraint in request.service_constraints:
+            service.service_constraints.add().CopyFrom(constraint)  # pylint: disable=no-member
+
+        del service.service_config.config_rules[:]  # pylint: disable=no-member
+        for config_rule in request.service_config.config_rules:
+            service.service_config.config_rules.add().CopyFrom(config_rule) # pylint: disable=no-member
 
         service_id_with_uuids = context_client.SetService(service)
         service_with_uuids = context_client.GetService(service_id_with_uuids)
diff --git a/src/service/service/service_handler_api/SettingsHandler.py b/src/service/service/service_handler_api/SettingsHandler.py
new file mode 100644
index 000000000..4df24cee0
--- /dev/null
+++ b/src/service/service/service_handler_api/SettingsHandler.py
@@ -0,0 +1,88 @@
+# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import anytree, json, logging
+from typing import Any, List, Optional, Tuple, Union
+from common.proto.context_pb2 import ConfigActionEnum, ConfigRule, Device, EndPoint, ServiceConfig
+from common.tools.grpc.Tools import grpc_message_to_json, grpc_message_to_json_string
+from service.service.service_handler_api.AnyTreeTools import TreeNode, delete_subnode, get_subnode, set_subnode_value
+
+LOGGER = logging.getLogger(__name__)
+
+class SettingsHandler:
+    def __init__(self, service_config : ServiceConfig, **settings) -> None:
+        self.__resolver = anytree.Resolver(pathattr='name')
+        self.__config = TreeNode('.')
+        for config_rule in service_config.config_rules:
+            self.update_config_rule(config_rule)
+
+    @staticmethod
+    def _config_rule_to_raw(config_rule : ConfigRule) -> Optional[Tuple[int, str, Any]]:
+        action = config_rule.action
+        kind = config_rule.WhichOneof('config_rule')
+        if kind == 'custom':
+            key_or_path = config_rule.custom.resource_key
+            value = config_rule.custom.resource_value
+            try:
+                value = json.loads(value)
+            except: # pylint: disable=bare-except
+                pass
+        elif kind == 'acl':
+            device_uuid = config_rule.acl.endpoint_id.device_id.device_uuid.uuid
+            endpoint_uuid = config_rule.acl.endpoint_id.endpoint_uuid.uuid
+            acl_ruleset_name = config_rule.acl.rule_set.name
+            ACL_KEY_TEMPLATE = '/device[{:s}]/endpoint[{:s}]/acl_ruleset[{:s}]'
+            key_or_path = ACL_KEY_TEMPLATE.format(device_uuid, endpoint_uuid, acl_ruleset_name)
+            value = grpc_message_to_json(config_rule.acl)
+        else:
+            MSG = 'Unsupported Kind({:s}) in ConfigRule({:s})'
+            LOGGER.warning(MSG.format(str(kind), grpc_message_to_json_string(config_rule)))
+            return None
+
+        return action, key_or_path, value
+
+    def get(self, key_or_path : Union[str, List[str]], default : Optional[Any] = None) -> Optional[TreeNode]:
+        return get_subnode(self.__resolver, self.__config, key_or_path, default=default)
+
+    def get_endpoint_settings(self, device : Device, endpoint : EndPoint) -> Optional[TreeNode]:
+        device_keys   = device.device_id.device_uuid.uuid,       device.name
+        endpoint_keys = endpoint.endpoint_id.endpoint_uuid.uuid, endpoint.name
+
+        for device_key in device_keys:
+            for endpoint_key in endpoint_keys:
+                endpoint_settings_uri = '/device[{:s}]/endpoint[{:s}]/settings'.format(device_key, endpoint_key)
+                endpoint_settings = self.get(endpoint_settings_uri)
+                if endpoint_settings is not None: return endpoint_settings
+
+        return None
+
+    def set(self, key_or_path : Union[str, List[str]], value : Any) -> None:
+        set_subnode_value(self.__resolver, self.__config, key_or_path, value)
+
+    def delete(self, key_or_path : Union[str, List[str]]) -> None:
+        delete_subnode(self.__resolver, self.__config, key_or_path)
+
+    def update_config_rule(self, config_rule : ConfigRule) -> None:
+        raw_data = SettingsHandler._config_rule_to_raw(config_rule)
+        if raw_data is None: return
+        action, key_or_path, value = raw_data
+
+        if action == ConfigActionEnum.CONFIGACTION_SET:
+            self.set(key_or_path, value)
+        elif action == ConfigActionEnum.CONFIGACTION_DELETE:
+            self.delete(key_or_path)
+        else:
+            MSG = 'Unsupported Action({:s}) in ConfigRule({:s})'
+            LOGGER.warning(MSG.format(str(action), grpc_message_to_json_string(config_rule)))
+            return
diff --git a/src/service/service/service_handler_api/Tools.py b/src/service/service/service_handler_api/Tools.py
index 61ad79761..ebd16a532 100644
--- a/src/service/service/service_handler_api/Tools.py
+++ b/src/service/service/service_handler_api/Tools.py
@@ -14,6 +14,8 @@
 
 import functools
 from typing import Any, List, Union
+from common.method_wrappers.ServiceExceptions import NotFoundException
+from common.proto.context_pb2 import Device, EndPoint
 
 ACTION_MSG_SET_ENDPOINT      = 'Set EndPoint(device_uuid={:s}, endpoint_uuid={:s}, topology_uuid={:s})'
 ACTION_MSG_DELETE_ENDPOINT   = 'Delete EndPoint(device_uuid={:s}, endpoint_uuid={:s}, topology_uuid={:s})'
@@ -40,3 +42,12 @@ check_errors_setconstraint    = functools.partial(_check_errors, ACTION_MSG_SET_
 check_errors_deleteconstraint = functools.partial(_check_errors, ACTION_MSG_DELETE_CONSTRAINT)
 check_errors_setconfig        = functools.partial(_check_errors, ACTION_MSG_SET_CONFIG       )
 check_errors_deleteconfig     = functools.partial(_check_errors, ACTION_MSG_DELETE_CONFIG    )
+
+def get_endpoint_matching(device : Device, endpoint_uuid_or_name : str) -> EndPoint:
+    for endpoint in device.device_endpoints:
+        choices = {endpoint.endpoint_id.endpoint_uuid.uuid, endpoint.name}
+        if endpoint_uuid_or_name in choices: return endpoint
+
+    device_uuid = device.device_id.device_uuid.uuid
+    extra_details = 'Device({:s})'.format(str(device_uuid))
+    raise NotFoundException('Endpoint', endpoint_uuid_or_name, extra_details=extra_details)
diff --git a/src/service/service/service_handlers/l2nm_emulated/L2NMEmulatedServiceHandler.py b/src/service/service/service_handlers/l2nm_emulated/L2NMEmulatedServiceHandler.py
index bc628c160..66259d1f6 100644
--- a/src/service/service/service_handlers/l2nm_emulated/L2NMEmulatedServiceHandler.py
+++ b/src/service/service/service_handlers/l2nm_emulated/L2NMEmulatedServiceHandler.py
@@ -12,14 +12,15 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import anytree, json, logging
+import json, logging
 from typing import Any, List, Optional, Tuple, Union
 from common.method_wrappers.Decorator import MetricTypeEnum, MetricsPool, metered_subclass_method, INF
-from common.proto.context_pb2 import ConfigActionEnum, ConfigRule, DeviceId, Service
+from common.proto.context_pb2 import ConfigRule, DeviceId, Service
 from common.tools.object_factory.Device import json_device_id
 from common.type_checkers.Checkers import chk_length, chk_type
+from service.service.service_handler_api.Tools import get_endpoint_matching
 from service.service.service_handler_api._ServiceHandler import _ServiceHandler
-from service.service.service_handler_api.AnyTreeTools import TreeNode, delete_subnode, get_subnode, set_subnode_value
+from service.service.service_handler_api.SettingsHandler import SettingsHandler
 from service.service.task_scheduler.TaskExecutor import TaskExecutor
 from .ConfigRules import setup_config_rules, teardown_config_rules
 
@@ -47,22 +48,8 @@ class L2NMEmulatedServiceHandler(_ServiceHandler):
         self, service : Service, task_executor : TaskExecutor, **settings
     ) -> None:
         self.__service = service
-        self.__task_executor = task_executor # pylint: disable=unused-private-member
-        self.__resolver = anytree.Resolver(pathattr='name')
-        self.__config = TreeNode('.')
-        for config_rule in service.service_config.config_rules:
-            action = config_rule.action
-            if config_rule.WhichOneof('config_rule') != 'custom': continue
-            resource_key = config_rule.custom.resource_key
-            resource_value = config_rule.custom.resource_value
-            if action == ConfigActionEnum.CONFIGACTION_SET:
-                try:
-                    resource_value = json.loads(resource_value)
-                except: # pylint: disable=bare-except
-                    pass
-                set_subnode_value(self.__resolver, self.__config, resource_key, resource_value)
-            elif action == ConfigActionEnum.CONFIGACTION_DELETE:
-                delete_subnode(self.__resolver, self.__config, resource_key)
+        self.__task_executor = task_executor
+        self.__settings_handler = SettingsHandler(service.service_config, **settings)
 
     @metered_subclass_method(METRICS_POOL)
     def SetEndpoint(
@@ -72,7 +59,7 @@ class L2NMEmulatedServiceHandler(_ServiceHandler):
         if len(endpoints) == 0: return []
 
         service_uuid = self.__service.service_id.service_uuid.uuid
-        settings : TreeNode = get_subnode(self.__resolver, self.__config, '/settings', None)
+        settings = self.__settings_handler.get('/settings')
 
         results = []
         for endpoint in endpoints:
@@ -81,17 +68,17 @@ class L2NMEmulatedServiceHandler(_ServiceHandler):
                 chk_length('endpoint', endpoint, min_length=2, max_length=3)
                 device_uuid, endpoint_uuid = endpoint[0:2] # ignore topology_uuid by now
 
-                endpoint_settings_uri = '/device[{:s}]/endpoint[{:s}]/settings'.format(device_uuid, endpoint_uuid)
-                endpoint_settings : TreeNode = get_subnode(self.__resolver, self.__config, endpoint_settings_uri, None)
+                device_obj = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
+                endpoint_obj = get_endpoint_matching(device_obj, endpoint_uuid)
+                endpoint_settings = self.__settings_handler.get_endpoint_settings(device_obj, endpoint_obj)
 
                 json_config_rules = setup_config_rules(
                     service_uuid, connection_uuid, device_uuid, endpoint_uuid, settings, endpoint_settings)
 
-                device = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
-                del device.device_config.config_rules[:]
+                del device_obj.device_config.config_rules[:]
                 for json_config_rule in json_config_rules:
-                    device.device_config.config_rules.append(ConfigRule(**json_config_rule))
-                self.__task_executor.configure_device(device)
+                    device_obj.device_config.config_rules.append(ConfigRule(**json_config_rule))
+                self.__task_executor.configure_device(device_obj)
                 results.append(True)
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to SetEndpoint({:s})'.format(str(endpoint)))
@@ -107,7 +94,7 @@ class L2NMEmulatedServiceHandler(_ServiceHandler):
         if len(endpoints) == 0: return []
 
         service_uuid = self.__service.service_id.service_uuid.uuid
-        settings : TreeNode = get_subnode(self.__resolver, self.__config, '/settings', None)
+        settings = self.__settings_handler.get('/settings')
 
         results = []
         for endpoint in endpoints:
@@ -116,17 +103,17 @@ class L2NMEmulatedServiceHandler(_ServiceHandler):
                 chk_length('endpoint', endpoint, min_length=2, max_length=3)
                 device_uuid, endpoint_uuid = endpoint[0:2] # ignore topology_uuid by now
 
-                endpoint_settings_uri = '/device[{:s}]/endpoint[{:s}]/settings'.format(device_uuid, endpoint_uuid)
-                endpoint_settings : TreeNode = get_subnode(self.__resolver, self.__config, endpoint_settings_uri, None)
+                device_obj = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
+                endpoint_obj = get_endpoint_matching(device_obj, endpoint_uuid)
+                endpoint_settings = self.__settings_handler.get_endpoint_settings(device_obj, endpoint_obj)
 
                 json_config_rules = teardown_config_rules(
                     service_uuid, connection_uuid, device_uuid, endpoint_uuid, settings, endpoint_settings)
 
-                device = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
-                del device.device_config.config_rules[:]
+                del device_obj.device_config.config_rules[:]
                 for json_config_rule in json_config_rules:
-                    device.device_config.config_rules.append(ConfigRule(**json_config_rule))
-                self.__task_executor.configure_device(device)
+                    device_obj.device_config.config_rules.append(ConfigRule(**json_config_rule))
+                self.__task_executor.configure_device(device_obj)
                 results.append(True)
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to DeleteEndpoint({:s})'.format(str(endpoint)))
@@ -160,9 +147,8 @@ class L2NMEmulatedServiceHandler(_ServiceHandler):
         results = []
         for resource in resources:
             try:
-                resource_key, resource_value = resource
-                resource_value = json.loads(resource_value)
-                set_subnode_value(self.__resolver, self.__config, resource_key, resource_value)
+                resource_value = json.loads(resource[1])
+                self.__settings_handler.set(resource[0], resource_value)
                 results.append(True)
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to SetConfig({:s})'.format(str(resource)))
@@ -178,8 +164,7 @@ class L2NMEmulatedServiceHandler(_ServiceHandler):
         results = []
         for resource in resources:
             try:
-                resource_key, _ = resource
-                delete_subnode(self.__resolver, self.__config, resource_key)
+                self.__settings_handler.delete(resource[0])
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to DeleteConfig({:s})'.format(str(resource)))
                 results.append(e)
diff --git a/src/service/service/service_handlers/l2nm_openconfig/L2NMOpenConfigServiceHandler.py b/src/service/service/service_handlers/l2nm_openconfig/L2NMOpenConfigServiceHandler.py
index 23df44413..63442a6b4 100644
--- a/src/service/service/service_handlers/l2nm_openconfig/L2NMOpenConfigServiceHandler.py
+++ b/src/service/service/service_handlers/l2nm_openconfig/L2NMOpenConfigServiceHandler.py
@@ -12,14 +12,15 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import anytree, json, logging
+import json, logging
 from typing import Any, List, Optional, Tuple, Union
 from common.method_wrappers.Decorator import MetricTypeEnum, MetricsPool, metered_subclass_method, INF
-from common.proto.context_pb2 import ConfigActionEnum, ConfigRule, DeviceId, Service
+from common.proto.context_pb2 import ConfigRule, DeviceId, Service
 from common.tools.object_factory.Device import json_device_id
 from common.type_checkers.Checkers import chk_length, chk_type
+from service.service.service_handler_api.Tools import get_endpoint_matching
 from service.service.service_handler_api._ServiceHandler import _ServiceHandler
-from service.service.service_handler_api.AnyTreeTools import TreeNode, delete_subnode, get_subnode, set_subnode_value
+from service.service.service_handler_api.SettingsHandler import SettingsHandler
 from service.service.task_scheduler.TaskExecutor import TaskExecutor
 from .ConfigRules import setup_config_rules, teardown_config_rules
 
@@ -47,22 +48,8 @@ class L2NMOpenConfigServiceHandler(_ServiceHandler):
         self, service : Service, task_executor : TaskExecutor, **settings
     ) -> None:
         self.__service = service
-        self.__task_executor = task_executor # pylint: disable=unused-private-member
-        self.__resolver = anytree.Resolver(pathattr='name')
-        self.__config = TreeNode('.')
-        for config_rule in service.service_config.config_rules:
-            action = config_rule.action
-            if config_rule.WhichOneof('config_rule') != 'custom': continue
-            resource_key = config_rule.custom.resource_key
-            resource_value = config_rule.custom.resource_value
-            if action == ConfigActionEnum.CONFIGACTION_SET:
-                try:
-                    resource_value = json.loads(resource_value)
-                except: # pylint: disable=bare-except
-                    pass
-                set_subnode_value(self.__resolver, self.__config, resource_key, resource_value)
-            elif action == ConfigActionEnum.CONFIGACTION_DELETE:
-                delete_subnode(self.__resolver, self.__config, resource_key)
+        self.__task_executor = task_executor
+        self.__settings_handler = SettingsHandler(service.service_config, **settings)
 
     @metered_subclass_method(METRICS_POOL)
     def SetEndpoint(
@@ -72,7 +59,7 @@ class L2NMOpenConfigServiceHandler(_ServiceHandler):
         if len(endpoints) == 0: return []
 
         service_uuid = self.__service.service_id.service_uuid.uuid
-        settings : TreeNode = get_subnode(self.__resolver, self.__config, '/settings', None)
+        settings = self.__settings_handler.get('/settings')
 
         results = []
         for endpoint in endpoints:
@@ -81,17 +68,17 @@ class L2NMOpenConfigServiceHandler(_ServiceHandler):
                 chk_length('endpoint', endpoint, min_length=2, max_length=3)
                 device_uuid, endpoint_uuid = endpoint[0:2] # ignore topology_uuid by now
 
-                endpoint_settings_uri = '/device[{:s}]/endpoint[{:s}]/settings'.format(device_uuid, endpoint_uuid)
-                endpoint_settings : TreeNode = get_subnode(self.__resolver, self.__config, endpoint_settings_uri, None)
+                device_obj = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
+                endpoint_obj = get_endpoint_matching(device_obj, endpoint_uuid)
+                endpoint_settings = self.__settings_handler.get_endpoint_settings(device_obj, endpoint_obj)
 
                 json_config_rules = setup_config_rules(
                     service_uuid, connection_uuid, device_uuid, endpoint_uuid, settings, endpoint_settings)
 
-                device = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
-                del device.device_config.config_rules[:]
+                del device_obj.device_config.config_rules[:]
                 for json_config_rule in json_config_rules:
-                    device.device_config.config_rules.append(ConfigRule(**json_config_rule))
-                self.__task_executor.configure_device(device)
+                    device_obj.device_config.config_rules.append(ConfigRule(**json_config_rule))
+                self.__task_executor.configure_device(device_obj)
                 results.append(True)
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to SetEndpoint({:s})'.format(str(endpoint)))
@@ -107,7 +94,7 @@ class L2NMOpenConfigServiceHandler(_ServiceHandler):
         if len(endpoints) == 0: return []
 
         service_uuid = self.__service.service_id.service_uuid.uuid
-        settings : TreeNode = get_subnode(self.__resolver, self.__config, '/settings', None)
+        settings = self.__settings_handler.get('/settings')
 
         results = []
         for endpoint in endpoints:
@@ -116,17 +103,17 @@ class L2NMOpenConfigServiceHandler(_ServiceHandler):
                 chk_length('endpoint', endpoint, min_length=2, max_length=3)
                 device_uuid, endpoint_uuid = endpoint[0:2] # ignore topology_uuid by now
 
-                endpoint_settings_uri = '/device[{:s}]/endpoint[{:s}]/settings'.format(device_uuid, endpoint_uuid)
-                endpoint_settings : TreeNode = get_subnode(self.__resolver, self.__config, endpoint_settings_uri, None)
+                device_obj = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
+                endpoint_obj = get_endpoint_matching(device_obj, endpoint_uuid)
+                endpoint_settings = self.__settings_handler.get_endpoint_settings(device_obj, endpoint_obj)
 
                 json_config_rules = teardown_config_rules(
                     service_uuid, connection_uuid, device_uuid, endpoint_uuid, settings, endpoint_settings)
 
-                device = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
-                del device.device_config.config_rules[:]
+                del device_obj.device_config.config_rules[:]
                 for json_config_rule in json_config_rules:
-                    device.device_config.config_rules.append(ConfigRule(**json_config_rule))
-                self.__task_executor.configure_device(device)
+                    device_obj.device_config.config_rules.append(ConfigRule(**json_config_rule))
+                self.__task_executor.configure_device(device_obj)
                 results.append(True)
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to DeleteEndpoint({:s})'.format(str(endpoint)))
@@ -160,9 +147,8 @@ class L2NMOpenConfigServiceHandler(_ServiceHandler):
         results = []
         for resource in resources:
             try:
-                resource_key, resource_value = resource
-                resource_value = json.loads(resource_value)
-                set_subnode_value(self.__resolver, self.__config, resource_key, resource_value)
+                resource_value = json.loads(resource[1])
+                self.__settings_handler.set(resource[0], resource_value)
                 results.append(True)
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to SetConfig({:s})'.format(str(resource)))
@@ -178,8 +164,7 @@ class L2NMOpenConfigServiceHandler(_ServiceHandler):
         results = []
         for resource in resources:
             try:
-                resource_key, _ = resource
-                delete_subnode(self.__resolver, self.__config, resource_key)
+                self.__settings_handler.delete(resource[0])
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to DeleteConfig({:s})'.format(str(resource)))
                 results.append(e)
diff --git a/src/service/service/service_handlers/l3nm_emulated/L3NMEmulatedServiceHandler.py b/src/service/service/service_handlers/l3nm_emulated/L3NMEmulatedServiceHandler.py
index f16122519..8a39ed474 100644
--- a/src/service/service/service_handlers/l3nm_emulated/L3NMEmulatedServiceHandler.py
+++ b/src/service/service/service_handlers/l3nm_emulated/L3NMEmulatedServiceHandler.py
@@ -12,14 +12,15 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import anytree, json, logging
+import json, logging
 from typing import Any, List, Optional, Tuple, Union
 from common.method_wrappers.Decorator import MetricTypeEnum, MetricsPool, metered_subclass_method, INF
-from common.proto.context_pb2 import ConfigActionEnum, ConfigRule, DeviceId, Service
+from common.proto.context_pb2 import ConfigRule, DeviceId, Service
 from common.tools.object_factory.Device import json_device_id
 from common.type_checkers.Checkers import chk_length, chk_type
+from service.service.service_handler_api.Tools import get_endpoint_matching
 from service.service.service_handler_api._ServiceHandler import _ServiceHandler
-from service.service.service_handler_api.AnyTreeTools import TreeNode, delete_subnode, get_subnode, set_subnode_value
+from service.service.service_handler_api.SettingsHandler import SettingsHandler
 from service.service.task_scheduler.TaskExecutor import TaskExecutor
 from .ConfigRules import setup_config_rules, teardown_config_rules
 
@@ -47,67 +48,64 @@ class L3NMEmulatedServiceHandler(_ServiceHandler):
         self, service : Service, task_executor : TaskExecutor, **settings
     ) -> None:
         self.__service = service
-        self.__task_executor = task_executor # pylint: disable=unused-private-member
-        self.__resolver = anytree.Resolver(pathattr='name')
-        self.__config = TreeNode('.')
-        for config_rule in service.service_config.config_rules:
-            action = config_rule.action
-            if config_rule.WhichOneof('config_rule') != 'custom': continue
-            resource_key = config_rule.custom.resource_key
-            resource_value = config_rule.custom.resource_value
-            if action == ConfigActionEnum.CONFIGACTION_SET:
-                try:
-                    resource_value = json.loads(resource_value)
-                except: # pylint: disable=bare-except
-                    pass
-                set_subnode_value(self.__resolver, self.__config, resource_key, resource_value)
-            elif action == ConfigActionEnum.CONFIGACTION_DELETE:
-                delete_subnode(self.__resolver, self.__config, resource_key)
+        self.__task_executor = task_executor
+        self.__settings_handler = SettingsHandler(service.service_config, **settings)
 
     @metered_subclass_method(METRICS_POOL)
     def SetEndpoint(
         self, endpoints : List[Tuple[str, str, Optional[str]]], connection_uuid : Optional[str] = None
     ) -> List[Union[bool, Exception]]:
+        LOGGER.info('[SetEndpoint] endpoints={:s}'.format(str(endpoints)))
+        LOGGER.info('[SetEndpoint] connection_uuid={:s}'.format(str(connection_uuid)))
+
         chk_type('endpoints', endpoints, list)
         if len(endpoints) == 0: return []
 
         service_uuid = self.__service.service_id.service_uuid.uuid
-        settings : TreeNode = get_subnode(self.__resolver, self.__config, '/settings', None)
+        settings = self.__settings_handler.get('/settings')
+        LOGGER.info('[SetEndpoint] settings={:s}'.format(str(settings)))
 
         results = []
         for endpoint in endpoints:
+            LOGGER.info('[SetEndpoint] endpoint={:s}'.format(str(endpoint)))
             try:
                 chk_type('endpoint', endpoint, (tuple, list))
                 chk_length('endpoint', endpoint, min_length=2, max_length=3)
                 device_uuid, endpoint_uuid = endpoint[0:2] # ignore topology_uuid by now
 
-                endpoint_settings_uri = '/device[{:s}]/endpoint[{:s}]/settings'.format(device_uuid, endpoint_uuid)
-                endpoint_settings : TreeNode = get_subnode(self.__resolver, self.__config, endpoint_settings_uri, None)
+                device_obj = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
+                endpoint_obj = get_endpoint_matching(device_obj, endpoint_uuid)
+                endpoint_settings = self.__settings_handler.get_endpoint_settings(device_obj, endpoint_obj)
+                LOGGER.info('[SetEndpoint] endpoint_settings={:s}'.format(str(endpoint_settings)))
 
                 json_config_rules = setup_config_rules(
                     service_uuid, connection_uuid, device_uuid, endpoint_uuid, settings, endpoint_settings)
+                LOGGER.info('[SetEndpoint] json_config_rules={:s}'.format(str(json_config_rules)))
 
-                device = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
-                del device.device_config.config_rules[:]
+                del device_obj.device_config.config_rules[:]
                 for json_config_rule in json_config_rules:
-                    device.device_config.config_rules.append(ConfigRule(**json_config_rule))
-                self.__task_executor.configure_device(device)
+                    device_obj.device_config.config_rules.append(ConfigRule(**json_config_rule))
+                self.__task_executor.configure_device(device_obj)
                 results.append(True)
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to SetEndpoint({:s})'.format(str(endpoint)))
                 results.append(e)
 
+        LOGGER.info('[SetEndpoint] results={:s}'.format(str(results)))
         return results
 
     @metered_subclass_method(METRICS_POOL)
     def DeleteEndpoint(
         self, endpoints : List[Tuple[str, str, Optional[str]]], connection_uuid : Optional[str] = None
     ) -> List[Union[bool, Exception]]:
+        LOGGER.info('[DeleteEndpoint] endpoints={:s}'.format(str(endpoints)))
+        LOGGER.info('[DeleteEndpoint] connection_uuid={:s}'.format(str(connection_uuid)))
+
         chk_type('endpoints', endpoints, list)
         if len(endpoints) == 0: return []
 
         service_uuid = self.__service.service_id.service_uuid.uuid
-        settings : TreeNode = get_subnode(self.__resolver, self.__config, '/settings', None)
+        settings = self.__settings_handler.get('/settings')
 
         results = []
         for endpoint in endpoints:
@@ -116,17 +114,17 @@ class L3NMEmulatedServiceHandler(_ServiceHandler):
                 chk_length('endpoint', endpoint, min_length=2, max_length=3)
                 device_uuid, endpoint_uuid = endpoint[0:2] # ignore topology_uuid by now
 
-                endpoint_settings_uri = '/device[{:s}]/endpoint[{:s}]/settings'.format(device_uuid, endpoint_uuid)
-                endpoint_settings : TreeNode = get_subnode(self.__resolver, self.__config, endpoint_settings_uri, None)
+                device_obj = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
+                endpoint_obj = get_endpoint_matching(device_obj, endpoint_uuid)
+                endpoint_settings = self.__settings_handler.get_endpoint_settings(device_obj, endpoint_obj)
 
                 json_config_rules = teardown_config_rules(
                     service_uuid, connection_uuid, device_uuid, endpoint_uuid, settings, endpoint_settings)
 
-                device = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
-                del device.device_config.config_rules[:]
+                del device_obj.device_config.config_rules[:]
                 for json_config_rule in json_config_rules:
-                    device.device_config.config_rules.append(ConfigRule(**json_config_rule))
-                self.__task_executor.configure_device(device)
+                    device_obj.device_config.config_rules.append(ConfigRule(**json_config_rule))
+                self.__task_executor.configure_device(device_obj)
                 results.append(True)
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to DeleteEndpoint({:s})'.format(str(endpoint)))
@@ -160,9 +158,8 @@ class L3NMEmulatedServiceHandler(_ServiceHandler):
         results = []
         for resource in resources:
             try:
-                resource_key, resource_value = resource
-                resource_value = json.loads(resource_value)
-                set_subnode_value(self.__resolver, self.__config, resource_key, resource_value)
+                resource_value = json.loads(resource[1])
+                self.__settings_handler.set(resource[0], resource_value)
                 results.append(True)
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to SetConfig({:s})'.format(str(resource)))
@@ -178,8 +175,7 @@ class L3NMEmulatedServiceHandler(_ServiceHandler):
         results = []
         for resource in resources:
             try:
-                resource_key, _ = resource
-                delete_subnode(self.__resolver, self.__config, resource_key)
+                self.__settings_handler.delete(resource[0])
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to DeleteConfig({:s})'.format(str(resource)))
                 results.append(e)
diff --git a/src/service/service/service_handlers/l3nm_openconfig/L3NMOpenConfigServiceHandler.py b/src/service/service/service_handlers/l3nm_openconfig/L3NMOpenConfigServiceHandler.py
index 0f5cb6c55..3dc98f71b 100644
--- a/src/service/service/service_handlers/l3nm_openconfig/L3NMOpenConfigServiceHandler.py
+++ b/src/service/service/service_handlers/l3nm_openconfig/L3NMOpenConfigServiceHandler.py
@@ -12,14 +12,15 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import anytree, json, logging
+import json, logging
 from typing import Any, List, Optional, Tuple, Union
 from common.method_wrappers.Decorator import MetricTypeEnum, MetricsPool, metered_subclass_method, INF
-from common.proto.context_pb2 import ConfigActionEnum, ConfigRule, DeviceId, Service
+from common.proto.context_pb2 import ConfigRule, DeviceId, Service
 from common.tools.object_factory.Device import json_device_id
 from common.type_checkers.Checkers import chk_length, chk_type
+from service.service.service_handler_api.Tools import get_endpoint_matching
 from service.service.service_handler_api._ServiceHandler import _ServiceHandler
-from service.service.service_handler_api.AnyTreeTools import TreeNode, delete_subnode, get_subnode, set_subnode_value
+from service.service.service_handler_api.SettingsHandler import SettingsHandler
 from service.service.task_scheduler.TaskExecutor import TaskExecutor
 from .ConfigRules import setup_config_rules, teardown_config_rules
 
@@ -47,22 +48,8 @@ class L3NMOpenConfigServiceHandler(_ServiceHandler):
         self, service : Service, task_executor : TaskExecutor, **settings
     ) -> None:
         self.__service = service
-        self.__task_executor = task_executor # pylint: disable=unused-private-member
-        self.__resolver = anytree.Resolver(pathattr='name')
-        self.__config = TreeNode('.')
-        for config_rule in service.service_config.config_rules:
-            action = config_rule.action
-            if config_rule.WhichOneof('config_rule') != 'custom': continue
-            resource_key = config_rule.custom.resource_key
-            resource_value = config_rule.custom.resource_value
-            if action == ConfigActionEnum.CONFIGACTION_SET:
-                try:
-                    resource_value = json.loads(resource_value)
-                except: # pylint: disable=bare-except
-                    pass
-                set_subnode_value(self.__resolver, self.__config, resource_key, resource_value)
-            elif action == ConfigActionEnum.CONFIGACTION_DELETE:
-                delete_subnode(self.__resolver, self.__config, resource_key)
+        self.__task_executor = task_executor
+        self.__settings_handler = SettingsHandler(service.service_config, **settings)
 
     @metered_subclass_method(METRICS_POOL)
     def SetEndpoint(
@@ -72,7 +59,7 @@ class L3NMOpenConfigServiceHandler(_ServiceHandler):
         if len(endpoints) == 0: return []
 
         service_uuid = self.__service.service_id.service_uuid.uuid
-        settings : TreeNode = get_subnode(self.__resolver, self.__config, '/settings', None)
+        settings = self.__settings_handler.get('/settings')
 
         results = []
         for endpoint in endpoints:
@@ -81,17 +68,17 @@ class L3NMOpenConfigServiceHandler(_ServiceHandler):
                 chk_length('endpoint', endpoint, min_length=2, max_length=3)
                 device_uuid, endpoint_uuid = endpoint[0:2] # ignore topology_uuid by now
 
-                endpoint_settings_uri = '/device[{:s}]/endpoint[{:s}]/settings'.format(device_uuid, endpoint_uuid)
-                endpoint_settings : TreeNode = get_subnode(self.__resolver, self.__config, endpoint_settings_uri, None)
+                device_obj = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
+                endpoint_obj = get_endpoint_matching(device_obj, endpoint_uuid)
+                endpoint_settings = self.__settings_handler.get_endpoint_settings(device_obj, endpoint_obj)
 
                 json_config_rules = setup_config_rules(
                     service_uuid, connection_uuid, device_uuid, endpoint_uuid, settings, endpoint_settings)
 
-                device = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
-                del device.device_config.config_rules[:]
+                del device_obj.device_config.config_rules[:]
                 for json_config_rule in json_config_rules:
-                    device.device_config.config_rules.append(ConfigRule(**json_config_rule))
-                self.__task_executor.configure_device(device)
+                    device_obj.device_config.config_rules.append(ConfigRule(**json_config_rule))
+                self.__task_executor.configure_device(device_obj)
                 results.append(True)
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to SetEndpoint({:s})'.format(str(endpoint)))
@@ -107,7 +94,7 @@ class L3NMOpenConfigServiceHandler(_ServiceHandler):
         if len(endpoints) == 0: return []
 
         service_uuid = self.__service.service_id.service_uuid.uuid
-        settings : TreeNode = get_subnode(self.__resolver, self.__config, '/settings', None)
+        settings = self.__settings_handler.get('/settings')
 
         results = []
         for endpoint in endpoints:
@@ -116,17 +103,17 @@ class L3NMOpenConfigServiceHandler(_ServiceHandler):
                 chk_length('endpoint', endpoint, min_length=2, max_length=3)
                 device_uuid, endpoint_uuid = endpoint[0:2] # ignore topology_uuid by now
 
-                endpoint_settings_uri = '/device[{:s}]/endpoint[{:s}]/settings'.format(device_uuid, endpoint_uuid)
-                endpoint_settings : TreeNode = get_subnode(self.__resolver, self.__config, endpoint_settings_uri, None)
+                device_obj = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
+                endpoint_obj = get_endpoint_matching(device_obj, endpoint_uuid)
+                endpoint_settings = self.__settings_handler.get_endpoint_settings(device_obj, endpoint_obj)
 
                 json_config_rules = teardown_config_rules(
                     service_uuid, connection_uuid, device_uuid, endpoint_uuid, settings, endpoint_settings)
 
-                device = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
-                del device.device_config.config_rules[:]
+                del device_obj.device_config.config_rules[:]
                 for json_config_rule in json_config_rules:
-                    device.device_config.config_rules.append(ConfigRule(**json_config_rule))
-                self.__task_executor.configure_device(device)
+                    device_obj.device_config.config_rules.append(ConfigRule(**json_config_rule))
+                self.__task_executor.configure_device(device_obj)
                 results.append(True)
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to DeleteEndpoint({:s})'.format(str(endpoint)))
@@ -160,9 +147,8 @@ class L3NMOpenConfigServiceHandler(_ServiceHandler):
         results = []
         for resource in resources:
             try:
-                resource_key, resource_value = resource
-                resource_value = json.loads(resource_value)
-                set_subnode_value(self.__resolver, self.__config, resource_key, resource_value)
+                resource_value = json.loads(resource[1])
+                self.__settings_handler.set(resource[0], resource_value)
                 results.append(True)
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to SetConfig({:s})'.format(str(resource)))
@@ -178,8 +164,7 @@ class L3NMOpenConfigServiceHandler(_ServiceHandler):
         results = []
         for resource in resources:
             try:
-                resource_key, _ = resource
-                delete_subnode(self.__resolver, self.__config, resource_key)
+                self.__settings_handler.delete(resource[0])
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to DeleteConfig({:s})'.format(str(resource)))
                 results.append(e)
diff --git a/src/service/service/service_handlers/microwave/MicrowaveServiceHandler.py b/src/service/service/service_handlers/microwave/MicrowaveServiceHandler.py
index fb54a1bc1..a16f8cdfa 100644
--- a/src/service/service/service_handlers/microwave/MicrowaveServiceHandler.py
+++ b/src/service/service/service_handlers/microwave/MicrowaveServiceHandler.py
@@ -12,15 +12,15 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import anytree, json, logging
+import json, logging
 from typing import Any, Dict, List, Optional, Tuple, Union
 from common.method_wrappers.Decorator import MetricsPool, metered_subclass_method
-from common.proto.context_pb2 import ConfigActionEnum, ConfigRule, DeviceId, Service
+from common.proto.context_pb2 import ConfigRule, DeviceId, Service
 from common.tools.object_factory.ConfigRule import json_config_rule_delete, json_config_rule_set
 from common.tools.object_factory.Device import json_device_id
 from common.type_checkers.Checkers import chk_type
 from service.service.service_handler_api._ServiceHandler import _ServiceHandler
-from service.service.service_handler_api.AnyTreeTools import TreeNode, delete_subnode, get_subnode, set_subnode_value
+from service.service.service_handler_api.SettingsHandler import SettingsHandler
 from service.service.task_scheduler.TaskExecutor import TaskExecutor
 
 LOGGER = logging.getLogger(__name__)
@@ -38,22 +38,8 @@ class MicrowaveServiceHandler(_ServiceHandler):
         self, service : Service, task_executor : TaskExecutor, **settings
     ) -> None:
         self.__service = service
-        self.__task_executor = task_executor # pylint: disable=unused-private-member
-        self.__resolver = anytree.Resolver(pathattr='name')
-        self.__config = TreeNode('.')
-        for config_rule in service.service_config.config_rules:
-            action = config_rule.action
-            if config_rule.WhichOneof('config_rule') != 'custom': continue
-            resource_key = config_rule.custom.resource_key
-            resource_value = config_rule.custom.resource_value
-            if action == ConfigActionEnum.CONFIGACTION_SET:
-                try:
-                    resource_value = json.loads(resource_value)
-                except: # pylint: disable=bare-except
-                    pass
-                set_subnode_value(self.__resolver, self.__config, resource_key, resource_value)
-            elif action == ConfigActionEnum.CONFIGACTION_DELETE:
-                delete_subnode(self.__resolver, self.__config, resource_key)
+        self.__task_executor = task_executor
+        self.__settings_handler = SettingsHandler(service.service_config, **settings)
 
     @metered_subclass_method(METRICS_POOL)
     def SetEndpoint(
@@ -61,26 +47,22 @@ class MicrowaveServiceHandler(_ServiceHandler):
     ) -> List[Union[bool, Exception]]:
         LOGGER.info('[SetEndpoint] endpoints={:s}'.format(str(endpoints)))
         LOGGER.info('[SetEndpoint] connection_uuid={:s}'.format(str(connection_uuid)))
+        chk_type('endpoints', endpoints, list)
+        if len(endpoints) != 2: return []
 
         service_uuid = self.__service.service_id.service_uuid.uuid
+        settings = self.__settings_handler.get('/settings')
+        json_settings : Dict = {} if settings is None else settings.value
+        vlan_id = json_settings.get('vlan_id', 121)
 
         results = []
         try:
-            chk_type('endpoints', endpoints, list)
-            if len(endpoints) != 2: raise Exception('len(endpoints) != 2')
-
-            settings : TreeNode = get_subnode(self.__resolver, self.__config, '/settings', None)
-            if settings is None:
-                raise Exception('Unable to retrieve settings for Service({:s})'.format(str(service_uuid)))
-
-            json_settings : Dict = settings.value
-            vlan_id = json_settings.get('vlan_id', 121)
             # endpoints are retrieved in the following format --> '/endpoints/endpoint[172.26.60.243:9]'
             node_id_src, tp_id_src = check_endpoint(endpoints[0][1], service_uuid)
             node_id_dst, tp_id_dst = check_endpoint(endpoints[1][1], service_uuid)
         
             device_uuid = endpoints[0][0]
-            device = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
+            device_obj = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
             json_config_rule = json_config_rule_set('/services/service[{:s}]'.format(service_uuid), {
                 'uuid'       : service_uuid,
                 'node_id_src': node_id_src,
@@ -89,9 +71,9 @@ class MicrowaveServiceHandler(_ServiceHandler):
                 'tp_id_dst'  : tp_id_dst,
                 'vlan_id'    : vlan_id,
             })
-            del device.device_config.config_rules[:]
-            device.device_config.config_rules.append(ConfigRule(**json_config_rule))
-            self.__task_executor.configure_device(device)
+            del device_obj.device_config.config_rules[:]
+            device_obj.device_config.config_rules.append(ConfigRule(**json_config_rule))
+            self.__task_executor.configure_device(device_obj)
             results.append(True)
         except Exception as e: # pylint: disable=broad-except
             LOGGER.exception('Unable to SetEndpoint for Service({:s})'.format(str(service_uuid)))
@@ -106,21 +88,21 @@ class MicrowaveServiceHandler(_ServiceHandler):
         LOGGER.info('[DeleteEndpoint] endpoints={:s}'.format(str(endpoints)))
         LOGGER.info('[DeleteEndpoint] connection_uuid={:s}'.format(str(connection_uuid)))
 
+        chk_type('endpoints', endpoints, list)
+        if len(endpoints) != 2: return []
+
         service_uuid = self.__service.service_id.service_uuid.uuid
 
         results = []
         try:
-            chk_type('endpoints', endpoints, list)
-            if len(endpoints) < 1: raise Exception('len(endpoints) < 1')
-
             device_uuid = endpoints[0][0]
-            device = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
+            device_obj = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
             json_config_rule = json_config_rule_delete('/services/service[{:s}]'.format(service_uuid), {
                 'uuid': service_uuid
             })
-            del device.device_config.config_rules[:]
-            device.device_config.config_rules.append(ConfigRule(**json_config_rule))
-            self.__task_executor.configure_device(device)
+            del device_obj.device_config.config_rules[:]
+            device_obj.device_config.config_rules.append(ConfigRule(**json_config_rule))
+            self.__task_executor.configure_device(device_obj)
             results.append(True)
         except Exception as e: # pylint: disable=broad-except
             LOGGER.exception('Unable to DeleteEndpoint for Service({:s})'.format(str(service_uuid)))
@@ -154,9 +136,8 @@ class MicrowaveServiceHandler(_ServiceHandler):
         results = []
         for resource in resources:
             try:
-                resource_key, resource_value = resource
-                resource_value = json.loads(resource_value)
-                set_subnode_value(self.__resolver, self.__config, resource_key, resource_value)
+                resource_value = json.loads(resource[1])
+                self.__settings_handler.set(resource[0], resource_value)
                 results.append(True)
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to SetConfig({:s})'.format(str(resource)))
@@ -172,8 +153,7 @@ class MicrowaveServiceHandler(_ServiceHandler):
         results = []
         for resource in resources:
             try:
-                resource_key, _ = resource
-                delete_subnode(self.__resolver, self.__config, resource_key)
+                self.__settings_handler.delete(resource[0])
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to DeleteConfig({:s})'.format(str(resource)))
                 results.append(e)
diff --git a/src/service/service/service_handlers/tapi_tapi/TapiServiceHandler.py b/src/service/service/service_handlers/tapi_tapi/TapiServiceHandler.py
index 24371203a..d8a4668bb 100644
--- a/src/service/service/service_handlers/tapi_tapi/TapiServiceHandler.py
+++ b/src/service/service/service_handlers/tapi_tapi/TapiServiceHandler.py
@@ -12,15 +12,15 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import anytree, json, logging
+import json, logging
 from typing import Any, Dict, List, Optional, Tuple, Union
 from common.method_wrappers.Decorator import MetricsPool, metered_subclass_method
-from common.proto.context_pb2 import ConfigActionEnum, ConfigRule, DeviceId, Service
+from common.proto.context_pb2 import ConfigRule, DeviceId, Service
 from common.tools.object_factory.ConfigRule import json_config_rule_delete, json_config_rule_set
 from common.tools.object_factory.Device import json_device_id
 from common.type_checkers.Checkers import chk_type
 from service.service.service_handler_api._ServiceHandler import _ServiceHandler
-from service.service.service_handler_api.AnyTreeTools import TreeNode, delete_subnode, get_subnode, set_subnode_value
+from service.service.service_handler_api.SettingsHandler import SettingsHandler
 from service.service.task_scheduler.TaskExecutor import TaskExecutor
 
 LOGGER = logging.getLogger(__name__)
@@ -32,22 +32,8 @@ class TapiServiceHandler(_ServiceHandler):
         self, service : Service, task_executor : TaskExecutor, **settings
     ) -> None:
         self.__service = service
-        self.__task_executor = task_executor # pylint: disable=unused-private-member
-        self.__resolver = anytree.Resolver(pathattr='name')
-        self.__config = TreeNode('.')
-        for config_rule in service.service_config.config_rules:
-            action = config_rule.action
-            if config_rule.WhichOneof('config_rule') != 'custom': continue
-            resource_key = config_rule.custom.resource_key
-            resource_value = config_rule.custom.resource_value
-            if action == ConfigActionEnum.CONFIGACTION_SET:
-                try:
-                    resource_value = json.loads(resource_value)
-                except: # pylint: disable=bare-except
-                    pass
-                set_subnode_value(self.__resolver, self.__config, resource_key, resource_value)
-            elif action == ConfigActionEnum.CONFIGACTION_DELETE:
-                delete_subnode(self.__resolver, self.__config, resource_key)
+        self.__task_executor = task_executor
+        self.__settings_handler = SettingsHandler(service.service_config, **settings)
 
     @metered_subclass_method(METRICS_POOL)
     def SetEndpoint(
@@ -59,10 +45,8 @@ class TapiServiceHandler(_ServiceHandler):
         if len(endpoints) != 2: return []
 
         service_uuid = self.__service.service_id.service_uuid.uuid
-        settings : TreeNode = get_subnode(self.__resolver, self.__config, '/settings', None)
-        if settings is None: raise Exception('Unable to retrieve settings for Service({:s})'.format(str(service_uuid)))
-
-        json_settings : Dict = settings.value
+        settings = self.__settings_handler.get('/settings')
+        json_settings : Dict = {} if settings is None else settings.value
         capacity_value   = json_settings.get('capacity_value',   50.0)
         capacity_unit    = json_settings.get('capacity_unit',    'GHz')
         layer_proto_name = json_settings.get('layer_proto_name', 'PHOTONIC_MEDIA')
@@ -72,7 +56,7 @@ class TapiServiceHandler(_ServiceHandler):
         results = []
         try:
             device_uuid = endpoints[0][0]
-            device = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
+            device_obj = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
             json_config_rule = json_config_rule_set('/service[{:s}]'.format(service_uuid), {
                 'uuid'                    : service_uuid,
                 'input_sip'               : endpoints[0][1],
@@ -83,12 +67,12 @@ class TapiServiceHandler(_ServiceHandler):
                 'layer_protocol_qualifier': layer_proto_qual,
                 'direction'               : direction,
             })
-            del device.device_config.config_rules[:]
-            device.device_config.config_rules.append(ConfigRule(**json_config_rule))
-            self.__task_executor.configure_device(device)
+            del device_obj.device_config.config_rules[:]
+            device_obj.device_config.config_rules.append(ConfigRule(**json_config_rule))
+            self.__task_executor.configure_device(device_obj)
             results.append(True)
         except Exception as e: # pylint: disable=broad-except
-            LOGGER.exception('Unable to configure Service({:s})'.format(str(service_uuid)))
+            LOGGER.exception('Unable to SetEndpoint for Service({:s})'.format(str(service_uuid)))
             results.append(e)
 
         return results
@@ -104,14 +88,17 @@ class TapiServiceHandler(_ServiceHandler):
         if len(endpoints) != 2: return []
 
         service_uuid = self.__service.service_id.service_uuid.uuid
+
         results = []
         try:
             device_uuid = endpoints[0][0]
-            device = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
-            json_config_rule = json_config_rule_delete('/service[{:s}]'.format(service_uuid), {'uuid': service_uuid})
-            del device.device_config.config_rules[:]
-            device.device_config.config_rules.append(ConfigRule(**json_config_rule))
-            self.__task_executor.configure_device(device)
+            device_obj = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
+            json_config_rule = json_config_rule_delete('/service[{:s}]'.format(service_uuid), {
+                'uuid': service_uuid
+            })
+            del device_obj.device_config.config_rules[:]
+            device_obj.device_config.config_rules.append(ConfigRule(**json_config_rule))
+            self.__task_executor.configure_device(device_obj)
             results.append(True)
         except Exception as e: # pylint: disable=broad-except
             LOGGER.exception('Unable to DeleteEndpoint for Service({:s})'.format(str(service_uuid)))
@@ -145,9 +132,8 @@ class TapiServiceHandler(_ServiceHandler):
         results = []
         for resource in resources:
             try:
-                resource_key, resource_value = resource
-                resource_value = json.loads(resource_value)
-                set_subnode_value(self.__resolver, self.__config, resource_key, resource_value)
+                resource_value = json.loads(resource[1])
+                self.__settings_handler.set(resource[0], resource_value)
                 results.append(True)
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to SetConfig({:s})'.format(str(resource)))
@@ -163,8 +149,7 @@ class TapiServiceHandler(_ServiceHandler):
         results = []
         for resource in resources:
             try:
-                resource_key, _ = resource
-                delete_subnode(self.__resolver, self.__config, resource_key)
+                self.__settings_handler.delete(resource[0])
             except Exception as e: # pylint: disable=broad-except
                 LOGGER.exception('Unable to DeleteConfig({:s})'.format(str(resource)))
                 results.append(e)
-- 
GitLab


From 39436a6b026221b8e774dfb06fd0958a5d67c027 Mon Sep 17 00:00:00 2001
From: gifrerenom <lluis.gifre@cttc.es>
Date: Sat, 28 Jan 2023 23:49:55 +0000
Subject: [PATCH 20/31] Device component:

- testing CI pipeline, added log messages
---
 src/device/tests/test_unitary_emulated.py | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/src/device/tests/test_unitary_emulated.py b/src/device/tests/test_unitary_emulated.py
index 8a1b30a6e..1af1ad861 100644
--- a/src/device/tests/test_unitary_emulated.py
+++ b/src/device/tests/test_unitary_emulated.py
@@ -156,8 +156,8 @@ def test_device_emulated_configure(
         for config_rule in device_data.device_config.config_rules
         if config_rule.WhichOneof('config_rule') == 'custom'
     ]
-    #LOGGER.info('device_data.device_config.config_rules = \n{:s}'.format(
-    #    '\n'.join(['{:s} {:s} = {:s}'.format(*config_rule) for config_rule in config_rules])))
+    LOGGER.info('device_data.device_config.config_rules = \n{:s}'.format(
+        '\n'.join(['{:s} {:s} = {:s}'.format(*config_rule) for config_rule in config_rules])))
     RESULTING_CONFIG_ENDPOINTS = {cr['custom']['resource_key']:cr for cr in copy.deepcopy(DEVICE_EMU_CONFIG_ENDPOINTS)}
     for endpoint_cooked in DEVICE_EMU_ENDPOINTS_COOKED:
         values = json.loads(RESULTING_CONFIG_ENDPOINTS[endpoint_cooked[0]]['custom']['resource_value'])
@@ -168,14 +168,14 @@ def test_device_emulated_configure(
         config_rule = (
             ConfigActionEnum.Name(config_rule['action']), config_rule['custom']['resource_key'],
             json.loads(json.dumps(config_rule['custom']['resource_value'])))
-        #LOGGER.info('config_rule: {:s} {:s} = {:s}'.format(*config_rule))
+        LOGGER.info('A config_rule: {:s} {:s} = {:s}'.format(*config_rule))
         assert config_rule in config_rules
     for config_rule in DEVICE_EMU_CONFIG_ADDRESSES:
         assert 'custom' in config_rule
         config_rule = (
             ConfigActionEnum.Name(config_rule['action']), config_rule['custom']['resource_key'],
             json.loads(json.dumps(config_rule['custom']['resource_value'])))
-        #LOGGER.info('config_rule: {:s} {:s} = {:s}'.format(*config_rule))
+        LOGGER.info('B config_rule: {:s} {:s} = {:s}'.format(*config_rule))
         assert config_rule in config_rules
 
     # Try to reconfigure...
-- 
GitLab


From ba9ab833b0f3a7bbf5989992b95c984f9aff63d5 Mon Sep 17 00:00:00 2001
From: gifrerenom <lluis.gifre@cttc.es>
Date: Sun, 29 Jan 2023 08:36:44 +0000
Subject: [PATCH 21/31] Common:

- updated method to update config rules and support action updates
---
 src/common/tools/grpc/ConfigRules.py | 15 +++++++++++----
 1 file changed, 11 insertions(+), 4 deletions(-)

diff --git a/src/common/tools/grpc/ConfigRules.py b/src/common/tools/grpc/ConfigRules.py
index e109cb7a0..4a7e588e4 100644
--- a/src/common/tools/grpc/ConfigRules.py
+++ b/src/common/tools/grpc/ConfigRules.py
@@ -18,24 +18,31 @@
 
 import json
 from typing import Any, Dict, Tuple
-from common.proto.context_pb2 import ConfigActionEnum, ConfigRule
+from common.proto.context_pb2 import ConfigActionEnum
 from common.tools.grpc.Tools import grpc_message_to_json_string
 
-def update_config_rule_custom(config_rules, resource_key : str, fields : Dict[str, Tuple[Any, bool]]) -> ConfigRule:
+def update_config_rule_custom(
+    config_rules, resource_key : str, fields : Dict[str, Tuple[Any, bool]],
+    new_action : ConfigActionEnum = ConfigActionEnum.CONFIGACTION_SET
+) -> None:
     # fields: Dict[field_name : str, Tuple[field_value : Any, raise_if_differs : bool]]
 
+    # TODO: add support for ACL config rules
+
     for config_rule in config_rules:
-        if config_rule.WhichOneof('config_rule') != 'custom': continue
+        kind = config_rule.WhichOneof('config_rule')
+        if kind != 'custom': continue
         if config_rule.custom.resource_key != resource_key: continue
         json_resource_value = json.loads(config_rule.custom.resource_value)
         break   # found, end loop
     else:
         # not found, add it
         config_rule = config_rules.add()    # pylint: disable=no-member
-        config_rule.action = ConfigActionEnum.CONFIGACTION_SET
         config_rule.custom.resource_key = resource_key
         json_resource_value = {}
 
+    config_rule.action = new_action
+
     for field_name,(field_value, raise_if_differs) in fields.items():
         if (field_name not in json_resource_value) or not raise_if_differs:
             # missing or raise_if_differs=False, add/update it
-- 
GitLab


From 1dbb33d24ec195086a3e4329e9f988c85ef314ff Mon Sep 17 00:00:00 2001
From: gifrerenom <lluis.gifre@cttc.es>
Date: Sun, 29 Jan 2023 08:37:27 +0000
Subject: [PATCH 22/31] Device component:

- implemented support to compose resulting rules considering previous values
---
 src/device/service/Tools.py | 8 +++-----
 1 file changed, 3 insertions(+), 5 deletions(-)

diff --git a/src/device/service/Tools.py b/src/device/service/Tools.py
index b4dc66dde..28b5391a7 100644
--- a/src/device/service/Tools.py
+++ b/src/device/service/Tools.py
@@ -19,6 +19,7 @@ from common.method_wrappers.ServiceExceptions import InvalidArgumentException
 from common.proto.context_pb2 import ConfigActionEnum, Device, DeviceConfig
 from common.proto.device_pb2 import MonitoringSettings
 from common.proto.kpi_sample_types_pb2 import KpiSampleType
+from common.tools.grpc.ConfigRules import update_config_rule_custom
 from common.tools.grpc.Tools import grpc_message_to_json
 from .driver_api._Driver import _Driver, RESOURCE_ENDPOINTS
 from .monitoring.MonitoringLoops import MonitoringLoops
@@ -117,11 +118,8 @@ def _raw_config_rules_to_grpc(
             errors.append(error_template.format(device_uuid, str(resource_key), str(resource_value)))
             continue
 
-        config_rule = device_config.config_rules.add()
-        config_rule.action = config_action
-        config_rule.custom.resource_key = resource_key
-        config_rule.custom.resource_value = \
-            resource_value if isinstance(resource_value, str) else json.dumps(resource_value, sort_keys=True)
+        resource_value = json.loads(resource_value) if isinstance(resource_value, str) else resource_value
+        update_config_rule_custom(device_config.config_rules, resource_key, resource_value, new_action=config_action)
 
     return errors
 
-- 
GitLab


From c2a789ad113490eceae631d7e735358eebfc5055 Mon Sep 17 00:00:00 2001
From: gifrerenom <lluis.gifre@cttc.es>
Date: Sun, 29 Jan 2023 09:07:56 +0000
Subject: [PATCH 23/31] Device component:

- corrected parameters for composing config rules considering previous values
- corrected unitary test for deconfigure
---
 src/device/service/Tools.py               | 1 +
 src/device/tests/test_unitary_emulated.py | 6 +++++-
 2 files changed, 6 insertions(+), 1 deletion(-)

diff --git a/src/device/service/Tools.py b/src/device/service/Tools.py
index 28b5391a7..970a35e37 100644
--- a/src/device/service/Tools.py
+++ b/src/device/service/Tools.py
@@ -119,6 +119,7 @@ def _raw_config_rules_to_grpc(
             continue
 
         resource_value = json.loads(resource_value) if isinstance(resource_value, str) else resource_value
+        resource_value = {field_name : (field_value, False) for field_name,field_value in resource_value.items()}
         update_config_rule_custom(device_config.config_rules, resource_key, resource_value, new_action=config_action)
 
     return errors
diff --git a/src/device/tests/test_unitary_emulated.py b/src/device/tests/test_unitary_emulated.py
index 1af1ad861..0e5f8e22d 100644
--- a/src/device/tests/test_unitary_emulated.py
+++ b/src/device/tests/test_unitary_emulated.py
@@ -379,7 +379,11 @@ def test_device_emulated_deconfigure(
     for config_rule in config_rules:
         assert config_rule.WhichOneof('config_rule') == 'custom'
         if config_rule.custom.resource_key.startswith('/endpoints/endpoint'): continue
-        config_rule_value = json.loads(config_rule.custom.resource_value)
+        if config_rule.custom.resource_key.startswith('_connect/'): continue
+        try:
+            config_rule_value = json.loads(config_rule.custom.resource_value)
+        except: # pylint: disable=bare-except
+            config_rule_value = config_rule.custom.resource_value
         if isinstance(config_rule_value, str) and config_rule_value.startswith('do_sampling (trigger:'): continue
         clean_config_rules.append(config_rule)
     LOGGER.info('clean_config_rules = {:s}'.format(str(clean_config_rules)))
-- 
GitLab


From 7ad1d40fd71c613948c9379363b21fed27caf807 Mon Sep 17 00:00:00 2001
From: gifrerenom <lluis.gifre@cttc.es>
Date: Sun, 29 Jan 2023 09:08:48 +0000
Subject: [PATCH 24/31] Context component:

- added logto config rule management for debug purposes
---
 src/context/service/database/ConfigRule.py | 17 ++++++++++++++---
 1 file changed, 14 insertions(+), 3 deletions(-)

diff --git a/src/context/service/database/ConfigRule.py b/src/context/service/database/ConfigRule.py
index e35f246b6..0f0e9668c 100644
--- a/src/context/service/database/ConfigRule.py
+++ b/src/context/service/database/ConfigRule.py
@@ -14,6 +14,7 @@
 
 import datetime, json, logging
 from sqlalchemy import delete
+from sqlalchemy.dialects import postgresql
 from sqlalchemy.dialects.postgresql import insert
 from sqlalchemy.orm import Session
 from typing import Dict, List, Optional, Set
@@ -104,8 +105,11 @@ def upsert_config_rules(
             str_config_rule = json.dumps(config_rule)
             raise Exception(MSG.format(str_config_rule, str(device_uuid), str(service_uuid), str(slice_uuid)))
 
-    #LOGGER.warning('uuids_to_delete={:s}'.format(str(uuids_to_delete)))
-    #LOGGER.warning('rules_to_upsert={:s}'.format(str(rules_to_upsert)))
+    LOGGER.warning('device_uuid={:s}'.format(str(device_uuid)))
+    LOGGER.warning('service_uuid={:s}'.format(str(service_uuid)))
+    LOGGER.warning('slice_uuid={:s}'.format(str(slice_uuid)))
+    LOGGER.warning('uuids_to_delete={:s}'.format(str(uuids_to_delete)))
+    LOGGER.warning('rules_to_upsert={:s}'.format(str(rules_to_upsert)))
 
     delete_affected = False
     upsert_affected = False
@@ -116,7 +120,14 @@ def upsert_config_rules(
         if service_uuid is not None: stmt = stmt.where(ConfigRuleModel.service_uuid == service_uuid)
         if slice_uuid   is not None: stmt = stmt.where(ConfigRuleModel.slice_uuid   == slice_uuid  )
         stmt = stmt.where(ConfigRuleModel.configrule_uuid.in_(uuids_to_delete))
-        configrule_deletes = session.execute(stmt)#.fetchall()
+
+        str_stmt = stmt.compile(dialect=postgresql.dialect(), compile_kwargs={"literal_binds": True})
+        LOGGER.warning('raw delete stmt={:s}'.format(str(str_stmt)))
+
+        configrule_deletes = session.execute(stmt)
+
+        LOGGER.warning('configrule_deletes.rowcount={:s}'.format(str(configrule_deletes.rowcount)))
+
         delete_affected = int(configrule_deletes.rowcount) > 0
 
     if len(rules_to_upsert) > 0:
-- 
GitLab


From 3aeb2e5cb6b1f47858fd743c4ea3342df59c96c6 Mon Sep 17 00:00:00 2001
From: gifrerenom <lluis.gifre@cttc.es>
Date: Sun, 29 Jan 2023 10:03:08 +0000
Subject: [PATCH 25/31] Hackfest MockOSM:

- updated default IP address
- corrected error retrieval message
---
 hackfest/mock_osm/WimconnectorIETFL2VPN.py | 2 +-
 hackfest/mock_osm/__main__.py              | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/hackfest/mock_osm/WimconnectorIETFL2VPN.py b/hackfest/mock_osm/WimconnectorIETFL2VPN.py
index e1273b4e4..aa4ca045f 100644
--- a/hackfest/mock_osm/WimconnectorIETFL2VPN.py
+++ b/hackfest/mock_osm/WimconnectorIETFL2VPN.py
@@ -73,7 +73,7 @@ class WimconnectorIETFL2VPN(SdnConnectorBase):
             response = requests.get(endpoint, auth=self.auth)
             http_code = response.status_code
         except requests.exceptions.RequestException as e:
-            raise SdnConnectorError(e.message, http_code=503)
+            raise SdnConnectorError(e.response, http_code=503)
 
         if http_code != 200:
             raise SdnConnectorError("Failed while authenticating", http_code=http_code)
diff --git a/hackfest/mock_osm/__main__.py b/hackfest/mock_osm/__main__.py
index e76616eab..410feda84 100644
--- a/hackfest/mock_osm/__main__.py
+++ b/hackfest/mock_osm/__main__.py
@@ -18,7 +18,7 @@ from .MockOSM import MockOSM
 LOGGER = logging.getLogger(__name__)
 LOGGER.setLevel(logging.DEBUG)
 
-WIM_URL = 'http://10.0.2.15:80'
+WIM_URL = 'http://10.0.2.10:80'
 WIM_USERNAME = 'admin'
 WIM_PASSWORD = 'admin'
 
-- 
GitLab


From 5ec2b2b6ee99c0401f6407cc75219b18348b0e70 Mon Sep 17 00:00:00 2001
From: gifrerenom <lluis.gifre@cttc.es>
Date: Sun, 29 Jan 2023 10:03:21 +0000
Subject: [PATCH 26/31] MockOSM:

- corrected error retrieval message
---
 src/tests/tools/mock_osm/WimconnectorIETFL2VPN.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/tests/tools/mock_osm/WimconnectorIETFL2VPN.py b/src/tests/tools/mock_osm/WimconnectorIETFL2VPN.py
index e1273b4e4..aa4ca045f 100644
--- a/src/tests/tools/mock_osm/WimconnectorIETFL2VPN.py
+++ b/src/tests/tools/mock_osm/WimconnectorIETFL2VPN.py
@@ -73,7 +73,7 @@ class WimconnectorIETFL2VPN(SdnConnectorBase):
             response = requests.get(endpoint, auth=self.auth)
             http_code = response.status_code
         except requests.exceptions.RequestException as e:
-            raise SdnConnectorError(e.message, http_code=503)
+            raise SdnConnectorError(e.response, http_code=503)
 
         if http_code != 200:
             raise SdnConnectorError("Failed while authenticating", http_code=http_code)
-- 
GitLab


From 37da83e221ceaa473f4997d6f7ab175c4529eed1 Mon Sep 17 00:00:00 2001
From: gifrerenom <lluis.gifre@cttc.es>
Date: Sun, 29 Jan 2023 15:14:01 +0000
Subject: [PATCH 27/31] Hackfest TAPI server:

- updated methods to properly retrieve/store connectivity services
---
 .../tapi_connectivity_controller.py           | 38 ++++++++++++++++++-
 1 file changed, 36 insertions(+), 2 deletions(-)

diff --git a/hackfest/tapi/server/tapi_server/controllers/tapi_connectivity_controller.py b/hackfest/tapi/server/tapi_server/controllers/tapi_connectivity_controller.py
index 640f5a72d..dc0118b7a 100644
--- a/hackfest/tapi/server/tapi_server/controllers/tapi_connectivity_controller.py
+++ b/hackfest/tapi/server/tapi_server/controllers/tapi_connectivity_controller.py
@@ -2559,8 +2559,42 @@ def data_tapi_commoncontext_tapi_connectivityconnectivity_context_post(body=None
     :rtype: None
     """
     if connexion.request.is_json:
-        body = TapiConnectivityConnectivityContextWrapper.from_dict(connexion.request.get_json())  # noqa: E501
-    return 'do some magic!'
+        #body = TapiConnectivityConnectivityContextWrapper.from_dict(connexion.request.get_json())  # noqa: E501
+        raw_body = connexion.request.get_json()
+        if "tapi-connectivity:connectivity-service" in raw_body:
+            raw_body["connectivity-service"] = raw_body.pop("tapi-connectivity:connectivity-service")
+        if isinstance(raw_body["connectivity-service"], list) and len(raw_body["connectivity-service"]) > 0:
+            raw_body["connectivity-service"] = raw_body["connectivity-service"][0]
+        
+        connectivity_service = raw_body["connectivity-service"]
+        if "connectivity-constraint" in connectivity_service:
+            connectivity_constraint = connectivity_service.pop("connectivity-constraint")
+            if "requested-capacity" in connectivity_constraint:
+                connectivity_service["requested-capacity"] = connectivity_constraint.pop("requested-capacity")
+            if "connectivity-direction" in connectivity_constraint:
+                connectivity_service["connectivity-direction"] = connectivity_constraint.pop("connectivity-direction")
+
+        body = TapiConnectivityConnectivityServiceWrapper.from_dict(raw_body)  # noqa: E501
+
+    connection = TapiConnectivityConnection(
+        uuid=body.connectivity_service.uuid,
+        connection_end_point=[
+            TapiConnectivityConnectionEndPointRef(
+                node_edge_point_uuid="node-1-port-3", connection_end_point_uuid="cep13"),
+            TapiConnectivityConnectionEndPointRef(
+                node_edge_point_uuid="node-3-port-2", connection_end_point_uuid="cep32"),
+        ]
+    )
+    connection_ref = TapiConnectivityConnectionRef(connection.uuid)
+    body.connectivity_service.connection = [ connection_ref ]
+
+    if database.context.connectivity_context is None:
+        database.context.connectivity_context = TapiConnectivityConnectivityContext(
+            connectivity_service=[], connection=[]
+        )
+
+    database.context.connectivity_context.connection.append(connection)
+    database.context.connectivity_context.connectivity_service.append(body.connectivity_service)
 
 
 def data_tapi_commoncontext_tapi_connectivityconnectivity_context_put(body=None):  # noqa: E501
-- 
GitLab


From 812f09fe91029ecc56f6e10b9cd482e13b06d183 Mon Sep 17 00:00:00 2001
From: gifrerenom <lluis.gifre@cttc.es>
Date: Sun, 29 Jan 2023 15:21:10 +0000
Subject: [PATCH 28/31] Context component:

- corrected ConstraintKinds in ConstraintModel
- corrected UUID composition in ConfigRules to prevent collisions
- corrected classification of config rules to upsert/delete
- removed unneeded logs
---
 src/context/service/database/ConfigRule.py    |  50 +--
 src/context/service/database/Constraint.py    | 167 ++++----
 .../database/models/ConstraintModel.py        | 364 +-----------------
 3 files changed, 112 insertions(+), 469 deletions(-)

diff --git a/src/context/service/database/ConfigRule.py b/src/context/service/database/ConfigRule.py
index 0f0e9668c..7e4a383c7 100644
--- a/src/context/service/database/ConfigRule.py
+++ b/src/context/service/database/ConfigRule.py
@@ -14,7 +14,7 @@
 
 import datetime, json, logging
 from sqlalchemy import delete
-from sqlalchemy.dialects import postgresql
+#from sqlalchemy.dialects import postgresql
 from sqlalchemy.dialects.postgresql import insert
 from sqlalchemy.orm import Session
 from typing import Dict, List, Optional, Set
@@ -44,29 +44,29 @@ def compose_config_rules_data(
             'updated_at': now,
         }
 
-        parent_uuid = None
+        parent_kind,parent_uuid = '',None
         if device_uuid is not None:
             dict_config_rule['device_uuid'] = device_uuid
-            parent_uuid = device_uuid
+            parent_kind,parent_uuid = 'device',device_uuid
         elif service_uuid is not None:
             dict_config_rule['service_uuid'] = service_uuid
-            parent_uuid = service_uuid
+            parent_kind,parent_uuid = 'service',service_uuid
         elif slice_uuid is not None:
             dict_config_rule['slice_uuid'] = slice_uuid
-            parent_uuid = slice_uuid
+            parent_kind,parent_uuid = 'slice',slice_uuid
         else:
             MSG = 'Parent for ConfigRule({:s}) cannot be identified '+\
                   '(device_uuid={:s}, service_uuid={:s}, slice_uuid={:s})'
             str_config_rule = grpc_message_to_json_string(config_rule)
             raise Exception(MSG.format(str_config_rule, str(device_uuid), str(service_uuid), str(slice_uuid)))
-        
+
         configrule_name = None
         if kind == ConfigRuleKindEnum.CUSTOM:
-            configrule_name = config_rule.custom.resource_key
+            configrule_name = '{:s}:{:s}:{:s}'.format(parent_kind, kind.value, config_rule.custom.resource_key)
         elif kind == ConfigRuleKindEnum.ACL:
-            endpoint_uuid = endpoint_get_uuid(config_rule.acl.endpoint_id, allow_random=False)
+            _, _, endpoint_uuid = endpoint_get_uuid(config_rule.acl.endpoint_id, allow_random=False)
             rule_set_name = config_rule.acl.rule_set.name
-            configrule_name = '{:s}/{:s}'.format(endpoint_uuid, rule_set_name)
+            configrule_name = '{:s}:{:s}:{:s}:{:s}'.format(parent_kind, kind.value, endpoint_uuid, rule_set_name)
         else:
             MSG = 'Name for ConfigRule({:s}) cannot be inferred '+\
                   '(device_uuid={:s}, service_uuid={:s}, slice_uuid={:s})'
@@ -87,49 +87,39 @@ def upsert_config_rules(
     uuids_to_upsert : Dict[str, int] = dict()
     rules_to_upsert : List[Dict] = list()
     for config_rule in config_rules:
-        if config_rule['action'] == ORM_ConfigActionEnum.SET:
-            configrule_uuid = config_rule['configrule_uuid']
+        configrule_uuid = config_rule['configrule_uuid']
+        configrule_action = config_rule['action']
+        if configrule_action == ORM_ConfigActionEnum.SET:
             position = uuids_to_upsert.get(configrule_uuid)
             if position is None:
                 # if not added, add it
                 rules_to_upsert.append(config_rule)
-                uuids_to_upsert[config_rule['configrule_uuid']] = len(rules_to_upsert) - 1
+                uuids_to_upsert[configrule_uuid] = len(rules_to_upsert) - 1
             else:
                 # if already added, update occurrence
                 rules_to_upsert[position] = config_rule
-        elif config_rule['action'] == ORM_ConfigActionEnum.DELETE:
-            uuids_to_delete.add(config_rule['configrule_uuid'])
+        elif configrule_action == ORM_ConfigActionEnum.DELETE:
+            uuids_to_delete.add(configrule_uuid)
         else:
             MSG = 'Action for ConfigRule({:s}) is not supported '+\
                   '(device_uuid={:s}, service_uuid={:s}, slice_uuid={:s})'
             str_config_rule = json.dumps(config_rule)
             raise Exception(MSG.format(str_config_rule, str(device_uuid), str(service_uuid), str(slice_uuid)))
 
-    LOGGER.warning('device_uuid={:s}'.format(str(device_uuid)))
-    LOGGER.warning('service_uuid={:s}'.format(str(service_uuid)))
-    LOGGER.warning('slice_uuid={:s}'.format(str(slice_uuid)))
-    LOGGER.warning('uuids_to_delete={:s}'.format(str(uuids_to_delete)))
-    LOGGER.warning('rules_to_upsert={:s}'.format(str(rules_to_upsert)))
-
     delete_affected = False
-    upsert_affected = False
-
     if len(uuids_to_delete) > 0:
         stmt = delete(ConfigRuleModel)
         if device_uuid  is not None: stmt = stmt.where(ConfigRuleModel.device_uuid  == device_uuid )
         if service_uuid is not None: stmt = stmt.where(ConfigRuleModel.service_uuid == service_uuid)
         if slice_uuid   is not None: stmt = stmt.where(ConfigRuleModel.slice_uuid   == slice_uuid  )
         stmt = stmt.where(ConfigRuleModel.configrule_uuid.in_(uuids_to_delete))
-
-        str_stmt = stmt.compile(dialect=postgresql.dialect(), compile_kwargs={"literal_binds": True})
-        LOGGER.warning('raw delete stmt={:s}'.format(str(str_stmt)))
-
+        #str_stmt = stmt.compile(dialect=postgresql.dialect(), compile_kwargs={"literal_binds": True})
+        #LOGGER.warning('delete stmt={:s}'.format(str(str_stmt)))
         configrule_deletes = session.execute(stmt)
-
-        LOGGER.warning('configrule_deletes.rowcount={:s}'.format(str(configrule_deletes.rowcount)))
-
+        #LOGGER.warning('configrule_deletes.rowcount={:s}'.format(str(configrule_deletes.rowcount)))
         delete_affected = int(configrule_deletes.rowcount) > 0
 
+    upsert_affected = False
     if len(rules_to_upsert) > 0:
         stmt = insert(ConfigRuleModel).values(rules_to_upsert)
         stmt = stmt.on_conflict_do_update(
@@ -142,6 +132,8 @@ def upsert_config_rules(
             )
         )
         stmt = stmt.returning(ConfigRuleModel.created_at, ConfigRuleModel.updated_at)
+        #str_stmt = stmt.compile(dialect=postgresql.dialect(), compile_kwargs={"literal_binds": True})
+        #LOGGER.warning('upsert stmt={:s}'.format(str(str_stmt)))
         configrule_updates = session.execute(stmt).fetchall()
         upsert_affected = any([(updated_at > created_at) for created_at,updated_at in configrule_updates])
 
diff --git a/src/context/service/database/Constraint.py b/src/context/service/database/Constraint.py
index 82629b25c..cf6e11ac9 100644
--- a/src/context/service/database/Constraint.py
+++ b/src/context/service/database/Constraint.py
@@ -14,13 +14,15 @@
 
 import datetime, logging
 from sqlalchemy import delete
+#from sqlalchemy.dialects import postgresql
 from sqlalchemy.dialects.postgresql import insert
 from sqlalchemy.orm import Session
 from typing import Dict, List, Optional
 from common.proto.context_pb2 import Constraint
 from common.tools.grpc.Tools import grpc_message_to_json_string
 from .models.ConstraintModel import ConstraintKindEnum, ConstraintModel
-from .uuids._Builder import get_uuid_random
+from .uuids._Builder import get_uuid_from_string
+from .uuids.EndPoint import endpoint_get_uuid
 
 LOGGER = logging.getLogger(__name__)
 
@@ -31,16 +33,50 @@ def compose_constraints_data(
     dict_constraints : List[Dict] = list()
     for position,constraint in enumerate(constraints):
         str_kind = constraint.WhichOneof('constraint')
+        kind = ConstraintKindEnum._member_map_.get(str_kind.upper()) # pylint: disable=no-member
         dict_constraint = {
-            'constraint_uuid': get_uuid_random(),
-            'position'       : position,
-            'kind'           : ConstraintKindEnum._member_map_.get(str_kind.upper()), # pylint: disable=no-member
-            'data'           : grpc_message_to_json_string(getattr(constraint, str_kind, {})),
-            'created_at'     : now,
-            'updated_at'     : now,
+            'position'  : position,
+            'kind'      : kind,
+            'data'      : grpc_message_to_json_string(getattr(constraint, str_kind, {})),
+            'created_at': now,
+            'updated_at': now,
         }
-        if service_uuid is not None: dict_constraint['service_uuid'] = service_uuid
-        if slice_uuid   is not None: dict_constraint['slice_uuid'  ] = slice_uuid
+
+        parent_kind,parent_uuid = '',None
+        if service_uuid is not None:
+            dict_constraint['service_uuid'] = service_uuid
+            parent_kind,parent_uuid = 'service',service_uuid
+        elif slice_uuid is not None:
+            dict_constraint['slice_uuid'] = slice_uuid
+            parent_kind,parent_uuid = 'slice',slice_uuid
+        else:
+            MSG = 'Parent for Constraint({:s}) cannot be identified (service_uuid={:s}, slice_uuid={:s})'
+            str_constraint = grpc_message_to_json_string(constraint)
+            raise Exception(MSG.format(str_constraint, str(service_uuid), str(slice_uuid)))
+
+        constraint_name = None
+        if kind == ConstraintKindEnum.CUSTOM:
+            constraint_name = '{:s}:{:s}:{:s}'.format(parent_kind, kind.value, constraint.custom.constraint_type)
+        elif kind == ConstraintKindEnum.ENDPOINT_LOCATION:
+            _, _, endpoint_uuid = endpoint_get_uuid(constraint.endpoint_location.endpoint_id, allow_random=False)
+            location_kind = constraint.endpoint_location.location.WhichOneof('location')
+            constraint_name = '{:s}:{:s}:{:s}:{:s}'.format(parent_kind, kind.value, endpoint_uuid, location_kind)
+        elif kind == ConstraintKindEnum.ENDPOINT_PRIORITY:
+            _, _, endpoint_uuid = endpoint_get_uuid(constraint.endpoint_priority.endpoint_id, allow_random=False)
+            constraint_name = '{:s}:{:s}:{:s}'.format(parent_kind, kind.value, endpoint_uuid)
+        elif kind in {
+            ConstraintKindEnum.SCHEDULE, ConstraintKindEnum.SLA_CAPACITY, ConstraintKindEnum.SLA_LATENCY,
+            ConstraintKindEnum.SLA_AVAILABILITY, ConstraintKindEnum.SLA_ISOLATION_LEVEL
+        }:
+            constraint_name = '{:s}:{:s}:'.format(parent_kind, kind.value)
+        else:
+            MSG = 'Name for Constraint({:s}) cannot be inferred (service_uuid={:s}, slice_uuid={:s})'
+            str_constraint = grpc_message_to_json_string(constraint)
+            raise Exception(MSG.format(str_constraint, str(service_uuid), str(slice_uuid)))
+
+        constraint_uuid = get_uuid_from_string(constraint_name, prefix_for_name=parent_uuid)
+        dict_constraint['constraint_uuid'] = constraint_uuid
+
         dict_constraints.append(dict_constraint)
     return dict_constraints
 
@@ -48,84 +84,47 @@ def upsert_constraints(
     session : Session, constraints : List[Dict],
     service_uuid : Optional[str] = None, slice_uuid : Optional[str] = None
 ) -> bool:
-    # TODO: do not delete all constraints; just add-remove as needed
-    stmt = delete(ConstraintModel)
-    if service_uuid is not None: stmt = stmt.where(ConstraintModel.service_uuid == service_uuid)
-    if slice_uuid   is not None: stmt = stmt.where(ConstraintModel.slice_uuid   == slice_uuid  )
-    session.execute(stmt)
+    uuids_to_upsert : Dict[str, int] = dict()
+    rules_to_upsert : List[Dict] = list()
+    for constraint in constraints:
+        constraint_uuid = constraint['constraint_uuid']
+        position = uuids_to_upsert.get(constraint_uuid)
+        if position is None:
+            # if not added, add it
+            rules_to_upsert.append(constraint)
+            uuids_to_upsert[constraint_uuid] = len(rules_to_upsert) - 1
+        else:
+            # if already added, update occurrence
+            rules_to_upsert[position] = constraint
+
+    # Delete all constraints not in uuids_to_upsert
+    delete_affected = False
+    if len(uuids_to_upsert) > 0:
+        stmt = delete(ConstraintModel)
+        if service_uuid is not None: stmt = stmt.where(ConstraintModel.service_uuid == service_uuid)
+        if slice_uuid   is not None: stmt = stmt.where(ConstraintModel.slice_uuid   == slice_uuid  )
+        stmt = stmt.where(ConstraintModel.constraint_uuid.not_in(set(uuids_to_upsert.keys())))
+        #str_stmt = stmt.compile(dialect=postgresql.dialect(), compile_kwargs={"literal_binds": True})
+        #LOGGER.warning('delete stmt={:s}'.format(str(str_stmt)))
+        constraint_deletes = session.execute(stmt)
+        LOGGER.warning('constraint_deletes.rowcount={:s}'.format(str(constraint_deletes.rowcount)))
+        delete_affected = int(constraint_deletes.rowcount) > 0
 
-    changed = False
+    upsert_affected = False
     if len(constraints) > 0:
         stmt = insert(ConstraintModel).values(constraints)
-        #stmt = stmt.on_conflict_do_update(
-        #    index_elements=[ConstraintModel.configrule_uuid],
-        #    set_=dict(
-        #        updated_at = stmt.excluded.updated_at,
-        #    )
-        #)
+        stmt = stmt.on_conflict_do_update(
+            index_elements=[ConstraintModel.constraint_uuid],
+            set_=dict(
+                position   = stmt.excluded.position,
+                data       = stmt.excluded.data,
+                updated_at = stmt.excluded.updated_at,
+            )
+        )
         stmt = stmt.returning(ConstraintModel.created_at, ConstraintModel.updated_at)
+        #str_stmt = stmt.compile(dialect=postgresql.dialect(), compile_kwargs={"literal_binds": True})
+        #LOGGER.warning('upsert stmt={:s}'.format(str(str_stmt)))
         constraint_updates = session.execute(stmt).fetchall()
-        changed = any([(updated_at > created_at) for created_at,updated_at in constraint_updates])
+        upsert_affected = any([(updated_at > created_at) for created_at,updated_at in constraint_updates])
 
-    return changed
-
-
-#    def set_constraint(self, db_constraints: ConstraintsModel, grpc_constraint: Constraint, position: int
-#    ) -> Tuple[Union_ConstraintModel, bool]:
-#        with self.session() as session:
-#
-#            grpc_constraint_kind = str(grpc_constraint.WhichOneof('constraint'))
-#
-#            parser = CONSTRAINT_PARSERS.get(grpc_constraint_kind)
-#            if parser is None:
-#                raise NotImplementedError('Constraint of kind {:s} is not implemented: {:s}'.format(
-#                    grpc_constraint_kind, grpc_message_to_json_string(grpc_constraint)))
-#
-#            # create specific constraint
-#            constraint_class, str_constraint_id, constraint_data, constraint_kind = parser(grpc_constraint)
-#            str_constraint_id = str(uuid.uuid4())
-#            LOGGER.info('str_constraint_id: {}'.format(str_constraint_id))
-#            # str_constraint_key_hash = fast_hasher(':'.join([constraint_kind.value, str_constraint_id]))
-#            # str_constraint_key = key_to_str([db_constraints.pk, str_constraint_key_hash], separator=':')
-#
-#            # result : Tuple[Union_ConstraintModel, bool] = update_or_create_object(
-#            #     database, constraint_class, str_constraint_key, constraint_data)
-#            constraint_data[constraint_class.main_pk_name()] = str_constraint_id
-#            db_new_constraint = constraint_class(**constraint_data)
-#            result: Tuple[Union_ConstraintModel, bool] = self.database.create_or_update(db_new_constraint)
-#            db_specific_constraint, updated = result
-#
-#            # create generic constraint
-#            # constraint_fk_field_name = 'constraint_uuid'.format(constraint_kind.value)
-#            constraint_data = {
-#                'constraints_uuid': db_constraints.constraints_uuid, 'position': position, 'kind': constraint_kind
-#            }
-#
-#            db_new_constraint = ConstraintModel(**constraint_data)
-#            result: Tuple[Union_ConstraintModel, bool] = self.database.create_or_update(db_new_constraint)
-#            db_constraint, updated = result
-#
-#            return db_constraint, updated
-#
-#    def set_constraints(self, service_uuid: str, constraints_name : str, grpc_constraints
-#    ) -> List[Tuple[Union[ConstraintsModel, ConstraintModel], bool]]:
-#        with self.session() as session:
-#            # str_constraints_key = key_to_str([db_parent_pk, constraints_name], separator=':')
-#            # result : Tuple[ConstraintsModel, bool] = get_or_create_object(database, ConstraintsModel, str_constraints_key)
-#            result = session.query(ConstraintsModel).filter_by(constraints_uuid=service_uuid).one_or_none()
-#            created = None
-#            if result:
-#                created = True
-#            session.query(ConstraintsModel).filter_by(constraints_uuid=service_uuid).one_or_none()
-#            db_constraints = ConstraintsModel(constraints_uuid=service_uuid)
-#            session.add(db_constraints)
-#
-#            db_objects = [(db_constraints, created)]
-#
-#            for position,grpc_constraint in enumerate(grpc_constraints):
-#                result : Tuple[ConstraintModel, bool] = self.set_constraint(
-#                    db_constraints, grpc_constraint, position)
-#                db_constraint, updated = result
-#                db_objects.append((db_constraint, updated))
-#
-#            return db_objects
+    return delete_affected or upsert_affected
diff --git a/src/context/service/database/models/ConstraintModel.py b/src/context/service/database/models/ConstraintModel.py
index 51fc0b91d..e7767b4b2 100644
--- a/src/context/service/database/models/ConstraintModel.py
+++ b/src/context/service/database/models/ConstraintModel.py
@@ -20,11 +20,14 @@ from ._Base import _Base
 
 # Enum values should match name of field in Constraint message
 class ConstraintKindEnum(enum.Enum):
-    CUSTOM                        = 'custom'
-    ENDPOINT_LOCATION_REGION      = 'ep_loc_region'
-    ENDPOINT_LOCATION_GPSPOSITION = 'ep_loc_gpspos'
-    ENDPOINT_PRIORITY             = 'ep_priority'
-    SLA_AVAILABILITY              = 'sla_avail'
+    CUSTOM              = 'custom'
+    SCHEDULE            = 'schedule'
+    ENDPOINT_LOCATION   = 'endpoint_location'
+    ENDPOINT_PRIORITY   = 'endpoint_priority'
+    SLA_CAPACITY        = 'sla_capacity'
+    SLA_LATENCY         = 'sla_latency'
+    SLA_AVAILABILITY    = 'sla_availability'
+    SLA_ISOLATION_LEVEL = 'sla_isolation'
 
 class ConstraintModel(_Base):
     __tablename__ = 'constraint'
@@ -46,354 +49,3 @@ class ConstraintModel(_Base):
 
     def dump(self) -> Dict:
         return {self.kind.value: json.loads(self.data)}
-
-
-#import logging, operator
-#from typing import Dict, List, Optional, Tuple, Type, Union
-#from common.orm.HighLevel import get_object, get_or_create_object, update_or_create_object
-#from common.orm.backend.Tools import key_to_str
-#from common.proto.context_pb2 import Constraint
-#from common.tools.grpc.Tools import grpc_message_to_json_string
-#from .EndPointModel import EndPointModel
-#from .Tools import fast_hasher
-#from sqlalchemy import Column, ForeignKey, String, Float, CheckConstraint, Integer, Boolean, Enum
-#from sqlalchemy.dialects.postgresql import UUID
-#from context.service.database.models._Base import Base
-#import enum
-#
-#LOGGER = logging.getLogger(__name__)
-#
-#def remove_dict_key(dictionary : Dict, key : str):
-#    dictionary.pop(key, None)
-#    return dictionary
-#
-#class ConstraintsModel(Base): # pylint: disable=abstract-method
-#    __tablename__ = 'Constraints'
-#    constraints_uuid = Column(UUID(as_uuid=False), primary_key=True, unique=True)
-#
-#    @staticmethod
-#    def main_pk_name():
-#        return 'constraints_uuid'
-#
-#
-#    def dump(self, constraints) -> List[Dict]:
-#        constraints = sorted(constraints, key=operator.itemgetter('position'))
-#        return [remove_dict_key(constraint, 'position') for constraint in constraints]
-#
-#
-#class ConstraintCustomModel(Base): # pylint: disable=abstract-method
-#    __tablename__ = 'ConstraintCustom'
-#    constraint_uuid = Column(UUID(as_uuid=False), primary_key=True, unique=True)
-#    constraint_type = Column(String, nullable=False)
-#    constraint_value = Column(String, nullable=False)
-#
-#    @staticmethod
-#    def main_pk_name():
-#        return 'constraint_uuid'
-#
-#
-#    def dump(self) -> Dict: # pylint: disable=arguments-differ
-#        return {'custom': {'constraint_type': self.constraint_type, 'constraint_value': self.constraint_value}}
-#
-#
-#Union_ConstraintEndpoint = Union[
-#    'ConstraintEndpointLocationGpsPositionModel', 'ConstraintEndpointLocationRegionModel',
-#    'ConstraintEndpointPriorityModel'
-#]
-#
-#class ConstraintEndpointLocationRegionModel(Model): # pylint: disable=abstract-method
-#    endpoint_fk = ForeignKeyField(EndPointModel)
-#    region = StringField(required=True, allow_empty=False)
-#
-#    def dump(self) -> Dict: # pylint: disable=arguments-differ
-#        json_endpoint_id = EndPointModel(self.database, self.endpoint_fk).dump_id()
-#        return {'endpoint_location': {'endpoint_id': json_endpoint_id, 'location': {'region': self.region}}}
-#
-## def dump_endpoint_id(endpoint_constraint: Union_ConstraintEndpoint):
-##     db_endpoints_pks = list(endpoint_constraint.references(EndPointModel))
-##     num_endpoints = len(db_endpoints_pks)
-##     if num_endpoints != 1:
-##         raise Exception('Wrong number({:d}) of associated Endpoints with constraint'.format(num_endpoints))
-##     db_endpoint = EndPointModel(endpoint_constraint.database, db_endpoints_pks[0])
-##     return db_endpoint.dump_id()
-#
-#
-#class ConstraintEndpointLocationRegionModel(Base): # pylint: disable=abstract-method
-#    __tablename__ = 'ConstraintEndpointLocationRegion'
-#    constraint_uuid = Column(UUID(as_uuid=False), primary_key=True, unique=True)
-#    endpoint_uuid = Column(UUID(as_uuid=False), ForeignKey("EndPoint.endpoint_uuid"))
-#    region = Column(String, nullable=False)
-#
-#    @staticmethod
-#    def main_pk_name():
-#        return 'constraint_uuid'
-#
-#    def dump(self, endpoint) -> Dict: # pylint: disable=arguments-differ
-#        return {'endpoint_location': {'endpoint_id': endpoint.dump_id(), 'region': self.region}}
-#
-#    def dump(self) -> Dict: # pylint: disable=arguments-differ
-#        gps_position = {'latitude': self.latitude, 'longitude': self.longitude}
-#        json_endpoint_id = EndPointModel(self.database, self.endpoint_fk).dump_id()
-#        return {'endpoint_location': {'endpoint_id': json_endpoint_id, 'location': {'gps_position': gps_position}}}
-#
-#class ConstraintEndpointPriorityModel(Model): # pylint: disable=abstract-method
-#    endpoint_fk = ForeignKeyField(EndPointModel)
-#    priority = IntegerField(required=True, min_value=0)
-#
-#    def dump(self) -> Dict: # pylint: disable=arguments-differ
-#        json_endpoint_id = EndPointModel(self.database, self.endpoint_fk).dump_id()
-#        return {'endpoint_priority': {'endpoint_id': json_endpoint_id, 'priority': self.priority}}
-#
-#class ConstraintEndpointLocationGpsPositionModel(Base): # pylint: disable=abstract-method
-#    __tablename__ = 'ConstraintEndpointLocationGpsPosition'
-#    constraint_uuid = Column(UUID(as_uuid=False), primary_key=True, unique=True)
-#    endpoint_uuid = Column(UUID(as_uuid=False), ForeignKey("EndPoint.endpoint_uuid"))
-#    latitude = Column(Float, CheckConstraint('latitude > -90.0 AND latitude < 90.0'), nullable=False)
-#    longitude = Column(Float, CheckConstraint('longitude > -90.0 AND longitude < 90.0'), nullable=False)
-#
-#    def dump(self, endpoint) -> Dict: # pylint: disable=arguments-differ
-#        gps_position = {'latitude': self.latitude, 'longitude': self.longitude}
-#        return {'endpoint_location': {'endpoint_id': endpoint.dump_id(), 'gps_position': gps_position}}
-#
-#
-#class ConstraintEndpointPriorityModel(Base): # pylint: disable=abstract-method
-#    __tablename__ = 'ConstraintEndpointPriority'
-#    constraint_uuid = Column(UUID(as_uuid=False), primary_key=True, unique=True)
-#    endpoint_uuid = Column(UUID(as_uuid=False), ForeignKey("EndPoint.endpoint_uuid"))
-#    # endpoint_fk = ForeignKeyField(EndPointModel)
-#    # priority = FloatField(required=True)
-#    priority = Column(Float, nullable=False)
-#    @staticmethod
-#    def main_pk_name():
-#        return 'constraint_uuid'
-#
-#    def dump(self, endpoint) -> Dict: # pylint: disable=arguments-differ
-#        return {'endpoint_priority': {'endpoint_id': endpoint.dump_id(), 'priority': self.priority}}
-#
-#
-#class ConstraintSlaAvailabilityModel(Base): # pylint: disable=abstract-method
-#    __tablename__ = 'ConstraintSlaAvailability'
-#    constraint_uuid = Column(UUID(as_uuid=False), primary_key=True, unique=True)
-#    # num_disjoint_paths = IntegerField(required=True, min_value=1)
-#    num_disjoint_paths = Column(Integer, CheckConstraint('num_disjoint_paths > 1'), nullable=False)
-#    # all_active = BooleanField(required=True)
-#    all_active = Column(Boolean, nullable=False)
-#    @staticmethod
-#    def main_pk_name():
-#        return 'constraint_uuid'
-#
-#    def dump(self) -> Dict: # pylint: disable=arguments-differ
-#        return {'sla_availability': {'num_disjoint_paths': self.num_disjoint_paths, 'all_active': self.all_active}}
-#
-#Union_SpecificConstraint = Union[
-#    ConstraintCustomModel, ConstraintEndpointLocationRegionModel, ConstraintEndpointLocationGpsPositionModel,
-#    ConstraintEndpointPriorityModel, ConstraintSlaAvailabilityModel,
-#]
-#
-#class ConstraintModel(Base): # pylint: disable=abstract-method
-#    __tablename__ = 'Constraint'
-#    # pk = PrimaryKeyField()
-#    # constraints_fk = ForeignKeyField(ConstraintsModel)
-#    constraint_uuid = Column(UUID(as_uuid=False), primary_key=True, unique=True)
-#    constraints_uuid = Column(UUID(as_uuid=False), ForeignKey("Constraints.constraints_uuid"), primary_key=True)
-#    # kind = EnumeratedField(ConstraintKindEnum)
-#    kind = Column(Enum(ConstraintKindEnum, create_constraint=False, native_enum=False))
-#    # position = IntegerField(min_value=0, required=True)
-#    position = Column(Integer, CheckConstraint('position >= 0'), nullable=False)
-#    # constraint_custom_fk        = ForeignKeyField(ConstraintCustomModel, required=False)
-#    constraint_custom = Column(UUID(as_uuid=False), ForeignKey("ConstraintCustom.constraint_uuid"))
-#    # constraint_ep_loc_region_fk = ForeignKeyField(ConstraintEndpointLocationRegionModel, required=False)
-#    constraint_ep_loc_region = Column(UUID(as_uuid=False), ForeignKey("ConstraintEndpointLocationRegion.constraint_uuid"))
-#    # constraint_ep_loc_gpspos_fk = ForeignKeyField(ConstraintEndpointLocationGpsPositionModel, required=False)
-#    constraint_ep_loc_gpspos = Column(UUID(as_uuid=False), ForeignKey("ConstraintEndpointLocationGpsPosition.constraint_uuid"))
-#    # constraint_ep_priority_fk   = ForeignKeyField(ConstraintEndpointPriorityModel, required=False)
-#    constraint_ep_priority = Column(UUID(as_uuid=False), ForeignKey("ConstraintEndpointPriority.constraint_uuid"),)
-#    # constraint_sla_avail_fk     = ForeignKeyField(ConstraintSlaAvailabilityModel, required=False)
-#    constraint_sla_avail = Column(UUID(as_uuid=False), ForeignKey("ConstraintSlaAvailability.constraint_uuid"))
-#
-#    @staticmethod
-#    def main_pk_name():
-#        return 'constraint_uuid'
-#
-#    # def delete(self) -> None:
-#    #     field_name = 'constraint_{:s}_fk'.format(str(self.kind.value))
-#    #     specific_fk_value : Optional[ForeignKeyField] = getattr(self, field_name, None)
-#    #     if specific_fk_value is None:
-#    #         raise Exception('Unable to find constraint key for field_name({:s})'.format(field_name))
-#    #     specific_fk_class = getattr(ConstraintModel, field_name, None)
-#    #     foreign_model_class : Model = specific_fk_class.foreign_model
-#    #     super().delete()
-#    #     get_object(self.database, foreign_model_class, str(specific_fk_value)).delete()
-#
-#    def dump(self, include_position=True) -> Dict: # pylint: disable=arguments-differ
-#        field_name = 'constraint_{:s}'.format(str(self.kind.value))
-#        specific_fk_value = getattr(self, field_name, None)
-#        if specific_fk_value is None:
-#            raise Exception('Unable to find constraint key for field_name({:s})'.format(field_name))
-#        specific_fk_class = getattr(ConstraintModel, field_name, None)
-#        foreign_model_class: Base = specific_fk_class.foreign_model
-#        constraint: Union_SpecificConstraint = get_object(self.database, foreign_model_class, str(specific_fk_value))
-#        result = constraint.dump()
-#        if include_position:
-#            result['position'] = self.position
-#        return result
-#
-#Tuple_ConstraintSpecs = Tuple[Type, str, Dict, ConstraintKindEnum]
-#
-#def parse_constraint_custom(grpc_constraint) -> Tuple_ConstraintSpecs:
-#    constraint_class = ConstraintCustomModel
-#    str_constraint_id = grpc_constraint.custom.constraint_type
-#    constraint_data = {
-#        'constraint_type' : grpc_constraint.custom.constraint_type,
-#        'constraint_value': grpc_constraint.custom.constraint_value,
-#    }
-#    return constraint_class, str_constraint_id, constraint_data, ConstraintKindEnum.CUSTOM
-#
-#def parse_constraint_endpoint_location(db_endpoint, grpc_constraint) -> Tuple_ConstraintSpecs:
-#    grpc_endpoint_id = grpc_constraint.endpoint_location.endpoint_id
-#    # str_endpoint_key, db_endpoint = get_endpoint(database, grpc_endpoint_id)
-#
-#    str_constraint_id = db_endpoint.endpoint_uuid
-#    constraint_data = {'endpoint_fk': db_endpoint}
-#
-#    grpc_location = grpc_constraint.endpoint_location.location
-#    location_kind = str(grpc_location.WhichOneof('location'))
-#    if location_kind == 'region':
-#        constraint_class = ConstraintEndpointLocationRegionModel
-#        constraint_data.update({'region': grpc_location.region})
-#        return constraint_class, str_constraint_id, constraint_data, ConstraintKindEnum.ENDPOINT_LOCATION_REGION
-#    elif location_kind == 'gps_position':
-#        constraint_class = ConstraintEndpointLocationGpsPositionModel
-#        gps_position = grpc_location.gps_position
-#        constraint_data.update({'latitude': gps_position.latitude, 'longitude': gps_position.longitude})
-#        return constraint_class, str_constraint_id, constraint_data, ConstraintKindEnum.ENDPOINT_LOCATION_GPSPOSITION
-#    else:
-#        MSG = 'Location kind {:s} in Constraint of kind endpoint_location is not implemented: {:s}'
-#        raise NotImplementedError(MSG.format(location_kind, grpc_message_to_json_string(grpc_constraint)))
-#
-#def parse_constraint_endpoint_priority(db_endpoint, grpc_constraint) -> Tuple_ConstraintSpecs:
-#    grpc_endpoint_id = grpc_constraint.endpoint_priority.endpoint_id
-#    # str_endpoint_key, db_endpoint = get_endpoint(database, grpc_endpoint_id)
-#
-#    constraint_class = ConstraintEndpointPriorityModel
-#    str_constraint_id = db_endpoint.endpoint_uuid
-#    priority = grpc_constraint.endpoint_priority.priority
-#    constraint_data = {'endpoint_fk': db_endpoint, 'priority': priority}
-#
-#    return constraint_class, str_constraint_id, constraint_data, ConstraintKindEnum.ENDPOINT_PRIORITY
-#
-#def parse_constraint_sla_availability(grpc_constraint) -> Tuple_ConstraintSpecs:
-#    constraint_class = ConstraintSlaAvailabilityModel
-#    str_constraint_id = ''
-#    constraint_data = {
-#        'num_disjoint_paths' : grpc_constraint.sla_availability.num_disjoint_paths,
-#        'all_active': grpc_constraint.sla_availability.all_active,
-#    }
-#    return constraint_class, str_constraint_id, constraint_data, ConstraintKindEnum.SLA_AVAILABILITY
-#
-#CONSTRAINT_PARSERS = {
-#    'custom'            : parse_constraint_custom,
-#    'endpoint_location' : parse_constraint_endpoint_location,
-#    'endpoint_priority' : parse_constraint_endpoint_priority,
-#    'sla_availability'  : parse_constraint_sla_availability,
-#}
-#
-#Union_ConstraintModel = Union[
-#    ConstraintCustomModel, ConstraintEndpointLocationGpsPositionModel, ConstraintEndpointLocationRegionModel,
-#    ConstraintEndpointPriorityModel, ConstraintSlaAvailabilityModel
-#]
-#
-## def set_constraint(
-##     db_constraints : ConstraintsModel, grpc_constraint : Constraint, position : int
-## ) -> Tuple[Union_ConstraintModel, bool]:
-##     grpc_constraint_kind = str(grpc_constraint.WhichOneof('constraint'))
-##
-##     parser = CONSTRAINT_PARSERS.get(grpc_constraint_kind)
-##     if parser is None:
-##         raise NotImplementedError('Constraint of kind {:s} is not implemented: {:s}'.format(
-##             grpc_constraint_kind, grpc_message_to_json_string(grpc_constraint)))
-##
-##     # create specific constraint
-##     constraint_class, str_constraint_id, constraint_data, constraint_kind = parser(database, grpc_constraint)
-##     str_constraint_key_hash = fast_hasher(':'.join([constraint_kind.value, str_constraint_id]))
-##     str_constraint_key = key_to_str([db_constraints.pk, str_constraint_key_hash], separator=':')
-##     result : Tuple[Union_ConstraintModel, bool] = update_or_create_object(
-##         database, constraint_class, str_constraint_key, constraint_data)
-##     db_specific_constraint, updated = result
-##
-##     # create generic constraint
-##     constraint_fk_field_name = 'constraint_{:s}_fk'.format(constraint_kind.value)
-##     constraint_data = {
-##         'constraints_fk': db_constraints, 'position': position, 'kind': constraint_kind,
-##         constraint_fk_field_name: db_specific_constraint
-##     }
-##     result : Tuple[ConstraintModel, bool] = update_or_create_object(
-##         database, ConstraintModel, str_constraint_key, constraint_data)
-##     db_constraint, updated = result
-##
-##     return db_constraint, updated
-##
-## def set_constraints(
-##     database : Database, db_parent_pk : str, constraints_name : str, grpc_constraints
-## ) -> List[Tuple[Union[ConstraintsModel, ConstraintModel], bool]]:
-##
-##     str_constraints_key = key_to_str([db_parent_pk, constraints_name], separator=':')
-##     result : Tuple[ConstraintsModel, bool] = get_or_create_object(database, ConstraintsModel, str_constraints_key)
-##     db_constraints, created = result
-##
-##     db_objects = [(db_constraints, created)]
-##
-##     for position,grpc_constraint in enumerate(grpc_constraints):
-##         result : Tuple[ConstraintModel, bool] = set_constraint(
-##             database, db_constraints, grpc_constraint, position)
-##         db_constraint, updated = result
-##         db_objects.append((db_constraint, updated))
-##
-##     return db_objects
-#def set_constraint(
-#    database : Database, db_constraints : ConstraintsModel, grpc_constraint : Constraint, position : int
-#) -> Tuple[Union_ConstraintModel, bool]:
-#    grpc_constraint_kind = str(grpc_constraint.WhichOneof('constraint'))
-#
-#    parser = CONSTRAINT_PARSERS.get(grpc_constraint_kind)
-#    if parser is None:
-#        raise NotImplementedError('Constraint of kind {:s} is not implemented: {:s}'.format(
-#            grpc_constraint_kind, grpc_message_to_json_string(grpc_constraint)))
-#
-#    # create specific constraint
-#    constraint_class, str_constraint_id, constraint_data, constraint_kind = parser(database, grpc_constraint)
-#    str_constraint_key_hash = fast_hasher(':'.join([constraint_kind.value, str_constraint_id]))
-#    str_constraint_key = key_to_str([db_constraints.pk, str_constraint_key_hash], separator=':')
-#    result : Tuple[Union_ConstraintModel, bool] = update_or_create_object(
-#        database, constraint_class, str_constraint_key, constraint_data)
-#    db_specific_constraint, updated = result
-#
-#    # create generic constraint
-#    constraint_fk_field_name = 'constraint_{:s}_fk'.format(constraint_kind.value)
-#    constraint_data = {
-#        'constraints_fk': db_constraints, 'position': position, 'kind': constraint_kind,
-#        constraint_fk_field_name: db_specific_constraint
-#    }
-#    result : Tuple[ConstraintModel, bool] = update_or_create_object(
-#        database, ConstraintModel, str_constraint_key, constraint_data)
-#    db_constraint, updated = result
-#
-#    return db_constraint, updated
-#
-#def set_constraints(
-#    database : Database, db_parent_pk : str, constraints_name : str, grpc_constraints
-#) -> List[Tuple[Union[ConstraintsModel, ConstraintModel], bool]]:
-#
-#    str_constraints_key = key_to_str([constraints_name, db_parent_pk], separator=':')
-#    result : Tuple[ConstraintsModel, bool] = get_or_create_object(database, ConstraintsModel, str_constraints_key)
-#    db_constraints, created = result
-#
-#    db_objects = [(db_constraints, created)]
-#
-#    for position,grpc_constraint in enumerate(grpc_constraints):
-#        result : Tuple[ConstraintModel, bool] = set_constraint(
-#            database, db_constraints, grpc_constraint, position)
-#        db_constraint, updated = result
-#        db_objects.append((db_constraint, updated))
-#
-#    return db_objects
-- 
GitLab


From 7a612dd10d068f0fa17f38cb9b671b90d84239c7 Mon Sep 17 00:00:00 2001
From: gifrerenom <lluis.gifre@cttc.es>
Date: Sun, 29 Jan 2023 15:23:26 +0000
Subject: [PATCH 29/31] Device component:

- Added conditions to prevent reaching the driver when there is noting to configure/deconfigure
---
 src/device/service/Tools.py | 6 ++++--
 1 file changed, 4 insertions(+), 2 deletions(-)

diff --git a/src/device/service/Tools.py b/src/device/service/Tools.py
index 970a35e37..9245e08a8 100644
--- a/src/device/service/Tools.py
+++ b/src/device/service/Tools.py
@@ -168,7 +168,7 @@ def compute_rules_to_add_delete(
     return resources_to_set, resources_to_delete
 
 def configure_rules(device : Device, driver : _Driver, resources_to_set : List[Tuple[str, Any]]) -> List[str]:
-    device_uuid = device.device_id.device_uuid.uuid
+    if len(resources_to_set) == 0: return []
 
     results_setconfig = driver.SetConfig(resources_to_set)
     results_setconfig = [
@@ -176,11 +176,12 @@ def configure_rules(device : Device, driver : _Driver, resources_to_set : List[T
         for (resource_key, resource_value), result in zip(resources_to_set, results_setconfig)
     ]
 
+    device_uuid = device.device_id.device_uuid.uuid
     return _raw_config_rules_to_grpc(
         device_uuid, device.device_config, ERROR_SET, ConfigActionEnum.CONFIGACTION_SET, results_setconfig)
 
 def deconfigure_rules(device : Device, driver : _Driver, resources_to_delete : List[Tuple[str, Any]]) -> List[str]:
-    device_uuid = device.device_id.device_uuid.uuid
+    if len(resources_to_delete) == 0: return []
 
     results_deleteconfig = driver.DeleteConfig(resources_to_delete)
     results_deleteconfig = [
@@ -188,6 +189,7 @@ def deconfigure_rules(device : Device, driver : _Driver, resources_to_delete : L
         for (resource_key, resource_value), result in zip(resources_to_delete, results_deleteconfig)
     ]
 
+    device_uuid = device.device_id.device_uuid.uuid
     return _raw_config_rules_to_grpc(
         device_uuid, device.device_config, ERROR_DELETE, ConfigActionEnum.CONFIGACTION_DELETE, results_deleteconfig)
 
-- 
GitLab


From 41befba7088c8db82ec3e4bfd7d2e80ad715cf1f Mon Sep 17 00:00:00 2001
From: gifrerenom <lluis.gifre@cttc.es>
Date: Sun, 29 Jan 2023 15:24:10 +0000
Subject: [PATCH 30/31] Service component:

- Added update of ServiceType during UpdateService
---
 src/service/service/ServiceServiceServicerImpl.py | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/src/service/service/ServiceServiceServicerImpl.py b/src/service/service/ServiceServiceServicerImpl.py
index 87eda2678..7d9ddeffb 100644
--- a/src/service/service/ServiceServiceServicerImpl.py
+++ b/src/service/service/ServiceServiceServicerImpl.py
@@ -16,7 +16,7 @@ import grpc, json, logging
 from typing import Optional
 from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method
 from common.method_wrappers.ServiceExceptions import AlreadyExistsException, InvalidArgumentException
-from common.proto.context_pb2 import Empty, Service, ServiceId, ServiceStatusEnum
+from common.proto.context_pb2 import Empty, Service, ServiceId, ServiceStatusEnum, ServiceTypeEnum
 from common.proto.pathcomp_pb2 import PathCompRequest
 from common.proto.service_pb2_grpc import ServiceServiceServicer
 from common.tools.grpc.Tools import grpc_message_to_json, grpc_message_to_json_string
@@ -93,6 +93,8 @@ class ServiceServiceServicerImpl(ServiceServiceServicer):
         _service : Optional[Service] = get_service(context_client, request.service_id)
         service = Service()
         service.CopyFrom(request if _service is None else _service)
+        if service.service_type == ServiceTypeEnum.SERVICETYPE_UNKNOWN:                     # pylint: disable=no-member
+            service.service_type = request.service_type                                     # pylint: disable=no-member
         service.service_status.service_status = ServiceStatusEnum.SERVICESTATUS_PLANNED     # pylint: disable=no-member
 
         del service.service_endpoint_ids[:] # pylint: disable=no-member
-- 
GitLab


From f6fe2ee80f94f39ab0bdd45d5afb04f49f88fbeb Mon Sep 17 00:00:00 2001
From: gifrerenom <lluis.gifre@cttc.es>
Date: Sun, 29 Jan 2023 15:25:07 +0000
Subject: [PATCH 31/31] Slice component:

- Corrected logic of Slice to take into account Context-generated UUIDs
---
 src/slice/service/SliceServiceServicerImpl.py | 65 ++++++++++---------
 1 file changed, 35 insertions(+), 30 deletions(-)

diff --git a/src/slice/service/SliceServiceServicerImpl.py b/src/slice/service/SliceServiceServicerImpl.py
index d693abd8f..413750662 100644
--- a/src/slice/service/SliceServiceServicerImpl.py
+++ b/src/slice/service/SliceServiceServicerImpl.py
@@ -12,12 +12,14 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from typing import Optional
 import grpc, json, logging #, deepdiff
 from common.proto.context_pb2 import (
     Empty, Service, ServiceId, ServiceStatusEnum, ServiceTypeEnum, Slice, SliceId, SliceStatusEnum)
 from common.proto.slice_pb2_grpc import SliceServiceServicer
 from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method
 from common.tools.context_queries.InterDomain import is_multi_domain
+from common.tools.context_queries.Slice import get_slice
 from common.tools.grpc.ConfigRules import copy_config_rules
 from common.tools.grpc.Constraints import copy_constraints
 from common.tools.grpc.EndPointIds import copy_endpoint_ids
@@ -37,29 +39,32 @@ class SliceServiceServicerImpl(SliceServiceServicer):
         LOGGER.debug('Servicer Created')
 
     def create_update(self, request : Slice) -> SliceId:
+        # Set slice status to "SERVICESTATUS_PLANNED" to ensure rest of components are aware the slice is
+        # being modified.
         context_client = ContextClient()
-        try:
-            _slice = context_client.GetSlice(request.slice_id)
-            #json_current_slice = grpc_message_to_json(_slice)
-        except: # pylint: disable=bare-except
-            #json_current_slice = {}
-            slice_request = Slice()
-            slice_request.slice_id.CopyFrom(request.slice_id) # pylint: disable=no-member
-            slice_request.slice_status.slice_status = SliceStatusEnum.SLICESTATUS_PLANNED # pylint: disable=no-member
-            context_client.SetSlice(slice_request)
-            _slice = context_client.GetSlice(request.slice_id)
+        slice_ro : Optional[Service] = get_slice(
+            context_client, request.slice_id.slice_uuid.uuid, request.slice_id.context_id.context_uuid.uuid,
+            rw_copy=False)
+
+        slice_rw = Slice()
+        slice_rw.CopyFrom(request if slice_ro is None else slice_ro)
+        slice_rw.name = request.name
+        slice_rw.slice_owner.CopyFrom(request.slice_owner)                          # pylint: disable=no-member
+        slice_rw.slice_status.slice_status = SliceStatusEnum.SLICESTATUS_PLANNED    # pylint: disable=no-member
 
-        slice_request = Slice()
-        slice_request.CopyFrom(_slice)
+        copy_endpoint_ids(request.slice_endpoint_ids,        slice_rw.slice_endpoint_ids       ) # pylint: disable=no-member
+        copy_constraints (request.slice_constraints,         slice_rw.slice_constraints        ) # pylint: disable=no-member
+        copy_config_rules(request.slice_config.config_rules, slice_rw.slice_config.config_rules) # pylint: disable=no-member
 
-        if len(request.slice_endpoint_ids) < 2:
+        slice_id_with_uuids = context_client.SetSlice(slice_rw)
+
+        if len(slice_rw.slice_endpoint_ids) < 2: # pylint: disable=no-member
             # unable to identify the kind of slice; just update endpoints, constraints and config rules
             # update the slice in database, and return
             # pylint: disable=no-member
-            copy_endpoint_ids(request.slice_endpoint_ids, slice_request.slice_endpoint_ids)
-            copy_constraints(request.slice_constraints, slice_request.slice_constraints)
-            copy_config_rules(request.slice_config.config_rules, slice_request.slice_config.config_rules)
-            return context_client.SetSlice(slice_request)
+            return context_client.SetSlice(slice_rw)
+
+        slice_with_uuids = context_client.GetSlice(slice_id_with_uuids)
 
         #LOGGER.info('json_current_slice = {:s}'.format(str(json_current_slice)))
         #json_updated_slice = grpc_message_to_json(request)
@@ -67,9 +72,9 @@ class SliceServiceServicerImpl(SliceServiceServicer):
         #changes = deepdiff.DeepDiff(json_current_slice, json_updated_slice)
         #LOGGER.info('changes = {:s}'.format(str(changes)))
 
-        if is_multi_domain(context_client, request.slice_endpoint_ids):
+        if is_multi_domain(context_client, slice_with_uuids.slice_endpoint_ids):
             interdomain_client = InterdomainClient()
-            slice_id = interdomain_client.RequestSlice(request)
+            slice_id = interdomain_client.RequestSlice(slice_with_uuids)
             slice_ = context_client.GetSlice(slice_id)
             slice_active = Slice()
             slice_active.CopyFrom(slice_)
@@ -80,8 +85,8 @@ class SliceServiceServicerImpl(SliceServiceServicer):
         # Local domain slice
         service_id = ServiceId()
         # pylint: disable=no-member
-        context_uuid = service_id.context_id.context_uuid.uuid = request.slice_id.context_id.context_uuid.uuid
-        service_uuid = service_id.service_uuid.uuid = request.slice_id.slice_uuid.uuid
+        context_uuid = service_id.context_id.context_uuid.uuid = slice_with_uuids.slice_id.context_id.context_uuid.uuid
+        service_uuid = service_id.service_uuid.uuid = slice_with_uuids.slice_id.slice_uuid.uuid
 
         service_client = ServiceClient()
         try:
@@ -136,13 +141,13 @@ class SliceServiceServicerImpl(SliceServiceServicer):
 
         service_client.UpdateService(service_request)
 
-        copy_endpoint_ids(request.slice_endpoint_ids, slice_request.slice_endpoint_ids)
-        copy_constraints(request.slice_constraints, slice_request.slice_constraints)
-        copy_config_rules(request.slice_config.config_rules, slice_request.slice_config.config_rules)
+        #copy_endpoint_ids(request.slice_endpoint_ids, slice_with_uuids.slice_endpoint_ids)
+        #copy_constraints(request.slice_constraints, slice_with_uuids.slice_constraints)
+        #copy_config_rules(request.slice_config.config_rules, slice_with_uuids.slice_config.config_rules)
 
-        update_service_ids(slice_request.slice_service_ids, context_uuid, service_uuid)
-        context_client.SetSlice(slice_request)
-        slice_id = slice_request.slice_id
+        update_service_ids(slice_with_uuids.slice_service_ids, context_uuid, service_uuid)
+        context_client.SetSlice(slice_with_uuids)
+        slice_id = slice_with_uuids.slice_id
 
         slice_ = context_client.GetSlice(slice_id)
         slice_active = Slice()
@@ -194,14 +199,14 @@ class SliceServiceServicerImpl(SliceServiceServicer):
         else:
             current_slice = Slice()
             current_slice.CopyFrom(_slice)
-            current_slice.slice_status.slice_status = SliceStatusEnum.SLICESTATUS_DEINIT
+            current_slice.slice_status.slice_status = SliceStatusEnum.SLICESTATUS_DEINIT # pylint: disable=no-member
             context_client.SetSlice(current_slice)
 
             service_client = ServiceClient()
             for service_id in _slice.slice_service_ids:
                 current_slice = Slice()
-                current_slice.slice_id.CopyFrom(_slice.slice_id)
-                slice_service_id = current_slice.slice_service_ids.add()
+                current_slice.slice_id.CopyFrom(_slice.slice_id) # pylint: disable=no-member
+                slice_service_id = current_slice.slice_service_ids.add() # pylint: disable=no-member
                 slice_service_id.CopyFrom(service_id)
                 context_client.UnsetSlice(current_slice)
 
-- 
GitLab