diff --git a/src/dbscanserving/.gitlab-ci.yml b/src/dbscanserving/.gitlab-ci.yml
index a9d99f41e8d32d596effe166a95ac38c7cfe991f..01545f3a24fdd4b5753fbf1a9c36519ba37d015c 100644
--- a/src/dbscanserving/.gitlab-ci.yml
+++ b/src/dbscanserving/.gitlab-ci.yml
@@ -30,6 +30,8 @@ build dbscanserving:
     - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
     - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
     - changes:
+      - src/common/**/*.py
+      - proto/*.proto
       - src/$IMAGE_NAME/**/*.{py,in,yml}
       - src/$IMAGE_NAME/Dockerfile
       - src/$IMAGE_NAME/tests/*.py
@@ -48,25 +50,33 @@ unit_test dbscanserving:
     - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
     - if docker network list | grep teraflowbridge; then echo "teraflowbridge is already created"; else docker network create -d bridge teraflowbridge; fi
     - if docker container ls | grep $IMAGE_NAME; then docker rm -f $IMAGE_NAME; else echo "$IMAGE_NAME container is not in the system"; fi
+    - docker container prune -f
   script:
     - docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
-    - docker run --name $IMAGE_NAME -d -p 10008:10008 -v "$PWD/src/$IMAGE_NAME/tests:/home/${IMAGE_NAME}/results" --network=teraflowbridge --rm $CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG
-    - sleep 5
-    - docker ps -a
-    - docker logs $IMAGE_NAME
+    - >
+      docker run --name $IMAGE_NAME -d -p 10008:10008
+      --volume "$PWD/src/$IMAGE_NAME/tests:/opt/results"
+      --network=teraflowbridge
+      $CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG
     - docker ps -a
     - sleep 5
     - docker logs $IMAGE_NAME
-    - docker exec -i $IMAGE_NAME bash -c "coverage run -m pytest --log-level=INFO --verbose $IMAGE_NAME/tests/test_unitary.py --junitxml=/home/${IMAGE_NAME}/results/${IMAGE_NAME}_report.xml"
+    - >
+      docker exec -i $IMAGE_NAME bash -c
+      "coverage run -m pytest --log-level=INFO --verbose --junitxml=/opt/results/${IMAGE_NAME}_report.xml $IMAGE_NAME/tests/test_unitary.py"
     - docker exec -i $IMAGE_NAME bash -c "coverage report --include='${IMAGE_NAME}/*' --show-missing"
   coverage: '/TOTAL\s+\d+\s+\d+\s+(\d+%)/'
   after_script:
     - docker rm -f $IMAGE_NAME
     - docker network rm teraflowbridge
+    - docker volume prune --force
+    - docker image prune --force
   rules:
     - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
-    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' 
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
     - changes:
+      - src/common/**/*.py
+      - proto/*.proto
       - src/$IMAGE_NAME/**/*.{py,in,yml}
       - src/$IMAGE_NAME/Dockerfile
       - src/$IMAGE_NAME/tests/*.py
@@ -78,29 +88,28 @@ unit_test dbscanserving:
     reports:
       junit: src/$IMAGE_NAME/tests/${IMAGE_NAME}_report.xml
 
-
 # Deployment of the dbscanserving service in Kubernetes Cluster
-# deploy dbscanserving:
-#   variables:
-#     IMAGE_NAME: 'dbscanserving' # name of the microservice
-#     IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
-#   stage: deploy
-#   needs:
-#     - unit test dbscanserving
-#     # - integ_test execute
-#   script:
-#     - 'sed -i "s/$IMAGE_NAME:.*/$IMAGE_NAME:$IMAGE_TAG/" manifests/${IMAGE_NAME}service.yaml'
-#     - kubectl version
-#     - kubectl get all
-#     - kubectl apply -f "manifests/${IMAGE_NAME}service.yaml"
-#     - kubectl get all
-#   # environment:
-#   #   name: test
-#   #   url: https://example.com
-#   #   kubernetes:
-#   #     namespace: test
-#   rules:
-#     - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
-#       when: manual    
-#     - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
-#       when: manual
\ No newline at end of file
+#deploy dbscanserving:
+#  variables:
+#    IMAGE_NAME: 'dbscanserving' # name of the microservice
+#    IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
+#  stage: deploy
+#  needs:
+#    - unit test dbscanserving
+#    # - integ_test execute
+#  script:
+#    - 'sed -i "s/$IMAGE_NAME:.*/$IMAGE_NAME:$IMAGE_TAG/" manifests/${IMAGE_NAME}service.yaml'
+#    - kubectl version
+#    - kubectl get all
+#    - kubectl apply -f "manifests/${IMAGE_NAME}service.yaml"
+#    - kubectl get all
+#  # environment:
+#  #   name: test
+#  #   url: https://example.com
+#  #   kubernetes:
+#  #     namespace: test
+#  rules:
+#    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+#      when: manual    
+#    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
+#      when: manual
diff --git a/src/dbscanserving/Dockerfile b/src/dbscanserving/Dockerfile
index 703a36efaec97351581ac5a472295bb1959934cc..1532be8286ce9e72d66f34297cb0d5fe0cb94006 100644
--- a/src/dbscanserving/Dockerfile
+++ b/src/dbscanserving/Dockerfile
@@ -27,19 +27,6 @@ RUN GRPC_HEALTH_PROBE_VERSION=v0.2.0 && \
     wget -qO/bin/grpc_health_probe https://github.com/grpc-ecosystem/grpc-health-probe/releases/download/${GRPC_HEALTH_PROBE_VERSION}/grpc_health_probe-linux-amd64 && \
     chmod +x /bin/grpc_health_probe
 
-# Creating a user for security reasons
-RUN groupadd -r teraflow && useradd -u 1001 --no-log-init -r -m -g teraflow teraflow
-USER teraflow
-
-# set working directory
-RUN mkdir -p /home/teraflow/controller/common
-WORKDIR /home/teraflow/controller
-
-# Get Python packages per module
-ENV VIRTUAL_ENV=/home/teraflow/venv
-RUN python3 -m venv ${VIRTUAL_ENV}
-ENV PATH="${VIRTUAL_ENV}/bin:${PATH}"
-
 # Get generic Python packages
 RUN python3 -m pip install --upgrade pip
 RUN python3 -m pip install --upgrade setuptools wheel
@@ -47,36 +34,35 @@ RUN python3 -m pip install --upgrade pip-tools
 
 # Get common Python packages
 # Note: this step enables sharing the previous Docker build steps among all the Python components
-COPY --chown=teraflow:teraflow common_requirements.in common_requirements.in
+WORKDIR /var/teraflow
+COPY common_requirements.in common_requirements.in
 RUN pip-compile --quiet --output-file=common_requirements.txt common_requirements.in
 RUN python3 -m pip install -r common_requirements.txt
 
 # Add common files into working directory
-WORKDIR /home/teraflow/controller/common
-COPY --chown=teraflow:teraflow src/common/. ./
+WORKDIR /var/teraflow/common
+COPY src/common/. ./
 RUN rm -rf proto
 
 # Create proto sub-folder, copy .proto files, and generate Python code
-RUN mkdir -p /home/teraflow/controller/common/proto
-WORKDIR /home/teraflow/controller/common/proto
+RUN mkdir -p /var/teraflow/common/proto
+WORKDIR /var/teraflow/common/proto
 RUN touch __init__.py
-COPY --chown=teraflow:teraflow proto/*.proto ./
+COPY proto/*.proto ./
 RUN python3 -m grpc_tools.protoc -I=. --python_out=. --grpc_python_out=. *.proto
 RUN rm *.proto
 RUN find . -type f -exec sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' {} \;
 
-# Create module sub-folders
-RUN mkdir -p /home/teraflow/controller/dbscanserving
-WORKDIR /home/teraflow/controller
-
-# Get Python packages per module
-COPY --chown=teraflow:teraflow ./src/dbscanserving/requirements.in dbscanserving/requirements.in
-# consider common and specific requirements to avoid inconsistencies with dependencies
-RUN pip-compile --quiet --output-file=dbscanserving/requirements.txt dbscanserving/requirements.in common_requirements.in
-RUN python3 -m pip install -r dbscanserving/requirements.txt
+# Create component sub-folders, get specific Python packages
+RUN mkdir -p /var/teraflow/dbscanserving
+WORKDIR /var/teraflow/dbscanserving
+COPY src/dbscanserving/requirements.in requirements.in
+RUN pip-compile --quiet --output-file=requirements.txt requirements.in
+RUN python3 -m pip install -r requirements.txt
 
 # Add component files into working directory
-COPY --chown=teraflow:teraflow ./src/dbscanserving/. dbscanserving
+WORKDIR /var/teraflow
+COPY src/dbscanserving/. dbscanserving/
 
 # Start the service
 ENTRYPOINT ["python", "-m", "dbscanserving.service"]
diff --git a/src/service/service/task_scheduler/TaskExecutor.py b/src/service/service/task_scheduler/TaskExecutor.py
index ac06e321d342fc7cddb9f54958a38ed87067c922..362d72959ff918f60ae6549e7d7714c7768ca242 100644
--- a/src/service/service/task_scheduler/TaskExecutor.py
+++ b/src/service/service/task_scheduler/TaskExecutor.py
@@ -14,10 +14,9 @@
 
 import json, logging
 from enum import Enum
-from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, Union
+from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
 from common.DeviceTypes import DeviceTypeEnum
 from common.method_wrappers.ServiceExceptions import NotFoundException
-from typing import List
 from common.proto.qkd_app_pb2 import QKDAppStatusEnum
 from common.proto.context_pb2 import (
     Connection, ConnectionId, Device, DeviceDriverEnum, DeviceId, Service, ServiceId,
diff --git a/src/service/tests/test_l3nm_gnmi_static_rule_gen/MockServiceHandler.py b/src/service/tests/test_l3nm_gnmi_static_rule_gen/MockServiceHandler.py
index 9b3f76566c9d8e5b2c8bdfb05f4b2448c29b7eae..11f221f1516915235f6b70f79e6847bd50011129 100644
--- a/src/service/tests/test_l3nm_gnmi_static_rule_gen/MockServiceHandler.py
+++ b/src/service/tests/test_l3nm_gnmi_static_rule_gen/MockServiceHandler.py
@@ -14,13 +14,15 @@
 
 import json, logging
 from typing import Any, Dict, List, Optional, Tuple, Union
-from common.proto.context_pb2 import ConfigRule, DeviceId, Service
+from common.proto.context_pb2 import ConfigRule, ConnectionId, DeviceId, Service
+from common.tools.object_factory.Connection import json_connection_id
 from common.tools.object_factory.Device import json_device_id
 from common.type_checkers.Checkers import chk_type
 from service.service.service_handler_api._ServiceHandler import _ServiceHandler
 from service.service.service_handler_api.SettingsHandler import SettingsHandler
 from service.service.service_handler_api.Tools import get_device_endpoint_uuids, get_endpoint_matching
 from .MockTaskExecutor import MockTaskExecutor
+from service.service.tools.EndpointIdFormatters import endpointids_to_raw
 from service.service.service_handlers.l3nm_gnmi_openconfig.ConfigRuleComposer import ConfigRuleComposer
 from service.service.service_handlers.l3nm_gnmi_openconfig.StaticRouteGenerator import StaticRouteGenerator
 
@@ -48,17 +50,20 @@ class MockServiceHandler(_ServiceHandler):
 
             device_obj = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid)))
             device_settings = self.__settings_handler.get_device_settings(device_obj)
+            self.__config_rule_composer.set_device_alias(device_obj.name, device_uuid)
             _device = self.__config_rule_composer.get_device(device_obj.name)
             _device.configure(device_obj, device_settings)
 
             endpoint_obj = get_endpoint_matching(device_obj, endpoint_uuid)
             endpoint_settings = self.__settings_handler.get_endpoint_settings(device_obj, endpoint_obj)
+            _device.set_endpoint_alias(endpoint_obj.name, endpoint_uuid)
             _endpoint = _device.get_endpoint(endpoint_obj.name)
             _endpoint.configure(endpoint_obj, endpoint_settings)
 
             self.__endpoint_map[(device_uuid, endpoint_uuid)] = (device_obj.name, endpoint_obj.name)
 
         self.__static_route_generator.compose(endpoints)
+        LOGGER.debug('config_rule_composer = {:s}'.format(json.dumps(self.__config_rule_composer.dump())))
 
     def _do_configurations(
         self, config_rules_per_device : Dict[str, List[Dict]], endpoints : List[Tuple[str, str, Optional[str]]],
@@ -94,7 +99,12 @@ class MockServiceHandler(_ServiceHandler):
     ) -> List[Union[bool, Exception]]:
         chk_type('endpoints', endpoints, list)
         if len(endpoints) == 0: return []
-        self._compose_config_rules(endpoints)
+        #service_uuid = self.__service.service_id.service_uuid.uuid
+        connection = self.__task_executor.get_connection(ConnectionId(**json_connection_id(connection_uuid)))
+        connection_endpoint_ids = endpointids_to_raw(connection.path_hops_endpoint_ids)
+        self._compose_config_rules(connection_endpoint_ids)
+        #network_instance_name = service_uuid.split('-')[0]
+        #config_rules_per_device = self.__config_rule_composer.get_config_rules(network_instance_name, delete=False)
         config_rules_per_device = self.__config_rule_composer.get_config_rules(delete=False)
         LOGGER.debug('config_rules_per_device={:s}'.format(str(config_rules_per_device)))
         results = self._do_configurations(config_rules_per_device, endpoints)
@@ -106,7 +116,12 @@ class MockServiceHandler(_ServiceHandler):
     ) -> List[Union[bool, Exception]]:
         chk_type('endpoints', endpoints, list)
         if len(endpoints) == 0: return []
-        self._compose_config_rules(endpoints)
+        #service_uuid = self.__service.service_id.service_uuid.uuid
+        connection = self.__task_executor.get_connection(ConnectionId(**json_connection_id(connection_uuid)))
+        connection_endpoint_ids = endpointids_to_raw(connection.path_hops_endpoint_ids)
+        self._compose_config_rules(connection_endpoint_ids)
+        #network_instance_name = service_uuid.split('-')[0]
+        #config_rules_per_device = self.__config_rule_composer.get_config_rules(network_instance_name, delete=True)
         config_rules_per_device = self.__config_rule_composer.get_config_rules(delete=True)
         LOGGER.debug('config_rules_per_device={:s}'.format(str(config_rules_per_device)))
         results = self._do_configurations(config_rules_per_device, endpoints, delete=True)
diff --git a/src/service/tests/test_l3nm_gnmi_static_rule_gen/MockTaskExecutor.py b/src/service/tests/test_l3nm_gnmi_static_rule_gen/MockTaskExecutor.py
index 765b04477efdf06bfef934e96329887e898aa1b4..37e171026f968cc1680c4eb4d891b8820240f6e4 100644
--- a/src/service/tests/test_l3nm_gnmi_static_rule_gen/MockTaskExecutor.py
+++ b/src/service/tests/test_l3nm_gnmi_static_rule_gen/MockTaskExecutor.py
@@ -14,10 +14,12 @@
 
 import logging
 from enum import Enum
-from typing import Dict, Optional, Union
+from typing import Any, Dict, Optional, Union
 from common.method_wrappers.ServiceExceptions import NotFoundException
-from common.proto.context_pb2 import Connection, Device, DeviceId, Service
-from service.service.tools.ObjectKeys import get_device_key
+from common.proto.context_pb2 import (
+    Connection, ConnectionId, Device, DeviceId, Service
+)
+from service.service.tools.ObjectKeys import get_connection_key, get_device_key
 
 LOGGER = logging.getLogger(__name__)
 
@@ -46,6 +48,32 @@ class MockTaskExecutor:
         object_key = '{:s}:{:s}'.format(object_type.value, object_key)
         self._grpc_objects_cache.pop(object_key, None)
 
+    def _store_editable_grpc_object(
+        self, object_type : CacheableObjectType, object_key : str, grpc_class, grpc_ro_object
+    ) -> Any:
+        grpc_rw_object = grpc_class()
+        grpc_rw_object.CopyFrom(grpc_ro_object)
+        self._store_grpc_object(object_type, object_key, grpc_rw_object)
+        return grpc_rw_object
+
+    # ----- Connection-related methods ---------------------------------------------------------------------------------
+
+    def get_connection(self, connection_id : ConnectionId) -> Connection:
+        connection_key = get_connection_key(connection_id)
+        connection = self._load_grpc_object(CacheableObjectType.CONNECTION, connection_key)
+        if connection is None: raise NotFoundException('Connection', connection_key)
+        return connection
+
+    def set_connection(self, connection : Connection) -> None:
+        connection_key = get_connection_key(connection.connection_id)
+        self._store_grpc_object(CacheableObjectType.CONNECTION, connection_key, connection)
+
+    def delete_connection(self, connection_id : ConnectionId) -> None:
+        connection_key = get_connection_key(connection_id)
+        self._delete_grpc_object(CacheableObjectType.CONNECTION, connection_key)
+
+    # ----- Device-related methods -------------------------------------------------------------------------------------
+
     def get_device(self, device_id : DeviceId) -> Device:
         device_key = get_device_key(device_id)
         device = self._load_grpc_object(CacheableObjectType.DEVICE, device_key)
diff --git a/src/service/tests/test_l3nm_gnmi_static_rule_gen/test_unitary_sns4sns.py b/src/service/tests/test_l3nm_gnmi_static_rule_gen/test_unitary_sns4sns.py
new file mode 100644
index 0000000000000000000000000000000000000000..359eaab39a00c6cf1df4c989a301e3dee24049c1
--- /dev/null
+++ b/src/service/tests/test_l3nm_gnmi_static_rule_gen/test_unitary_sns4sns.py
@@ -0,0 +1,108 @@
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Run with:
+# $ PYTHONPATH=./src python -m service.tests.test_l3nm_gnmi_static_rule_gen.test_unitary_sns4sns
+
+import logging
+from typing import List, Optional, Tuple
+from common.DeviceTypes import DeviceTypeEnum
+from common.proto.context_pb2 import Connection, Device, DeviceOperationalStatusEnum, Service
+from common.tools.object_factory.ConfigRule import json_config_rule_set
+from common.tools.object_factory.Connection import json_connection
+from common.tools.object_factory.Device import json_device, json_device_id
+from common.tools.object_factory.EndPoint import json_endpoint, json_endpoint_id
+from common.tools.object_factory.Service import json_service_l3nm_planned
+from .MockServiceHandler import MockServiceHandler
+from .MockTaskExecutor import CacheableObjectType, MockTaskExecutor
+
+logging.basicConfig(level=logging.DEBUG)
+LOGGER = logging.getLogger(__name__)
+
+SERVICE = Service(**json_service_l3nm_planned(
+    'svc-core-edge-uuid',
+    endpoint_ids=[
+        json_endpoint_id(json_device_id('core-net'), 'eth1'),
+        json_endpoint_id(json_device_id('edge-net'), 'eth1'),
+    ],
+    config_rules=[
+        json_config_rule_set('/device[core-net]/endpoint[eth1]/settings', {
+            'address_ip': '10.10.10.0', 'address_prefix': 24, 'index': 0
+        }),
+        json_config_rule_set('/device[r1]/endpoint[eth10]/settings', {
+            'address_ip': '10.10.10.229', 'address_prefix': 24, 'index': 0
+        }),
+        json_config_rule_set('/device[r2]/endpoint[eth10]/settings', {
+            'address_ip': '10.158.72.229', 'address_prefix': 24, 'index': 0
+        }),
+        json_config_rule_set('/device[edge-net]/endpoint[eth1]/settings', {
+            'address_ip': '10.158.72.0', 'address_prefix': 24, 'index': 0
+        }),
+    ]
+))
+
+CONNECTION_ENDPOINTS : List[Tuple[str, str, Optional[str]]] = [
+    #('core-net', 'int',   None),
+    ('core-net', 'eth1',  None),
+    ('r1',       'eth10', None), ('r1',       'eth2',  None),
+    ('r2',       'eth1',  None), ('r2',       'eth10', None),
+    ('edge-net', 'eth1',  None),
+    #('edge-net', 'int',   None),
+]
+
+def test_l3nm_gnmi_static_rule_gen() -> None:
+    dev_op_st_enabled = DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_ENABLED
+
+    mock_task_executor = MockTaskExecutor()
+    mock_task_executor._store_grpc_object(CacheableObjectType.DEVICE, 'core-net', Device(**json_device(
+        'core-net', DeviceTypeEnum.EMULATED_DATACENTER.value, dev_op_st_enabled, name='core-net',
+        endpoints=[
+            json_endpoint(json_device_id('core-net'), 'int',  'packet', name='int' ),
+            json_endpoint(json_device_id('core-net'), 'eth1', 'packet', name='eth1'),
+        ]
+    )))
+    mock_task_executor._store_grpc_object(CacheableObjectType.DEVICE, 'edge-net', Device(**json_device(
+        'edge-net', DeviceTypeEnum.EMULATED_DATACENTER.value, dev_op_st_enabled, name='edge-net',
+        endpoints=[
+            json_endpoint(json_device_id('edge-net'), 'int',  'packet', name='int' ),
+            json_endpoint(json_device_id('edge-net'), 'eth1', 'packet', name='eth1'),
+        ]
+    )))
+    mock_task_executor._store_grpc_object(CacheableObjectType.DEVICE, 'r1', Device(**json_device(
+        'r1', DeviceTypeEnum.EMULATED_PACKET_ROUTER.value, dev_op_st_enabled, name='r1',
+        endpoints=[
+            json_endpoint(json_device_id('r1'), 'eth2',  'packet', name='eth2' ),
+            json_endpoint(json_device_id('r1'), 'eth10', 'packet', name='eth10'),
+        ]
+    )))
+    mock_task_executor._store_grpc_object(CacheableObjectType.DEVICE, 'r2', Device(**json_device(
+        'r2', DeviceTypeEnum.EMULATED_PACKET_ROUTER.value, dev_op_st_enabled, name='r2',
+        endpoints=[
+            json_endpoint(json_device_id('r1'), 'eth1',  'packet', name='eth1' ),
+            json_endpoint(json_device_id('r1'), 'eth10', 'packet', name='eth10'),
+        ]
+    )))
+    mock_task_executor._store_grpc_object(CacheableObjectType.CONNECTION, 'conn', Connection(**json_connection(
+        'conn', path_hops_endpoint_ids=[
+            json_endpoint_id(json_device_id(device_uuid), endpoint_uuid=endpoint_uuid)
+            for device_uuid, endpoint_uuid, _ in CONNECTION_ENDPOINTS
+        ]
+    )))
+
+    mock_service_handler = MockServiceHandler(SERVICE, mock_task_executor)
+    mock_service_handler.SetEndpoint(CONNECTION_ENDPOINTS, connection_uuid='conn')
+    mock_service_handler.DeleteEndpoint(CONNECTION_ENDPOINTS, connection_uuid='conn')
+
+if __name__ == '__main__':
+    test_l3nm_gnmi_static_rule_gen()