diff --git a/dataplane-in-a-box/.gitignore b/src/tests/eucnc24/.gitignore
similarity index 100%
rename from dataplane-in-a-box/.gitignore
rename to src/tests/eucnc24/.gitignore
diff --git a/src/tests/eucnc24/.gitlab-ci.yml b/src/tests/eucnc24/.gitlab-ci.yml
new file mode 100644
index 0000000000000000000000000000000000000000..1718531447b267a237db618a0109a5bfd5624e5f
--- /dev/null
+++ b/src/tests/eucnc24/.gitlab-ci.yml
@@ -0,0 +1,132 @@
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Build, tag, and push the Docker image to the GitLab Docker registry
+build eucnc24:
+  variables:
+    TEST_NAME: 'eucnc24'
+  stage: build
+  before_script:
+    - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
+  script:
+    - docker buildx build -t "${TEST_NAME}:latest" -f ./src/tests/${TEST_NAME}/Dockerfile .
+    - docker tag "${TEST_NAME}:latest" "$CI_REGISTRY_IMAGE/${TEST_NAME}:latest"
+    - docker push "$CI_REGISTRY_IMAGE/${TEST_NAME}:latest"
+  after_script:
+    - docker images --filter="dangling=true" --quiet | xargs -r docker rmi
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
+    - changes:
+      - src/common/**/*.py
+      - proto/*.proto
+      - src/tests/${TEST_NAME}/**/*.{py,in,sh,yml}
+      - src/tests/${TEST_NAME}/Dockerfile
+      - .gitlab-ci.yml
+
+# Deploy TeraFlowSDN and Execute end-2-end test
+end2end_test eucnc24:
+  variables:
+    TEST_NAME: 'eucnc24'
+  stage: end2end_test
+  # Disable to force running it after all other tasks
+  #needs:
+  #  - build eucnc24
+  before_script:
+    - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
+    - if docker ps -a | grep ${TEST_NAME}; then docker rm -f ${TEST_NAME}; fi
+    - containerlab destroy --all
+    - containerlab destroy --cleanup
+
+  script:
+    # Download Docker image to run the test
+    - docker pull "${CI_REGISTRY_IMAGE}/${TEST_NAME}:latest"
+
+    # Check MicroK8s is ready
+    - microk8s status --wait-ready
+    - kubectl get pods --all-namespaces
+
+    # Deploy ContainerLab Scenario
+    - containerlab deploy --topo $PWD/src/tests/${TEST_NAME}/eucnc24.clab.yml
+
+    # Wait for initialization of Device NOSes
+    - sleep 3
+    - docker ps -a
+
+    # Configure TeraFlowSDN deployment
+    # Uncomment if DEBUG log level is needed for the components
+    #- yq -i '((select(.kind=="Deployment").spec.template.spec.containers.[] | select(.name=="server").env.[]) | select(.name=="LOG_LEVEL").value) |= "DEBUG"' manifests/contextservice.yaml
+    #- yq -i '((select(.kind=="Deployment").spec.template.spec.containers.[] | select(.name=="server").env.[]) | select(.name=="LOG_LEVEL").value) |= "DEBUG"' manifests/deviceservice.yaml
+    #- yq -i '((select(.kind=="Deployment").spec.template.spec.containers.[] | select(.name=="frontend").env.[]) | select(.name=="LOG_LEVEL").value) |= "DEBUG"' manifests/pathcompservice.yaml
+    #- yq -i '((select(.kind=="Deployment").spec.template.spec.containers.[] | select(.name=="server").env.[]) | select(.name=="LOG_LEVEL").value) |= "DEBUG"' manifests/serviceservice.yaml
+    #- yq -i '((select(.kind=="Deployment").spec.template.spec.containers.[] | select(.name=="server").env.[]) | select(.name=="LOG_LEVEL").value) |= "DEBUG"' manifests/nbiservice.yaml
+    #- yq -i '((select(.kind=="Deployment").spec.template.spec.containers.[] | select(.name=="server").env.[]) | select(.name=="LOG_LEVEL").value) |= "DEBUG"' manifests/monitoringservice.yaml
+
+    - source src/tests/${TEST_NAME}/deploy_specs.sh
+    #- export TFS_REGISTRY_IMAGES="${CI_REGISTRY_IMAGE}"
+    #- export TFS_SKIP_BUILD="YES"
+    #- export TFS_IMAGE_TAG="latest"
+    #- echo "TFS_REGISTRY_IMAGES=${CI_REGISTRY_IMAGE}"
+
+    # Deploy TeraFlowSDN
+    - ./deploy/crdb.sh
+    - ./deploy/nats.sh
+    - ./deploy/qdb.sh
+    #- ./deploy/kafka.sh
+    - ./deploy/tfs.sh
+    - ./deploy/show.sh
+
+    # Wait for Context to be subscribed to NATS
+    - while ! kubectl --namespace $TFS_K8S_NAMESPACE logs deployment/contextservice -c server 2>&1 | grep -q 'Subscriber is Ready? True'; do sleep 1; done
+    - kubectl --namespace $TFS_K8S_NAMESPACE logs deployment/contextservice -c server
+
+    # Run end-to-end tests
+    - >
+      docker run -t --name ${TEST_NAME} --network=host 
+      --volume "$PWD/tfs_runtime_env_vars.sh:/var/teraflow/tfs_runtime_env_vars.sh"
+      --volume "$PWD/src/tests/${TEST_NAME}:/opt/results"
+      $CI_REGISTRY_IMAGE/${TEST_NAME}:latest
+
+  after_script:
+    # Dump TeraFlowSDN component logs
+    - source src/tests/${TEST_NAME}/deploy_specs.sh
+    - kubectl --namespace $TFS_K8S_NAMESPACE logs deployment/contextservice -c server
+    - kubectl --namespace $TFS_K8S_NAMESPACE logs deployment/deviceservice -c server
+    - kubectl --namespace $TFS_K8S_NAMESPACE logs deployment/pathcompservice -c frontend
+    - kubectl --namespace $TFS_K8S_NAMESPACE logs deployment/serviceservice -c server
+    - kubectl --namespace $TFS_K8S_NAMESPACE logs deployment/nbiservice -c server
+    - kubectl --namespace $TFS_K8S_NAMESPACE logs deployment/monitoringservice -c server
+
+    # Dump test container logs
+    - if docker ps -a | grep ${TEST_NAME}; then docker logs ${TEST_NAME}; fi
+
+    # Destroy Scenario
+    - if docker ps -a | grep ${TEST_NAME}; then docker rm -f ${TEST_NAME}; fi
+    - containerlab destroy --all
+    - containerlab destroy --cleanup
+    - kubectl delete namespaces tfs crdb qdb
+    - helm3 uninstall --namespace nats nats
+    - kubectl delete namespace nats
+
+    # Clean old docker images
+    - docker images --filter="dangling=true" --quiet | xargs -r docker rmi
+
+  #coverage: '/TOTAL\s+\d+\s+\d+\s+(\d+%)/'
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
+    - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
+  artifacts:
+      when: always
+      reports:
+        junit: ./src/tests/${TEST_NAME}/report_*.xml
diff --git a/src/tests/eucnc24/Dockerfile b/src/tests/eucnc24/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..956d54b74b38bc433ac03f79170125a800b51506
--- /dev/null
+++ b/src/tests/eucnc24/Dockerfile
@@ -0,0 +1,91 @@
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+FROM python:3.9-slim
+
+# Install dependencies
+RUN apt-get --yes --quiet --quiet update && \
+    apt-get --yes --quiet --quiet install wget g++ git && \
+    rm -rf /var/lib/apt/lists/*
+
+# Set Python to show logs as they occur
+ENV PYTHONUNBUFFERED=0
+
+# Get generic Python packages
+RUN python3 -m pip install --upgrade pip
+RUN python3 -m pip install --upgrade setuptools wheel
+RUN python3 -m pip install --upgrade pip-tools
+
+# Get common Python packages
+# Note: this step enables sharing the previous Docker build steps among all the Python components
+WORKDIR /var/teraflow
+COPY common_requirements.in common_requirements.in
+RUN pip-compile --quiet --output-file=common_requirements.txt common_requirements.in
+RUN python3 -m pip install -r common_requirements.txt
+
+# Add common files into working directory
+WORKDIR /var/teraflow/common
+COPY src/common/. ./
+RUN rm -rf proto
+
+# Create proto sub-folder, copy .proto files, and generate Python code
+RUN mkdir -p /var/teraflow/common/proto
+WORKDIR /var/teraflow/common/proto
+RUN touch __init__.py
+COPY proto/*.proto ./
+RUN python3 -m grpc_tools.protoc -I=. --python_out=. --grpc_python_out=. *.proto
+RUN rm *.proto
+RUN find . -type f -exec sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' {} \;
+
+# Create component sub-folders, get specific Python packages
+RUN mkdir -p /var/teraflow/tests/eucnc24
+WORKDIR /var/teraflow/tests/eucnc24
+COPY src/tests/eucnc24/requirements.in requirements.in
+RUN pip-compile --quiet --output-file=requirements.txt requirements.in
+RUN python3 -m pip install -r requirements.txt
+
+# Add component files into working directory
+WORKDIR /var/teraflow
+COPY src/__init__.py ./__init__.py
+COPY src/common/*.py ./common/
+COPY src/common/tests/. ./common/tests/
+COPY src/common/tools/. ./common/tools/
+COPY src/context/__init__.py context/__init__.py
+COPY src/context/client/. context/client/
+COPY src/device/__init__.py device/__init__.py
+COPY src/device/client/. device/client/
+COPY src/monitoring/__init__.py monitoring/__init__.py
+COPY src/monitoring/client/. monitoring/client/
+COPY src/service/__init__.py service/__init__.py
+COPY src/service/client/. service/client/
+COPY src/slice/__init__.py slice/__init__.py
+COPY src/slice/client/. slice/client/
+COPY src/tests/*.py ./tests/
+COPY src/tests/eucnc24/__init__.py ./tests/eucnc24/__init__.py
+COPY src/tests/eucnc24/data/. ./tests/eucnc24/data/
+COPY src/tests/eucnc24/tests/. ./tests/eucnc24/tests/
+
+RUN tee ./run_tests.sh <<EOF
+#!/bin/bash
+source /var/teraflow/tfs_runtime_env_vars.sh
+export PYTHONPATH=/var/teraflow
+pytest --verbose --log-level=INFO /var/teraflow/tests/eucnc24/tests/test_functional_bootstrap.py    --junitxml=/opt/results/report_bootstrap.xml
+pytest --verbose --log-level=INFO /var/teraflow/tests/eucnc24/tests/test_functional_service_tfs.py  --junitxml=/opt/results/report_service_tfs.xml
+#pytest --verbose --log-level=INFO /var/teraflow/tests/eucnc24/tests/test_functional_service_ietf.py --junitxml=/opt/results/report_service_ietf.xml
+pytest --verbose --log-level=INFO /var/teraflow/tests/eucnc24/tests/test_functional_cleanup.py      --junitxml=/opt/results/report_cleanup.xml
+EOF
+RUN chmod ug+x ./run_tests.sh
+
+# Run the tests
+ENTRYPOINT ["./run_tests.sh"]
diff --git a/dataplane-in-a-box/README.md b/src/tests/eucnc24/README.md
similarity index 100%
rename from dataplane-in-a-box/README.md
rename to src/tests/eucnc24/README.md
diff --git a/src/tests/eucnc24/__init__.py b/src/tests/eucnc24/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..3ee6f7071f145e06c3aeaefc09a43ccd88e619e3
--- /dev/null
+++ b/src/tests/eucnc24/__init__.py
@@ -0,0 +1,14 @@
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
diff --git a/src/tests/eucnc24/data/ietf-l3vpn-service.json b/src/tests/eucnc24/data/ietf-l3vpn-service.json
new file mode 100644
index 0000000000000000000000000000000000000000..bfeb93fb74c9513ef4d175d5962110127303a2a7
--- /dev/null
+++ b/src/tests/eucnc24/data/ietf-l3vpn-service.json
@@ -0,0 +1,231 @@
+{
+  "ietf-l3vpn-svc:l3vpn-svc": {
+    "vpn-services": {
+      "vpn-service": [
+        {
+          "vpn-id": "vpn1"
+        }
+      ]
+    },
+    "sites": {
+      "site": [
+        {
+          "site-id": "site_OLT",
+          "management": {
+            "type": "ietf-l3vpn-svc:provider-managed"
+          },
+          "locations": {
+            "location": [
+              {
+                "location-id": "OLT"
+              }
+            ]
+          },
+          "devices": {
+            "device": [
+              {
+                "device-id": "128.32.33.5",
+                "location": "OLT"
+              }
+            ]
+          },
+          "routing-protocols": {
+            "routing-protocol": [
+              {
+                "type": "ietf-l3vpn-svc:static",
+                "static": {
+                  "cascaded-lan-prefixes": {
+                    "ipv4-lan-prefixes": [
+                      {
+                        "lan": "128.32.10.1/24",
+                        "lan-tag": "vlan21",
+                        "next-hop": "128.32.33.2"
+                      },
+                      {
+                        "lan": "128.32.20.1/24",
+                        "lan-tag": "vlan21",
+                        "next-hop": "128.32.33.2"
+                      }
+                    ]
+                  }
+                }
+              }
+            ]
+          },
+          "site-network-accesses": {
+            "site-network-access": [
+              {
+                "site-network-access-id": "500",
+                "site-network-access-type": "ietf-l3vpn-svc:multipoint",
+                "device-reference": "128.32.33.5",
+                "vpn-attachment": {
+                  "vpn-id": "vpn1",
+                  "site-role": "ietf-l3vpn-svc:spoke-role"
+                },
+                "ip-connection": {
+                  "ipv4": {
+                    "address-allocation-type": "ietf-l3vpn-svc:static-address",
+                    "addresses": {
+                      "provider-address": "128.32.33.254",
+                      "customer-address": "128.32.33.2",
+                      "prefix-length": 24
+                    }
+                  }
+                },
+                "routing-protocols": {
+                  "routing-protocol": [
+                    {
+                      "type": "ietf-l3vpn-svc:static",
+                      "static": {
+                        "cascaded-lan-prefixes": {
+                          "ipv4-lan-prefixes": [
+                            {
+                              "lan": "172.1.101.1/24",
+                              "lan-tag": "vlan21",
+                              "next-hop": "128.32.33.254"
+                            }
+                          ]
+                        }
+                      }
+                    }
+                  ]
+                },
+                "service": {
+                  "svc-mtu": 1500,
+                  "svc-input-bandwidth": 1000000000,
+                  "svc-output-bandwidth": 1000000000,
+                  "qos": {
+                    "qos-profile": {
+                      "classes": {
+                        "class": [
+                          {
+                            "class-id": "qos-realtime",
+                            "direction": "ietf-l3vpn-svc:both",
+                            "latency": {
+                              "latency-boundary": 10
+                            },
+                            "bandwidth": {
+                              "guaranteed-bw-percent": 100
+                            }
+                          }
+                        ]
+                      }
+                    }
+                  }
+                }
+              }
+            ]
+          }
+        },
+        {
+          "site-id": "site_POP",
+          "management": {
+            "type": "ietf-l3vpn-svc:provider-managed"
+          },
+          "locations": {
+            "location": [
+              {
+                "location-id": "POP"
+              }
+            ]
+          },
+          "devices": {
+            "device": [
+              {
+                "device-id": "172.10.33.5",
+                "location": "POP"
+              }
+            ]
+          },
+          "routing-protocols": {
+            "routing-protocol": [
+              {
+                "type": "ietf-l3vpn-svc:static",
+                "static": {
+                  "cascaded-lan-prefixes": {
+                    "ipv4-lan-prefixes": [
+                      {
+                        "lan": "172.1.101.1/24",
+                        "lan-tag": "vlan101",
+                        "next-hop": "172.10.33.2"
+                      }
+                    ]
+                  }
+                }
+              }
+            ]
+          },
+          "site-network-accesses": {
+            "site-network-access": [
+              {
+                "site-network-access-id": "500",
+                "site-network-access-type": "ietf-l3vpn-svc:multipoint",
+                "device-reference": "172.10.33.5",
+                "vpn-attachment": {
+                  "vpn-id": "vpn1",
+                  "site-role": "ietf-l3vpn-svc:hub-role"
+                },
+                "ip-connection": {
+                  "ipv4": {
+                    "address-allocation-type": "ietf-l3vpn-svc:static-address",
+                    "addresses": {
+                      "provider-address": "172.10.33.254",
+                      "customer-address": "172.10.33.2",
+                      "prefix-length": 24
+                    }
+                  }
+                },
+                "routing-protocols": {
+                  "routing-protocol": [
+                    {
+                      "type": "ietf-l3vpn-svc:static",
+                      "static": {
+                        "cascaded-lan-prefixes": {
+                          "ipv4-lan-prefixes": [
+                            {
+                              "lan": "128.32.10.1/24",
+                              "lan-tag": "vlan101",
+                              "next-hop": "172.10.33.254"
+                            },
+                            {
+                              "lan": "128.32.20.1/24",
+                              "lan-tag": "vlan101",
+                              "next-hop": "172.10.33.254"
+                            }
+                          ]
+                        }
+                      }
+                    }
+                  ]
+                },
+                "service": {
+                  "svc-mtu": 1500,
+                  "svc-input-bandwidth": 1000000000,
+                  "svc-output-bandwidth": 1000000000,
+                  "qos": {
+                    "qos-profile": {
+                      "classes": {
+                        "class": [
+                          {
+                            "class-id": "qos-realtime",
+                            "direction": "ietf-l3vpn-svc:both",
+                            "latency": {
+                              "latency-boundary": 10
+                            },
+                            "bandwidth": {
+                              "guaranteed-bw-percent": 100
+                            }
+                          }
+                        ]
+                      }
+                    }
+                  }
+                }
+              }
+            ]
+          }
+        }
+      ]
+    }
+  }
+}
\ No newline at end of file
diff --git a/dataplane-in-a-box/tfs-02-l3-service.json b/src/tests/eucnc24/data/tfs-service.json
similarity index 100%
rename from dataplane-in-a-box/tfs-02-l3-service.json
rename to src/tests/eucnc24/data/tfs-service.json
diff --git a/dataplane-in-a-box/tfs-01-topology.json b/src/tests/eucnc24/data/tfs-topology.json
similarity index 100%
rename from dataplane-in-a-box/tfs-01-topology.json
rename to src/tests/eucnc24/data/tfs-topology.json
diff --git a/dataplane-in-a-box/deploy_specs.sh b/src/tests/eucnc24/deploy_specs.sh
similarity index 68%
rename from dataplane-in-a-box/deploy_specs.sh
rename to src/tests/eucnc24/deploy_specs.sh
index 86f3d87111c0ddb738644313e179372b58bf048f..02021bc63f4555df33474f987073bfd75b38f72c 100755
--- a/dataplane-in-a-box/deploy_specs.sh
+++ b/src/tests/eucnc24/deploy_specs.sh
@@ -21,11 +21,30 @@ export TFS_REGISTRY_IMAGES="http://localhost:32000/tfs/"
 
 # Set the list of components, separated by spaces, you want to build images for, and deploy.
 #export TFS_COMPONENTS="context device pathcomp service slice nbi webui load_generator"
-export TFS_COMPONENTS="context device pathcomp service slice nbi webui"
+export TFS_COMPONENTS="context device pathcomp service nbi"
 
-# Uncomment to activate Monitoring
+# Uncomment to activate Monitoring (old)
 export TFS_COMPONENTS="${TFS_COMPONENTS} monitoring"
 
+# Uncomment to activate Monitoring Framework (new)
+#export TFS_COMPONENTS="${TFS_COMPONENTS} kpi_manager kpi_value_writer kpi_value_api telemetry analytics automation"
+
+# Uncomment to activate QoS Profiles
+#export TFS_COMPONENTS="${TFS_COMPONENTS} qos_profile"
+
+# Uncomment to activate BGP-LS Speaker
+#export TFS_COMPONENTS="${TFS_COMPONENTS} bgpls_speaker"
+
+# Uncomment to activate Optical Controller
+#   To manage optical connections, "service" requires "opticalcontroller" to be deployed
+#   before "service", thus we "hack" the TFS_COMPONENTS environment variable prepending the
+#   "opticalcontroller" only if "service" is already in TFS_COMPONENTS, and re-export it.
+#if [[ "$TFS_COMPONENTS" == *"service"* ]]; then
+#    BEFORE="${TFS_COMPONENTS% service*}"
+#    AFTER="${TFS_COMPONENTS#* service}"
+#    export TFS_COMPONENTS="${BEFORE} opticalcontroller service ${AFTER}"
+#fi
+
 # Uncomment to activate ZTP
 #export TFS_COMPONENTS="${TFS_COMPONENTS} ztp"
 
@@ -44,6 +63,28 @@ export TFS_COMPONENTS="${TFS_COMPONENTS} monitoring"
 # Uncomment to activate Forecaster
 #export TFS_COMPONENTS="${TFS_COMPONENTS} forecaster"
 
+# Uncomment to activate E2E Orchestrator
+#export TFS_COMPONENTS="${TFS_COMPONENTS} e2e_orchestrator"
+
+# Uncomment to activate DLT and Interdomain
+#export TFS_COMPONENTS="${TFS_COMPONENTS} interdomain dlt"
+#if [[ "$TFS_COMPONENTS" == *"dlt"* ]]; then
+#    export KEY_DIRECTORY_PATH="src/dlt/gateway/keys/priv_sk"
+#    export CERT_DIRECTORY_PATH="src/dlt/gateway/keys/cert.pem"
+#    export TLS_CERT_PATH="src/dlt/gateway/keys/ca.crt"
+#fi
+
+# Uncomment to activate QKD App
+#   To manage QKD Apps, "service" requires "qkd_app" to be deployed
+#   before "service", thus we "hack" the TFS_COMPONENTS environment variable prepending the
+#   "qkd_app" only if "service" is already in TFS_COMPONENTS, and re-export it.
+#if [[ "$TFS_COMPONENTS" == *"service"* ]]; then
+#    BEFORE="${TFS_COMPONENTS% service*}"
+#    AFTER="${TFS_COMPONENTS#* service}"
+#    export TFS_COMPONENTS="${BEFORE} qkd_app service ${AFTER}"
+#fi
+
+
 # Set the tag you want to use for your images.
 export TFS_IMAGE_TAG="dev"
 
@@ -108,6 +149,10 @@ export NATS_EXT_PORT_CLIENT="4222"
 # Set the external port NATS HTTP Mgmt GUI interface will be exposed to.
 export NATS_EXT_PORT_HTTP="8222"
 
+# Set NATS installation mode to 'single'. This option is convenient for development and testing.
+# See ./deploy/all.sh or ./deploy/nats.sh for additional details
+export NATS_DEPLOY_MODE="single"
+
 # Disable flag for re-deploying NATS from scratch.
 export NATS_REDEPLOY=""
 
@@ -152,3 +197,15 @@ export PROM_EXT_PORT_HTTP="9090"
 
 # Set the external port Grafana HTTP Dashboards will be exposed to.
 export GRAF_EXT_PORT_HTTP="3000"
+
+
+# ----- Apache Kafka -----------------------------------------------------------
+
+# Set the namespace where Apache Kafka will be deployed.
+export KFK_NAMESPACE="kafka"
+
+# Set the port Apache Kafka server will be exposed to.
+export KFK_SERVER_PORT="9092"
+
+# Set the flag to YES for redeploying of Apache Kafka
+export KFK_REDEPLOY=""
diff --git a/dataplane-in-a-box/arista.clab.yml b/src/tests/eucnc24/eucnc24.clab.yml
similarity index 92%
rename from dataplane-in-a-box/arista.clab.yml
rename to src/tests/eucnc24/eucnc24.clab.yml
index 306caf5d9c51cfb76711b5d63210e9b92738daad..98f93e0c6f059a807d61219a21978272293289d3 100644
--- a/dataplane-in-a-box/arista.clab.yml
+++ b/src/tests/eucnc24/eucnc24.clab.yml
@@ -14,7 +14,7 @@
 
 # TFS - Arista devices + Linux clients
 
-name: arista
+name: eucnc24
 
 mgmt:
   network: mgmt-net
@@ -25,7 +25,11 @@ topology:
     arista_ceos:
       kind: arista_ceos
       #image: ceos:4.30.4M
-      image: ceos:4.31.2F
+      #image: ceos:4.31.2F
+      #image: ceos:4.31.5M
+      #image: ceos:4.32.0F
+      #image: ceos:4.32.2F
+      image: ceos:4.32.2.1F
     linux:
       kind: linux
       image: ghcr.io/hellt/network-multitool:latest
diff --git a/src/tests/eucnc24/redeploy-tfs.sh b/src/tests/eucnc24/redeploy-tfs.sh
new file mode 100755
index 0000000000000000000000000000000000000000..b5ced0d12e026be4b9cb7eefcf343445b776f042
--- /dev/null
+++ b/src/tests/eucnc24/redeploy-tfs.sh
@@ -0,0 +1,17 @@
+#!/bin/bash
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+source ~/tfs-ctrl/src/tests/eucnc24/deploy_specs.sh
+./deploy/all.sh
diff --git a/src/tests/eucnc24/requirements.in b/src/tests/eucnc24/requirements.in
new file mode 100644
index 0000000000000000000000000000000000000000..468af1a17931d6e545647e3e7a057433d74826b3
--- /dev/null
+++ b/src/tests/eucnc24/requirements.in
@@ -0,0 +1,15 @@
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+requests==2.27.*
diff --git a/dataplane-in-a-box/clab-cli-dc1.sh b/src/tests/eucnc24/scripts/clab-cli-dc1.sh
similarity index 94%
rename from dataplane-in-a-box/clab-cli-dc1.sh
rename to src/tests/eucnc24/scripts/clab-cli-dc1.sh
index fc47fecdb216d3c785136e81d88a494c8b21c266..d0ee18dcfd7eac03b108e163200d14b532d8db8f 100755
--- a/dataplane-in-a-box/clab-cli-dc1.sh
+++ b/src/tests/eucnc24/scripts/clab-cli-dc1.sh
@@ -13,4 +13,4 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-docker exec -it clab-arista-dc1 bash
+docker exec -it clab-eucnc24-dc1 bash
diff --git a/dataplane-in-a-box/clab-cli-dc2.sh b/src/tests/eucnc24/scripts/clab-cli-dc2.sh
similarity index 94%
rename from dataplane-in-a-box/clab-cli-dc2.sh
rename to src/tests/eucnc24/scripts/clab-cli-dc2.sh
index 0f308b5320a22ce96688c34465e8ece73d8e68a1..2867fedcb9051f6d73b78d571b33ed7ae25efd80 100755
--- a/dataplane-in-a-box/clab-cli-dc2.sh
+++ b/src/tests/eucnc24/scripts/clab-cli-dc2.sh
@@ -13,4 +13,4 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-docker exec -it clab-arista-dc2 bash
+docker exec -it clab-eucnc24-dc2 bash
diff --git a/dataplane-in-a-box/clab-cli-r1.sh b/src/tests/eucnc24/scripts/clab-cli-r1.sh
similarity index 94%
rename from dataplane-in-a-box/clab-cli-r1.sh
rename to src/tests/eucnc24/scripts/clab-cli-r1.sh
index 807ec051709f86d933344c3178d99fab2fc389ea..69141a0ae73ee23274b823242b0f864e1527d505 100755
--- a/dataplane-in-a-box/clab-cli-r1.sh
+++ b/src/tests/eucnc24/scripts/clab-cli-r1.sh
@@ -13,4 +13,4 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-docker exec -it clab-arista-r1 Cli
+docker exec -it clab-eucnc24-r1 Cli
diff --git a/dataplane-in-a-box/clab-cli-r2.sh b/src/tests/eucnc24/scripts/clab-cli-r2.sh
similarity index 94%
rename from dataplane-in-a-box/clab-cli-r2.sh
rename to src/tests/eucnc24/scripts/clab-cli-r2.sh
index d9eea8932217db8a4b7d0643a624fc0b0c75c353..7860d1d21f07a293f3bcbc65575625568a49a41c 100755
--- a/dataplane-in-a-box/clab-cli-r2.sh
+++ b/src/tests/eucnc24/scripts/clab-cli-r2.sh
@@ -13,4 +13,4 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-docker exec -it clab-arista-r2 Cli
+docker exec -it clab-eucnc24-r2 Cli
diff --git a/dataplane-in-a-box/clab-cli-r3.sh b/src/tests/eucnc24/scripts/clab-cli-r3.sh
similarity index 94%
rename from dataplane-in-a-box/clab-cli-r3.sh
rename to src/tests/eucnc24/scripts/clab-cli-r3.sh
index 9d1b9b444fe62b14e1e2069eb009d832790e7de3..801c3223d7c59e767bcc64b0cc331553a34bb4b4 100755
--- a/dataplane-in-a-box/clab-cli-r3.sh
+++ b/src/tests/eucnc24/scripts/clab-cli-r3.sh
@@ -13,4 +13,4 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-docker exec -it clab-arista-r3 Cli
+docker exec -it clab-eucnc24-r3 Cli
diff --git a/dataplane-in-a-box/clab-deploy.sh b/src/tests/eucnc24/scripts/clab-deploy.sh
similarity index 87%
rename from dataplane-in-a-box/clab-deploy.sh
rename to src/tests/eucnc24/scripts/clab-deploy.sh
index a1ce30c259614618675215fe856918242a6a7bd4..ae1676ada75ab6a5ce671fae187c0a94ffc62331 100755
--- a/dataplane-in-a-box/clab-deploy.sh
+++ b/src/tests/eucnc24/scripts/clab-deploy.sh
@@ -13,5 +13,5 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-cd /home/$USER/tfs-ctrl/dataplane-in-a-box
-sudo containerlab deploy --topo arista.clab.yml
+cd ~/tfs-ctrl/src/tests/eucnc24
+sudo containerlab deploy --topo eucnc24.clab.yml
diff --git a/dataplane-in-a-box/clab-destroy.sh b/src/tests/eucnc24/scripts/clab-destroy.sh
similarity index 82%
rename from dataplane-in-a-box/clab-destroy.sh
rename to src/tests/eucnc24/scripts/clab-destroy.sh
index 57917900b887ef21ac297ce99f118e4621e8fb4f..6e58a3c490c196073f4cb259e11d45dd4ff2a1f8 100755
--- a/dataplane-in-a-box/clab-destroy.sh
+++ b/src/tests/eucnc24/scripts/clab-destroy.sh
@@ -13,6 +13,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-cd /home/$USER/tfs-ctrl/dataplane-in-a-box
-sudo containerlab destroy --topo arista.clab.yml
-sudo rm -rf clab-arista/ .arista.clab.yml.bak
+cd ~/tfs-ctrl/src/tests/eucnc24
+sudo containerlab destroy --topo eucnc24.clab.yml
+sudo rm -rf clab-eucnc24/ .eucnc24.clab.yml.bak
diff --git a/dataplane-in-a-box/clab-inspect.sh b/src/tests/eucnc24/scripts/clab-inspect.sh
similarity index 87%
rename from dataplane-in-a-box/clab-inspect.sh
rename to src/tests/eucnc24/scripts/clab-inspect.sh
index a4d51eac41558c059b556f66e94a452ba2b5bd3c..0dd6dce12dc60e775edc6ab449d1d448f37bd686 100755
--- a/dataplane-in-a-box/clab-inspect.sh
+++ b/src/tests/eucnc24/scripts/clab-inspect.sh
@@ -13,5 +13,5 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-cd /home/$USER/tfs-ctrl/dataplane-in-a-box
-sudo containerlab inspect --topo arista.clab.yml
+cd ~/tfs-ctrl/src/tests/eucnc24
+sudo containerlab inspect --topo eucnc24.clab.yml
diff --git a/src/tests/eucnc24/tests/Tools.py b/src/tests/eucnc24/tests/Tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..e983ffefff3d5aea6b078efa9bb586202957a5e1
--- /dev/null
+++ b/src/tests/eucnc24/tests/Tools.py
@@ -0,0 +1,109 @@
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import enum, logging, requests
+from typing import Any, Dict, List, Optional, Set, Union
+from common.Constants import ServiceNameEnum
+from common.Settings import get_service_host, get_service_port_http
+
+NBI_ADDRESS  = get_service_host(ServiceNameEnum.NBI)
+NBI_PORT     = get_service_port_http(ServiceNameEnum.NBI)
+NBI_USERNAME = 'admin'
+NBI_PASSWORD = 'admin'
+NBI_BASE_URL = ''
+
+class RestRequestMethod(enum.Enum):
+    GET    = 'get'
+    POST   = 'post'
+    PUT    = 'put'
+    PATCH  = 'patch'
+    DELETE = 'delete'
+
+EXPECTED_STATUS_CODES : Set[int] = {
+    requests.codes['OK'        ],
+    requests.codes['CREATED'   ],
+    requests.codes['ACCEPTED'  ],
+    requests.codes['NO_CONTENT'],
+}
+
+def do_rest_request(
+    method : RestRequestMethod, url : str, body : Optional[Any] = None, timeout : int = 10,
+    allow_redirects : bool = True, expected_status_codes : Set[int] = EXPECTED_STATUS_CODES,
+    logger : Optional[logging.Logger] = None
+) -> Optional[Union[Dict, List]]:
+    request_url = 'http://{:s}:{:s}@{:s}:{:d}{:s}{:s}'.format(
+        NBI_USERNAME, NBI_PASSWORD, NBI_ADDRESS, NBI_PORT, str(NBI_BASE_URL), url
+    )
+
+    if logger is not None:
+        msg = 'Request: {:s} {:s}'.format(str(method.value).upper(), str(request_url))
+        if body is not None: msg += ' body={:s}'.format(str(body))
+        logger.warning(msg)
+    reply = requests.request(method.value, request_url, timeout=timeout, json=body, allow_redirects=allow_redirects)
+    if logger is not None:
+        logger.warning('Reply: {:s}'.format(str(reply.text)))
+    assert reply.status_code in expected_status_codes, 'Reply failed with status code {:d}'.format(reply.status_code)
+
+    if reply.content and len(reply.content) > 0: return reply.json()
+    return None
+
+def do_rest_get_request(
+    url : str, body : Optional[Any] = None, timeout : int = 10,
+    allow_redirects : bool = True, expected_status_codes : Set[int] = EXPECTED_STATUS_CODES,
+    logger : Optional[logging.Logger] = None
+) -> Optional[Union[Dict, List]]:
+    return do_rest_request(
+        RestRequestMethod.GET, url, body=body, timeout=timeout, allow_redirects=allow_redirects,
+        expected_status_codes=expected_status_codes, logger=logger
+    )
+
+def do_rest_post_request(
+    url : str, body : Optional[Any] = None, timeout : int = 10,
+    allow_redirects : bool = True, expected_status_codes : Set[int] = EXPECTED_STATUS_CODES,
+    logger : Optional[logging.Logger] = None
+) -> Optional[Union[Dict, List]]:
+    return do_rest_request(
+        RestRequestMethod.POST, url, body=body, timeout=timeout, allow_redirects=allow_redirects,
+        expected_status_codes=expected_status_codes, logger=logger
+    )
+
+def do_rest_put_request(
+    url : str, body : Optional[Any] = None, timeout : int = 10,
+    allow_redirects : bool = True, expected_status_codes : Set[int] = EXPECTED_STATUS_CODES,
+    logger : Optional[logging.Logger] = None
+) -> Optional[Union[Dict, List]]:
+    return do_rest_request(
+        RestRequestMethod.PUT, url, body=body, timeout=timeout, allow_redirects=allow_redirects,
+        expected_status_codes=expected_status_codes, logger=logger
+    )
+
+def do_rest_patch_request(
+    url : str, body : Optional[Any] = None, timeout : int = 10,
+    allow_redirects : bool = True, expected_status_codes : Set[int] = EXPECTED_STATUS_CODES,
+    logger : Optional[logging.Logger] = None
+) -> Optional[Union[Dict, List]]:
+    return do_rest_request(
+        RestRequestMethod.PATCH, url, body=body, timeout=timeout, allow_redirects=allow_redirects,
+        expected_status_codes=expected_status_codes, logger=logger
+    )
+
+def do_rest_delete_request(
+    url : str, body : Optional[Any] = None, timeout : int = 10,
+    allow_redirects : bool = True, expected_status_codes : Set[int] = EXPECTED_STATUS_CODES,
+    logger : Optional[logging.Logger] = None
+) -> Optional[Union[Dict, List]]:
+    return do_rest_request(
+        RestRequestMethod.DELETE, url, body=body, timeout=timeout, allow_redirects=allow_redirects,
+        expected_status_codes=expected_status_codes, logger=logger
+    )
diff --git a/src/tests/eucnc24/tests/__init__.py b/src/tests/eucnc24/tests/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..3ee6f7071f145e06c3aeaefc09a43ccd88e619e3
--- /dev/null
+++ b/src/tests/eucnc24/tests/__init__.py
@@ -0,0 +1,14 @@
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
diff --git a/src/tests/eucnc24/tests/test_functional_bootstrap.py b/src/tests/eucnc24/tests/test_functional_bootstrap.py
new file mode 100644
index 0000000000000000000000000000000000000000..38040a3081302f946a5759fed10337f255c3267b
--- /dev/null
+++ b/src/tests/eucnc24/tests/test_functional_bootstrap.py
@@ -0,0 +1,67 @@
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging, os, time
+from common.Constants import DEFAULT_CONTEXT_NAME
+from common.proto.context_pb2 import ContextId, DeviceOperationalStatusEnum, Empty
+from common.tools.descriptor.Loader import DescriptorLoader, check_descriptor_load_results, validate_empty_scenario
+from common.tools.object_factory.Context import json_context_id
+from context.client.ContextClient import ContextClient
+from device.client.DeviceClient import DeviceClient
+from tests.Fixtures import context_client, device_client # pylint: disable=unused-import
+
+LOGGER = logging.getLogger(__name__)
+LOGGER.setLevel(logging.DEBUG)
+
+DESCRIPTOR_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', 'data', 'tfs-topology.json')
+ADMIN_CONTEXT_ID = ContextId(**json_context_id(DEFAULT_CONTEXT_NAME))
+
+def test_scenario_bootstrap(
+    context_client : ContextClient, # pylint: disable=redefined-outer-name
+    device_client : DeviceClient,   # pylint: disable=redefined-outer-name
+) -> None:
+    validate_empty_scenario(context_client)
+
+    descriptor_loader = DescriptorLoader(
+        descriptors_file=DESCRIPTOR_FILE, context_client=context_client, device_client=device_client)
+    results = descriptor_loader.process()
+    check_descriptor_load_results(results, descriptor_loader)
+    descriptor_loader.validate()
+
+    # Verify the scenario has no services/slices
+    response = context_client.GetContext(ADMIN_CONTEXT_ID)
+    assert len(response.service_ids) == 0
+    assert len(response.slice_ids) == 0
+
+def test_scenario_devices_enabled(
+    context_client : ContextClient,         # pylint: disable=redefined-outer-name
+) -> None:
+    """
+    This test validates that the devices are enabled.
+    """
+    DEVICE_OP_STATUS_ENABLED = DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_ENABLED
+
+    num_devices = -1
+    num_devices_enabled, num_retry = 0, 0
+    while (num_devices != num_devices_enabled) and (num_retry < 10):
+        time.sleep(1.0)
+        response = context_client.ListDevices(Empty())
+        num_devices = len(response.devices)
+        num_devices_enabled = 0
+        for device in response.devices:
+            if device.device_operational_status != DEVICE_OP_STATUS_ENABLED: continue
+            num_devices_enabled += 1
+        LOGGER.info('Num Devices enabled: {:d}/{:d}'.format(num_devices_enabled, num_devices))
+        num_retry += 1
+    assert num_devices_enabled == num_devices
diff --git a/src/tests/eucnc24/tests/test_functional_cleanup.py b/src/tests/eucnc24/tests/test_functional_cleanup.py
new file mode 100644
index 0000000000000000000000000000000000000000..1adf8f2b97294e51b94b06e72b4c62d370542612
--- /dev/null
+++ b/src/tests/eucnc24/tests/test_functional_cleanup.py
@@ -0,0 +1,44 @@
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging, os
+from common.Constants import DEFAULT_CONTEXT_NAME
+from common.proto.context_pb2 import ContextId
+from common.tools.descriptor.Loader import DescriptorLoader, validate_empty_scenario
+from common.tools.object_factory.Context import json_context_id
+from context.client.ContextClient import ContextClient
+from device.client.DeviceClient import DeviceClient
+from tests.Fixtures import context_client, device_client    # pylint: disable=unused-import
+
+LOGGER = logging.getLogger(__name__)
+LOGGER.setLevel(logging.DEBUG)
+
+DESCRIPTOR_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', 'data', 'tfs-topology.json')
+ADMIN_CONTEXT_ID = ContextId(**json_context_id(DEFAULT_CONTEXT_NAME))
+
+def test_scenario_cleanup(
+    context_client : ContextClient, # pylint: disable=redefined-outer-name
+    device_client : DeviceClient,   # pylint: disable=redefined-outer-name
+) -> None:
+    # Verify the scenario has no services/slices
+    response = context_client.GetContext(ADMIN_CONTEXT_ID)
+    assert len(response.service_ids) == 0
+    assert len(response.slice_ids) == 0
+
+    # Load descriptors and validate the base scenario
+    descriptor_loader = DescriptorLoader(
+        descriptors_file=DESCRIPTOR_FILE, context_client=context_client, device_client=device_client)
+    descriptor_loader.validate()
+    descriptor_loader.unload()
+    validate_empty_scenario(context_client)
diff --git a/src/tests/eucnc24/tests/test_functional_service_ietf.py b/src/tests/eucnc24/tests/test_functional_service_ietf.py
new file mode 100644
index 0000000000000000000000000000000000000000..a54084c579414d7b4df44a0b7ef313d3f091f7c1
--- /dev/null
+++ b/src/tests/eucnc24/tests/test_functional_service_ietf.py
@@ -0,0 +1,182 @@
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json, logging, os, pytest
+from typing import Dict
+from common.Constants import DEFAULT_CONTEXT_NAME
+from common.proto.context_pb2 import ContextId, ServiceStatusEnum, ServiceTypeEnum
+from common.tools.descriptor.Loader import (
+    DescriptorLoader, check_descriptor_load_results
+)
+from common.tools.grpc.Tools import grpc_message_to_json_string
+from common.tools.object_factory.Context import json_context_id
+from context.client.ContextClient import ContextClient
+from device.client.DeviceClient import DeviceClient
+from service.client.ServiceClient import ServiceClient
+from tests.Fixtures import context_client, device_client, service_client        # pylint: disable=unused-import
+from .Tools import (
+    do_rest_delete_request, do_rest_get_request, do_rest_post_request,
+)
+
+
+LOGGER = logging.getLogger(__name__)
+LOGGER.setLevel(logging.DEBUG)
+
+REQUEST_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', 'data', 'ietf-l3vpn-service.json')
+ADMIN_CONTEXT_ID = ContextId(**json_context_id(DEFAULT_CONTEXT_NAME))
+
+@pytest.fixture(scope='session')
+def storage() -> Dict:
+    yield dict()
+
+# pylint: disable=redefined-outer-name, unused-argument
+def test_service_creation_ietf(
+    context_client : ContextClient,
+    device_client  : DeviceClient,
+    service_client : ServiceClient,
+    storage : Dict
+):
+    # Issue service creation request
+    with open(REQUEST_FILE, 'r', encoding='UTF-8') as f:
+        svc1_data = json.load(f)
+    URL = '/restconf/data/ietf-l3vpn-svc:l3vpn-svc/vpn-services'
+    do_rest_post_request(URL, body=svc1_data, logger=LOGGER, expected_status_codes={201})
+    storage['svc-uuid'] = svc1_data['ietf-l3vpn-svc:l3vpn-svc']['vpn-services']['vpn-service'][0]['vpn-id']
+
+    # Verify service was created
+    response = context_client.GetContext(ADMIN_CONTEXT_ID)
+    assert len(response.service_ids) == 1
+    assert len(response.slice_ids) == 0
+
+    # Check there is 1 service
+    response = context_client.ListServices(ADMIN_CONTEXT_ID)
+    LOGGER.warning('Services[{:d}] = {:s}'.format(
+        len(response.services), grpc_message_to_json_string(response)
+    ))
+    assert len(response.services) == 1
+
+    for service in response.services:
+        service_id = service.service_id
+        assert service.service_status.service_status == ServiceStatusEnum.SERVICESTATUS_ACTIVE
+        assert service.service_type == ServiceTypeEnum.SERVICETYPE_L3NM
+
+        response = context_client.ListConnections(service_id)
+        LOGGER.warning('  ServiceId[{:s}] => Connections[{:d}] = {:s}'.format(
+            grpc_message_to_json_string(service_id), len(response.connections),
+            grpc_message_to_json_string(response)
+        ))
+        assert len(response.connections) == 1
+
+
+# pylint: disable=redefined-outer-name, unused-argument
+def test_get_state_svc1(storage : Dict):
+    assert 'svc-uuid' in storage
+    service_uuid = storage['svc-uuid']
+    URL = '/restconf/data/ietf-l3vpn-svc:l3vpn-svc/vpn-services/vpn-service={:s}/'.format(service_uuid)
+    do_rest_get_request(URL, logger=LOGGER, expected_status_codes={200})
+
+# pylint: disable=redefined-outer-name, unused-argument
+def test_delete_svc1(storage : Dict):
+    assert 'svc-uuid' in storage
+    service_uuid = storage['svc-uuid']
+    URL = '/restconf/data/ietf-l3vpn-svc:l3vpn-svc/vpn-services/vpn-service={:s}/'.format(service_uuid)
+    do_rest_delete_request(URL, logger=LOGGER, expected_status_codes={204})
+
+
+
+
+
+
+
+
+
+
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+from typing import Set, Tuple
+from common.Constants import DEFAULT_CONTEXT_NAME
+from common.proto.context_pb2 import ContextId, ServiceId, ServiceStatusEnum, ServiceTypeEnum
+from common.tools.grpc.Tools import grpc_message_to_json_string
+from common.tools.object_factory.Context import json_context_id
+from common.tools.object_factory.Service import json_service_id
+from context.client.ContextClient import ContextClient
+from service.client.ServiceClient import ServiceClient
+from tests.Fixtures import context_client, service_client   # pylint: disable=unused-import
+
+LOGGER = logging.getLogger(__name__)
+LOGGER.setLevel(logging.DEBUG)
+
+ADMIN_CONTEXT_ID = ContextId(**json_context_id(DEFAULT_CONTEXT_NAME))
+
+def test_service_removal_bidir(
+    context_client : ContextClient, # pylint: disable=redefined-outer-name
+    service_client : ServiceClient, # pylint: disable=redefined-outer-name
+):
+    # Verify the scenario has 1 service and 0 slices
+    response = context_client.GetContext(ADMIN_CONTEXT_ID)
+    assert len(response.service_ids) == 1
+    assert len(response.slice_ids) == 0
+
+    # Check there are no slices
+    response = context_client.ListSlices(ADMIN_CONTEXT_ID)
+    LOGGER.warning('Slices[{:d}] = {:s}'.format(len(response.slices), grpc_message_to_json_string(response)))
+    assert len(response.slices) == 0
+
+    # Check there is 1 service
+    response = context_client.ListServices(ADMIN_CONTEXT_ID)
+    LOGGER.warning('Services[{:d}] = {:s}'.format(len(response.services), grpc_message_to_json_string(response)))
+    assert len(response.services) == 1
+
+    context_service_uuids : Set[Tuple[str, str]] = set()
+    for service in response.services:
+        service_id = service.service_id
+        assert service.service_status.service_status == ServiceStatusEnum.SERVICESTATUS_ACTIVE
+
+        response = context_client.ListConnections(service_id)
+        LOGGER.warning('  ServiceId[{:s}] => Connections[{:d}] = {:s}'.format(
+            grpc_message_to_json_string(service_id), len(response.connections), grpc_message_to_json_string(response)))
+
+        if service.service_type == ServiceTypeEnum.SERVICETYPE_OPTICAL_CONNECTIVITY:
+            assert len(response.connections) == 2
+            context_uuid = service_id.context_id.context_uuid.uuid
+            service_uuid = service_id.service_uuid.uuid
+            context_service_uuids.add((context_uuid, service_uuid))
+        else:
+            str_service = grpc_message_to_json_string(service)
+            raise Exception('Unexpected ServiceType: {:s}'.format(str_service))
+
+    # Identify service to delete
+    assert len(context_service_uuids) == 1
+    context_uuid, service_uuid = set(context_service_uuids).pop()
+
+    # Delete Service
+    service_client.DeleteService(ServiceId(**json_service_id(service_uuid, json_context_id(context_uuid))))
+
+    # Verify the scenario has no services/slices
+    response = context_client.GetContext(ADMIN_CONTEXT_ID)
+    assert len(response.service_ids) == 0
+    assert len(response.slice_ids) == 0
diff --git a/src/tests/eucnc24/tests/test_functional_service_tfs.py b/src/tests/eucnc24/tests/test_functional_service_tfs.py
new file mode 100644
index 0000000000000000000000000000000000000000..880211c39ddd7271b060111f9cae9bfaf3492a29
--- /dev/null
+++ b/src/tests/eucnc24/tests/test_functional_service_tfs.py
@@ -0,0 +1,125 @@
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging, os
+from typing import Set, Tuple
+from common.Constants import DEFAULT_CONTEXT_NAME
+from common.proto.context_pb2 import ContextId, ServiceId, ServiceStatusEnum, ServiceTypeEnum
+from common.tools.descriptor.Loader import DescriptorLoader, check_descriptor_load_results
+from common.tools.grpc.Tools import grpc_message_to_json_string
+from common.tools.object_factory.Context import json_context_id
+from common.tools.object_factory.Service import json_service_id
+from context.client.ContextClient import ContextClient
+from device.client.DeviceClient import DeviceClient
+from service.client.ServiceClient import ServiceClient
+from tests.Fixtures import context_client, device_client, service_client        # pylint: disable=unused-import
+
+LOGGER = logging.getLogger(__name__)
+LOGGER.setLevel(logging.DEBUG)
+
+DESCRIPTOR_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', 'data', 'tfs-service.json')
+ADMIN_CONTEXT_ID = ContextId(**json_context_id(DEFAULT_CONTEXT_NAME))
+
+def test_service_creation_tfs(
+    context_client : ContextClient, # pylint: disable=redefined-outer-name
+    device_client  : DeviceClient,  # pylint: disable=redefined-outer-name
+    service_client : ServiceClient, # pylint: disable=redefined-outer-name
+):
+    # Load descriptors and validate the base scenario
+    descriptor_loader = DescriptorLoader(
+        descriptors_file=DESCRIPTOR_FILE, context_client=context_client,
+        device_client=device_client, service_client=service_client
+    )
+    results = descriptor_loader.process()
+    check_descriptor_load_results(results, descriptor_loader)
+
+    # Verify the scenario has 1 service and 0 slices
+    response = context_client.GetContext(ADMIN_CONTEXT_ID)
+    assert len(response.service_ids) == 1
+    assert len(response.slice_ids) == 0
+
+    # Check there are no slices
+    response = context_client.ListSlices(ADMIN_CONTEXT_ID)
+    LOGGER.warning('Slices[{:d}] = {:s}'.format(
+        len(response.slices), grpc_message_to_json_string(response)
+    ))
+    assert len(response.slices) == 0
+
+    # Check there is 1 service
+    response = context_client.ListServices(ADMIN_CONTEXT_ID)
+    LOGGER.warning('Services[{:d}] = {:s}'.format(
+        len(response.services), grpc_message_to_json_string(response)
+    ))
+    assert len(response.services) == 1
+
+    for service in response.services:
+        service_id = service.service_id
+        assert service.service_status.service_status == ServiceStatusEnum.SERVICESTATUS_ACTIVE
+        assert service.service_type == ServiceTypeEnum.SERVICETYPE_L3NM
+
+        response = context_client.ListConnections(service_id)
+        LOGGER.warning('  ServiceId[{:s}] => Connections[{:d}] = {:s}'.format(
+            grpc_message_to_json_string(service_id), len(response.connections),
+            grpc_message_to_json_string(response)
+        ))
+        assert len(response.connections) == 1
+
+def test_service_removal_tfs(
+    context_client : ContextClient, # pylint: disable=redefined-outer-name
+    service_client : ServiceClient, # pylint: disable=redefined-outer-name
+):
+    # Verify the scenario has 1 service and 0 slices
+    response = context_client.GetContext(ADMIN_CONTEXT_ID)
+    assert len(response.service_ids) == 1
+    assert len(response.slice_ids) == 0
+
+    # Check there are no slices
+    response = context_client.ListSlices(ADMIN_CONTEXT_ID)
+    LOGGER.warning('Slices[{:d}] = {:s}'.format(len(response.slices), grpc_message_to_json_string(response)))
+    assert len(response.slices) == 0
+
+    # Check there is 1 service
+    response = context_client.ListServices(ADMIN_CONTEXT_ID)
+    LOGGER.warning('Services[{:d}] = {:s}'.format(len(response.services), grpc_message_to_json_string(response)))
+    assert len(response.services) == 1
+
+    context_service_uuids : Set[Tuple[str, str]] = set()
+    for service in response.services:
+        service_id = service.service_id
+        assert service.service_status.service_status == ServiceStatusEnum.SERVICESTATUS_ACTIVE
+
+        response = context_client.ListConnections(service_id)
+        LOGGER.warning('  ServiceId[{:s}] => Connections[{:d}] = {:s}'.format(
+            grpc_message_to_json_string(service_id), len(response.connections), grpc_message_to_json_string(response)))
+
+        if service.service_type == ServiceTypeEnum.SERVICETYPE_OPTICAL_CONNECTIVITY:
+            assert len(response.connections) == 2
+            context_uuid = service_id.context_id.context_uuid.uuid
+            service_uuid = service_id.service_uuid.uuid
+            context_service_uuids.add((context_uuid, service_uuid))
+        else:
+            str_service = grpc_message_to_json_string(service)
+            raise Exception('Unexpected ServiceType: {:s}'.format(str_service))
+
+    # Identify service to delete
+    assert len(context_service_uuids) == 1
+    context_uuid, service_uuid = set(context_service_uuids).pop()
+
+    # Delete Service
+    service_client.DeleteService(ServiceId(**json_service_id(service_uuid, json_context_id(context_uuid))))
+
+    # Verify the scenario has no services/slices
+    response = context_client.GetContext(ADMIN_CONTEXT_ID)
+    assert len(response.service_ids) == 0
+    assert len(response.slice_ids) == 0