diff --git a/manifests/automationservice.yaml b/manifests/automationservice.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..f6c97f7fb7635e4b04431d41dbf20ca3edc51475
--- /dev/null
+++ b/manifests/automationservice.yaml
@@ -0,0 +1,80 @@
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  name: automationservice
+spec:
+  selector:
+    matchLabels:
+      app: automationservice
+  replicas: 1
+  template:
+    metadata:
+      annotations:
+        # Required for IETF L2VPN SBI when both parent and child run in same K8s cluster with Linkerd
+        config.linkerd.io/skip-outbound-ports: "2002"
+      labels:
+        app: automationservice
+    spec:
+      terminationGracePeriodSeconds: 5
+      containers:
+        - name: server
+          image: labs.etsi.org:5050/tfs/controller/automation:latest
+          imagePullPolicy: Always
+          ports:
+            - containerPort: 30200
+            - containerPort: 9192
+          env:
+            - name: LOG_LEVEL
+              value: "INFO"
+          startupProbe:
+            exec:
+              command: ["/bin/grpc_health_probe", "-addr=:30200"]
+            failureThreshold: 30
+            periodSeconds: 1
+          readinessProbe:
+            exec:
+              command: ["/bin/grpc_health_probe", "-addr=:30200"]
+          livenessProbe:
+            exec:
+              command: ["/bin/grpc_health_probe", "-addr=:30200"]
+          resources:
+            requests:
+              cpu: 250m
+              memory: 128Mi
+            limits:
+              cpu: 1000m
+              memory: 1024Mi
+---
+apiVersion: v1
+kind: Service
+metadata:
+  name: automationservice
+  labels:
+    app: automationservice
+spec:
+  type: ClusterIP
+  selector:
+    app: automationservice
+  ports:
+    - name: grpc
+      protocol: TCP
+      port: 30200
+      targetPort: 30200
+    - name: metrics
+      protocol: TCP
+      port: 9192
+      targetPort: 9192
diff --git a/my_deploy.sh b/my_deploy.sh
index b89df7481ebd17edf2b966eb818598d1a04a596f..c3337ef39f087e19f3455d8a32e5448316f94250 100755
--- a/my_deploy.sh
+++ b/my_deploy.sh
@@ -20,7 +20,7 @@
 export TFS_REGISTRY_IMAGES="http://localhost:32000/tfs/"
 
 # Set the list of components, separated by spaces, you want to build images for, and deploy.
-export TFS_COMPONENTS="context device pathcomp service slice nbi webui load_generator"
+export TFS_COMPONENTS="context device pathcomp service slice nbi webui load_generator automation"
 
 # Uncomment to activate Monitoring (old)
 #export TFS_COMPONENTS="${TFS_COMPONENTS} monitoring"
@@ -67,7 +67,7 @@ export TFS_IMAGE_TAG="dev"
 
 # Set the name of the Kubernetes namespace to deploy TFS to.
 export TFS_K8S_NAMESPACE="tfs"
-
+:
 # Set additional manifest files to be applied after the deployment
 export TFS_EXTRA_MANIFESTS="manifests/nginx_ingress_http.yaml"
 
diff --git a/src/automation/Dockerfile b/src/automation/Dockerfile
index b43a0169437dbe7e9bb7cda82e5e1376b8fe5a4a..f5e7c100c454c974f63a85bfe678374a8fcc55ed 100644
--- a/src/automation/Dockerfile
+++ b/src/automation/Dockerfile
@@ -62,8 +62,12 @@ RUN python3 -m pip install -r requirements.txt
 
 # Add component files into working directory
 WORKDIR /var/teraflow
+COPY src/telemetry/frontend/__init__.py telemetry/frontend/__init__.py
+COPY src/telemetry/frontend/client/. telemetry/frontend/client/
 COPY src/context/__init__.py context/__init__.py
 COPY src/context/client/. context/client/
+COPY src/kpi_manager/__init__.py kpi_manager/__init__.py
+COPY src/kpi_manager/client/. kpi_manager/client/
 COPY src/monitoring/__init__.py monitoring/__init__.py
 COPY src/monitoring/client/. monitoring/client/
 COPY src/automation/. automation/
diff --git a/src/automation/client/PolicyClient.py b/src/automation/client/PolicyClient.py
new file mode 100644
index 0000000000000000000000000000000000000000..22f2aa18b520b8162b9df9ba3af3ebe1b77d5051
--- /dev/null
+++ b/src/automation/client/PolicyClient.py
@@ -0,0 +1,55 @@
+# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import grpc, logging
+from common.Constants import ServiceNameEnum
+from common.Settings import get_service_host, get_service_port_grpc
+from common.proto.policy_pb2 import PolicyRuleService, PolicyRuleState
+from common.proto.policy_pb2_grpc import PolicyServiceStub
+from common.tools.client.RetryDecorator import retry, delay_exponential
+from common.tools.grpc.Tools import grpc_message_to_json_string
+from common.proto.openconfig_device_pb2_grpc import OpenConfigServiceStub
+LOGGER = logging.getLogger(__name__)
+MAX_RETRIES = 15
+DELAY_FUNCTION = delay_exponential(initial=0.01, increment=2.0, maximum=5.0)
+RETRY_DECORATOR = retry(max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+
+class PolicyClient:
+    def __init__(self, host=None, port=None):
+        if not host: host = get_service_host(ServiceNameEnum.POLICY)
+        if not port: port = get_service_port_grpc(ServiceNameEnum.POLICY)
+        self.endpoint = '{:s}:{:s}'.format(str(host), str(port))
+        LOGGER.info('Creating channel to {:s}...'.format(str(self.endpoint)))
+        self.channel = None
+        self.stub = None
+        self.openconfig_stub=None
+        self.connect()
+        LOGGER.info('Channel created')
+
+    def connect(self):
+        self.channel = grpc.insecure_channel(self.endpoint)
+        self.stub = PolicyServiceStub(self.channel)
+        self.openconfig_stub=OpenConfigServiceStub(self.channel)
+
+    def close(self):
+        if self.channel is not None: self.channel.close()
+        self.channel = None
+        self.stub = None
+
+    @RETRY_DECORATOR
+    def PolicyAddService(self, request : PolicyRuleService) -> PolicyRuleState:
+        LOGGER.debug('AddPolicy request: {:s}'.format(grpc_message_to_json_string(request)))
+        response = self.stub.PolicyAddService(request)
+        LOGGER.debug('AddPolicy result: {:s}'.format(grpc_message_to_json_string(response)))
+        return response
\ No newline at end of file
diff --git a/src/automation/service/AutomationServiceServicerImpl.py b/src/automation/service/AutomationServiceServicerImpl.py
index e3dd7864ef39af20a201d70ebfb2277cedaa891d..45a846f3b59ab908146fbd5e5b24053c5cbaff6c 100644
--- a/src/automation/service/AutomationServiceServicerImpl.py
+++ b/src/automation/service/AutomationServiceServicerImpl.py
@@ -18,16 +18,171 @@ from common.method_wrappers.Decorator import MetricsPool
 from common.proto.automation_pb2_grpc import AutomationServiceServicer
 from common.proto.automation_pb2 import ( ZSMCreateRequest , ZSMService ,ZSMServiceID ,ZSMServiceState,ZSMCreateUpdate , ZSMServiceStateEnum)
 from common.proto.context_pb2 import ( ServiceId , ContextId , Uuid , Empty)
+from common.proto.telemetry_frontend_pb2 import ( Collector , CollectorId )
 from common.proto.policy_pb2 import ( PolicyRuleList)
+from context.client.ContextClient import ContextClient
+from automation.client.PolicyClient import PolicyClient
+from telemetry.frontend.client.TelemetryFrontendClient import TelemetryFrontendClient
+from kpi_manager.client.KpiManagerClient import KpiManagerClient
+from common.proto.context_pb2 import ( Service )
+
+from common.proto.kpi_manager_pb2 import (KpiId, KpiDescriptor)
+from common.proto.policy_pb2 import PolicyRuleService, PolicyRuleState
+from common.proto.policy_action_pb2 import PolicyRuleAction , PolicyRuleActionConfig
+from common.proto.policy_condition_pb2 import PolicyRuleCondition
+from uuid import uuid4
+
+from common.method_wrappers.ServiceExceptions import InvalidArgumentException
 
 LOGGER = logging.getLogger(__name__)
 METRICS_POOL = MetricsPool('Automation', 'RPC')
 
 class AutomationServiceServicerImpl(AutomationServiceServicer):
+    def __init__(self):
+        LOGGER.info('Init AutomationService')
 
     @safe_and_metered_rpc_method(METRICS_POOL,LOGGER)
     def ZSMCreate(self, request : ZSMCreateRequest, context : grpc.ServicerContext) -> ZSMService:
-        LOGGER.info('NOT IMPLEMENTED ZSMCreate')
+
+        # check that service does not exist
+        context_client = ContextClient()
+        kpi_manager_client = KpiManagerClient()
+        policy_client = PolicyClient()
+        telemetry_frontend_client = TelemetryFrontendClient()
+
+        LOGGER.info('Trying to get the service ')
+        LOGGER.info('request.serviceId.service_uuid.uuid({:s})'.format(str(request.serviceId.service_uuid.uuid)))
+        LOGGER.info('request.serviceId.service_uuid({:s})'.format(str(request.serviceId.service_uuid)))
+        LOGGER.info('request.serviceId({:s})'.format(str(request.serviceId)))
+        LOGGER.info('Request({:s})'.format(str(request)))
+
+        try:
+
+            ####### GET Context #######################
+            service: Service = context_client.GetService(request.serviceId)
+            LOGGER.info('service({:s})'.format(str(service)))
+            ###########################################
+
+            ####### SET Kpi Descriptor LAT ################
+
+            # if(len(service.service_constraints) == 0):
+            #     raise InvalidArgumentException("argument_name" , "argument_value",  []);
+
+                # KPI Descriptor
+            kpi_descriptor_lat = KpiDescriptor()
+            kpi_descriptor_lat.kpi_sample_type = 701 #'KPISAMPLETYPE_SERVICE_LATENCY_MS'  #static service.service_constraints[].sla_latency.e2e_latency_ms
+            kpi_descriptor_lat.service_id.service_uuid.uuid = request.serviceId.service_uuid.uuid
+            kpi_descriptor_lat.kpi_id.kpi_id.uuid = str(uuid4())
+
+            kpi_id_lat: KpiId = kpi_manager_client.SetKpiDescriptor(kpi_descriptor_lat)
+            LOGGER.info('kpi_id_lat({:s})'.format(str(kpi_id_lat)))
+            ###########################################
+
+            ####### SET Kpi Descriptor TX ################
+            kpi_descriptor_tx = KpiDescriptor()
+            kpi_descriptor_tx.kpi_sample_type = 101  # static KPISAMPLETYPE_PACKETS_TRANSMITTED
+            kpi_descriptor_tx.service_id.service_uuid.uuid = request.serviceId.service_uuid.uuid
+            kpi_descriptor_tx.kpi_id.kpi_id.uuid = str(uuid4())
+
+            kpi_id_tx: KpiId = kpi_manager_client.SetKpiDescriptor(kpi_descriptor_tx)
+            LOGGER.info('kpi_id_tx({:s})'.format(str(kpi_id_tx)))
+            ###########################################
+
+            ####### SET Kpi Descriptor RX ################
+            kpi_descriptor_rx = KpiDescriptor()
+            kpi_descriptor_rx.kpi_sample_type = 102  # static KPISAMPLETYPE_PACKETS_RECEIVED
+            kpi_descriptor_rx.service_id.service_uuid.uuid = request.serviceId.service_uuid.uuid
+            kpi_descriptor_rx.kpi_id.kpi_id.uuid = str(uuid4())
+
+            kpi_id_rx: KpiId = kpi_manager_client.SetKpiDescriptor(kpi_descriptor_rx)
+            LOGGER.info('kpi_id_rx({:s})'.format(str(kpi_id_rx)))
+            ###########################################
+
+            ####### START Analyzer LAT ################
+            # analyzer = Analyzer()
+            # analyzer.algorithm_name = ''  # static
+            # analyzer.operation_mode = ''
+            # analyzer.input_kpi_ids[] = [kpi_id_rx,kpi_id_tx]
+            # analyzer.output_kpi_ids[] = [kpi_id_lat]
+            #
+            # analyzer_id_lat: AnalyzerId = analyzer_client.StartAnalyzer(analyzer)
+            # LOGGER.info('analyzer_id_lat({:s})'.format(str(analyzer_id_lat)))
+            ###########################################
+
+            ####### SET Policy LAT ################
+            policy_lat = PolicyRuleService()
+            policy_lat.serviceId.service_uuid.uuid = request.serviceId.service_uuid.uuid
+            policy_lat.serviceId.context_id.context_uuid.uuid = request.serviceId.context_id.context_uuid.uuid
+
+            # PolicyRuleBasic
+            policy_lat.policyRuleBasic.priority = 0
+            policy_lat.policyRuleBasic.policyRuleId.uuid.uuid = str(uuid4())
+            policy_lat.policyRuleBasic.booleanOperator = 2
+
+            # PolicyRuleAction
+            policyRuleActionConfig = PolicyRuleActionConfig()
+            policyRuleActionConfig.action_key = ""
+            policyRuleActionConfig.action_value = ""
+
+            policyRuleAction = PolicyRuleAction()
+            policyRuleAction.action = 5
+            policyRuleAction.action_config.append(policyRuleActionConfig)
+            policy_lat.policyRuleBasic.actionList.append(policyRuleAction)
+
+            # for constraint in service.service_constraints:
+
+                # PolicyRuleCondition
+            policyRuleCondition = PolicyRuleCondition()
+            policyRuleCondition.kpiId.kpi_id.uuid = kpi_id_lat.kpi_id.uuid
+            policyRuleCondition.numericalOperator = 5
+            policyRuleCondition.kpiValue.floatVal = 300 #constraint.sla_latency.e2e_latency_ms
+
+            policy_lat.policyRuleBasic.conditionList.append(policyRuleCondition)
+
+
+            policy_rule_state: PolicyRuleState = policy_client.PolicyAddService(policy_lat)
+            LOGGER.info('policy_rule_state({:s})'.format(str(policy_rule_state)))
+
+
+            ####### START Collector TX #################
+            collect_tx = Collector()
+            collect_tx.collector_id.collector_id.uuid = str(uuid4())
+            collect_tx.kpi_id.kpi_id.uuid = kpi_id_tx.kpi_id.uuid
+            collect_tx.duration_s = 0  # static
+            collect_tx.interval_s = 1  # static
+            LOGGER.info('Start Collector TX'.format(str(collect_tx)))
+
+            collect_id_tx: CollectorId = telemetry_frontend_client.StartCollector(collect_tx)
+            LOGGER.info('collect_id_tx({:s})'.format(str(collect_id_tx)))
+            #############################################
+
+            ####### START Collector RX ##################
+            collect_rx = Collector()
+            collect_rx.collector_id.collector_id.uuid = str(uuid4())
+            collect_rx.kpi_id.kpi_id.uuid = kpi_id_rx.kpi_id.uuid
+            collect_rx.duration_s = 0  # static
+            collect_rx.interval_s = 1  # static
+            LOGGER.info('Start Collector RX'.format(str(collect_rx)))
+
+            collect_id_rx: CollectorId = telemetry_frontend_client.StartCollector(collect_rx)
+            LOGGER.info('collect_id_tx({:s})'.format(str(collect_id_rx)))
+            ###############################################
+
+        except grpc.RpcError as e:
+            if e.code() != grpc.StatusCode.NOT_FOUND: raise  # pylint: disable=no-member
+            LOGGER.exception('Unable to get Service({:s})'.format(str(request)))
+            context_client.close()
+            kpi_manager_client.close()
+            policy_client.close()
+            telemetry_frontend_client.close()
+            return None
+
+        LOGGER.info('Here is the service')
+
+        context_client.close()
+        kpi_manager_client.close()
+        policy_client.close()
+        telemetry_frontend_client.close()
         return ZSMService()
 
     @safe_and_metered_rpc_method(METRICS_POOL,LOGGER)
diff --git a/src/tests/hackfest3/tests/Objects.py b/src/tests/hackfest3/tests/Objects.py
index 0d49747b89647997d9f2aafbccc8aacc137793a4..e5f394070f403f780abe39a2c1e73288ff8cf3b1 100644
--- a/src/tests/hackfest3/tests/Objects.py
+++ b/src/tests/hackfest3/tests/Objects.py
@@ -60,7 +60,7 @@ DEVICE_SW1                  = json_device_p4_disabled(DEVICE_SW1_UUID)
 
 DEVICE_SW1_DPID             = 1
 DEVICE_SW1_NAME             = DEVICE_SW1_UUID
-DEVICE_SW1_IP_ADDR          = '192.168.6.38'
+DEVICE_SW1_IP_ADDR          = '192.168.5.131'
 DEVICE_SW1_PORT             = '50001'
 DEVICE_SW1_VENDOR           = 'Open Networking Foundation'
 DEVICE_SW1_HW_VER           = 'BMv2 simple_switch'
@@ -100,7 +100,7 @@ DEVICE_SW2                  = json_device_p4_disabled(DEVICE_SW2_UUID)
 
 DEVICE_SW2_DPID             = 1
 DEVICE_SW2_NAME             = DEVICE_SW2_UUID
-DEVICE_SW2_IP_ADDR          = '192.168.6.38'
+DEVICE_SW2_IP_ADDR          = '192.168.5.131'
 DEVICE_SW2_PORT             = '50002'
 DEVICE_SW2_VENDOR           = 'Open Networking Foundation'
 DEVICE_SW2_HW_VER           = 'BMv2 simple_switch'
@@ -138,7 +138,7 @@ DEVICE_SW3                  = json_device_p4_disabled(DEVICE_SW3_UUID)
 
 DEVICE_SW3_DPID             = 1
 DEVICE_SW3_NAME             = DEVICE_SW3_UUID
-DEVICE_SW3_IP_ADDR          = '192.168.6.38'
+DEVICE_SW3_IP_ADDR          = '192.168.5.131'
 DEVICE_SW3_PORT             = '50003'
 DEVICE_SW3_VENDOR           = 'Open Networking Foundation'
 DEVICE_SW3_HW_VER           = 'BMv2 simple_switch'
@@ -176,7 +176,7 @@ DEVICE_SW4                  = json_device_p4_disabled(DEVICE_SW4_UUID)
 
 DEVICE_SW4_DPID             = 1
 DEVICE_SW4_NAME             = DEVICE_SW4_UUID
-DEVICE_SW4_IP_ADDR          = '192.168.6.38'
+DEVICE_SW4_IP_ADDR          = '192.168.5.131'
 DEVICE_SW4_PORT             = '50004'
 DEVICE_SW4_VENDOR           = 'Open Networking Foundation'
 DEVICE_SW4_HW_VER           = 'BMv2 simple_switch'
diff --git a/src/tests/p4/tests/Objects.py b/src/tests/p4/tests/Objects.py
index 6004ea413ec6af3412780bb7b49f03edb0481b7e..d8f08c271d1f1c2ca36f6ac87b38dd0a47bc1cf8 100644
--- a/src/tests/p4/tests/Objects.py
+++ b/src/tests/p4/tests/Objects.py
@@ -60,7 +60,7 @@ DEVICE_SW1                  = json_device_p4_disabled(DEVICE_SW1_UUID)
 
 DEVICE_SW1_DPID             = 1
 DEVICE_SW1_NAME             = DEVICE_SW1_UUID
-DEVICE_SW1_IP_ADDR          = '192.168.6.38'
+DEVICE_SW1_IP_ADDR          = '192.168.5.131'
 DEVICE_SW1_PORT             = '50001'
 DEVICE_SW1_VENDOR           = 'Open Networking Foundation'
 DEVICE_SW1_HW_VER           = 'BMv2 simple_switch'
@@ -102,7 +102,7 @@ DEVICE_SW2                  = json_device_p4_disabled(DEVICE_SW2_UUID)
 
 DEVICE_SW2_DPID             = 1
 DEVICE_SW2_NAME             = DEVICE_SW2_UUID
-DEVICE_SW2_IP_ADDR          = '192.168.6.38'
+DEVICE_SW2_IP_ADDR          = '192.168.5.131'
 DEVICE_SW2_PORT             = '50002'
 DEVICE_SW2_VENDOR           = 'Open Networking Foundation'
 DEVICE_SW2_HW_VER           = 'BMv2 simple_switch'
@@ -140,7 +140,7 @@ DEVICE_SW3                  = json_device_p4_disabled(DEVICE_SW3_UUID)
 
 DEVICE_SW3_DPID             = 1
 DEVICE_SW3_NAME             = DEVICE_SW3_UUID
-DEVICE_SW3_IP_ADDR          = '192.168.6.38'
+DEVICE_SW3_IP_ADDR          = '192.168.5.131'
 DEVICE_SW3_PORT             = '50003'
 DEVICE_SW3_VENDOR           = 'Open Networking Foundation'
 DEVICE_SW3_HW_VER           = 'BMv2 simple_switch'
@@ -178,7 +178,7 @@ DEVICE_SW4                  = json_device_p4_disabled(DEVICE_SW4_UUID)
 
 DEVICE_SW4_DPID             = 1
 DEVICE_SW4_NAME             = DEVICE_SW4_UUID
-DEVICE_SW4_IP_ADDR          = '192.168.6.38'
+DEVICE_SW4_IP_ADDR          = '192.168.5.131'
 DEVICE_SW4_PORT             = '50004'
 DEVICE_SW4_VENDOR           = 'Open Networking Foundation'
 DEVICE_SW4_HW_VER           = 'BMv2 simple_switch'
@@ -216,7 +216,7 @@ DEVICE_SW5                  = json_device_p4_disabled(DEVICE_SW5_UUID)
 
 DEVICE_SW5_DPID             = 1
 DEVICE_SW5_NAME             = DEVICE_SW5_UUID
-DEVICE_SW5_IP_ADDR          = '192.168.6.38'
+DEVICE_SW5_IP_ADDR          = '192.168.5.131'
 DEVICE_SW5_PORT             = '50005'
 DEVICE_SW5_VENDOR           = 'Open Networking Foundation'
 DEVICE_SW5_HW_VER           = 'BMv2 simple_switch'
@@ -254,7 +254,7 @@ DEVICE_SW6                  = json_device_p4_disabled(DEVICE_SW6_UUID)
 
 DEVICE_SW6_DPID             = 1
 DEVICE_SW6_NAME             = DEVICE_SW6_UUID
-DEVICE_SW6_IP_ADDR          = '192.168.6.38'
+DEVICE_SW6_IP_ADDR          = '192.168.5.131'
 DEVICE_SW6_PORT             = '50006'
 DEVICE_SW6_VENDOR           = 'Open Networking Foundation'
 DEVICE_SW6_HW_VER           = 'BMv2 simple_switch'
@@ -292,7 +292,7 @@ DEVICE_SW7                  = json_device_p4_disabled(DEVICE_SW7_UUID)
 
 DEVICE_SW7_DPID             = 1
 DEVICE_SW7_NAME             = DEVICE_SW7_UUID
-DEVICE_SW7_IP_ADDR          = '192.168.6.38'
+DEVICE_SW7_IP_ADDR          = '192.168.5.131'
 DEVICE_SW7_PORT             = '50007'
 DEVICE_SW7_VENDOR           = 'Open Networking Foundation'
 DEVICE_SW7_HW_VER           = 'BMv2 simple_switch'
@@ -330,7 +330,7 @@ DEVICE_SW8                  = json_device_p4_disabled(DEVICE_SW8_UUID)
 
 DEVICE_SW8_DPID             = 1
 DEVICE_SW8_NAME             = DEVICE_SW8_UUID
-DEVICE_SW8_IP_ADDR          = '192.168.6.38'
+DEVICE_SW8_IP_ADDR          = '192.168.5.131'
 DEVICE_SW8_PORT             = '50008'
 DEVICE_SW8_VENDOR           = 'Open Networking Foundation'
 DEVICE_SW8_HW_VER           = 'BMv2 simple_switch'
diff --git a/update_tfs_runtime_env_vars.sh b/update_tfs_runtime_env_vars.sh
index 209551c03a85909b5f02b70f22704675a2f64df5..63a692c9f62c361c2461adc16c11e59d7fee1b19 100755
--- a/update_tfs_runtime_env_vars.sh
+++ b/update_tfs_runtime_env_vars.sh
@@ -20,7 +20,7 @@
 
 # If not already set, set the list of components you want to build images for, and deploy.
 # By default, only basic components are deployed
-export TFS_COMPONENTS=${TFS_COMPONENTS:-"context device monitoring service nbi webui"}
+export TFS_COMPONENTS=${TFS_COMPONENTS:-"context device monitoring service nbi webui automation"}
 
 # If not already set, set the name of the Kubernetes namespace to deploy to.
 export TFS_K8S_NAMESPACE=${TFS_K8S_NAMESPACE:-"tfs"}