diff --git a/src/service/service/ServiceServiceServicerImpl.py b/src/service/service/ServiceServiceServicerImpl.py
index e3c0e4e69c71bd4c78d7593c6636c820870346bb..84559e4a4eea33ed3748cacaf640d24f1c08ef6f 100644
--- a/src/service/service/ServiceServiceServicerImpl.py
+++ b/src/service/service/ServiceServiceServicerImpl.py
@@ -12,12 +12,13 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import grpc, json, logging
+import grpc, json, logging, uuid
 from typing import Optional
 from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method
 from common.method_wrappers.ServiceExceptions import (
-    AlreadyExistsException, InvalidArgumentException, NotFoundException, NotImplementedException)
-from common.proto.context_pb2 import Empty, Service, ServiceId, ServiceStatusEnum, ServiceTypeEnum
+    AlreadyExistsException, InvalidArgumentException, NotFoundException, NotImplementedException,
+    OperationFailedException)
+from common.proto.context_pb2 import Connection, Empty, Service, ServiceId, ServiceStatusEnum, ServiceTypeEnum
 from common.proto.pathcomp_pb2 import PathCompRequest
 from common.proto.service_pb2_grpc import ServiceServiceServicer
 from common.tools.context_queries.Service import get_service_by_id
@@ -286,12 +287,30 @@ class ServiceServiceServicerImpl(ServiceServiceServicer):
         # compute a string representing the old connection
         str_old_connection = connection_to_string(old_connection)
 
+        LOGGER.debug('old_connection={:s}'.format(grpc_message_to_json_string(old_connection)))
+
         new_connection = None
         for candidate_new_connection in pathcomp_reply.connections:
             str_candidate_new_connection = connection_to_string(candidate_new_connection)
             if str_candidate_new_connection != str_old_connection:
                 new_connection = candidate_new_connection
                 break
+        
+        if new_connection is None:
+            MSG = 'Unable to find a new suitable path: pathcomp_request={:s} pathcomp_reply={:s} old_connection={:s}'
+            str_pathcomp_request = grpc_message_to_json_string(pathcomp_request)
+            str_pathcomp_reply = grpc_message_to_json_string(pathcomp_reply)
+            str_old_connection = grpc_message_to_json_string(old_connection)
+            extra_details = MSG.format(str_pathcomp_request, str_pathcomp_reply, str_old_connection)
+            raise OperationFailedException('no-new-path-found', extra_details=extra_details)
+
+        LOGGER.debug('new_connection={:s}'.format(grpc_message_to_json_string(new_connection)))
+
+        # Change UUID of new connection to prevent collisions
+        tmp_connection = Connection()
+        tmp_connection.CopyFrom(new_connection)
+        tmp_connection.connection_id.connection_uuid.uuid = str(uuid.uuid4())
+        new_connection = tmp_connection
 
         # Feed TaskScheduler with the service to update, the old connection to
         # deconfigure and the new connection to configure. It will produce a
diff --git a/src/service/service/task_scheduler/TaskScheduler.py b/src/service/service/task_scheduler/TaskScheduler.py
index cf799f8d8193c0c0ab751e95f79fc90c83219bf2..fceed36e92771394dff9e9f45ef928a0175b8d32 100644
--- a/src/service/service/task_scheduler/TaskScheduler.py
+++ b/src/service/service/task_scheduler/TaskScheduler.py
@@ -232,7 +232,7 @@ class TasksScheduler:
         self._dag.add(service_active_key, service_updating_key)
 
         # re-activating the service depends on the new connection having been configured
-        self._dag.add(service_active_key, service_updating_key)
+        self._dag.add(service_active_key, new_connection_configure_key)
 
         t1 = time.time()
         LOGGER.debug('[compose_service_connection_update] elapsed_time: {:f} sec'.format(t1-t0))
@@ -243,9 +243,12 @@ class TasksScheduler:
 
         results = []
         for task_key in ordered_task_keys:
+            str_task_name = ('DRY ' if dry_run else '') + str(task_key)
+            LOGGER.debug('[execute_all] starting task {:s}'.format(str_task_name))
             task = self._tasks.get(task_key)
             succeeded = True if dry_run else task.execute()
             results.append(succeeded)
+            LOGGER.debug('[execute_all] finished task {:s} ; succeeded={:s}'.format(str_task_name, str(succeeded)))
 
         LOGGER.debug('[execute_all] results={:s}'.format(str(results)))
         return zip(ordered_task_keys, results)