diff --git a/src/common/Constants.py b/src/common/Constants.py index c0b4cbf0511884148de34fdd891a256796d7d26a..a7bf198a7204677ed3669fc28a2c3528a5936425 100644 --- a/src/common/Constants.py +++ b/src/common/Constants.py @@ -83,7 +83,6 @@ DEFAULT_SERVICE_HTTP_PORTS = { # Default HTTP/REST-API service base URLs DEFAULT_SERVICE_HTTP_BASEURLS = { - ServiceNameEnum.CONTEXT .value : '/api', - ServiceNameEnum.COMPUTE .value : '/restconf/data', + ServiceNameEnum.COMPUTE .value : '/restconf', ServiceNameEnum.WEBUI .value : None, } diff --git a/src/common/tools/descriptor/Loader.py b/src/common/tools/descriptor/Loader.py index fc3b008b4004efe5afc270da65246c4635c777c3..5972d425be5298ec7fcb63bd28b50f3643363ae4 100644 --- a/src/common/tools/descriptor/Loader.py +++ b/src/common/tools/descriptor/Loader.py @@ -31,8 +31,8 @@ # for message,level in compose_notifications(results): # loggers.get(level)(message) -import json -from typing import Dict, List, Optional, Tuple, Union +import concurrent.futures, json, logging, operator +from typing import Any, Dict, List, Optional, Tuple, Union from common.proto.context_pb2 import Connection, Context, Device, Link, Service, Slice, Topology from context.client.ContextClient import ContextClient from device.client.DeviceClient import DeviceClient @@ -43,6 +43,8 @@ from .Tools import ( get_descriptors_add_contexts, get_descriptors_add_services, get_descriptors_add_slices, get_descriptors_add_topologies, split_devices_by_rules) +LOGGER = logging.getLogger(__name__) + ENTITY_TO_TEXT = { # name => singular, plural 'context' : ('Context', 'Contexts' ), @@ -79,7 +81,7 @@ def compose_notifications(results : TypeResults) -> TypeNotificationList: class DescriptorLoader: def __init__( - self, descriptors : Union[str, Dict], + self, descriptors : Union[str, Dict], num_workers : int = 1, context_client : Optional[ContextClient] = None, device_client : Optional[DeviceClient] = None, service_client : Optional[ServiceClient] = None, slice_client : Optional[SliceClient] = None ) -> None: @@ -93,6 +95,8 @@ class DescriptorLoader: self.__slices = self.__descriptors.get('slices' , []) self.__connections = self.__descriptors.get('connections', []) + self.__num_workers = num_workers + self.__contexts_add = None self.__topologies_add = None self.__devices_add = None @@ -242,12 +246,26 @@ class DescriptorLoader: #self.__dev_cli.close() #self.__ctx_cli.close() + @staticmethod + def worker(grpc_method, grpc_class, entity) -> Any: + return grpc_method(grpc_class(**entity)) + def _process_descr(self, entity_name, action_name, grpc_method, grpc_class, entities) -> None: num_ok, error_list = 0, [] - for entity in entities: - try: - grpc_method(grpc_class(**entity)) - num_ok += 1 - except Exception as e: # pylint: disable=broad-except - error_list.append(f'{str(entity)}: {str(e)}') + + with concurrent.futures.ThreadPoolExecutor(max_workers=self.__num_workers) as executor: + future_to_entity = { + executor.submit(DescriptorLoader.worker, grpc_method, grpc_class, entity): (i, entity) + for i,entity in enumerate(entities) + } + + for future in concurrent.futures.as_completed(future_to_entity): + i, entity = future_to_entity[future] + try: + _ = future.result() + num_ok += 1 + except Exception as e: # pylint: disable=broad-except + error_list.append((i, f'{str(entity)}: {str(e)}')) + + error_list = [str_error for _,str_error in sorted(error_list, key=operator.itemgetter(0))] self.__results.append((entity_name, action_name, num_ok, error_list)) diff --git a/src/compute/service/__main__.py b/src/compute/service/__main__.py index 887887e071d11cc19e9db5acb1485b162a899224..19a04c4c8ef4f77b3b7fb6949a1b567ef5cbec00 100644 --- a/src/compute/service/__main__.py +++ b/src/compute/service/__main__.py @@ -27,7 +27,7 @@ from .rest_server.nbi_plugins.ietf_network_slice import register_ietf_nss terminate = threading.Event() LOGGER = None -def signal_handler(signal, frame): # pylint: disable=redefined-outer-name +def signal_handler(signal, frame): # pylint: disable=redefined-outer-name, unused-argument LOGGER.warning('Terminate signal received') terminate.set() diff --git a/src/compute/service/rest_server/nbi_plugins/debug_api/Resources.py b/src/compute/service/rest_server/nbi_plugins/debug_api/Resources.py index 0c66254d93220392d44c8393373ba94ddd7b3f93..67ef3dfb0ba1519440b0a22f46935165c8388cb8 100644 --- a/src/compute/service/rest_server/nbi_plugins/debug_api/Resources.py +++ b/src/compute/service/rest_server/nbi_plugins/debug_api/Resources.py @@ -12,48 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -from flask.json import jsonify from flask_restful import Resource -from common.proto.context_pb2 import ConnectionId, ContextId, DeviceId, Empty, LinkId, ServiceId, SliceId, TopologyId -from common.proto.policy_pb2 import PolicyRuleId -from common.tools.grpc.Tools import grpc_message_to_json -from common.tools.object_factory.Connection import json_connection_id -from common.tools.object_factory.Context import json_context_id -from common.tools.object_factory.Device import json_device_id -from common.tools.object_factory.Link import json_link_id -from common.tools.object_factory.PolicyRule import json_policyrule_id -from common.tools.object_factory.Service import json_service_id -from common.tools.object_factory.Slice import json_slice_id -from common.tools.object_factory.Topology import json_topology_id +from common.proto.context_pb2 import Empty from context.client.ContextClient import ContextClient - - -def format_grpc_to_json(grpc_reply): - return jsonify(grpc_message_to_json(grpc_reply)) - -def grpc_connection_id(connection_uuid): - return ConnectionId(**json_connection_id(connection_uuid)) - -def grpc_context_id(context_uuid): - return ContextId(**json_context_id(context_uuid)) - -def grpc_device_id(device_uuid): - return DeviceId(**json_device_id(device_uuid)) - -def grpc_link_id(link_uuid): - return LinkId(**json_link_id(link_uuid)) - -def grpc_service_id(context_uuid, service_uuid): - return ServiceId(**json_service_id(service_uuid, context_id=json_context_id(context_uuid))) - -def grpc_slice_id(context_uuid, slice_uuid): - return SliceId(**json_slice_id(slice_uuid, context_id=json_context_id(context_uuid))) - -def grpc_topology_id(context_uuid, topology_uuid): - return TopologyId(**json_topology_id(topology_uuid, context_id=json_context_id(context_uuid))) - -def grpc_policy_rule_id(policy_rule_uuid): - return PolicyRuleId(**json_policyrule_id(policy_rule_uuid)) +from .Tools import ( + format_grpc_to_json, grpc_connection_id, grpc_context_id, grpc_device_id, grpc_link_id, grpc_policy_rule_id, + grpc_service_id, grpc_slice_id, grpc_topology_id) class _Resource(Resource): diff --git a/src/compute/service/rest_server/nbi_plugins/debug_api/Tools.py b/src/compute/service/rest_server/nbi_plugins/debug_api/Tools.py new file mode 100644 index 0000000000000000000000000000000000000000..f3dff545ba9812ff3f4e13c3da53774af7626014 --- /dev/null +++ b/src/compute/service/rest_server/nbi_plugins/debug_api/Tools.py @@ -0,0 +1,54 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from flask.json import jsonify +from common.proto.context_pb2 import ConnectionId, ContextId, DeviceId, LinkId, ServiceId, SliceId, TopologyId +from common.proto.policy_pb2 import PolicyRuleId +from common.tools.grpc.Tools import grpc_message_to_json +from common.tools.object_factory.Connection import json_connection_id +from common.tools.object_factory.Context import json_context_id +from common.tools.object_factory.Device import json_device_id +from common.tools.object_factory.Link import json_link_id +from common.tools.object_factory.PolicyRule import json_policyrule_id +from common.tools.object_factory.Service import json_service_id +from common.tools.object_factory.Slice import json_slice_id +from common.tools.object_factory.Topology import json_topology_id + + +def format_grpc_to_json(grpc_reply): + return jsonify(grpc_message_to_json(grpc_reply)) + +def grpc_connection_id(connection_uuid): + return ConnectionId(**json_connection_id(connection_uuid)) + +def grpc_context_id(context_uuid): + return ContextId(**json_context_id(context_uuid)) + +def grpc_device_id(device_uuid): + return DeviceId(**json_device_id(device_uuid)) + +def grpc_link_id(link_uuid): + return LinkId(**json_link_id(link_uuid)) + +def grpc_service_id(context_uuid, service_uuid): + return ServiceId(**json_service_id(service_uuid, context_id=json_context_id(context_uuid))) + +def grpc_slice_id(context_uuid, slice_uuid): + return SliceId(**json_slice_id(slice_uuid, context_id=json_context_id(context_uuid))) + +def grpc_topology_id(context_uuid, topology_uuid): + return TopologyId(**json_topology_id(topology_uuid, context_id=json_context_id(context_uuid))) + +def grpc_policy_rule_id(policy_rule_uuid): + return PolicyRuleId(**json_policyrule_id(policy_rule_uuid)) diff --git a/src/compute/service/rest_server/nbi_plugins/debug_api/__init__.py b/src/compute/service/rest_server/nbi_plugins/debug_api/__init__.py index d9243cca711a2b2ad00509102ecab5b06c6cc334..d1309353c412a738e2f2238d0bb4fff07765b825 100644 --- a/src/compute/service/rest_server/nbi_plugins/debug_api/__init__.py +++ b/src/compute/service/rest_server/nbi_plugins/debug_api/__init__.py @@ -12,52 +12,48 @@ # See the License for the specific language governing permissions and # limitations under the License. -# RFC 8466 - L2VPN Service Model (L2SM) -# Ref: https://datatracker.ietf.org/doc/html/rfc8466 - from compute.service.rest_server.RestServer import RestServer from .Resources import ( Connection, ConnectionIds, Connections, Context, ContextIds, Contexts, Device, DeviceIds, Devices, Link, LinkIds, Links, PolicyRule, PolicyRuleIds, PolicyRules, Service, ServiceIds, Services, Slice, SliceIds, Slices, Topologies, Topology, TopologyIds) -URL_PREFIX = '/api' +URL_PREFIX = '/debug-api' -# Use 'path' type in Service and Sink because service_uuid and link_uuid might contain char '/' and Flask is unable to -# recognize them in 'string' type. +# Use 'path' type since some identifiers might contain char '/' and Flask is unable to recognize them in 'string' type. RESOURCES = [ # (endpoint_name, resource_class, resource_url) ('api.context_ids', ContextIds, '/context_ids'), ('api.contexts', Contexts, '/contexts'), - ('api.context', Context, '/context/<string:context_uuid>'), + ('api.context', Context, '/context/<path:context_uuid>'), - ('api.topology_ids', TopologyIds, '/context/<string:context_uuid>/topology_ids'), - ('api.topologies', Topologies, '/context/<string:context_uuid>/topologies'), - ('api.topology', Topology, '/context/<string:context_uuid>/topology/<string:topology_uuid>'), + ('api.topology_ids', TopologyIds, '/context/<path:context_uuid>/topology_ids'), + ('api.topologies', Topologies, '/context/<path:context_uuid>/topologies'), + ('api.topology', Topology, '/context/<path:context_uuid>/topology/<path:topology_uuid>'), - ('api.service_ids', ServiceIds, '/context/<string:context_uuid>/service_ids'), - ('api.services', Services, '/context/<string:context_uuid>/services'), - ('api.service', Service, '/context/<string:context_uuid>/service/<path:service_uuid>'), + ('api.service_ids', ServiceIds, '/context/<path:context_uuid>/service_ids'), + ('api.services', Services, '/context/<path:context_uuid>/services'), + ('api.service', Service, '/context/<path:context_uuid>/service/<path:service_uuid>'), - ('api.slice_ids', SliceIds, '/context/<string:context_uuid>/slice_ids'), - ('api.slices', Slices, '/context/<string:context_uuid>/slices'), - ('api.slice', Slice, '/context/<string:context_uuid>/slice/<path:slice_uuid>'), + ('api.slice_ids', SliceIds, '/context/<path:context_uuid>/slice_ids'), + ('api.slices', Slices, '/context/<path:context_uuid>/slices'), + ('api.slice', Slice, '/context/<path:context_uuid>/slice/<path:slice_uuid>'), ('api.device_ids', DeviceIds, '/device_ids'), ('api.devices', Devices, '/devices'), - ('api.device', Device, '/device/<string:device_uuid>'), + ('api.device', Device, '/device/<path:device_uuid>'), ('api.link_ids', LinkIds, '/link_ids'), ('api.links', Links, '/links'), ('api.link', Link, '/link/<path:link_uuid>'), - ('api.connection_ids', ConnectionIds, '/context/<string:context_uuid>/service/<path:service_uuid>/connection_ids'), - ('api.connections', Connections, '/context/<string:context_uuid>/service/<path:service_uuid>/connections'), + ('api.connection_ids', ConnectionIds, '/context/<path:context_uuid>/service/<path:service_uuid>/connection_ids'), + ('api.connections', Connections, '/context/<path:context_uuid>/service/<path:service_uuid>/connections'), ('api.connection', Connection, '/connection/<path:connection_uuid>'), ('api.policyrule_ids', PolicyRuleIds, '/policyrule_ids'), ('api.policyrules', PolicyRules, '/policyrules'), - ('api.policyrule', PolicyRule, '/policyrule/<string:policyrule_uuid>'), + ('api.policyrule', PolicyRule, '/policyrule/<path:policyrule_uuid>'), ] def register_debug_api(rest_server : RestServer): diff --git a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/__init__.py b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/__init__.py index 1b9027b1feb7c65c6fb3ee6ecdef485e4719a1b5..110c51af5fe0e4cd8e012fd4105712ed176dd12a 100644 --- a/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/__init__.py +++ b/src/compute/service/rest_server/nbi_plugins/ietf_l2vpn/__init__.py @@ -21,7 +21,7 @@ from .L2VPN_Services import L2VPN_Services from .L2VPN_Service import L2VPN_Service from .L2VPN_SiteNetworkAccesses import L2VPN_SiteNetworkAccesses -URL_PREFIX = '/ietf-l2vpn-svc:l2vpn-svc' +URL_PREFIX = '/data/ietf-l2vpn-svc:l2vpn-svc' def _add_resource(rest_server : RestServer, resource : Resource, *urls, **kwargs): urls = [(URL_PREFIX + url) for url in urls] diff --git a/src/device/service/drivers/xr/README_XR.md b/src/device/service/drivers/xr/README_XR.md index 3bfdf5b019b3c36e7ded09d58ac625a48add36a9..c741c3e808ebddd20c9c4749064964594ea32b73 100644 --- a/src/device/service/drivers/xr/README_XR.md +++ b/src/device/service/drivers/xr/README_XR.md @@ -25,6 +25,19 @@ cd ~/.kube microk8s config > config ``` +Helm 3 is mandatory as of February 2023. Enable it with microk8s command. Then create wrapper shell script to expose it with standard name: + +``` +sudo su - +cat > /usr/bin/helm3 +#!/bin/sh +microk8s.helm3 "$@" +^D +chmod 755 /usr/bin/helm3 +``` + +Using symbolic link does not work, because snap wraps the real binary and won't work if name is different. + Local Docker registry is needed for build results. Use the following command to start local registry (docker will pull necessary images from Internet) ```bash @@ -32,23 +45,33 @@ docker run -d -p 32000:5000 --restart=always --name registry registry:2 ``` Setup mydeploy script outside the git repo. E.g. following will do. SOURCE IT ON ALL SHELLS. - -IMPORTANT: September 2022 version of controller has a bug where any update to device trigger update to device -until GRPC endpoints are so loaded that K8s kills device service. XR does not need automation service, so it can -be left out. +Use https://labs.etsi.org/rep/tfs/controller/-/blob/develop/my_deploy.sh as example. +Script requires more variables than before as of February 2023. ```bash +# See https://labs.etsi.org/rep/tfs/controller/-/blob/develop/my_deploy.sh +# Use docker run -d -p 32000:5000 --restart=always --name registry registry:2 export TFS_REGISTRY_IMAGE="http://localhost:32000/tfs/" -# Without automation service (see note above) -export TFS_COMPONENTS="context device pathcomp service slice compute monitoring webui" -# Correct setting -# export TFS_COMPONENTS="context device automation pathcomp service slice compute monitoring webui" -# Pre-rebase -#export TFS_COMPONENTS="context device automation service compute monitoring webui" +export TFS_COMPONENTS="context device automation monitoring pathcomp service slice compute webui load_generator" export TFS_IMAGE_TAG="dev" export TFS_K8S_NAMESPACE="tfs" export TFS_EXTRA_MANIFESTS="manifests/nginx_ingress_http.yaml" export TFS_GRAFANA_PASSWORD="admin123+" +#export TFS_SKIP_BUILD="" +export CRDB_NAMESPACE="crdb" +export CRDB_USERNAME="tfs" +export CRDB_PASSWORD="tfs123" +export CRDB_DATABASE="tfs" +export CRDB_DEPLOY_MODE="single" +export CRDB_DROP_DATABASE_IF_EXISTS="" +export CRDB_REDEPLOY="" +export NATS_NAMESPACE="nats" +export NATS_REDEPLOY="" +export QDB_NAMESPACE="qdb" +export QDB_USERNAME="admin" +export QDB_PASSWORD="quest" +export QDB_TABLE="tfs_monitoring" +export QDB_REDEPLOY="" ``` Build is containerized, pytest used for setup is not. Teraflow has some third party venv suggestion in docs. However standard venv works. Create: @@ -114,11 +137,32 @@ Setup service by following commands in src directory. Kubernetes endpoins change python -m pytest --verbose tests/ofc22/tests/test_functional_create_service_xr.py ``` +For topology different than used by the test_functional_create/delete_service_xr.py, one can also +use service-cli.py tool in the xr module directory. It allows creation of ELINE services between +arbitrary endpoints in the topology (with consequent underlying XR service instantiation). Run in +*xr module directory*. Representative examples: +``` + PYTHONPATH=../../../../ ./service-cli.py create 1 R1-EMU 13/1/2 500 2 R3-EMU 13/1/2 500 + PYTHONPATH=../../../../ ./service-cli.py list + PYTHONPATH=../../../../ ./service-cli.py delete 43a8046a-5dec-463d-82f7-7cc3442dbf4f +``` +The PYTHONPATH is mandatory. Suitable topology JSON must have been loaded before. With the +CocroachDB persistence, it is sufficient to load the topology once and it will persist. + Good logs to check are: * kubectl logs service/deviceservice --namespace tfs * kubectl logs service/webuiservice --namespace tfs +New 2.0 version of Teraflow has persistent database. To clean up any failed state +(e.g. from debugging session), set before deploy: + +``` +export CRDB_DROP_DATABASE_IF_EXISTS=YES +``` + +In normal test runs it is not necessary to clear the database. However DO NOT RE-UPLOAD THE TOPOLOGY JSON FILE if DB has not been cleared. + ## Unit Tests Run in src directory (src under repo top level) with command: diff --git a/src/device/service/drivers/xr/XrDriver.py b/src/device/service/drivers/xr/XrDriver.py index 565e3692feb88dd07779bb5f777b0061028f9776..605f4ce8d0f9c875a4b1736ff0aaa02fcb468778 100644 --- a/src/device/service/drivers/xr/XrDriver.py +++ b/src/device/service/drivers/xr/XrDriver.py @@ -106,8 +106,10 @@ class XrDriver(_Driver): def SetConfig(self, resources: List[Tuple[str, Any]]) -> List[Union[bool, Exception]]: LOGGER.info(f"SetConfig[{self}]: {resources=}") # Logged config seems like: + # Pre-February 2023 #[('/service[52ff5f0f-fda4-40bd-a0b1-066f4ff04079:optical]', '{"capacity_unit": "GHz", "capacity_value": 1, "direction": "UNIDIRECTIONAL", "input_sip": "XR HUB 1|XR-T4", "layer_protocol_name": "PHOTONIC_MEDIA", "layer_protocol_qualifier": "tapi-photonic-media:PHOTONIC_LAYER_QUALIFIER_NMC", "output_sip": "XR LEAF 1|XR-T1", "uuid": "52ff5f0f-fda4-40bd-a0b1-066f4ff04079:optical"}')] - + # Post February 2023 + #[('/services/service[e1b9184c-767d-44b9-bf83-a1f643d82bef]', '{"capacity_unit": "GHz", "capacity_value": 50.0, "direction": "UNIDIRECTIONAL", "input_sip": "XR LEAF 1|XR-T1", "layer_protocol_name": "PHOTONIC_MEDIA", "layer_protocol_qualifier": "tapi-photonic-media:PHOTONIC_LAYER_QUALIFIER_NMC", "output_sip": "XR HUB 1|XR-T4", "uuid": "e1b9184c-767d-44b9-bf83-a1f643d82bef"}')] with self.__lock: if self.__constellation is None: self.__constellation = self.__cm_connection.get_constellation_by_hub_name(self.__hub_module_name) @@ -157,7 +159,7 @@ class XrDriver(_Driver): else: LOGGER.info(f"DeleteConfig: Connection {service_uuid} delete failure (was {str(connection)})") - if self.__constellation.is_vti_mode(): + if connection.is_vti_mode(): active_tc = self.__cm_connection.get_transport_capacity_by_teraflow_uuid(service_uuid) if active_tc is not None: if self.__cm_connection.delete_transport_capacity(active_tc.href): diff --git a/src/device/service/drivers/xr/cm-cli.py b/src/device/service/drivers/xr/cm-cli.py old mode 100644 new mode 100755 diff --git a/src/device/service/drivers/xr/cm/cm_connection.py b/src/device/service/drivers/xr/cm/cm_connection.py index 8ee9ee236c6bcfd504d4044dd023ef3a61fe4802..7128494510f40914917d2c3981158b6dd3571c70 100644 --- a/src/device/service/drivers/xr/cm/cm_connection.py +++ b/src/device/service/drivers/xr/cm/cm_connection.py @@ -241,7 +241,7 @@ class CmConnection: return self.__acquire_access_token() def list_constellations(self) -> List[Constellation]: - r = self.__get("/api/v1/ns/xr-networks?content=expanded") + r = self.__get("/api/v1/xr-networks?content=expanded") if not r.is_valid_json_list_with_status(200): return [] return [Constellation(c) for c in r.json] @@ -252,13 +252,13 @@ class CmConnection: ('content', 'expanded'), ('q', '{"hubModule.state.module.moduleName": "' + hub_module_name + '"}') ] - r = self.__get("/api/v1/ns/xr-networks?content=expanded", params=qparams) + r = self.__get("/api/v1/xr-networks?content=expanded", params=qparams) if not r.is_valid_json_list_with_status(200, 1, 1): return None return Constellation(r.json[0]) def get_transport_capacities(self) -> List[TransportCapacity]: - r= self.__get("/api/v1/ns/transport-capacities?content=expanded") + r= self.__get("/api/v1/transport-capacities?content=expanded") if not r.is_valid_json_list_with_status(200): return [] return [TransportCapacity(from_json=t) for t in r.json] @@ -268,7 +268,7 @@ class CmConnection: ('content', 'expanded'), ('q', '{"state.name": "' + tc_name + '"}') ] - r = self.__get("/api/v1/ns/transport-capacities?content=expanded", params=qparams) + r = self.__get("/api/v1/transport-capacities?content=expanded", params=qparams) if not r.is_valid_json_list_with_status(200, 1, 1): return TransportCapacity(from_json=r.json[0]) else: @@ -280,17 +280,17 @@ class CmConnection: def create_transport_capacity(self, tc: TransportCapacity) -> Optional[str]: # Create wants a list, so wrap connection to list tc_config = [tc.create_config()] - resp = self.__post("/api/v1/ns/transport-capacities", tc_config) + resp = self.__post("/api/v1/transport-capacities", tc_config) if resp.is_valid_json_list_with_status(202, 1, 1) and "href" in resp.json[0]: tc.href = resp.json[0]["href"] LOGGER.info(f"Created transport-capcity {tc}") - #LOGGER.info(self.__get(f"/api/v1/ns/transport-capacities{tc.href}?content=expanded")) + #LOGGER.info(self.__get(f"/api/v1/transport-capacities{tc.href}?content=expanded")) return tc.href else: return None def delete_transport_capacity(self, href: str) -> bool: - resp = self.__delete(f"/api/v1/ns/transport-capacities{href}") + resp = self.__delete(f"/api/v1/transport-capacities{href}") # Returns empty body if resp.is_valid_with_status_ignore_body(202): @@ -399,7 +399,7 @@ class CmConnection: # Create wants a list, so wrap connection to list cfg = [connection.create_config()] - resp = self.__post("/api/v1/ncs/network-connections", cfg) + resp = self.__post("/api/v1/network-connections", cfg) if resp.is_valid_json_list_with_status(202, 1, 1) and "href" in resp.json[0]: connection.href = resp.json[0]["href"] LOGGER.info(f"IPM accepted create request for connection {connection}") @@ -433,7 +433,7 @@ class CmConnection: # Perform deletes for ep_href in ep_deletes: - resp = self.__delete(f"/api/v1/ncs{ep_href}") + resp = self.__delete(f"/api/v1{ep_href}") if resp.is_valid_with_status_ignore_body(202): LOGGER.info(f"update_connection: EP-UPDATE: Deleted connection endpoint {ep_href}") else: @@ -441,21 +441,21 @@ class CmConnection: # Update capacities for otherwise similar endpoints for ep_href, ep_cfg in ep_updates: - resp = self.__put(f"/api/v1/ncs{ep_href}", ep_cfg) + resp = self.__put(f"/api/v1{ep_href}", ep_cfg) if resp.is_valid_with_status_ignore_body(202): LOGGER.info(f"update_connection: EP-UPDATE: Updated connection endpoint {ep_href} with {ep_cfg}") else: LOGGER.info(f"update_connection: EP-UPDATE: Failed to update connection endpoint {ep_href} with {ep_cfg}: {resp}") # Perform adds - resp = self.__post(f"/api/v1/ncs{href}/endpoints", ep_creates) + resp = self.__post(f"/api/v1{href}/endpoints", ep_creates) if resp.is_valid_json_list_with_status(202, 1, 1) and "href" in resp.json[0]: LOGGER.info(f"update_connection: EP-UPDATE: Created connection endpoints {resp.json[0]} with {ep_creates}") else: LOGGER.info(f"update_connection: EP-UPDATE: Failed to create connection endpoints {resp.json[0] if resp.json else None} with {ep_creates}: {resp}") # Connection update (excluding endpoints) - resp = self.__put(f"/api/v1/ncs{href}", cfg) + resp = self.__put(f"/api/v1{href}", cfg) # Returns empty body if resp.is_valid_with_status_ignore_body(202): LOGGER.info(f"update_connection: Updated connection {connection}") @@ -466,7 +466,7 @@ class CmConnection: return None def delete_connection(self, href: str) -> bool: - resp = self.__delete(f"/api/v1/ncs{href}") + resp = self.__delete(f"/api/v1{href}") #print(resp) # Returns empty body if resp.is_valid_with_status_ignore_body(202): @@ -489,7 +489,7 @@ class CmConnection: ('content', 'expanded'), ('q', '{"state.name": "' + connection_name + '"}') ] - r = self.__get("/api/v1/ncs/network-connections", params=qparams) + r = self.__get("/api/v1/network-connections", params=qparams) if r.is_valid_json_list_with_status(200, 1, 1): return Connection(from_json=r.json[0]) else: @@ -499,7 +499,7 @@ class CmConnection: qparams = [ ('content', 'expanded'), ] - r = self.__get(f"/api/v1/ncs{href}", params=qparams) + r = self.__get(f"/api/v1{href}", params=qparams) if r.is_valid_json_obj_with_status(200): return Connection(from_json=r.json) else: @@ -509,14 +509,14 @@ class CmConnection: return self.get_connection_by_name(f"TF:{uuid}") def get_connections(self): - r = self.__get("/api/v1/ncs/network-connections?content=expanded") + r = self.__get("/api/v1/network-connections?content=expanded") if r.is_valid_json_list_with_status(200): return [Connection(from_json=c) for c in r.json] else: return [] def service_uuid(self, key: str) -> Optional[str]: - service = re.match(r"^/service\[(.+)\]$", key) + service = re.match(r"^(?:/services)/service\[(.+)\]$", key) if service: return service.group(1) else: diff --git a/src/device/service/drivers/xr/cm/connection.py b/src/device/service/drivers/xr/cm/connection.py index 98736cce534685189069703d9560b9d34b1d8007..321922b1cb81eb1cedee673f40b232c038abd8af 100644 --- a/src/device/service/drivers/xr/cm/connection.py +++ b/src/device/service/drivers/xr/cm/connection.py @@ -165,6 +165,9 @@ class Connection: endpoints = ", ".join((str(ep) for ep in self.endpoints)) return f"name: {name}, id: {self.href}, service-mode: {self.serviceMode}, end-points: [{endpoints}]" + def is_vti_mode(self) -> bool: + return "XR-VTI-P2P" == self.serviceMode + def __guess_service_mode_from_emulated_enpoints(self): for ep in self.endpoints: if ep.vlan is not None: diff --git a/src/device/service/drivers/xr/cm/tests/test_cm_connection.py b/src/device/service/drivers/xr/cm/tests/test_cm_connection.py index 4f45be686c2c3a0f619d58230b2c52ed66a3eb6f..a7944ed220c6d68aad2f122a0bb0d2c1f83fdd06 100644 --- a/src/device/service/drivers/xr/cm/tests/test_cm_connection.py +++ b/src/device/service/drivers/xr/cm/tests/test_cm_connection.py @@ -59,7 +59,7 @@ def test_cmc_connect(): def test_cmc_get_constellations(): with mock_cm_connectivity() as m: - m.get("https://127.0.0.1:9999/api/v1/ns/xr-networks?content=expanded", text=res_constellations) + m.get("https://127.0.0.1:9999/api/v1/xr-networks?content=expanded", text=res_constellations) cm = CmConnection("127.0.0.1", 9999, "xr-user", "xr-password", tls_verify=False) assert cm.Connect() @@ -73,7 +73,7 @@ def test_cmc_get_constellations(): ['XR HUB 1|XR-T1', 'XR HUB 1|XR-T2', 'XR HUB 1|XR-T3', 'XR HUB 1|XR-T4', 'XR LEAF 1|XR-T1', 'XR LEAF 2|XR-T1']] # Get constellation by hub module name - m.get("https://127.0.0.1:9999/api/v1/ns/xr-networks?content=expanded&content=expanded&q=%7B%22hubModule.state.module.moduleName%22%3A+%22XR+HUB+1%22%7D", text=res_constellation_by_name_hub1) + m.get("https://127.0.0.1:9999/api/v1/xr-networks?content=expanded&content=expanded&q=%7B%22hubModule.state.module.moduleName%22%3A+%22XR+HUB+1%22%7D", text=res_constellation_by_name_hub1) constellation = cm.get_constellation_by_hub_name("XR HUB 1") assert constellation assert constellation.ifnames() == ['XR HUB 1|XR-T1', 'XR HUB 1|XR-T2', 'XR HUB 1|XR-T3', 'XR HUB 1|XR-T4', 'XR LEAF 1|XR-T1', 'XR LEAF 2|XR-T1'] diff --git a/src/device/service/drivers/xr/cm/tests/test_xr_service_set_config.py b/src/device/service/drivers/xr/cm/tests/test_xr_service_set_config.py index 3bfd63def82ae89f53ab6ec3a5fc18bd79ecd38f..e9b16b62034bcd42061907d920b757b59766f562 100644 --- a/src/device/service/drivers/xr/cm/tests/test_xr_service_set_config.py +++ b/src/device/service/drivers/xr/cm/tests/test_xr_service_set_config.py @@ -38,8 +38,8 @@ with open(os.path.join(resources, "connections-expanded.json"), "r", encoding="U def mock_cm(): m = requests_mock.Mocker() m.post('https://127.0.0.1:9999/realms/xr-cm/protocol/openid-connect/token', text=access_token) - m.get("https://127.0.0.1:9999/api/v1/ns/xr-networks?content=expanded&content=expanded&q=%7B%22hubModule.state.module.moduleName%22%3A+%22XR+HUB+1%22%7D", text=res_constellation_by_name_hub1) - m.post("https://127.0.0.1:9999/api/v1/ncs/network-connections", text='[{"href":"/network-connections/c3b31608-0bb7-4a4f-9f9a-88b24a059432","rt":["cm.network-connection"]}]', status_code=202) + m.get("https://127.0.0.1:9999/api/v1/xr-networks?content=expanded&content=expanded&q=%7B%22hubModule.state.module.moduleName%22%3A+%22XR+HUB+1%22%7D", text=res_constellation_by_name_hub1) + m.post("https://127.0.0.1:9999/api/v1/network-connections", text='[{"href":"/network-connections/c3b31608-0bb7-4a4f-9f9a-88b24a059432","rt":["cm.network-connection"]}]', status_code=202) return m uuid = "12345ABCDEFGHIJKLMN" @@ -69,9 +69,9 @@ def test_xr_set_config(): called_mocks = [(r._request.method, r._request.url) for r in m._adapter.request_history] expected_mocks = [ ('POST', 'https://127.0.0.1:9999/realms/xr-cm/protocol/openid-connect/token'), # Authentication - ('GET', 'https://127.0.0.1:9999/api/v1/ns/xr-networks?content=expanded&content=expanded&q=%7B%22hubModule.state.module.moduleName%22%3A+%22XR+HUB+1%22%7D'), # Hub module by name - ('GET', 'https://127.0.0.1:9999/api/v1/ncs/network-connections?content=expanded&q=%7B%22state.name%22%3A+%22TF%3A12345ABCDEFGHIJKLMN%22%7D'), # Get by name, determine update or create - ('POST', 'https://127.0.0.1:9999/api/v1/ncs/network-connections') # Create + ('GET', 'https://127.0.0.1:9999/api/v1/xr-networks?content=expanded&content=expanded&q=%7B%22hubModule.state.module.moduleName%22%3A+%22XR+HUB+1%22%7D'), # Hub module by name + ('GET', 'https://127.0.0.1:9999/api/v1/network-connections?content=expanded&q=%7B%22state.name%22%3A+%22TF%3A12345ABCDEFGHIJKLMN%22%7D'), # Get by name, determine update or create + ('POST', 'https://127.0.0.1:9999/api/v1/network-connections') # Create ] assert called_mocks == expected_mocks @@ -97,7 +97,7 @@ def test_xr_set_config_consistency_lifecycle(): json_non_terminal = copy.deepcopy(json_terminal) json_non_terminal["state"]["lifecycleState"] = "pendingConfiguration" # We go trough 404 and non-terminal lstate first and then terminal state. - m.get("https://127.0.0.1:9999/api/v1/ncs/network-connections/c3b31608-0bb7-4a4f-9f9a-88b24a059432", + m.get("https://127.0.0.1:9999/api/v1/network-connections/c3b31608-0bb7-4a4f-9f9a-88b24a059432", [{'text': '', 'status_code': 404}, { 'json': json_non_terminal, 'status_code': 200 }, {'json': json_terminal, 'status_code': 200 }]) @@ -108,19 +108,19 @@ def test_xr_set_config_consistency_lifecycle(): called_mocks = [(r._request.method, r._request.url) for r in m._adapter.request_history] expected_mocks = [ ('POST', 'https://127.0.0.1:9999/realms/xr-cm/protocol/openid-connect/token'), # Authentication - ('GET', 'https://127.0.0.1:9999/api/v1/ns/xr-networks?content=expanded&content=expanded&q=%7B%22hubModule.state.module.moduleName%22%3A+%22XR+HUB+1%22%7D'), # Hub module by name - ('GET', 'https://127.0.0.1:9999/api/v1/ncs/network-connections?content=expanded&q=%7B%22state.name%22%3A+%22TF%3A12345ABCDEFGHIJKLMN%22%7D'), # Get by name, determine update or create - ('POST', 'https://127.0.0.1:9999/api/v1/ncs/network-connections'), # Create - ('GET', 'https://127.0.0.1:9999/api/v1/ncs/network-connections/c3b31608-0bb7-4a4f-9f9a-88b24a059432?content=expanded'), # Life cycle state check --> no REST API object - ('GET', 'https://127.0.0.1:9999/api/v1/ncs/network-connections/c3b31608-0bb7-4a4f-9f9a-88b24a059432?content=expanded'), # Life cycle state check --> non-terminal - ('GET', 'https://127.0.0.1:9999/api/v1/ncs/network-connections/c3b31608-0bb7-4a4f-9f9a-88b24a059432?content=expanded') # Life cycle state check --> terminal + ('GET', 'https://127.0.0.1:9999/api/v1/xr-networks?content=expanded&content=expanded&q=%7B%22hubModule.state.module.moduleName%22%3A+%22XR+HUB+1%22%7D'), # Hub module by name + ('GET', 'https://127.0.0.1:9999/api/v1/network-connections?content=expanded&q=%7B%22state.name%22%3A+%22TF%3A12345ABCDEFGHIJKLMN%22%7D'), # Get by name, determine update or create + ('POST', 'https://127.0.0.1:9999/api/v1/network-connections'), # Create + ('GET', 'https://127.0.0.1:9999/api/v1/network-connections/c3b31608-0bb7-4a4f-9f9a-88b24a059432?content=expanded'), # Life cycle state check --> no REST API object + ('GET', 'https://127.0.0.1:9999/api/v1/network-connections/c3b31608-0bb7-4a4f-9f9a-88b24a059432?content=expanded'), # Life cycle state check --> non-terminal + ('GET', 'https://127.0.0.1:9999/api/v1/network-connections/c3b31608-0bb7-4a4f-9f9a-88b24a059432?content=expanded') # Life cycle state check --> terminal ] assert called_mocks == expected_mocks ################################################################################ # Same as before, but without life cycle progress m.reset_mock() - m.get("https://127.0.0.1:9999/api/v1/ncs/network-connections/c3b31608-0bb7-4a4f-9f9a-88b24a059432", + m.get("https://127.0.0.1:9999/api/v1/network-connections/c3b31608-0bb7-4a4f-9f9a-88b24a059432", [{'text': '', 'status_code': 401}, { 'json': json_non_terminal, 'status_code': 200 }]) @@ -129,10 +129,10 @@ def test_xr_set_config_consistency_lifecycle(): called_mocks = [(r._request.method, r._request.url) for r in m._adapter.request_history] expected_mocks_no_connect = [ - ('GET', 'https://127.0.0.1:9999/api/v1/ncs/network-connections?content=expanded&q=%7B%22state.name%22%3A+%22TF%3A12345ABCDEFGHIJKLMN%22%7D'), # Get by name, determine update or create - ('POST', 'https://127.0.0.1:9999/api/v1/ncs/network-connections'), # Create - ('GET', 'https://127.0.0.1:9999/api/v1/ncs/network-connections/c3b31608-0bb7-4a4f-9f9a-88b24a059432?content=expanded'), # Life cycle state check --> no REST API object - ('GET', 'https://127.0.0.1:9999/api/v1/ncs/network-connections/c3b31608-0bb7-4a4f-9f9a-88b24a059432?content=expanded'), # Life cycle state check --> non-terminal + ('GET', 'https://127.0.0.1:9999/api/v1/network-connections?content=expanded&q=%7B%22state.name%22%3A+%22TF%3A12345ABCDEFGHIJKLMN%22%7D'), # Get by name, determine update or create + ('POST', 'https://127.0.0.1:9999/api/v1/network-connections'), # Create + ('GET', 'https://127.0.0.1:9999/api/v1/network-connections/c3b31608-0bb7-4a4f-9f9a-88b24a059432?content=expanded'), # Life cycle state check --> no REST API object + ('GET', 'https://127.0.0.1:9999/api/v1/network-connections/c3b31608-0bb7-4a4f-9f9a-88b24a059432?content=expanded'), # Life cycle state check --> non-terminal ] assert called_mocks == repeat_last_expected(expected_mocks_no_connect, called_mocks) @@ -143,7 +143,7 @@ def test_xr_set_config_consistency_lifecycle(): assert cm.Connect() constellation = cm.get_constellation_by_hub_name("XR HUB 1") assert constellation - m.get("https://127.0.0.1:9999/api/v1/ncs/network-connections/c3b31608-0bb7-4a4f-9f9a-88b24a059432", + m.get("https://127.0.0.1:9999/api/v1/network-connections/c3b31608-0bb7-4a4f-9f9a-88b24a059432", [{'text': '', 'status_code': 401}, { 'json': json_non_terminal, 'status_code': 200 }]) result = set_config_for_service(cm, constellation, uuid, config) @@ -158,7 +158,7 @@ def test_xr_set_config_consistency_lifecycle(): assert cm.Connect() constellation = cm.get_constellation_by_hub_name("XR HUB 1") assert constellation - m.get("https://127.0.0.1:9999/api/v1/ncs/network-connections/c3b31608-0bb7-4a4f-9f9a-88b24a059432", + m.get("https://127.0.0.1:9999/api/v1/network-connections/c3b31608-0bb7-4a4f-9f9a-88b24a059432", [{'text': '', 'status_code': 401}]) result = set_config_for_service(cm, constellation, uuid, config) _validate_result(result, False) @@ -175,15 +175,15 @@ def test_xr_set_config_update_case(): assert constellation # Fake existing service (--> update path is taken) - m.get("https://127.0.0.1:9999/api/v1/ncs/network-connections?content=expanded&q=%7B%22state.name%22%3A+%22TF%3A12345ABCDEFGHIJKLMN%22%7D", json=res_connection_by_name_json) + m.get("https://127.0.0.1:9999/api/v1/network-connections?content=expanded&q=%7B%22state.name%22%3A+%22TF%3A12345ABCDEFGHIJKLMN%22%7D", json=res_connection_by_name_json) # Delete endpoint that is no longer necessary - m.delete("https://127.0.0.1:9999/api/v1/ncs/network-connections/4505d5d3-b2f3-40b8-8ec2-4a5b28523c03/endpoints/1d58ba8f-4d51-4213-83e1-97a0e0bdd388", text="", status_code = 202) + m.delete("https://127.0.0.1:9999/api/v1/network-connections/4505d5d3-b2f3-40b8-8ec2-4a5b28523c03/endpoints/1d58ba8f-4d51-4213-83e1-97a0e0bdd388", text="", status_code = 202) # Update changed endpoint - m.put("https://127.0.0.1:9999/api/v1/ncs/network-connections/4505d5d3-b2f3-40b8-8ec2-4a5b28523c03/endpoints/230516d0-7e38-44b1-b174-1ba7d4454ee6", text="", status_code = 202) + m.put("https://127.0.0.1:9999/api/v1/network-connections/4505d5d3-b2f3-40b8-8ec2-4a5b28523c03/endpoints/230516d0-7e38-44b1-b174-1ba7d4454ee6", text="", status_code = 202) # Create the newly added endpoint - m.post("https://127.0.0.1:9999/api/v1/ncs/network-connections/4505d5d3-b2f3-40b8-8ec2-4a5b28523c03/endpoints", json=[{"href":"/network-connections/4505d5d3-b2f3-40b8-8ec2-4a5b28523c03/endpoint/somethingplausible","rt":["plausible"]}], status_code=202) + m.post("https://127.0.0.1:9999/api/v1/network-connections/4505d5d3-b2f3-40b8-8ec2-4a5b28523c03/endpoints", json=[{"href":"/network-connections/4505d5d3-b2f3-40b8-8ec2-4a5b28523c03/endpoint/somethingplausible","rt":["plausible"]}], status_code=202) # Update the connection itself - m.put("https://127.0.0.1:9999/api/v1/ncs/network-connections/4505d5d3-b2f3-40b8-8ec2-4a5b28523c03", text="", status_code=202) + m.put("https://127.0.0.1:9999/api/v1/network-connections/4505d5d3-b2f3-40b8-8ec2-4a5b28523c03", text="", status_code=202) result = set_config_for_service(cm, constellation, uuid, config) _validate_result(result, True) @@ -191,11 +191,11 @@ def test_xr_set_config_update_case(): called_mocks = [(r._request.method, r._request.url) for r in m._adapter.request_history] expected_mocks = [ ('POST', 'https://127.0.0.1:9999/realms/xr-cm/protocol/openid-connect/token'), # Authentication - ('GET', 'https://127.0.0.1:9999/api/v1/ns/xr-networks?content=expanded&content=expanded&q=%7B%22hubModule.state.module.moduleName%22%3A+%22XR+HUB+1%22%7D'), # Hub module by name - ('GET', 'https://127.0.0.1:9999/api/v1/ncs/network-connections?content=expanded&q=%7B%22state.name%22%3A+%22TF%3A12345ABCDEFGHIJKLMN%22%7D'), # Get by name, determine update or create - ('DELETE', 'https://127.0.0.1:9999/api/v1/ncs/network-connections/4505d5d3-b2f3-40b8-8ec2-4a5b28523c03/endpoints/1d58ba8f-4d51-4213-83e1-97a0e0bdd388'), # Delete unnecessary endpoint - ('PUT', 'https://127.0.0.1:9999/api/v1/ncs/network-connections/4505d5d3-b2f3-40b8-8ec2-4a5b28523c03/endpoints/230516d0-7e38-44b1-b174-1ba7d4454ee6'), # Update changed endpoint - ('POST', 'https://127.0.0.1:9999/api/v1/ncs/network-connections/4505d5d3-b2f3-40b8-8ec2-4a5b28523c03/endpoints'), # Add new endpoint - ('PUT', 'https://127.0.0.1:9999/api/v1/ncs/network-connections/4505d5d3-b2f3-40b8-8ec2-4a5b28523c03') # Update the connection itself + ('GET', 'https://127.0.0.1:9999/api/v1/xr-networks?content=expanded&content=expanded&q=%7B%22hubModule.state.module.moduleName%22%3A+%22XR+HUB+1%22%7D'), # Hub module by name + ('GET', 'https://127.0.0.1:9999/api/v1/network-connections?content=expanded&q=%7B%22state.name%22%3A+%22TF%3A12345ABCDEFGHIJKLMN%22%7D'), # Get by name, determine update or create + ('DELETE', 'https://127.0.0.1:9999/api/v1/network-connections/4505d5d3-b2f3-40b8-8ec2-4a5b28523c03/endpoints/1d58ba8f-4d51-4213-83e1-97a0e0bdd388'), # Delete unnecessary endpoint + ('PUT', 'https://127.0.0.1:9999/api/v1/network-connections/4505d5d3-b2f3-40b8-8ec2-4a5b28523c03/endpoints/230516d0-7e38-44b1-b174-1ba7d4454ee6'), # Update changed endpoint + ('POST', 'https://127.0.0.1:9999/api/v1/network-connections/4505d5d3-b2f3-40b8-8ec2-4a5b28523c03/endpoints'), # Add new endpoint + ('PUT', 'https://127.0.0.1:9999/api/v1/network-connections/4505d5d3-b2f3-40b8-8ec2-4a5b28523c03') # Update the connection itself ] assert called_mocks == expected_mocks diff --git a/src/device/service/drivers/xr/service-cli.py b/src/device/service/drivers/xr/service-cli.py new file mode 100755 index 0000000000000000000000000000000000000000..01bd2aaa118225cf74a953fff81b54abb857e39b --- /dev/null +++ b/src/device/service/drivers/xr/service-cli.py @@ -0,0 +1,144 @@ +#!/usr/bin/env python3 +#pylint: disable=invalid-name, missing-function-docstring, line-too-long, logging-fstring-interpolation, missing-class-docstring, missing-module-docstring +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Manage L2 services (with underlying XR connectivity) without need to use unit test +# files or excessive JSON definitions +# +# Run in this directory with PYTHONPATH=../../../../ +# E.g.: +# PYTHONPATH=../../../../ ./service-cli.py create 1 R1-EMU 13/1/2 500 2 R3-EMU 13/1/2 500 +# PYTHONPATH=../../../../ ./service-cli.py list +# PYTHONPATH=../../../../ ./service-cli.py delete 43a8046a-5dec-463d-82f7-7cc3442dbf4f + + +import argparse +import logging +import traceback +from contextlib import contextmanager + +from common.Settings import get_setting +from context.client.ContextClient import ContextClient +from tests.tools.mock_osm.MockOSM import MockOSM +from common.proto.context_pb2 import ContextId, ServiceTypeEnum, ServiceStatusEnum +from common.tools.grpc.Tools import grpc_message_to_json_string + +LOGGER = logging.getLogger(__name__) + +WIM_USERNAME = 'admin' +WIM_PASSWORD = 'admin' + +@contextmanager +def make_context_client(): + try: + _client = ContextClient(get_setting('CONTEXTSERVICE_SERVICE_HOST'), get_setting('CONTEXTSERVICE_SERVICE_PORT_GRPC')) + yield _client + finally: + _client.close() + +def make_osm_wim(): + wim_url = 'http://{:s}:{:s}'.format( + get_setting('COMPUTESERVICE_SERVICE_HOST'), str(get_setting('COMPUTESERVICE_SERVICE_PORT_HTTP'))) + return MockOSM(wim_url, WIM_MAPPING, WIM_USERNAME, WIM_PASSWORD) + +logging.basicConfig(level=logging.ERROR) + +parser = argparse.ArgumentParser(description='TF Service Management Utility') +subparsers = parser.add_subparsers(dest="command") +subparsers.required = True + +create_parser = subparsers.add_parser('create') +create_parser.add_argument('site1', type=int, help='One endpoint of the service, e.g. 1') +create_parser.add_argument('device1', type=str, help='One endpoint of the service, e.g. R1-EMU') +create_parser.add_argument('interface1', type=str, help='One endpoint of the service, e.g. 13/1/2') +create_parser.add_argument('vlan1', type=int, help='VLAN in first endpoint, e.g. 500') + +create_parser.add_argument('site2', type=int, help='One endpoint of the service, e.g. 2') +create_parser.add_argument('device2', type=str, help='One endpoint of the service, e.g. R3-EMU') +create_parser.add_argument('interface2', type=str, help='One endpoint of the service, e.g. 13/1/2') +create_parser.add_argument('vlan2', type=int, help='VLAN in first endpoint, e.g. 500') + +delete_parser = subparsers.add_parser('delete') +delete_parser.add_argument('service_uuid', type=str, help='UUID of the service to be deleted') + +list_parser = subparsers.add_parser('list') + +args = parser.parse_args() + +WIM_SERVICE_TYPE = 'ELINE' +CONTEXT_ID = {'context_uuid': {'uuid': 'admin'}} + +if args.command == "create": + endpoint1 = f"{args.device1}:{args.interface1}" + endpoint2 = f"{args.device2}:{args.interface2}" + + WIM_MAPPING = [ + {'device-id': args.device1, 'service_endpoint_id': endpoint1, + 'service_mapping_info': {'bearer': {'bearer-reference': endpoint1}, 'site-id': args.site1}}, + {'device-id': args.device2, 'service_endpoint_id': endpoint2, + 'service_mapping_info': {'bearer': {'bearer-reference': endpoint2}, 'site-id': args.site2}}, + ] + WIM_SERVICE_CONNECTION_POINTS = [ + {'service_endpoint_id': endpoint1, + 'service_endpoint_encapsulation_type': 'dot1q', + 'service_endpoint_encapsulation_info': {'vlan': args.vlan1}}, + {'service_endpoint_id': endpoint2, + 'service_endpoint_encapsulation_type': 'dot1q', + 'service_endpoint_encapsulation_info': {'vlan': args.vlan2}}, + ] +else: + WIM_MAPPING = [] + WIM_SERVICE_CONNECTION_POINTS = [] + +#print(str(args)) +print(f"=== WIM_SERVICE_TYPE: {WIM_SERVICE_TYPE}") +print(f"=== WIM_SERVICE_CONNECTION_POINTS: {WIM_SERVICE_CONNECTION_POINTS}") +print(f"=== WIM_MAPPING: {WIM_MAPPING}") + +with make_context_client() as client: + osm_wim = make_osm_wim(); + + if args.command == "create": + service_uuid = osm_wim.create_connectivity_service(WIM_SERVICE_TYPE, WIM_SERVICE_CONNECTION_POINTS) + print(f"*** Create connectivity service --> {service_uuid}") + status = osm_wim.get_connectivity_service_status(service_uuid) + print(f"*** Get created service status --> {str(status)}") + + elif args.command == "delete": + osm_wim.wim.check_credentials() + try: + osm_wim.wim.delete_connectivity_service(args.service_uuid) + print(f"*** Service {args.service_uuid} is no longer present (delete was successfull or service did not exist)") + except Exception as e: + print(f"*** Failed to delete service {args.service_uuid}, {e}") + elif args.command == "list": + response = client.ListServices(ContextId(**CONTEXT_ID)) + + #print('Services[{:d}] = {:s}'.format(len(response.services), grpc_message_to_json_string(response))) + for service in response.services: + scs = "" + + # See if there are endpoint constraints that might be regognizable by the user. + # Keys do not necessarily exist, so catch exceptions and ignore those constraints + # that we cannot easily represent. + for sc in service.service_constraints: + try: + scs += f"{sc.endpoint_location.endpoint_id.device_id.device_uuid.uuid}:{sc.endpoint_location.endpoint_id.endpoint_uuid.uuid} " + except Exception: + pass + + print(f"{service.service_id.service_uuid.uuid:36} {ServiceTypeEnum.Name(service.service_type):40} {ServiceStatusEnum.Name(service.service_status.service_status)} {scs}") + + diff --git a/src/service/service/service_handlers/p4/p4_service_handler.py b/src/service/service/service_handlers/p4/p4_service_handler.py index 500c50378401c016a6cf30c73c78149e2097d2b8..6f2cfb5a9bc4dac991eecd14ba7b6eb1218bdaa2 100644 --- a/src/service/service/service_handlers/p4/p4_service_handler.py +++ b/src/service/service/service_handlers/p4/p4_service_handler.py @@ -47,7 +47,7 @@ def create_rule_set(endpoint_a, endpoint_b): } ] } -) + ) def create_rule_del(endpoint_a, endpoint_b): return json_config_rule_delete( @@ -68,7 +68,17 @@ def create_rule_del(endpoint_a, endpoint_b): } ] } -) + ) + +def find_names(uuid_a, uuid_b, device_endpoints): + endpoint_a, endpoint_b = None, None + for endpoint in device_endpoints: + if endpoint.endpoint_id.endpoint_uuid.uuid == uuid_a: + endpoint_a = endpoint.name + elif endpoint.endpoint_id.endpoint_uuid.uuid == uuid_b: + endpoint_b = endpoint.name + + return (endpoint_a, endpoint_b) class P4ServiceHandler(_ServiceHandler): def __init__(self, @@ -127,12 +137,21 @@ class P4ServiceHandler(_ServiceHandler): device = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid))) del device.device_config.config_rules[:] + + # Find names from uuids + (endpoint_a, endpoint_b) = find_names(matched_endpoint_uuid, endpoint_uuid, device.device_endpoints) + if endpoint_a is None: + LOGGER.exception('Unable to find name of endpoint({:s})'.format(str(matched_endpoint_uuid))) + raise Exception('Unable to find name of endpoint({:s})'.format(str(matched_endpoint_uuid))) + if endpoint_b is None: + LOGGER.exception('Unable to find name of endpoint({:s})'.format(str(endpoint_uuid))) + raise Exception('Unable to find name of endpoint({:s})'.format(str(endpoint_uuid))) # One way - rule = create_rule_set(matched_endpoint_uuid, endpoint_uuid) + rule = create_rule_set(endpoint_a, endpoint_b) device.device_config.config_rules.append(ConfigRule(**rule)) # The other way - rule = create_rule_set(endpoint_uuid, matched_endpoint_uuid) + rule = create_rule_set(endpoint_b, endpoint_a) device.device_config.config_rules.append(ConfigRule(**rule)) self.__task_executor.configure_device(device) @@ -189,11 +208,20 @@ class P4ServiceHandler(_ServiceHandler): del device.device_config.config_rules[:] + # Find names from uuids + (endpoint_a, endpoint_b) = find_names(matched_endpoint_uuid, endpoint_uuid, device.device_endpoints) + if endpoint_a is None: + LOGGER.exception('Unable to find name of endpoint({:s})'.format(str(matched_endpoint_uuid))) + raise Exception('Unable to find name of endpoint({:s})'.format(str(matched_endpoint_uuid))) + if endpoint_b is None: + LOGGER.exception('Unable to find name of endpoint({:s})'.format(str(endpoint_uuid))) + raise Exception('Unable to find name of endpoint({:s})'.format(str(endpoint_uuid))) + # One way - rule = create_rule_del(matched_endpoint_uuid, endpoint_uuid) + rule = create_rule_del(endpoint_a, endpoint_b) device.device_config.config_rules.append(ConfigRule(**rule)) # The other way - rule = create_rule_del(endpoint_uuid, matched_endpoint_uuid) + rule = create_rule_del(endpoint_b, endpoint_a) device.device_config.config_rules.append(ConfigRule(**rule)) self.__task_executor.configure_device(device) diff --git a/src/tests/ofc22/descriptors_emulated_xr.json b/src/tests/ofc22/descriptors_emulated_xr.json index d6a2f023422902bfc3d216771092f6081d8cf6b5..4e247bb30d4df25fa75d30a3baa94f1348c0a6d9 100644 --- a/src/tests/ofc22/descriptors_emulated_xr.json +++ b/src/tests/ofc22/descriptors_emulated_xr.json @@ -31,7 +31,10 @@ "device_config": {"config_rules": [ {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, - {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"13/0/0\"}, {\"sample_types\": [101, 102, 201, 202], \"type\": \"copper\", \"uuid\": \"13/1/2\"}]}"}} + {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": {"endpoints": [ + {"uuid": "13/0/0", "type": "optical", "sample_types": []}, + {"uuid": "13/1/2", "type": "copper", "sample_types": [101, 102, 201, 202]} + ]}}} ]}, "device_operational_status": 1, "device_drivers": [0], @@ -43,7 +46,10 @@ "device_config": {"config_rules": [ {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, - {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"13/0/0\"}, {\"sample_types\": [101, 102, 201, 202], \"type\": \"copper\", \"uuid\": \"13/1/2\"}]}"}} + {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": {"endpoints": [ + {"uuid": "13/0/0", "type": "optical", "sample_types": []}, + {"uuid": "13/1/2", "type": "copper", "sample_types": [101, 102, 201, 202]} + ]}}} ]}, "device_operational_status": 1, "device_drivers": [0], @@ -55,7 +61,10 @@ "device_config": {"config_rules": [ {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, - {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"13/0/0\"}, {\"sample_types\": [101, 102, 201, 202], \"type\": \"copper\", \"uuid\": \"13/1/2\"}]}"}} + {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": {"endpoints": [ + {"uuid": "13/0/0", "type": "optical", "sample_types": []}, + {"uuid": "13/1/2", "type": "copper", "sample_types": [101, 102, 201, 202]} + ]}}} ]}, "device_operational_status": 1, "device_drivers": [0], @@ -67,7 +76,10 @@ "device_config": {"config_rules": [ {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, - {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"13/0/0\"}, {\"sample_types\": [101, 102, 201, 202], \"type\": \"copper\", \"uuid\": \"13/1/2\"}]}"}} + {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": {"endpoints": [ + {"uuid": "13/0/0", "type": "optical", "sample_types": []}, + {"uuid": "13/1/2", "type": "copper", "sample_types": [101, 102, 201, 202]} + ]}}} ]}, "device_operational_status": 1, "device_drivers": [0], diff --git a/src/tests/p4/tests/test_functional_bootstrap.py b/src/tests/p4/tests/test_functional_bootstrap.py index 11b24adf137f0b06d1176b440a7fd93b5ad24e80..5e39490f23fe0635eaee502c1b8b8ffb9566f307 100644 --- a/src/tests/p4/tests/test_functional_bootstrap.py +++ b/src/tests/p4/tests/test_functional_bootstrap.py @@ -25,6 +25,10 @@ from common.proto.context_pb2 import ConfigActionEnum, Context, ContextId, Devic from device.client.DeviceClient import DeviceClient from .Objects import CONTEXT_ID, CONTEXTS, DEVICES, LINKS, TOPOLOGIES +from common.tools.object_factory.ConfigRule import ( + json_config_rule_set, json_config_rule_delete) + + LOGGER = logging.getLogger(__name__) LOGGER.setLevel(logging.DEBUG) @@ -48,15 +52,18 @@ def test_prepare_scenario(context_client : ContextClient): # pylint: disable=re context_uuid = context['context_id']['context_uuid']['uuid'] LOGGER.info('Adding Context {:s}'.format(context_uuid)) response = context_client.SetContext(Context(**context)) - assert response.context_uuid.uuid == context_uuid + context_data = context_client.GetContext(response) + assert context_data.name == context_uuid for topology in TOPOLOGIES: context_uuid = topology['topology_id']['context_id']['context_uuid']['uuid'] topology_uuid = topology['topology_id']['topology_uuid']['uuid'] LOGGER.info('Adding Topology {:s}/{:s}'.format(context_uuid, topology_uuid)) response = context_client.SetTopology(Topology(**topology)) - assert response.context_id.context_uuid.uuid == context_uuid - assert response.topology_uuid.uuid == topology_uuid +# assert response.context_id.context_uuid.uuid == context_uuid + + topology_data = context_client.GetTopology(response) + assert topology_data.name == topology_uuid context_id = json_context_id(context_uuid) @@ -81,18 +88,25 @@ def test_devices_bootstraping( device_p4_with_connect_rules = copy.deepcopy(device) device_p4_with_connect_rules['device_config']['config_rules'].extend(connect_rules) + device_p4_with_connect_rules['device_operational_status'] = \ + DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_ENABLED response = device_client.AddDevice(Device(**device_p4_with_connect_rules)) - assert response.device_uuid.uuid == device_uuid + LOGGER.info('Adding Device {:s}'.format(device_uuid)) device_p4_with_endpoints = copy.deepcopy(device) + device_p4_with_endpoints['device_id']['device_uuid']['uuid'] = response.device_uuid.uuid device_p4_with_endpoints['device_endpoints'].extend(endpoints) + for i in device_p4_with_endpoints['device_endpoints']: + i['endpoint_id']['device_id']['device_uuid']['uuid'] = response.device_uuid.uuid + + LOGGER.info('Adding Endpoints {:s}'.format(device_uuid)) device_client.ConfigureDevice(Device(**device_p4_with_endpoints)) for link in LINKS: link_uuid = link['link_id']['link_uuid']['uuid'] LOGGER.info('Adding Link {:s}'.format(link_uuid)) response = context_client.SetLink(Link(**link)) - assert response.link_uuid.uuid == link_uuid + assert response.name == link_uuid context_client.SetLink(Link(**link)) def test_devices_bootstrapped(context_client : ContextClient): # pylint: disable=redefined-outer-name diff --git a/src/tests/p4/tests/test_functional_create_service.py b/src/tests/p4/tests/test_functional_create_service.py index f160d3c6fbe4d560f821d0d70e90a2b3e44e4e8b..beaa23ba3e056fabb528fc7dc5dbebb43b0f019b 100644 --- a/src/tests/p4/tests/test_functional_create_service.py +++ b/src/tests/p4/tests/test_functional_create_service.py @@ -56,12 +56,12 @@ def test_rules_entry( - for device, _, __ in DEVICES: - # Enable device - device_p4_with_operational_status = copy.deepcopy(device) - device_p4_with_operational_status['device_operational_status'] = \ - DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_ENABLED - device_client.ConfigureDevice(Device(**device_p4_with_operational_status)) +# for device, _, __ in DEVICES: +# # Enable device +# device_p4_with_operational_status = copy.deepcopy(device) +# device_p4_with_operational_status['device_operational_status'] = \ +# DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_ENABLED +# device_client.ConfigureDevice(Device(**device_p4_with_operational_status)) # ----- Create Services --------------------------------------------------------------- for service, endpoints in SERVICES: @@ -71,23 +71,4 @@ def test_rules_entry( service_p4 = copy.deepcopy(service) service_client.CreateService(Service(**service_p4)) service_p4['service_endpoint_ids'].extend(endpoints) - service_client.UpdateService(Service(**service_p4)) - - - -""" -con_cl = ContextClient(get_setting('CONTEXTSERVICE_SERVICE_HOST'), get_setting('CONTEXTSERVICE_SERVICE_PORT_GRPC')) -dev_cl = DeviceClient(get_setting('DEVICESERVICE_SERVICE_HOST'), get_setting('DEVICESERVICE_SERVICE_PORT_GRPC')) -srv_cl = ServiceClient(get_setting('SERVICESERVICE_SERVICE_HOST'), get_setting('SERVICESERVICE_SERVICE_PORT_GRPC')) - -for service, endpoints in SERVICES: - service_uuid = service['service_id']['service_uuid']['uuid'] - print('Creating Service {:s}'.format(service_uuid)) - service_p4 = copy.deepcopy(service) - srv_cl.CreateService(Service(**service_p4)) - #service_data = con_cl.GetService(ServiceId(**json_service_id('svc1'))) - #print('service_data = {:s}'.format(grpc_message_to_json_string(service_data))) - service_p4 = copy.deepcopy(service) - service_p4['service_endpoint_ids'].extend(endpoints) - srv_cl.UpdateService(Service(**service_p4)) -""" \ No newline at end of file + service_client.UpdateService(Service(**service_p4)) \ No newline at end of file diff --git a/src/tests/p4/tests/test_functional_delete_service.py b/src/tests/p4/tests/test_functional_delete_service.py index 4d637cf88d840a20f38855beb7839e2b704016d4..c5821df4ccc1caa2a1d72ed98dbfcb82e9db21b1 100644 --- a/src/tests/p4/tests/test_functional_delete_service.py +++ b/src/tests/p4/tests/test_functional_delete_service.py @@ -60,10 +60,3 @@ def test_rules_delete( print('Deleting Service {:s}'.format(service_uuid)) service_p4 = copy.deepcopy(service) response = service_client.DeleteService(ServiceId(**json_service_id(service_uuid, CONTEXT_ID))) - - # ----- Disable Devices --------------------------------------------------------------- - for device, _, _ in DEVICES: - device_p4_with_operational_status = copy.deepcopy(device) - device_p4_with_operational_status['device_operational_status'] = \ - DeviceOperationalStatusEnum.DEVICEOPERATIONALSTATUS_DISABLED - device_client.ConfigureDevice(Device(**device_p4_with_operational_status)) diff --git a/src/webui/service/main/routes.py b/src/webui/service/main/routes.py index 38d13aad562f3e55490952db84ef784f87697739..dcbbf71a6fee6ebd040f14c7d0d2cb07ba9ee085 100644 --- a/src/webui/service/main/routes.py +++ b/src/webui/service/main/routes.py @@ -34,6 +34,8 @@ slice_client = SliceClient() LOGGER = logging.getLogger(__name__) +DESCRIPTOR_LOADER_NUM_WORKERS = 10 + def process_descriptors(descriptors): try: descriptors_file = request.files[descriptors.name] @@ -43,7 +45,7 @@ def process_descriptors(descriptors): flash(f'Unable to load descriptor file: {str(e)}', 'danger') return - descriptor_loader = DescriptorLoader(descriptors) + descriptor_loader = DescriptorLoader(descriptors, num_workers=DESCRIPTOR_LOADER_NUM_WORKERS) results = descriptor_loader.process() for message,level in compose_notifications(results): if level == 'error': LOGGER.warning('ERROR message={:s}'.format(str(message)))