diff --git a/src/dbscanserving/.gitlab-ci.yml b/src/dbscanserving/.gitlab-ci.yml index 01b5808bcaf1d0afe61d39beca9065f0fa1a0954..9af5bb233c8d472e92502d64a906d9e2b433e1da 100644 --- a/src/dbscanserving/.gitlab-ci.yml +++ b/src/dbscanserving/.gitlab-ci.yml @@ -111,6 +111,6 @@ unit_test dbscanserving: # # namespace: test # rules: # - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)' -# when: manual +# when: manual # - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' # when: manual diff --git a/src/device/service/drivers/transport_api/TapiRequestBuilder.py b/src/device/service/drivers/transport_api/TapiRequestBuilder.py new file mode 100644 index 0000000000000000000000000000000000000000..79d660deb5ff79df938f1cf8349ae609980eee64 --- /dev/null +++ b/src/device/service/drivers/transport_api/TapiRequestBuilder.py @@ -0,0 +1,89 @@ +# Copyright 2022-2025 ETSI SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import logging +import os +import requests + +LOGGER = logging.getLogger(__name__) + +def create_tapi_request(resource_value): + """Create TAPI connectivity service request from resource value. + + Args: + resource_value: Tuple of (resource_key, resource_data_dict) + + Returns: + dict: TAPI connectivity service request payload + """ + LOGGER.info("Creating TAPI request for resource_value: %s", resource_value) + try: + base_dir = os.path.dirname(os.path.abspath(__file__)) + json_path = os.path.join(base_dir, 'templates', 'lsp.json') + with open(json_path, 'r', encoding='utf-8') as f: + template = json.load(f) + + resource_key = resource_value[0] + resource_data = json.loads(resource_value[1]) if isinstance(resource_value[1], str) else resource_value[1] + + LOGGER.info("Processing resource_key: %s", resource_key) + LOGGER.info("Resource data: %s", resource_data) + + svc = template["tapi-connectivity:connectivity-service"][0] + svc["connectivity-direction"] = resource_data["direction"] + svc["layer-protocol-name"] = resource_data["layer_protocol_name"] + svc["layer-protocol-qualifier"] = resource_data["layer_protocol_qualifier"] + svc["requested-capacity"]["total-size"]["unit"] = "GHz" + svc["requested-capacity"]["total-size"]["value"] = resource_data["bw"] + svc["include-link"] = resource_data.get("link_uuid_path", []) + svc["uuid"] = resource_data["uuid"] + + ep0 = svc["end-point"][0] + ep0["service-interface-point"]["service-interface-point-uuid"] = resource_data["input_sip"] + ep0["direction"] = resource_data["direction"] + ep0["layer-protocol-name"] = resource_data["layer_protocol_name"] + ep0["layer-protocol-qualifier"] = resource_data["layer_protocol_qualifier"] + ep0["local-id"] = resource_data["input_sip"] + + media_spec = ep0["tapi-photonic-media:media-channel-connectivity-service-end-point-spec"] + mc_config = media_spec["mc-config"] + spectrum = mc_config["spectrum"] + spectrum["lower-frequency"] = resource_data["lower_frequency_mhz"] + spectrum["upper-frequency"] = resource_data["upper_frequency_mhz"] + spectrum["frequency-constraint"]["adjustment-granularity"] = resource_data["granularity"] + spectrum["frequency-constraint"]["grid-type"] = resource_data["grid_type"] + + ep1 = svc["end-point"][1] + ep1["service-interface-point"]["service-interface-point-uuid"] = resource_data["output_sip"] + ep1["direction"] = resource_data["direction"] + ep1["layer-protocol-name"] = resource_data["layer_protocol_name"] + ep1["layer-protocol-qualifier"] = resource_data["layer_protocol_qualifier"] + ep1["local-id"] = resource_data["output_sip"] + + url = resource_data.get("url", "") + + LOGGER.info("URL: %s", url) + LOGGER.info("Template: %s", json.dumps(template, indent = 2)) + headers = { + "Content-Type": "application/yang-data+json", + "Accept": "application/yang-data+json", + "Expect": "" + } + result = requests.post(url, headers=headers, data=json.dumps(template), timeout=10) + return result + + except (OSError, json.JSONDecodeError, KeyError) as e: + LOGGER.error("Error creating TAPI request: %s", str(e), exc_info=True) + raise diff --git a/src/device/service/drivers/transport_api/TfsApiClient.py b/src/device/service/drivers/transport_api/TfsApiClient.py new file mode 100644 index 0000000000000000000000000000000000000000..c912a1ad3deedd2fecacafd7d5853c54f8b98f4c --- /dev/null +++ b/src/device/service/drivers/transport_api/TfsApiClient.py @@ -0,0 +1,135 @@ +# Copyright 2022-2025 ETSI SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging, requests +from typing import Dict, List, Optional +from common.tools.rest_api.client.RestApiClient import RestApiClient +from device.service.driver_api.ImportTopologyEnum import ImportTopologyEnum + +GET_CONTEXT_IDS_URL = '/tfs-api/context_ids' +GET_DEVICES_URL = '/tfs-api/devices' +GET_LINKS_URL = '/tfs-api/links' + +MAPPING_STATUS = { + 'DEVICEOPERATIONALSTATUS_UNDEFINED': 0, + 'DEVICEOPERATIONALSTATUS_DISABLED' : 1, + 'DEVICEOPERATIONALSTATUS_ENABLED' : 2, +} + +MAPPING_DRIVER = { + 'DEVICEDRIVER_UNDEFINED' : 0, + 'DEVICEDRIVER_OPENCONFIG' : 1, + 'DEVICEDRIVER_TRANSPORT_API' : 2, + 'DEVICEDRIVER_P4' : 3, + 'DEVICEDRIVER_IETF_NETWORK_TOPOLOGY': 4, + 'DEVICEDRIVER_ONF_TR_532' : 5, + 'DEVICEDRIVER_XR' : 6, + 'DEVICEDRIVER_IETF_L2VPN' : 7, + 'DEVICEDRIVER_GNMI_OPENCONFIG' : 8, + 'DEVICEDRIVER_OPTICAL_TFS' : 9, + 'DEVICEDRIVER_IETF_ACTN' : 10, + 'DEVICEDRIVER_OC' : 11, + 'DEVICEDRIVER_QKD' : 12, + 'DEVICEDRIVER_IETF_L3VPN' : 13, + 'DEVICEDRIVER_IETF_SLICE' : 14, + 'DEVICEDRIVER_NCE' : 15, + 'DEVICEDRIVER_SMARTNIC' : 16, + 'DEVICEDRIVER_MORPHEUS' : 17, + 'DEVICEDRIVER_RYU' : 18, +} + +LOGGER = logging.getLogger(__name__) + +class TfsApiClient(RestApiClient): + def __init__( + self, address : str, port : int, scheme : str = 'http', + username : Optional[str] = None, password : Optional[str] = None, + timeout : Optional[int] = 30 + ) -> None: + super().__init__( + address, port, scheme=scheme, username=username, password=password, + timeout=timeout, verify_certs=False, allow_redirects=True, logger=LOGGER + ) + + def check_credentials(self) -> None: + self.get(GET_CONTEXT_IDS_URL, expected_status_codes={requests.codes['OK']}) + LOGGER.info('Credentials checked') + + def get_devices_endpoints( + self, import_topology : ImportTopologyEnum = ImportTopologyEnum.DEVICES + ) -> List[Dict]: + LOGGER.debug('[get_devices_endpoints] begin') + MSG = '[get_devices_endpoints] import_topology={:s}' + LOGGER.debug(MSG.format(str(import_topology))) + + if import_topology == ImportTopologyEnum.DISABLED: + MSG = 'Unsupported import_topology mode: {:s}' + raise Exception(MSG.format(str(import_topology))) + + devices = self.get(GET_DEVICES_URL, expected_status_codes={requests.codes['OK']}) + + result = list() + for json_device in devices['devices']: + device_uuid : str = json_device['device_id']['device_uuid']['uuid'] + device_type : str = json_device['device_type'] + device_status = json_device['device_operational_status'] + device_url = '/devices/device[{:s}]'.format(device_uuid) + device_data = { + 'uuid': json_device['device_id']['device_uuid']['uuid'], + 'name': json_device['name'], + 'type': device_type, + 'status': MAPPING_STATUS[device_status], + 'drivers': [ + MAPPING_DRIVER[driver] + for driver in json_device['device_drivers'] + ], + } + result.append((device_url, device_data)) + + for json_endpoint in json_device['device_endpoints']: + endpoint_uuid = json_endpoint['endpoint_id']['endpoint_uuid']['uuid'] + endpoint_url = '/endpoints/endpoint[{:s}]'.format(endpoint_uuid) + endpoint_data = { + 'device_uuid': device_uuid, + 'uuid': endpoint_uuid, + 'name': json_endpoint['name'], + 'type': json_endpoint['endpoint_type'], + } + result.append((endpoint_url, endpoint_data)) + + if import_topology == ImportTopologyEnum.DEVICES: + LOGGER.debug('[get_devices_endpoints] devices only; returning') + return result + + links = self.get(GET_LINKS_URL, expected_status_codes={requests.codes['OK']}) + + for json_link in links['links']: + link_uuid : str = json_link['link_id']['link_uuid']['uuid'] + link_url = '/links/link[{:s}]'.format(link_uuid) + link_endpoint_ids = [ + ( + json_endpoint_id['device_id']['device_uuid']['uuid'], + json_endpoint_id['endpoint_uuid']['uuid'], + ) + for json_endpoint_id in json_link['link_endpoint_ids'] + ] + link_data = { + 'uuid': json_link['link_id']['link_uuid']['uuid'], + 'name': json_link['name'], + 'endpoints': link_endpoint_ids, + } + result.append((link_url, link_data)) + + LOGGER.debug('[get_devices_endpoints] topology; returning') + return result diff --git a/src/device/service/drivers/transport_api/TransportApiDriver.py b/src/device/service/drivers/transport_api/TransportApiDriver.py index 6f958dc9bc02b59ca7cb73771efc27f1800b7d78..5aded3ac3b4dcbf4fd8c81667bf9c2846eb7c54b 100644 --- a/src/device/service/drivers/transport_api/TransportApiDriver.py +++ b/src/device/service/drivers/transport_api/TransportApiDriver.py @@ -12,14 +12,16 @@ # See the License for the specific language governing permissions and # limitations under the License. -import logging, requests, threading +import json, logging, requests, threading from requests.auth import HTTPBasicAuth from typing import Any, Iterator, List, Optional, Tuple, Union from common.method_wrappers.Decorator import MetricsPool, metered_subclass_method from common.type_checkers.Checkers import chk_string, chk_type -from device.service.driver_api._Driver import _Driver +from device.service.driver_api._Driver import _Driver, RESOURCE_ENDPOINTS, RESOURCE_SERVICES +from device.service.driver_api.ImportTopologyEnum import ImportTopologyEnum, get_import_topology from . import ALL_RESOURCE_KEYS -from .Tools import create_connectivity_service, find_key, config_getter, delete_connectivity_service, tapi_tequest +from .Tools import create_connectivity_service, find_key, config_getter, delete_connectivity_service +from .TfsApiClient import TfsApiClient LOGGER = logging.getLogger(__name__) @@ -38,6 +40,17 @@ class TransportApiDriver(_Driver): scheme = self.settings.get('scheme', 'http') self.__tapi_root = '{:s}://{:s}:{:d}'.format(scheme, self.address, int(self.port)) self.__timeout = int(self.settings.get('timeout', 120)) + self.__import_topology = get_import_topology(self.settings, default=ImportTopologyEnum.DISABLED) + self.__skip_tapi_queries = self.__import_topology != ImportTopologyEnum.DISABLED + + if self.__skip_tapi_queries: + self.tac = TfsApiClient(address, port, scheme=scheme, username=username, password=password, timeout=self.__timeout) + LOGGER.info('TransportApiDriver initialized for {:s}:{:s} with import_topology={:s}'.format( + address, str(port), str(self.__import_topology))) + else: + self.tac = None + LOGGER.info('TransportApiDriver initialized for {:s}:{:s} as real OLS device'.format( + address, str(port))) def Connect(self) -> bool: url = self.__tapi_root + '/restconf/data/tapi-common:context' @@ -70,6 +83,19 @@ class TransportApiDriver(_Driver): chk_type('resources', resource_keys, list) results = [] with self.__lock: + if self.__skip_tapi_queries: + LOGGER.info('Importing topology from NBI for teraflowsdn device') + if len(resource_keys) == 0: resource_keys = ALL_RESOURCE_KEYS + for resource_key in resource_keys: + if resource_key == RESOURCE_ENDPOINTS or resource_key == RESOURCE_SERVICES: + try: + results.extend(self.tac.get_devices_endpoints(self.__import_topology)) + LOGGER.info('Imported {:d} resources from NBI'.format(len(results))) + except Exception as e: + LOGGER.exception('Failed to import topology from NBI: {:s}'.format(str(e))) + results.append((resource_key, e)) + return results + if len(resource_keys) == 0: resource_keys = ALL_RESOURCE_KEYS for i, resource_key in enumerate(resource_keys): str_resource_name = 'resource_key[#{:d}]'.format(i) @@ -84,18 +110,61 @@ class TransportApiDriver(_Driver): if len(resources) == 0: return results with self.__lock: - if "tapi_lsp" in str(resources): - for resource in resources: + for resource in resources: + LOGGER.info('resource = {:s}'.format(str(resource))) + + resource_key = resource[0] + + # Handle optical slice resources + if '/optical_slice/context/' in resource_key: + LOGGER.info('=' * 80) + LOGGER.info('OPTICAL SLICE RECEIVED') + LOGGER.info('=' * 80) try: - tapi_tequest(resource) - results.append((resource, True)) + optical_slice_data = json.loads(resource[1]) + + # Extract URL from tapi-common:context + url = optical_slice_data.get('url', '') + LOGGER.info('URL: %s', url) + + # Log the full optical slice data + LOGGER.info('Optical Slice Data:') + optical_slice = optical_slice_data.get('data', {}) + LOGGER.info(json.dumps(optical_slice, indent=2)) + LOGGER.info('=' * 80) + # TODO DEMO TEST + headers = { + "Content-Type": "application/yang-data+json", + "Accept": "application/yang-data+json", + "Expect": "" + } + result = requests.post(url, headers=headers, data=json.dumps(optical_slice), timeout=10) + results.append(True) except Exception as e: - MSG = "Invalid resource_value type: expected dict, got {:s}" - results.append((resource, e)) - else: - for resource in resources: - LOGGER.info('resource = {:s}'.format(str(resource))) + LOGGER.error(f'Failed to parse optical slice data: {str(e)}') + results.append(e) + continue + + # Handle media channel resources + elif '/media_channel/service/' in resource_key: + LOGGER.info('=' * 80) + LOGGER.info('MEDIA CHANNEL RECEIVED') + LOGGER.info('=' * 80) + try: + # Import here to avoid circular dependency + from .TapiRequestBuilder import create_tapi_request + + # Generate complete TAPI request from resource data + result = create_tapi_request(resource) + # LOGGER.info('Result: {:s}'.format(result)) + results.append(True) + except Exception as e: + LOGGER.error('Failed to create TAPI request: {:s}'.format(str(e)), exc_info=True) + results.append(e) + + else: + LOGGER.info('resource = {:s}'.format(str(resource))) uuid = find_key(resource, 'uuid') input_sip = find_key(resource, 'input_sip_uuid') output_sip = find_key(resource, 'output_sip_uuid') @@ -109,6 +178,7 @@ class TransportApiDriver(_Driver): self.__tapi_root, uuid, input_sip, output_sip, direction, capacity_value, capacity_unit, layer_protocol_name, layer_protocol_qualifier, timeout=self.__timeout, auth=self.__auth) results.extend(data) + return results @metered_subclass_method(METRICS_POOL) diff --git a/src/device/service/drivers/transport_api/templates/lsp.json b/src/device/service/drivers/transport_api/templates/lsp.json new file mode 100644 index 0000000000000000000000000000000000000000..6120050398c00ac1643982ccb22352e6932d2f8d --- /dev/null +++ b/src/device/service/drivers/transport_api/templates/lsp.json @@ -0,0 +1,52 @@ +{ + "tapi-connectivity:connectivity-service" : [ + { + "connectivity-direction" : "", + "end-point" : [ + { + "direction" : "", + "layer-protocol-name" : "", + "layer-protocol-qualifier" : "", + "local-id" : "", + "service-interface-point" : { + "service-interface-point-uuid" : "" + }, + + "tapi-photonic-media:media-channel-connectivity-service-end-point-spec":{ + "mc-config":{ + "spectrum":{ + "lower-frequency": "", + "upper-frequency": "", + "frequency-constraint":{ + "adjustment-granularity" : "", + "grid-type": "" + } + } + } + } + }, + { + "direction" : "", + "layer-protocol-name" : "", + "layer-protocol-qualifier" : "", + "local-id" : "", + "service-interface-point" : { + "service-interface-point-uuid" : "" + } + } + ], + "layer-protocol-name" : "", + "layer-protocol-qualifier" : "", + "requested-capacity" : { + "total-size" : { + "unit" : "", + "value" : "" + } + }, + "include-link":[ + ], + "route-objective-function" : "10000", + "uuid" : "" + } + ] +} \ No newline at end of file diff --git a/src/nbi/service/app.py b/src/nbi/service/app.py index 7cb7cb3e79b698c81e5b749d14ec809428fe370e..6e9a5287a3e095d139d8566a85f142b0d9aa6e30 100644 --- a/src/nbi/service/app.py +++ b/src/nbi/service/app.py @@ -40,6 +40,7 @@ from .ietf_l2vpn import register_ietf_l2vpn from .ietf_l3vpn import register_ietf_l3vpn from .ietf_network import register_ietf_network from .ietf_network_slice import register_ietf_nss +from .optical_slice import register_optical_slice from .osm_nbi import register_osm_api from .qkd_app import register_qkd_app from .restconf_root import register_restconf_root @@ -97,6 +98,7 @@ register_ietf_l2vpn (nbi_app) register_ietf_l3vpn (nbi_app) register_ietf_network (nbi_app) register_ietf_nss (nbi_app) +register_optical_slice (nbi_app) register_osm_api (nbi_app) register_qkd_app (nbi_app) register_restconf_root (nbi_app) diff --git a/src/nbi/service/optical_slice/Resources.py b/src/nbi/service/optical_slice/Resources.py new file mode 100644 index 0000000000000000000000000000000000000000..635effd8bad12ce63d986653671a0fdf8de06520 --- /dev/null +++ b/src/nbi/service/optical_slice/Resources.py @@ -0,0 +1,124 @@ +# Copyright 2022-2025 ETSI SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import json +from flask_restful import Resource, request +from common.proto.context_pb2 import ConfigActionEnum, ConfigRule, Device, Service, ServiceTypeEnum, ServiceStatusEnum +from device.client.DeviceClient import DeviceClient +from service.client.ServiceClient import ServiceClient +import requests +LOGGER = logging.getLogger(__name__) + +class OpticalSliceService(Resource): + def __init__(self): + super().__init__() + self.device_client = DeviceClient() + self.service_client = ServiceClient() + + def post(self, sliceId: str): + LOGGER.info("Received POST request for optical slice: %s", sliceId) + + data = request.get_json() + LOGGER.info("Optical Slice data: %s", json.dumps(data, indent=2)) + + if 'data' in data and 'tapi-common:context' in data['data']: + context = data['data']['tapi-common:context'] + elif 'tapi-common:context' in data: + context = data['tapi-common:context'] + else: + return {'status': 'error', 'message': 'Missing tapi-common:context'}, 400 + + slice_uuid = context.get('uuid') + service_interface_points = context.get('service-interface-point', []) + topology_context = context.get('tapi-topology:topology-context', {}) + + LOGGER.info(f"Service Interface Points: {len(service_interface_points)}") + + try: + service = Service() + service.service_id.service_uuid.uuid = sliceId + service.service_id.context_id.context_uuid.uuid = "admin" + service.service_type = ServiceTypeEnum.SERVICETYPE_TAPI_CONNECTIVITY_SERVICE + service.service_status.service_status = ServiceStatusEnum.SERVICESTATUS_ACTIVE + service.name = f"OpticalSlice-{slice_uuid}" + + service_response = self.service_client.CreateService(service) + LOGGER.info("Created TFS optical slice service: %s", service_response) + + except Exception as e: + LOGGER.error("Failed to create TFS optical slice service: %s", str(e), exc_info=True) + return {'status': 'error', 'message': f'Failed to create TFS service: {str(e)}'}, 500 + + device_id_str = "TFS-TAPI" + try: + device = Device() + device.device_id.device_uuid.uuid = device_id_str + config_rule = ConfigRule() + config_rule.action = ConfigActionEnum.CONFIGACTION_SET + config_rule.custom.resource_key = f'/optical_slice/context/{slice_uuid}' + config_rule.custom.resource_value = json.dumps(data) + device.device_config.config_rules.append(config_rule) + self.device_client.ConfigureDevice(device) + LOGGER.info("Configured device %s with optical slice %s", device_id_str, sliceId) + + except Exception as e: + LOGGER.error("Failed to configure device: %s", str(e)) + return {'status': 'error', 'message': f'Failed to configure device: {str(e)}'}, 500 + + return { + 'status': 'success', + 'message': f'Optical slice created for {sliceId}', + 'sliceId': sliceId, + 'slice_uuid': slice_uuid + }, 201 + + def delete(self, sliceId: str): + LOGGER.info("Received DELETE request for optical slice: %s", sliceId) + + try: + from common.proto.context_pb2 import ServiceId + + service_id = ServiceId() + service_id.service_uuid.uuid = sliceId + service_id.context_id.context_uuid.uuid = "admin" + + self.service_client.DeleteService(service_id) + LOGGER.info("Deleted TFS service: %s", sliceId) + + except Exception as e: + LOGGER.error("Failed to delete TFS optical slice service: %s", str(e), exc_info=True) + return {'status': 'error', 'message': f'Failed to delete TFS service: {str(e)}'}, 500 + + headers = { + "Content-Type": "application/json", + "Expect": "" + } + try: + # TODO Dynamic IP + url = f'http://11.1.1.101:4900/restconf/data/tapi-common:context={sliceId}' + response = requests.delete(url, headers=headers, timeout=10) + LOGGER.info("Deleted optical slice from device %s: %s", sliceId, url) + + except Exception as e: + LOGGER.warning("Failed to delete from device: %s", str(e)) + return {'status': 'error', 'message': f'Failed to delete from device: {str(e)}'}, 500 + + + return { + 'status': 'success', + 'message': f'Optical Slice deleted for {sliceId}', + 'sliceId': sliceId, + 'service_uuid': sliceId + }, 200 diff --git a/src/nbi/service/optical_slice/__init__.py b/src/nbi/service/optical_slice/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..56348da7977f7244e700a2264dfcf708796ffc27 --- /dev/null +++ b/src/nbi/service/optical_slice/__init__.py @@ -0,0 +1,26 @@ +# Copyright 2022-2025 ETSI SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from nbi.service.NbiApplication import NbiApplication +from .Resources import OpticalSliceService + +URL_PREFIX = '/restconf/optical-slice/v1' + +def register_optical_slice(nbi_app : NbiApplication): + nbi_app.add_rest_api_resource( + OpticalSliceService, + URL_PREFIX + '/service/', + endpoint='optical_slice.service' + ) + diff --git a/src/pathcomp/.gitlab-ci.yml b/src/pathcomp/.gitlab-ci.yml index ff9da4fc32bd9be9c0fae873e74a418e493f9892..080b016b8bba8ad2889d491c310079d58415ac56 100644 --- a/src/pathcomp/.gitlab-ci.yml +++ b/src/pathcomp/.gitlab-ci.yml @@ -136,7 +136,7 @@ unit_test pathcomp-frontend: - docker logs ${IMAGE_NAME}-frontend - docker logs ${IMAGE_NAME}-backend - > - docker exec -i ${IMAGE_NAME}-frontend bash -c + docker exec -i ${IMAGE_NAME}-frontend bash -c "coverage run -m pytest --log-level=DEBUG --verbose --junitxml=/opt/results/${IMAGE_NAME}-frontend_report.xml $IMAGE_NAME/frontend/tests/test_unitary.py $IMAGE_NAME/frontend/tests/test_unitary_pathcomp_forecaster.py" - docker exec -i ${IMAGE_NAME}-frontend bash -c "coverage report --include='${IMAGE_NAME}/*' --show-missing" coverage: '/TOTAL\s+\d+\s+\d+\s+(\d+%)/' @@ -190,6 +190,6 @@ unit_test pathcomp-frontend: # # namespace: test # rules: # - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)' -# when: manual +# when: manual # - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' # when: manual