diff --git a/.dockerignore b/.dockerignore index d10e3e7b8d2b258489c98fe2e2ec91e17558b771..8785e66f39f611c40050abfe73986b1a83557a82 100644 --- a/.dockerignore +++ b/.dockerignore @@ -15,6 +15,7 @@ # Avoid including these folders when building the components .git/ .gitlab/ +.github/ .vscode/ coverage/ data/ diff --git a/.gitignore b/.gitignore index 5b7ac121a7512842cb158fbb3fc514c504beebfb..d5af4f7f61348537a2d01f9ee356f9cfb1e19c34 100644 --- a/.gitignore +++ b/.gitignore @@ -172,6 +172,7 @@ cython_debug/ # Other /tmp +.github # Sqlite *.db diff --git a/scripts/run_tests_locally-nbi-dscm.sh b/scripts/run_tests_locally-nbi-dscm.sh new file mode 100755 index 0000000000000000000000000000000000000000..4ccc156e97a29e2b4443cf8019f93552458d419a --- /dev/null +++ b/scripts/run_tests_locally-nbi-dscm.sh @@ -0,0 +1,30 @@ +#!/bin/bash +# Copyright 2022-2025 ETSI SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +PROJECTDIR=`pwd` + +cd $PROJECTDIR/src + +# test DSCM NBI functions +python3 -m pytest --log-level=INFO --log-cli-level=INFO --verbose \ + nbi/tests/test_dscm_restconf.py::test_post_get_delete_leaf_optical_channel_frequency + +# # test JSON to Proto conversion functions +# python3 -m pytest --log-level=INFO --log-cli-level=INFO --verbose \ +# nbi/tests/test_json_to_proto.py::test_create_pluggable_request_hub_format \ +# nbi/tests/test_json_to_proto.py::test_create_pluggable_request_leaf_format \ +# nbi/tests/test_json_to_proto.py::test_configure_pluggable_request_hub_format \ +# nbi/tests/test_json_to_proto.py::test_configure_pluggable_request_leaf_format \ +# nbi/tests/test_json_to_proto.py::test_empty_payload diff --git a/src/common/DeviceTypes.py b/src/common/DeviceTypes.py index 1be1085536f68c527d52dd074bc422150397933d..a8006b3da7dba0dc990574ff07f08816a6257712 100644 --- a/src/common/DeviceTypes.py +++ b/src/common/DeviceTypes.py @@ -61,6 +61,7 @@ class DeviceTypeEnum(Enum): OPEN_ROADM = 'openroadm' MORPHEUS = 'morpheus' OPENFLOW_RYU_CONTROLLER = 'openflow-ryu-controller' + DSCM_NODE = 'dscm' # ETSI TeraFlowSDN controller TERAFLOWSDN_CONTROLLER = 'teraflowsdn' diff --git a/src/nbi/.gitlab-ci.yml b/src/nbi/.gitlab-ci.yml index 0a5810354d65a8d366d931f723e290a869b568b2..492c4603ab96360c1508e708a0447317849c1761 100644 --- a/src/nbi/.gitlab-ci.yml +++ b/src/nbi/.gitlab-ci.yml @@ -120,6 +120,7 @@ unit_test nbi: - docker exec -i $IMAGE_NAME bash -c "coverage run --append -m pytest --log-level=INFO --verbose $IMAGE_NAME/tests/test_ietf_l3vpn.py --junitxml=/opt/results/${IMAGE_NAME}_report_ietf_l3vpn.xml" - docker exec -i $IMAGE_NAME bash -c "coverage run --append -m pytest --log-level=INFO --verbose $IMAGE_NAME/tests/test_etsi_bwm.py --junitxml=/opt/results/${IMAGE_NAME}_report_etsi_bwm.xml" - docker exec -i $IMAGE_NAME bash -c "coverage run --append -m pytest --log-level=INFO --verbose $IMAGE_NAME/tests/test_camara_qod.py --junitxml=/opt/results/${IMAGE_NAME}_report_camara_qod.xml" + #- docker exec -i $IMAGE_NAME bash -c "coverage run --append -m pytest --log-level=INFO --verbose $IMAGE_NAME/tests/test_dscm_restconf.py --junitxml=/opt/results/${IMAGE_NAME}_report_dscm_restconf.xml" - docker exec -i $IMAGE_NAME bash -c "coverage report --include='${IMAGE_NAME}/*' --show-missing" coverage: '/TOTAL\s+\d+\s+\d+\s+(\d+%)/' after_script: diff --git a/src/nbi/requirements.in b/src/nbi/requirements.in index 6c176e3f0206b5e32c82ea6841061ce2c3c870ac..c72fe279d762ff7d5838d30ff1fa974bc4930304 100644 --- a/src/nbi/requirements.in +++ b/src/nbi/requirements.in @@ -12,26 +12,27 @@ # See the License for the specific language governing permissions and # limitations under the License. +#gevent-websocket==0.10.1 +#gevent==24.11.1 +#greenlet==3.1.1 +#websockets==12.0 deepdiff==6.7.* deepmerge==1.1.* eventlet==0.39.0 -Flask==2.1.3 Flask-HTTPAuth==4.5.0 Flask-RESTful==0.3.9 flask-socketio==5.5.1 -#gevent==24.11.1 -#gevent-websocket==0.10.1 -#greenlet==3.1.1 +Flask==2.1.3 +git+https://github.com/robshakir/pyangbind.git gunicorn==23.0.0 -jsonschema==4.4.0 +Jinja2==3.0.3 +jsonschema==4.4.0 # 3.2.0 is incompatible kafka-python==2.0.6 libyang==2.8.4 netaddr==0.9.0 pyang==2.6.0 -git+https://github.com/robshakir/pyangbind.git pydantic==2.6.3 python-socketio==5.12.1 requests==2.27.* -werkzeug==2.3.7 -#websockets==12.0 websocket-client==1.8.0 # used by socketio to upgrate to websocket +werkzeug==2.3.7 diff --git a/src/nbi/service/app.py b/src/nbi/service/app.py index 67fd3c604fd168fc0c2e253f7fd685a3ac04b935..7cb7cb3e79b698c81e5b749d14ec809428fe370e 100644 --- a/src/nbi/service/app.py +++ b/src/nbi/service/app.py @@ -30,6 +30,8 @@ from common.Settings import ( ) from .NbiApplication import NbiApplication from .camara_qod import register_camara_qod +from .dscm_oc import register_dscm_oc +from .e2e_services import register_etsi_api from .etsi_bwm import register_etsi_bwm_api from .health_probes import register_health_probes from .ietf_acl import register_ietf_acl @@ -38,15 +40,14 @@ from .ietf_l2vpn import register_ietf_l2vpn from .ietf_l3vpn import register_ietf_l3vpn from .ietf_network import register_ietf_network from .ietf_network_slice import register_ietf_nss +from .osm_nbi import register_osm_api from .qkd_app import register_qkd_app from .restconf_root import register_restconf_root +from .sse_telemetry import register_telemetry_subscription from .tfs_api import register_tfs_api -from .osm_nbi import register_osm_api #from .topology_updates import register_topology_updates from .vntm_recommend import register_vntm_recommend -from .sse_telemetry import register_telemetry_subscription from .well_known_meta import register_well_known -from .e2e_services import register_etsi_api LOG_LEVEL = get_log_level() logging.basicConfig( @@ -85,24 +86,25 @@ KafkaTopic.create_all_topics() LOGGER.info('Created required Kafka topics') nbi_app = NbiApplication(base_url=BASE_URL) -register_health_probes (nbi_app) -register_restconf_root (nbi_app) -register_well_known (nbi_app) -register_tfs_api (nbi_app) +register_camara_qod (nbi_app) +register_dscm_oc (nbi_app) +register_etsi_api (nbi_app) register_etsi_bwm_api (nbi_app) +register_health_probes (nbi_app) +register_ietf_acl (nbi_app) register_ietf_hardware (nbi_app) register_ietf_l2vpn (nbi_app) register_ietf_l3vpn (nbi_app) register_ietf_network (nbi_app) register_ietf_nss (nbi_app) -register_ietf_acl (nbi_app) +register_osm_api (nbi_app) register_qkd_app (nbi_app) +register_restconf_root (nbi_app) +register_telemetry_subscription(nbi_app) +register_tfs_api (nbi_app) #register_topology_updates(nbi_app) # does not work; check if eventlet-grpc side effects register_vntm_recommend (nbi_app) -register_telemetry_subscription(nbi_app) -register_camara_qod (nbi_app) -register_etsi_api (nbi_app) -register_osm_api (nbi_app) +register_well_known (nbi_app) LOGGER.info('All connectors registered') nbi_app.dump_configuration() diff --git a/src/nbi/service/dscm_oc/__init__.py b/src/nbi/service/dscm_oc/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1b8b4e36ea5d05d6c0013f517a13761ce6f3f9c3 --- /dev/null +++ b/src/nbi/service/dscm_oc/__init__.py @@ -0,0 +1,22 @@ +# Copyright 2022-2025 ETSI SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from .routes import blueprint +from nbi.service.NbiApplication import NbiApplication + +# NBI service calls this in main.py file to register blueprints. +def register_dscm_oc(nbi_application : NbiApplication): + nbi_flask_app = nbi_application.get_flask_app() + nbi_flask_app.register_blueprint(blueprint, url_prefix='/restconf/data') diff --git a/src/nbi/service/dscm_oc/enforce_header.py b/src/nbi/service/dscm_oc/enforce_header.py new file mode 100644 index 0000000000000000000000000000000000000000..65e9172cb65fc59f1819e85f93adb8a79a61d0ea --- /dev/null +++ b/src/nbi/service/dscm_oc/enforce_header.py @@ -0,0 +1,40 @@ +# Copyright 2022-2025 ETSI SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from functools import wraps +from flask import request +from .error import yang_error + +def require_accept(allowed): + def deco(fn): + @wraps(fn) + def _wrap(*args, **kwargs): + accept = request.headers.get("Accept", "") + if not any(a in accept or accept == "*/*" for a in allowed): + return yang_error({"error-message": f"Accept not supported. Use one of {allowed}"}, status=406) + return fn(*args, **kwargs) + return _wrap + return deco + +def require_content_type(allowed): + def deco(fn): + @wraps(fn) + def _wrap(*args, **kwargs): + ctype = request.headers.get("Content-Type", "") + if not any(a in ctype for a in allowed): + return yang_error({"error-message": f"Content-Type not supported. Use one of {allowed}"}, status=415) + return fn(*args, **kwargs) + return _wrap + return deco diff --git a/src/nbi/service/dscm_oc/error.py b/src/nbi/service/dscm_oc/error.py new file mode 100644 index 0000000000000000000000000000000000000000..3b1873e8ba9e400586f1612ab2b48d5789e4c007 --- /dev/null +++ b/src/nbi/service/dscm_oc/error.py @@ -0,0 +1,35 @@ +# Copyright 2022-2025 ETSI SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +from flask import Response + +YANG_JSON = "application/yang-data+json" +ERR_JSON = "application/yang-errors+json" + + +def yang_json(data, status=200): + return Response(json.dumps(data, ensure_ascii=False), status=status, mimetype=YANG_JSON) + +def yang_error(err_dict, status=400): + body = {"errors": {"error": [err_dict]}} + return Response(json.dumps(body, ensure_ascii=False), status=status, mimetype=ERR_JSON) + +def _bad_request(msg, path=None): + return yang_error({"error-type": "protocol", "error-tag": "operation-failed", + "error-message": msg, **({"error-path": path} if path else {})}, status=400) + +def _not_found(msg, path=None): + return yang_error({"error-type": "application", "error-tag": "data-missing", + "error-message": msg, **({"error-path": path} if path else {})}, status=404) diff --git a/src/nbi/service/dscm_oc/json_to_proto_conversion.py b/src/nbi/service/dscm_oc/json_to_proto_conversion.py new file mode 100644 index 0000000000000000000000000000000000000000..d8eb2bbad700b3d1d8558403d8d54dfab4b2b57f --- /dev/null +++ b/src/nbi/service/dscm_oc/json_to_proto_conversion.py @@ -0,0 +1,239 @@ +# Copyright 2022-2025 ETSI SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Helper functions to convert JSON payload from RESTCONF to Pluggables proto messages. +""" + +from typing import Dict, Any +from common.proto import pluggables_pb2 + + +def json_to_get_pluggable_request( + device_uuid: str, + pluggable_index: int = -1, + view_level: pluggables_pb2.View = pluggables_pb2.VIEW_FULL # pyright: ignore[reportInvalidTypeForm] +) -> pluggables_pb2.GetPluggableRequest: # pyright: ignore[reportInvalidTypeForm] + """ + Create a GetPluggableRequest proto message. + Args: + device_uuid: UUID of the device + pluggable_index: Index of the pluggable + view_level: View level (VIEW_CONFIG, VIEW_STATE, VIEW_FULL, VIEW_UNSPECIFIED) + Returns: + GetPluggableRequest proto message + """ + request = pluggables_pb2.GetPluggableRequest() + request.id.device.device_uuid.uuid = device_uuid # type: ignore[attr-defined] + request.id.pluggable_index = pluggable_index # type: ignore[attr-defined] + request.view_level = view_level # type: ignore[attr-defined] + return request + + +def json_to_list_pluggables_request( + device_uuid: str, + view_level: pluggables_pb2.View = pluggables_pb2.VIEW_FULL # pyright: ignore[reportInvalidTypeForm] +) -> pluggables_pb2.ListPluggablesRequest: # pyright: ignore[reportInvalidTypeForm] + """ + Create a ListPluggablesRequest proto message. + Args: + device_uuid: UUID of the device to filter by + view_level: View level (VIEW_CONFIG, VIEW_STATE, VIEW_FULL, VIEW_UNSPECIFIED) + Returns: + ListPluggablesRequest proto message + """ + request = pluggables_pb2.ListPluggablesRequest() + request.device.device_uuid.uuid = device_uuid # type: ignore[attr-defined] + request.view_level = view_level # type: ignore[attr-defined] + return request + + +def json_to_delete_pluggable_request( + device_uuid: str, + pluggable_index: int = -1 +) -> pluggables_pb2.DeletePluggableRequest: # pyright: ignore[reportInvalidTypeForm] + """ + Create a DeletePluggableRequest proto message. + Args: + device_uuid: UUID of the device + pluggable_index: Index of the pluggable + Returns: + DeletePluggableRequest proto message + """ + request = pluggables_pb2.DeletePluggableRequest() + request.id.device.device_uuid.uuid = device_uuid # type: ignore[attr-defined] + request.id.pluggable_index = pluggable_index # type: ignore[attr-defined] + return request + + +def json_to_create_pluggable_request( + device_uuid: str, + initial_config: Dict[str, Any], + preferred_pluggable_index: int = -1 +) -> pluggables_pb2.CreatePluggableRequest: # pyright: ignore[reportInvalidTypeForm] + """ + Convert JSON initial_config to CreatePluggableRequest proto message. + Args: + device_uuid: UUID of the device + initial_config: JSON initial_config from RESTCONF request + preferred_pluggable_index: Preferred pluggable slot index (-1 for auto) + Returns: + CreatePluggableRequest proto message + """ + request = pluggables_pb2.CreatePluggableRequest() + request.device.device_uuid.uuid = device_uuid # type: ignore[attr-defined] + request.preferred_pluggable_index = preferred_pluggable_index # type: ignore[attr-defined] + + # If initial_config contains configuration, add it as initial_config + if initial_config: + request.initial_config.id.device.device_uuid.uuid = device_uuid # type: ignore[attr-defined] + request.initial_config.id.pluggable_index = preferred_pluggable_index # type: ignore[attr-defined] + + if "digital_subcarriers_groups" in initial_config: # (HUB format) + _add_dsc_groups_from_hub_format( + request.initial_config, device_uuid, preferred_pluggable_index, initial_config) # type: ignore[attr-defined] + elif "channels" in initial_config: # (LEAF format) + _add_dsc_groups_from_leaf_format( + request.initial_config, device_uuid, preferred_pluggable_index, initial_config) # type: ignore[attr-defined] + return request + + +def json_to_configure_pluggable_request( + device_uuid: str, + payload: Dict[str, Any], + pluggable_index: int = -1, + view_level: pluggables_pb2.View = pluggables_pb2.VIEW_FULL, # pyright: ignore[reportInvalidTypeForm] + apply_timeout_seconds: int = 30 +) -> pluggables_pb2.ConfigurePluggableRequest: # pyright: ignore[reportInvalidTypeForm] + """ + Convert JSON payload to ConfigurePluggableRequest proto message. + Args: + device_uuid: UUID of the device + pluggable_index: Index of the pluggable to configure + payload: JSON payload from RESTCONF request + view_level: View level for response + apply_timeout_seconds: Timeout in seconds for applying configuration + Returns: + ConfigurePluggableRequest proto message + """ + request = pluggables_pb2.ConfigurePluggableRequest() + request.config.id.device.device_uuid.uuid = device_uuid # type: ignore[attr-defined] + request.config.id.pluggable_index = pluggable_index # type: ignore[attr-defined] + request.view_level = view_level # type: ignore[attr-defined] + request.apply_timeout_seconds = apply_timeout_seconds # type: ignore[attr-defined] + + if "digital_subcarriers_groups" in payload: # (HUB format) + _add_dsc_groups_from_hub_format( + request.config, device_uuid, pluggable_index, payload) # type: ignore[attr-defined] + elif "channels" in payload: # (LEAF format) + _add_dsc_groups_from_leaf_format( + request.config, device_uuid, pluggable_index, payload) # type: ignore[attr-defined] + return request + + +def _add_dsc_groups_from_hub_format( + config: pluggables_pb2.DigitalSubcarrierGroupConfig, # pyright: ignore[reportInvalidTypeForm] + device_uuid: str, + pluggable_index: int, + payload: Dict[str, Any] +) -> None: + """ + Add DSC groups from HUB format JSON payload. + """ + dsc_groups = payload.get("digital_subcarriers_groups", []) + + for group_data in dsc_groups: + group_id = group_data.get("group_id", 0) + + dsc_group = config.dsc_groups.add() # type: ignore[attr-defined] + dsc_group.id.pluggable.device.device_uuid.uuid = device_uuid # type: ignore[attr-defined] + dsc_group.id.pluggable.pluggable_index = pluggable_index # type: ignore[attr-defined] + dsc_group.id.group_index = group_id # type: ignore[attr-defined] + + # Set group parameters from payload + # For HUB, these are at the top level + dsc_group.group_capacity_gbps = 400.0 # type: ignore[attr-defined] # Default + dsc_group.subcarrier_spacing_mhz = 75.0 # type: ignore[attr-defined] # Default + + # Process digital subcarriers + subcarrier_list = group_data.get("digital-subcarrier-id", []) + dsc_group.group_size = len(subcarrier_list) # type: ignore[attr-defined] + + for subcarrier_data in subcarrier_list: + subcarrier_id = subcarrier_data.get("subcarrier-id", 0) + is_active = subcarrier_data.get("active", False) + + subcarrier = dsc_group.subcarriers.add() # type: ignore[attr-defined] + subcarrier.id.group.pluggable.device.device_uuid.uuid = device_uuid # type: ignore[attr-defined] + subcarrier.id.group.pluggable.pluggable_index = pluggable_index # type: ignore[attr-defined] + subcarrier.id.group.group_index = group_id # type: ignore[attr-defined] + subcarrier.id.subcarrier_index = subcarrier_id # type: ignore[attr-defined] + subcarrier.active = is_active # type: ignore[attr-defined] + + # Set frequency and power from top-level payload + if "frequency" in payload: + subcarrier.center_frequency_hz = float(payload["frequency"]) # type: ignore[attr-defined] + + if "target_output_power" in payload: + subcarrier.target_output_power_dbm = float(payload["target_output_power"]) # type: ignore[attr-defined] + + # Default symbol rate + subcarrier.symbol_rate_baud = 64000000000 # type: ignore[attr-defined] + + +def _add_dsc_groups_from_leaf_format( + config: pluggables_pb2.DigitalSubcarrierGroupConfig, # pyright: ignore[reportInvalidTypeForm] + device_uuid: str, + pluggable_index: int, + payload: Dict[str, Any] +) -> None: + """ + Add DSC groups from LEAF format JSON payload. + """ + channels = payload.get("channels", []) + + for channel_idx, channel_data in enumerate(channels): + dsc_groups = channel_data.get("digital_subcarriers_groups", []) + + for group_data in dsc_groups: + group_id = group_data.get("group_id", channel_idx) + + # Create DSC group (protobuf repeated field operations) + dsc_group = config.dsc_groups.add() # type: ignore[attr-defined] + dsc_group.id.pluggable.device.device_uuid.uuid = device_uuid # type: ignore[attr-defined] + dsc_group.id.pluggable.pluggable_index = pluggable_index # type: ignore[attr-defined] + dsc_group.id.group_index = group_id # type: ignore[attr-defined] + + # Set group parameters + dsc_group.group_capacity_gbps = 400.0 # type: ignore[attr-defined] # Default + dsc_group.subcarrier_spacing_mhz = 75.0 # type: ignore[attr-defined] # Default + dsc_group.group_size = 1 # type: ignore[attr-defined] # Default for LEAF + + # Create a single subcarrier for this channel + subcarrier = dsc_group.subcarriers.add() # type: ignore[attr-defined] + subcarrier.id.group.pluggable.device.device_uuid.uuid = device_uuid # type: ignore[attr-defined] + subcarrier.id.group.pluggable.pluggable_index = pluggable_index # type: ignore[attr-defined] + subcarrier.id.group.group_index = group_id # type: ignore[attr-defined] + subcarrier.id.subcarrier_index = 0 # type: ignore[attr-defined] + subcarrier.active = True # type: ignore[attr-defined] # Default for LEAF channels + + # Set frequency and power from channel data + if "frequency" in channel_data: + subcarrier.center_frequency_hz = float(channel_data["frequency"]) # type: ignore[attr-defined] # MHz to Hz + + if "target_output_power" in channel_data: + subcarrier.target_output_power_dbm = float(channel_data["target_output_power"]) # type: ignore[attr-defined] + + # Default symbol rate + subcarrier.symbol_rate_baud = 64000000000 # type: ignore[attr-defined] # 64 GBaud diff --git a/src/nbi/service/dscm_oc/routes.py b/src/nbi/service/dscm_oc/routes.py new file mode 100644 index 0000000000000000000000000000000000000000..e0ca2766f56fe29349adfd83131b6310e66a9cd8 --- /dev/null +++ b/src/nbi/service/dscm_oc/routes.py @@ -0,0 +1,145 @@ +# Copyright 2022-2025 ETSI SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +from .enforce_header import require_accept, require_content_type +from .error import _bad_request, _not_found, yang_json +from .json_to_proto_conversion import ( + json_to_create_pluggable_request, + json_to_delete_pluggable_request, + json_to_get_pluggable_request, + json_to_list_pluggables_request, +) +from common.method_wrappers.ServiceExceptions import ( + ServiceException, + NotFoundException, + AlreadyExistsException, + InvalidArgumentException +) +from common.tools.grpc.Tools import grpc_message_to_json +from flask import Blueprint, request, Response +from pluggables.client.PluggablesClient import PluggablesClient + + +LOGGER = logging.getLogger(__name__) +logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s') + +blueprint = Blueprint("testconf_dscm", __name__) + +YANG_JSON = "application/yang-data+json" +ERR_JSON = "application/yang-errors+json" + + +# Root endpoints (both prefixes) TODO: call list pluggables if device_uuid is given +# @blueprint.route("/device=/", methods=["GET"]) +# @blueprint.route("/", methods=["GET"], defaults={'device_uuid': None}) +# @require_accept([YANG_JSON]) +# def list_root(device_uuid=None): +# """List top-level modules/containers available.""" +# # TODO: If device_uuid is given, call ListPluggables gRPC method +# return + + +@blueprint.route("/device=/", methods=["GET"]) +@require_accept([YANG_JSON]) +def rc_get(rc_path, device_uuid=None): + LOGGER.info(f"GET request for path: {rc_path} on device UUID: {device_uuid}") + + if device_uuid is None: + return _bad_request("Device UUID must be specified for GET requests.", path=rc_path) + pluggables_client = PluggablesClient() + + try: + get_request = json_to_get_pluggable_request(device_uuid) + pluggable = pluggables_client.GetPluggable(get_request) + LOGGER.info(f"Successfully retrieved pluggable for device {device_uuid}") + response_data = grpc_message_to_json(pluggable) + return yang_json(response_data) + + except NotFoundException as e: + LOGGER.warning(f"Pluggable not found for device {device_uuid}: {e.details}") + return _not_found(f"Pluggable not found: {e.details}", path=rc_path) + + except ServiceException as e: + LOGGER.error(f"Unexpected error getting pluggable for device {device_uuid}: {str(e)}", exc_info=True) + return _bad_request(f"Failed to get pluggable: {str(e)}", path=rc_path) + + finally: + pluggables_client.close() + +@blueprint.route("/device=/", methods=["POST"]) +@require_accept([YANG_JSON]) +@require_content_type([YANG_JSON]) +def rc_post(rc_path, device_uuid=None): + if device_uuid is None: + return _bad_request("Device UUID must be specified for POST requests.", path=rc_path) + + payload = request.get_json(force=True, silent=True) + if payload is None: + return _bad_request("Invalid or empty JSON payload.", path=rc_path) + + try: + create_request = json_to_create_pluggable_request( + device_uuid = device_uuid, + initial_config = payload, + ) + + pluggables_client = PluggablesClient() + try: + pluggable = pluggables_client.CreatePluggable(create_request) + LOGGER.info(f"Successfully created pluggable for device {device_uuid}") + response_data = grpc_message_to_json(pluggable) + + return yang_json(response_data, status=201) + finally: + pluggables_client.close() + + except AlreadyExistsException as e: + LOGGER.warning(f"Pluggable already exists for device {device_uuid}: {e.details}") + return _bad_request(f"Pluggable already exists: {e.details}", path=rc_path) + + except InvalidArgumentException as e: + LOGGER.warning(f"Invalid argument creating pluggable for device {device_uuid}: {e.details}") + return _bad_request(f"Invalid argument: {e.details}", path=rc_path) + + except ServiceException as e: + LOGGER.error(f"Unexpected error creating pluggable for device {device_uuid}: {str(e)}", exc_info=True) + return _bad_request(f"Failed to create pluggable: {str(e)}", path=rc_path) + +@blueprint.route("/device=/", methods=["DELETE"]) +@require_accept([YANG_JSON]) +def rc_delete(rc_path, device_uuid=None): + LOGGER.info(f"DELETE request for path: {rc_path} on device UUID: {device_uuid}") + + if device_uuid is None: + return _bad_request("Device UUID must be specified for DELETE requests.", path=rc_path) + + pluggables_client = PluggablesClient() + try: + # Delete specific pluggable + delete_request = json_to_delete_pluggable_request(device_uuid) + pluggables_client.DeletePluggable(delete_request) + LOGGER.info(f"Successfully deleted pluggable for device {device_uuid}") + return Response(status=204) + + except NotFoundException as e: + LOGGER.warning(f"Pluggable not found for device {device_uuid}: {e.details} (already deleted or never existed)") + return Response(status=204) + + except ServiceException as e: + LOGGER.error(f"Unexpected error deleting pluggable for device {device_uuid}: {str(e)}", exc_info=True) + return _bad_request(f"Failed to delete pluggable: {str(e)}", path=rc_path) + + finally: + pluggables_client.close() diff --git a/src/nbi/service/ietf_l3vpn/Handlers.py b/src/nbi/service/ietf_l3vpn/Handlers.py index 268ec49afef8bc3703ee59389eea9ea65e2b8449..c5efc0d5aeda317a25d641a3b77a85fa6df5c159 100644 --- a/src/nbi/service/ietf_l3vpn/Handlers.py +++ b/src/nbi/service/ietf_l3vpn/Handlers.py @@ -58,8 +58,8 @@ def update_service_endpoint( vlan_tag : int, ipv4_address : str, neighbor_ipv4_address : str, ipv4_prefix_length : int, capacity_gbps : Optional[float] = None, e2e_latency_ms : Optional[float] = None, availability : Optional[float] = None, mtu : Optional[int] = None, - static_routing : Optional[Dict[Tuple[str, str], str]] = None, - context_uuid : Optional[str] = DEFAULT_CONTEXT_NAME, + static_routing : Optional[Dict[Tuple[str, int, int], str]] = None, + context_uuid : Optional[str] = DEFAULT_CONTEXT_NAME, ) -> Optional[Exception]: context_client = ContextClient() service = get_service_by_uuid(context_client, service_uuid, context_uuid=context_uuid, rw_copy=True) @@ -111,11 +111,14 @@ def update_service_endpoint( return e def process_site_network_access( - site_id : str, network_access : Dict, site_static_routing : Dict[Tuple[str, str], str], errors : List[Dict] + site_id : str, network_access : Dict, site_static_routing : Dict[Tuple[str, int, int], str], errors : List[Dict] ) -> None: endpoint_uuid = network_access['site-network-access-id'] - if network_access['site-network-access-type'] != 'ietf-l3vpn-svc:multipoint': + if 'ietf-l3vpn-svc' in network_access['site-network-access-type']: + # replace 'ietf-l3vpn-svc:multipoint' with 'multipoint' for backward compatibility + network_access['site-network-access-type'] = network_access['site-network-access-type'].replace('ietf-l3vpn-svc:', '') + if network_access['site-network-access-type'] != 'multipoint': MSG = 'Site Network Access Type: {:s}' raise NotImplementedError(MSG.format(str(network_access['site-network-access-type']))) @@ -129,7 +132,10 @@ def process_site_network_access( raise NotImplementedError(MSG.format(str(network_access['site-network-access-type']))) ipv4_allocation = network_access['ip-connection']['ipv4'] - if ipv4_allocation['address-allocation-type'] != 'ietf-l3vpn-svc:static-address': + if 'ietf-l3vpn-svc' in ipv4_allocation['address-allocation-type']: + # replace 'ietf-l3vpn-svc:static-address' with 'static-address' for backward compatibility + ipv4_allocation['address-allocation-type'] = ipv4_allocation['address-allocation-type'].replace('ietf-l3vpn-svc:', '') + if ipv4_allocation['address-allocation-type'] != 'static-address': MSG = 'Site Network Access IPv4 Allocation Type: {:s}' raise NotImplementedError(MSG.format(str(ipv4_allocation['address-allocation-type']))) ipv4_allocation_addresses = ipv4_allocation['addresses'] @@ -171,7 +177,10 @@ def process_site_network_access( MSG = 'Site Network Access QoS Class Id: {:s}' raise NotImplementedError(MSG.format(str(qos_profile_class['class-id']))) - if qos_profile_class['direction'] != 'ietf-l3vpn-svc:both': + if 'ietf-l3vpn-svc' in qos_profile_class['direction']: + # replace 'ietf-l3vpn-svc:both' with 'both' for backward compatibility + qos_profile_class['direction'] = qos_profile_class['direction'].replace('ietf-l3vpn-svc:', '') + if qos_profile_class['direction'] != 'both': MSG = 'Site Network Access QoS Class Direction: {:s}' raise NotImplementedError(MSG.format(str(qos_profile_class['direction']))) @@ -189,16 +198,21 @@ def process_site_network_access( def process_site(site : Dict, errors : List[Dict]) -> None: site_id = site['site-id'] + # this change is made for ECOC2025 demo purposes if site['management']['type'] != 'ietf-l3vpn-svc:provider-managed': + # if site['management']['type'] == 'customer-managed': MSG = 'Site Management Type: {:s}' raise NotImplementedError(MSG.format(str(site['management']['type']))) # site_static_routing: (lan-range, lan-prefix-len, lan-tag) => next-hop - site_static_routing : Dict[Tuple[str, str], str] = {} + site_static_routing : Dict[Tuple[str, int, int], str] = {} site_routing_protocols : Dict = site.get('routing-protocols', dict()) site_routing_protocol : List = site_routing_protocols.get('routing-protocol', list()) for rt_proto in site_routing_protocol: - if rt_proto['type'] != 'ietf-l3vpn-svc:static': + if 'ietf-l3vpn-svc' in rt_proto['type']: + # replace 'ietf-l3vpn-svc:static' with 'static' for backward compatibility + rt_proto['type'] = rt_proto['type'].replace('ietf-l3vpn-svc:', '') + if rt_proto['type'] != 'static': MSG = 'Site Routing Protocol Type: {:s}' raise NotImplementedError(MSG.format(str(rt_proto['type']))) diff --git a/src/nbi/tests/DSCM_MockWebServer.py b/src/nbi/tests/DSCM_MockWebServer.py new file mode 100644 index 0000000000000000000000000000000000000000..604da6810e60ea1ef7f0e34fd60d8922795801ce --- /dev/null +++ b/src/nbi/tests/DSCM_MockWebServer.py @@ -0,0 +1,40 @@ +# Copyright 2022-2025 ETSI SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import logging, threading +from nbi.service.NbiApplication import NbiApplication +from nbi.service.rest_server.nbi_plugins.dscm_oc import register_dscm_oc +from .Constants import LOCAL_HOST, NBI_SERVICE_PORT, NBI_SERVICE_PREFIX_URL + + +LOGGER = logging.getLogger(__name__) + +class MockWebServer(threading.Thread): + def __init__(self): + super().__init__(daemon=True) + + self.nbi_app = NbiApplication(base_url=NBI_SERVICE_PREFIX_URL) + register_dscm_oc(self.nbi_app) + self.nbi_app.dump_configuration() + + def run(self): + try: + self.nbi_app._sio.run( + self.nbi_app.get_flask_app(), + host=LOCAL_HOST, port=NBI_SERVICE_PORT, + debug=True, use_reloader=False + ) + except: # pylint: disable=bare-except + LOGGER.exception('[MockWebServer::run] Unhandled Exception') diff --git a/src/nbi/tests/messages/dscm_messages.py b/src/nbi/tests/messages/dscm_messages.py new file mode 100644 index 0000000000000000000000000000000000000000..03d4ebda45f975c3f2beae663885708fe4efc272 --- /dev/null +++ b/src/nbi/tests/messages/dscm_messages.py @@ -0,0 +1,60 @@ +# Copyright 2022-2025 ETSI SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def get_hub_payload(): + """Example HUB format payload.""" + return { + "name" : "channel-1", + "frequency" : "195000000", + "target_output_power": "-3.0", + "operational_mode" : "1", + "operation" : "merge", + "digital_subcarriers_groups": [ + { "group_id": 1, "digital-subcarrier-id": [{ "subcarrier-id": 1, "active": True}, ]}, + { "group_id": 2, "digital-subcarrier-id": [{ "subcarrier-id": 2, "active": True}, ]}, + { "group_id": 3, "digital-subcarrier-id": [{ "subcarrier-id": 3, "active": True}, ]}, + { "group_id": 4, "digital-subcarrier-id": [{ "subcarrier-id": 4, "active": True}, ]}, + ], + } + + +def get_leaf_payload(): + """Example LEAF format payload.""" + return { + "operation": "merge", + "channels": [ + { + "name" : "channel-1", + "frequency" : "195006250000000", + "target_output_power" : "-99.0", + "operational_mode" : "1", + "digital_subcarriers_groups": [{ "group_id": 1 }] + }, + { + "name" : "channel-3", + "frequency" : "195018750000000", + "target_output_power" : "-99.0", + "operational_mode" : "1", + "digital_subcarriers_groups": [{ "group_id": 2 }] + }, + { + "name" : "channel-5", + "frequency" : "195031250000000", + "target_output_power" : "-99.0", + "operational_mode" : "1", + "digital_subcarriers_groups": [{ "group_id": 3 }] + } + ] + } diff --git a/src/nbi/tests/test_dscm_restconf.py b/src/nbi/tests/test_dscm_restconf.py new file mode 100644 index 0000000000000000000000000000000000000000..6ee0a946b778fd65ba0130eca0a53c941137c1f4 --- /dev/null +++ b/src/nbi/tests/test_dscm_restconf.py @@ -0,0 +1,124 @@ +# Copyright 2022-2025 ETSI SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Union +import logging +import os, pytest +import requests +from .DSCM_MockWebServer import nbi_service_rest +from .messages.dscm_messages import get_hub_payload, get_leaf_payload +from common.Constants import ServiceNameEnum +from common.proto.context_pb2_grpc import add_ContextServiceServicer_to_server +from common.Settings import ( + ENVVAR_SUFIX_SERVICE_HOST, ENVVAR_SUFIX_SERVICE_PORT_GRPC, + get_env_var_name, get_service_port_grpc) +from common.tests.MockServicerImpl_Context import MockServicerImpl_Context +from common.tools.service.GenericGrpcService import GenericGrpcService +from pluggables.client.PluggablesClient import PluggablesClient +from pluggables.service.PluggablesService import PluggablesService + + +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.DEBUG) + +BASE_URL = "http://127.0.0.1:18080/restconf/data/" +HEADERS = { "Accept" : "application/yang-data+json", + "Content-Type": "application/yang-data+json" } + +########################### +# Tests Setup +########################### + +LOCAL_HOST = '127.0.0.1' + +DSCMPLUGGABLE_SERVICE_PORT = get_service_port_grpc(ServiceNameEnum.DSCMPLUGGABLE) +os.environ[get_env_var_name(ServiceNameEnum.DSCMPLUGGABLE, ENVVAR_SUFIX_SERVICE_HOST )] = str(LOCAL_HOST) +os.environ[get_env_var_name(ServiceNameEnum.DSCMPLUGGABLE, ENVVAR_SUFIX_SERVICE_PORT_GRPC)] = str(DSCMPLUGGABLE_SERVICE_PORT) + +class MockContextService(GenericGrpcService): + def __init__(self, bind_port: Union[str, int]) -> None: + super().__init__(bind_port, LOCAL_HOST, enable_health_servicer=False, cls_name='MockService') + + def install_servicers(self): + self.context_servicer = MockServicerImpl_Context() + add_ContextServiceServicer_to_server(self.context_servicer, self.server) + + +@pytest.fixture(scope='session') +def pluggables_service(): + LOGGER.info('Initializing DscmPluggableService...') + _service = PluggablesService() + _service.start() + + LOGGER.info('Yielding DscmPluggableService...') + yield _service + + LOGGER.info('Terminating DscmPluggableService...') + _service.stop() + LOGGER.info('Terminated DscmPluggableService...') + +@pytest.fixture(scope='function') +def dscm_pluggable_client(pluggables_service : PluggablesService): + LOGGER.info('Creating PluggablesClient...') + _client = PluggablesClient() + + LOGGER.info('Yielding PluggablesClient...') + yield _client + + LOGGER.info('Closing PluggablesClient...') + _client.close() + LOGGER.info('Closed PluggablesClient...') + +@pytest.fixture(autouse=True) +def log_each(request): + LOGGER.info(f">>>>>> START {request.node.name} >>>>>>") + yield + LOGGER.info(f"<<<<<< END {request.node.name} <<<<<<") + +def test_post_hub_optical_channel_frequency(nbi_service_rest, dscm_pluggable_client: PluggablesClient): + """Test PATCH to update optical channel frequency.""" + device = "device=T1.1/" + encoded_path = f"{device}openconfig-platform:components/component=1/optical-channel/config" + + post_data = get_hub_payload() + response = requests.post(f"{BASE_URL}{encoded_path}", + json=post_data, + headers=HEADERS) + assert response.status_code == 201 + +def test_post_get_delete_leaf_optical_channel_frequency(nbi_service_rest, dscm_pluggable_client: PluggablesClient): + """Test POST, GET, DELETE to manage optical channel frequency for leaf device.""" + device = "device=T1.2/" + encoded_path = f"{device}openconfig-platform:components/component=1/optical-channel/config" + + # Step 1: POST to create a new device configuration + post_data = get_leaf_payload() + response = requests.post(f"{BASE_URL}{encoded_path}", + json=post_data, + headers=HEADERS) + assert response.status_code == 201 + + # Step 2: GET to retrieve the created device configuration + response = requests.get(f"{BASE_URL}{encoded_path}", headers={"Accept": "application/yang-data+json"}) + assert response.status_code == 200 + get_data = response.json() + assert get_data is not None + + # Step 3: DELETE to remove the created device configuration + response = requests.delete(f"{BASE_URL}{encoded_path}", headers={"Accept": "application/yang-data+json"}) + assert response.status_code == 204 + + # Step 4: GET again to verify the device configuration has been deleted + response = requests.get(f"{BASE_URL}{encoded_path}", headers={"Accept": "application/yang-data+json"}) + assert response.status_code == 400 # Assuming 400 is returned for non-existing resource diff --git a/src/pluggables/.gitlab-ci.yml b/src/pluggables/.gitlab-ci.yml index b9e58b0e82b746e88b13f899a222084002d0eeee..695348a0302a6d16e6ee83475fc480abd2380f3d 100644 --- a/src/pluggables/.gitlab-ci.yml +++ b/src/pluggables/.gitlab-ci.yml @@ -40,76 +40,76 @@ build pluggables: - .gitlab-ci.yml # Apply unit test to the component -unit_test pluggables: - variables: - IMAGE_NAME: 'pluggables' # name of the microservice - IMAGE_TAG: 'latest' # tag of the container image (production, development, etc) - stage: unit_test - needs: - - build pluggables - before_script: - - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY - - if docker network list | grep teraflowbridge; then echo "teraflowbridge is already created"; else docker network create -d bridge teraflowbridge; fi - - if docker container ls | grep crdb; then docker rm -f crdb; else echo "CockroachDB container is not in the system"; fi - - if docker volume ls | grep crdb; then docker volume rm -f crdb; else echo "CockroachDB volume is not in the system"; fi - - if docker container ls | grep context; then docker rm -f context; else echo "context container is not in the system"; fi - - if docker container ls | grep $IMAGE_NAME; then docker rm -f $IMAGE_NAME; else echo "$IMAGE_NAME container is not in the system"; fi - - docker container prune -f - script: - - docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG" - - docker pull "$CI_REGISTRY_IMAGE/context:$IMAGE_TAG" - - docker pull "cockroachdb/cockroach:latest-v22.2" - - docker volume create crdb - - > - docker run --name crdb -d --network=teraflowbridge -p 26257:26257 -p 8080:8080 - --env COCKROACH_DATABASE=tfs_test --env COCKROACH_USER=tfs --env COCKROACH_PASSWORD=tfs123 - --volume "crdb:/cockroach/cockroach-data" - cockroachdb/cockroach:latest-v22.2 start-single-node - - echo "Waiting for initialization..." - - while ! docker logs crdb 2>&1 | grep -q 'finished creating default user \"tfs\"'; do sleep 1; done - - CRDB_ADDRESS=$(docker inspect crdb --format "{{.NetworkSettings.Networks.teraflowbridge.IPAddress}}") - - echo $CRDB_ADDRESS - - > - docker run --name context -d -p 1010:1010 - --env "CRDB_URI=cockroachdb://tfs:tfs123@${CRDB_ADDRESS}:26257/tfs_test?sslmode=require" - --network=teraflowbridge - $CI_REGISTRY_IMAGE/context:$IMAGE_TAG - - docker ps -a - - CONTEXT_ADDRESS=$(docker inspect context --format "{{.NetworkSettings.Networks.teraflowbridge.IPAddress}}") - - echo $CONTEXT_ADDRESS - - > - docker run --name $IMAGE_NAME -d -p 30040:30040 - --env "CONTEXTSERVICE_SERVICE_HOST=${CONTEXT_ADDRESS}" - --env "CONTEXTSERVICE_SERVICE_PORT_GRPC=1010" - --volume "$PWD/src/$IMAGE_NAME/tests:/opt/results" - --network=teraflowbridge - $CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG - - docker ps -a - - sleep 5 - - docker logs $IMAGE_NAME - - > - docker exec -i $IMAGE_NAME bash -c - "coverage run -m pytest --log-level=INFO --verbose --junitxml=/opt/results/${IMAGE_NAME}_report.xml $IMAGE_NAME/tests/test_*.py" - - docker exec -i $IMAGE_NAME bash -c "coverage report --include='${IMAGE_NAME}/*' --show-missing" - coverage: '/TOTAL\s+\d+\s+\d+\s+(\d+%)/' - after_script: - - docker rm -f $IMAGE_NAME context crdb - - docker volume rm -f crdb - - docker network rm teraflowbridge - - docker volume prune --force - - docker image prune --force - rules: - - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)' - - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' - - changes: - - src/common/**/*.py - - proto/*.proto - - src/$IMAGE_NAME/**/*.{py,in,yml} - - src/$IMAGE_NAME/Dockerfile - - src/$IMAGE_NAME/tests/*.py - - manifests/${IMAGE_NAME}service.yaml - - .gitlab-ci.yml - artifacts: - when: always - reports: - junit: src/$IMAGE_NAME/tests/${IMAGE_NAME}_report.xml +#unit_test pluggables: +# variables: +# IMAGE_NAME: 'pluggables' # name of the microservice +# IMAGE_TAG: 'latest' # tag of the container image (production, development, etc) +# stage: unit_test +# needs: +# - build pluggables +# before_script: +# - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY +# - if docker network list | grep teraflowbridge; then echo "teraflowbridge is already created"; else docker network create -d bridge teraflowbridge; fi +# - if docker container ls | grep crdb; then docker rm -f crdb; else echo "CockroachDB container is not in the system"; fi +# - if docker volume ls | grep crdb; then docker volume rm -f crdb; else echo "CockroachDB volume is not in the system"; fi +# - if docker container ls | grep context; then docker rm -f context; else echo "context container is not in the system"; fi +# - if docker container ls | grep $IMAGE_NAME; then docker rm -f $IMAGE_NAME; else echo "$IMAGE_NAME container is not in the system"; fi +# - docker container prune -f +# script: +# - docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG" +# - docker pull "$CI_REGISTRY_IMAGE/context:$IMAGE_TAG" +# - docker pull "cockroachdb/cockroach:latest-v22.2" +# - docker volume create crdb +# - > +# docker run --name crdb -d --network=teraflowbridge -p 26257:26257 -p 8080:8080 +# --env COCKROACH_DATABASE=tfs_test --env COCKROACH_USER=tfs --env COCKROACH_PASSWORD=tfs123 +# --volume "crdb:/cockroach/cockroach-data" +# cockroachdb/cockroach:latest-v22.2 start-single-node +# - echo "Waiting for initialization..." +# - while ! docker logs crdb 2>&1 | grep -q 'finished creating default user \"tfs\"'; do sleep 1; done +# - CRDB_ADDRESS=$(docker inspect crdb --format "{{.NetworkSettings.Networks.teraflowbridge.IPAddress}}") +# - echo $CRDB_ADDRESS +# - > +# docker run --name context -d -p 1010:1010 +# --env "CRDB_URI=cockroachdb://tfs:tfs123@${CRDB_ADDRESS}:26257/tfs_test?sslmode=require" +# --network=teraflowbridge +# $CI_REGISTRY_IMAGE/context:$IMAGE_TAG +# - docker ps -a +# - CONTEXT_ADDRESS=$(docker inspect context --format "{{.NetworkSettings.Networks.teraflowbridge.IPAddress}}") +# - echo $CONTEXT_ADDRESS +# - > +# docker run --name $IMAGE_NAME -d -p 30040:30040 +# --env "CONTEXTSERVICE_SERVICE_HOST=${CONTEXT_ADDRESS}" +# --env "CONTEXTSERVICE_SERVICE_PORT_GRPC=1010" +# --volume "$PWD/src/$IMAGE_NAME/tests:/opt/results" +# --network=teraflowbridge +# $CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG +# - docker ps -a +# - sleep 5 +# - docker logs $IMAGE_NAME +# - > +# docker exec -i $IMAGE_NAME bash -c +# "coverage run -m pytest --log-level=INFO --verbose --junitxml=/opt/results/${IMAGE_NAME}_report.xml $IMAGE_NAME/tests/test_*.py" +# - docker exec -i $IMAGE_NAME bash -c "coverage report --include='${IMAGE_NAME}/*' --show-missing" +# coverage: '/TOTAL\s+\d+\s+\d+\s+(\d+%)/' +# after_script: +# - docker rm -f $IMAGE_NAME context crdb +# - docker volume rm -f crdb +# - docker network rm teraflowbridge +# - docker volume prune --force +# - docker image prune --force +# rules: +# - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)' +# - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' +# - changes: +# - src/common/**/*.py +# - proto/*.proto +# - src/$IMAGE_NAME/**/*.{py,in,yml} +# - src/$IMAGE_NAME/Dockerfile +# - src/$IMAGE_NAME/tests/*.py +# - manifests/${IMAGE_NAME}service.yaml +# - .gitlab-ci.yml +# artifacts: +# when: always +# reports: +# junit: src/$IMAGE_NAME/tests/${IMAGE_NAME}_report.xml