Skip to content
Snippets Groups Projects
Commit 41a97970 authored by Lluis Gifre Renom's avatar Lluis Gifre Renom
Browse files

Merge branch 'develop' of ssh://gifrerenom_labs.etsi.org/tfs/controller into...

Merge branch 'develop' of ssh://gifrerenom_labs.etsi.org/tfs/controller into feat/249-cttc-telemetry-enhancement
parents 87685c9c 6573fa12
No related branches found
No related tags found
1 merge request!320Resolve "(CTTC) Telemetry Enhancement"
Showing
with 1512 additions and 5 deletions
......@@ -223,6 +223,9 @@ enum DeviceDriverEnum {
DEVICEDRIVER_IETF_ACTN = 10;
DEVICEDRIVER_OC = 11;
DEVICEDRIVER_QKD = 12;
DEVICEDRIVER_IETF_L3VPN = 13;
DEVICEDRIVER_IETF_SLICE = 14;
DEVICEDRIVER_NCE = 15;
}
enum DeviceOperationalStatusEnum {
......
#!/bin/bash
# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
PROJECTDIR=`pwd`
cd $PROJECTDIR/src
RCFILE=$PROJECTDIR/coverage/.coveragerc
# Run unitary tests and analyze coverage of code at same time
# helpful pytest flags: --log-level=INFO -o log_cli=true --verbose --maxfail=1 --durations=0
coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \
nbi/tests/test_slice_2.py
......@@ -38,6 +38,7 @@ class DeviceTypeEnum(Enum):
CLIENT = 'client'
DATACENTER = 'datacenter'
IP_SDN_CONTROLLER = 'ip-sdn-controller'
NCE = 'nce'
MICROWAVE_RADIO_SYSTEM = 'microwave-radio-system'
OPEN_LINE_SYSTEM = 'open-line-system'
OPTICAL_ROADM = 'optical-roadm'
......@@ -52,3 +53,4 @@ class DeviceTypeEnum(Enum):
# ETSI TeraFlowSDN controller
TERAFLOWSDN_CONTROLLER = 'teraflowsdn'
IETF_SLICE = 'ietf-slice'
......@@ -12,12 +12,20 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import grpc, logging
from typing import Optional
import logging
import grpc
from typing import Optional, Tuple, Union
from uuid import UUID, uuid5
from common.Constants import DEFAULT_CONTEXT_NAME
from common.proto.context_pb2 import Slice, SliceFilter, SliceId
from common.method_wrappers.ServiceExceptions import InvalidArgumentsException
from common.proto.context_pb2 import ContextId, Slice, SliceFilter, SliceId
from common.method_wrappers.ServiceExceptions import InvalidArgumentsException
from common.proto.context_pb2 import ContextId, Slice, SliceFilter, SliceId
from context.client.ContextClient import ContextClient
NAMESPACE_TFS = UUID("200e3a1f-2223-534f-a100-758e29c37f40")
LOGGER = logging.getLogger(__name__)
def get_slice_by_id(
......@@ -59,3 +67,96 @@ def get_slice_by_uuid(
context_client, slice_id, rw_copy=rw_copy, include_endpoint_ids=include_endpoint_ids,
include_constraints=include_constraints, include_service_ids=include_service_ids,
include_subslice_ids=include_subslice_ids, include_config_rules=include_config_rules)
def get_uuid_from_string(
str_uuid_or_name: Union[str, UUID], prefix_for_name: Optional[str] = None
) -> str:
# if UUID given, assume it is already a valid UUID
if isinstance(str_uuid_or_name, UUID):
return str_uuid_or_name
if not isinstance(str_uuid_or_name, str):
MSG = "Parameter({:s}) cannot be used to produce a UUID"
raise Exception(MSG.format(str(repr(str_uuid_or_name))))
try:
# try to parse as UUID
return str(UUID(str_uuid_or_name))
except: # pylint: disable=bare-except
# produce a UUID within TFS namespace from parameter
if prefix_for_name is not None:
str_uuid_or_name = "{:s}/{:s}".format(prefix_for_name, str_uuid_or_name)
return str(uuid5(NAMESPACE_TFS, str_uuid_or_name))
def context_get_uuid(
context_id: ContextId,
context_name: str = "",
allow_random: bool = False,
allow_default: bool = False,
) -> str:
context_uuid = context_id.context_uuid.uuid
if len(context_uuid) > 0:
return get_uuid_from_string(context_uuid)
if len(context_name) > 0:
return get_uuid_from_string(context_name)
if allow_default:
return get_uuid_from_string(DEFAULT_CONTEXT_NAME)
raise InvalidArgumentsException(
[
("context_id.context_uuid.uuid", context_uuid),
("name", context_name),
],
extra_details=["At least one is required to produce a Context UUID"],
)
def slice_get_uuid(slice_id: SliceId) -> Tuple[str, str]:
context_uuid = context_get_uuid(slice_id.context_id, allow_random=False)
raw_slice_uuid = slice_id.slice_uuid.uuid
if len(raw_slice_uuid) > 0:
return context_uuid, get_uuid_from_string(
raw_slice_uuid, prefix_for_name=context_uuid
)
raise InvalidArgumentsException(
[
("slice_id.slice_uuid.uuid", raw_slice_uuid),
],
extra_details=["At least one is required to produce a Slice UUID"],
)
def get_slice_by_defualt_id(
context_client : ContextClient, default_slice_id : SliceId, context_uuid : str = DEFAULT_CONTEXT_NAME,
rw_copy : bool = False, include_endpoint_ids : bool = True, include_constraints : bool = True,
include_service_ids : bool = True, include_subslice_ids : bool = True, include_config_rules : bool = True
) -> Optional[Slice]:
context_uuid, slice_uuid = slice_get_uuid(default_slice_id)
LOGGER.debug(f'P60: {context_uuid} {slice_uuid}')
slice_id = SliceId()
slice_id.context_id.context_uuid.uuid = context_uuid # pylint: disable=no-member
slice_id.slice_uuid.uuid = slice_uuid # pylint: disable=no-member
return get_slice_by_id(
context_client, slice_id, rw_copy=rw_copy, include_endpoint_ids=include_endpoint_ids,
include_constraints=include_constraints, include_service_ids=include_service_ids,
include_subslice_ids=include_subslice_ids, include_config_rules=include_config_rules)
def get_slice_by_defualt_name(
context_client : ContextClient, slice_name : str, context_uuid : str = DEFAULT_CONTEXT_NAME,
rw_copy : bool = False, include_endpoint_ids : bool = True, include_constraints : bool = True,
include_service_ids : bool = True, include_subslice_ids : bool = True, include_config_rules : bool = True
) -> Optional[Slice]:
default_slice_id = SliceId()
default_slice_id.context_id.context_uuid.uuid = context_uuid # pylint: disable=no-member
default_slice_id.slice_uuid.uuid = slice_name # pylint: disable=no-member
context_uuid, slice_uuid = slice_get_uuid(default_slice_id)
slice_id = SliceId()
slice_id.context_id.context_uuid.uuid = context_uuid # pylint: disable=no-member
slice_id.slice_uuid.uuid = slice_uuid # pylint: disable=no-member
return get_slice_by_id(
context_client, slice_id, rw_copy=rw_copy, include_endpoint_ids=include_endpoint_ids,
include_constraints=include_constraints, include_service_ids=include_service_ids,
include_subslice_ids=include_subslice_ids, include_config_rules=include_config_rules)
......@@ -115,6 +115,8 @@ CONTROLLER_DEVICE_TYPES = {
DeviceTypeEnum.MICROWAVE_RADIO_SYSTEM.value,
DeviceTypeEnum.OPEN_LINE_SYSTEM.value,
DeviceTypeEnum.TERAFLOWSDN_CONTROLLER.value,
DeviceTypeEnum.IETF_SLICE.value,
DeviceTypeEnum.NCE.value,
}
def split_controllers_and_network_devices(devices : List[Dict]) -> Tuple[List[Dict], List[Dict]]:
......
......@@ -54,14 +54,13 @@ def update_config_rule_custom(
config_rule.custom.resource_value = json.dumps(json_resource_value, sort_keys=True)
def copy_config_rules(source_config_rules, target_config_rules):
def copy_config_rules(source_config_rules, target_config_rules, raise_if_differs = True):
for source_config_rule in source_config_rules:
config_rule_kind = source_config_rule.WhichOneof('config_rule')
if config_rule_kind == 'custom':
custom = source_config_rule.custom
resource_key = custom.resource_key
resource_value = json.loads(custom.resource_value)
raise_if_differs = True
fields = {name:(value, raise_if_differs) for name,value in resource_value.items()}
update_config_rule_custom(target_config_rules, resource_key, fields)
......
......@@ -33,6 +33,9 @@ class ORM_DeviceDriverEnum(enum.Enum):
GNMI_OPENCONFIG = DeviceDriverEnum.DEVICEDRIVER_GNMI_OPENCONFIG
OPTICAL_TFS = DeviceDriverEnum.DEVICEDRIVER_OPTICAL_TFS
IETF_ACTN = DeviceDriverEnum.DEVICEDRIVER_IETF_ACTN
IETF_L3VPN = DeviceDriverEnum.DEVICEDRIVER_IETF_L3VPN
NCE = DeviceDriverEnum.DEVICEDRIVER_NCE
IETF_SLICE = DeviceDriverEnum.DEVICEDRIVER_IETF_SLICE
OC = DeviceDriverEnum.DEVICEDRIVER_OC
QKD = DeviceDriverEnum.DEVICEDRIVER_QKD
......
......@@ -81,6 +81,16 @@ DRIVERS.append(
}
]))
from .ietf_l3vpn.driver import IetfL3VpnDriver # pylint: disable=wrong-import-position
DRIVERS.append(
(IetfL3VpnDriver, [
{
FilterFieldEnum.DEVICE_TYPE: DeviceTypeEnum.TERAFLOWSDN_CONTROLLER,
FilterFieldEnum.DRIVER: DeviceDriverEnum.DEVICEDRIVER_IETF_L3VPN,
}
]))
from .ietf_actn.IetfActnDriver import IetfActnDriver # pylint: disable=wrong-import-position
DRIVERS.append(
(IetfActnDriver, [
......@@ -90,6 +100,24 @@ DRIVERS.append(
}
]))
from .ietf_slice.driver import IetfSliceDriver # pylint: disable=wrong-import-position
DRIVERS.append(
(IetfSliceDriver, [
{
FilterFieldEnum.DEVICE_TYPE: DeviceTypeEnum.IETF_SLICE,
FilterFieldEnum.DRIVER: DeviceDriverEnum.DEVICEDRIVER_IETF_SLICE,
}
]))
from .nce.driver import NCEDriver # pylint: disable=wrong-import-position
DRIVERS.append(
(NCEDriver, [
{
FilterFieldEnum.DEVICE_TYPE: DeviceTypeEnum.NCE,
FilterFieldEnum.DRIVER: DeviceDriverEnum.DEVICEDRIVER_NCE,
}
]))
if LOAD_ALL_DEVICE_DRIVERS:
from .openconfig.OpenConfigDriver import OpenConfigDriver # pylint: disable=wrong-import-position
DRIVERS.append(
......
......@@ -82,6 +82,26 @@ def compose_resource_endpoint(endpoint_data : Dict[str, Any]) -> Optional[Tuple[
if 'location' in endpoint_data:
endpoint_resource_value['location'] = endpoint_data['location']
if "site_location" in endpoint_data:
endpoint_resource_value["site_location"] = endpoint_data["site_location"]
if "ce-ip" in endpoint_data:
endpoint_resource_value["ce-ip"] = endpoint_data["ce-ip"]
if "address_ip" in endpoint_data:
endpoint_resource_value["address_ip"] = endpoint_data["address_ip"]
if "address_prefix" in endpoint_data:
endpoint_resource_value["address_prefix"] = endpoint_data["address_prefix"]
if "mtu" in endpoint_data:
endpoint_resource_value["mtu"] = endpoint_data["mtu"]
if "ipv4_lan_prefixes" in endpoint_data:
endpoint_resource_value["ipv4_lan_prefixes"] = endpoint_data[
"ipv4_lan_prefixes"
]
return endpoint_resource_key, endpoint_resource_value
except: # pylint: disable=bare-except
......
# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from device.service.driver_api._Driver import (
RESOURCE_ENDPOINTS,
RESOURCE_INTERFACES,
RESOURCE_NETWORK_INSTANCES,
)
SPECIAL_RESOURCE_MAPPINGS = {
RESOURCE_ENDPOINTS: "/endpoints",
RESOURCE_INTERFACES: "/interfaces",
RESOURCE_NETWORK_INSTANCES: "/net-instances",
}
# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from typing import Dict, List, Optional
import requests
from requests.auth import HTTPBasicAuth
from device.service.driver_api.ImportTopologyEnum import ImportTopologyEnum
GET_DEVICES_URL = "{:s}://{:s}:{:d}/tfs-api/devices"
GET_LINKS_URL = "{:s}://{:s}:{:d}/tfs-api/links"
L3VPN_URL = "{:s}://{:s}:{:d}/restconf/data/ietf-l3vpn-svc:l3vpn-svc/vpn-services"
TIMEOUT = 30
HTTP_OK_CODES = {
200, # OK
201, # Created
202, # Accepted
204, # No Content
}
MAPPING_STATUS = {
"DEVICEOPERATIONALSTATUS_UNDEFINED": 0,
"DEVICEOPERATIONALSTATUS_DISABLED": 1,
"DEVICEOPERATIONALSTATUS_ENABLED": 2,
}
MAPPING_DRIVER = {
"DEVICEDRIVER_UNDEFINED": 0,
"DEVICEDRIVER_OPENCONFIG": 1,
"DEVICEDRIVER_TRANSPORT_API": 2,
"DEVICEDRIVER_P4": 3,
"DEVICEDRIVER_IETF_NETWORK_TOPOLOGY": 4,
"DEVICEDRIVER_ONF_TR_532": 5,
"DEVICEDRIVER_XR": 6,
"DEVICEDRIVER_IETF_L2VPN": 7,
"DEVICEDRIVER_GNMI_OPENCONFIG": 8,
"DEVICEDRIVER_OPTICAL_TFS": 9,
"DEVICEDRIVER_IETF_ACTN": 10,
"DEVICEDRIVER_OC": 11,
}
MSG_ERROR = "Could not retrieve devices in remote TeraFlowSDN instance({:s}). status_code={:s} reply={:s}"
LOGGER = logging.getLogger(__name__)
class TfsApiClient:
def __init__(
self,
address: str,
port: int,
scheme: str = "http",
username: Optional[str] = None,
password: Optional[str] = None,
) -> None:
self._devices_url = GET_DEVICES_URL.format(scheme, address, port)
self._links_url = GET_LINKS_URL.format(scheme, address, port)
self._l3vpn_url = L3VPN_URL.format(scheme, address, port)
self._auth = None
# (
# HTTPBasicAuth(username, password)
# if username is not None and password is not None
# else None
# )
def get_devices_endpoints(
self, import_topology: ImportTopologyEnum = ImportTopologyEnum.DEVICES
) -> List[Dict]:
LOGGER.debug("[get_devices_endpoints] begin")
LOGGER.debug(
"[get_devices_endpoints] import_topology={:s}".format(str(import_topology))
)
reply = requests.get(self._devices_url, timeout=TIMEOUT, auth=self._auth)
if reply.status_code not in HTTP_OK_CODES:
msg = MSG_ERROR.format(
str(self._devices_url), str(reply.status_code), str(reply)
)
LOGGER.error(msg)
raise Exception(msg)
if import_topology == ImportTopologyEnum.DISABLED:
raise Exception(
"Unsupported import_topology mode: {:s}".format(str(import_topology))
)
result = list()
for json_device in reply.json()["devices"]:
device_uuid: str = json_device["device_id"]["device_uuid"]["uuid"]
device_type: str = json_device["device_type"]
device_status = json_device["device_operational_status"]
device_url = "/devices/device[{:s}]".format(device_uuid)
device_data = {
"uuid": json_device["device_id"]["device_uuid"]["uuid"],
"name": json_device["name"],
"type": device_type,
"status": MAPPING_STATUS[device_status],
"drivers": [
MAPPING_DRIVER[driver] for driver in json_device["device_drivers"]
],
}
result.append((device_url, device_data))
for json_endpoint in json_device["device_endpoints"]:
endpoint_uuid = json_endpoint["endpoint_id"]["endpoint_uuid"]["uuid"]
endpoint_url = "/endpoints/endpoint[{:s}]".format(endpoint_uuid)
endpoint_data = {
"device_uuid": device_uuid,
"uuid": endpoint_uuid,
"name": json_endpoint["name"],
"type": json_endpoint["endpoint_type"],
}
result.append((endpoint_url, endpoint_data))
if import_topology == ImportTopologyEnum.DEVICES:
LOGGER.debug("[get_devices_endpoints] devices only; returning")
return result
reply = requests.get(self._links_url, timeout=TIMEOUT, auth=self._auth)
if reply.status_code not in HTTP_OK_CODES:
msg = MSG_ERROR.format(
str(self._links_url), str(reply.status_code), str(reply)
)
LOGGER.error(msg)
raise Exception(msg)
for json_link in reply.json()["links"]:
link_uuid: str = json_link["link_id"]["link_uuid"]["uuid"]
link_url = "/links/link[{:s}]".format(link_uuid)
link_endpoint_ids = [
(
json_endpoint_id["device_id"]["device_uuid"]["uuid"],
json_endpoint_id["endpoint_uuid"]["uuid"],
)
for json_endpoint_id in json_link["link_endpoint_ids"]
]
link_data = {
"uuid": json_link["link_id"]["link_uuid"]["uuid"],
"name": json_link["name"],
"endpoints": link_endpoint_ids,
}
result.append((link_url, link_data))
LOGGER.debug("[get_devices_endpoints] topology; returning")
return result
def create_connectivity_service(self, l3vpn_data: dict) -> None:
try:
requests.post(self._l3vpn_url, json=l3vpn_data)
LOGGER.debug(
"[create_connectivity_service] l3vpn_data={:s}".format(str(l3vpn_data))
)
except requests.exceptions.ConnectionError:
raise Exception("faild to send post request to TFS L3VPN NBI")
def update_connectivity_service(self, l3vpn_data: dict) -> None:
vpn_id = l3vpn_data['ietf-l3vpn-svc:l3vpn-svc']["vpn-services"]["vpn-service"][0]["vpn-id"]
url = self._l3vpn_url + f"/vpn-service={vpn_id}"
try:
requests.put(url, json=l3vpn_data)
LOGGER.debug(
"[update_connectivity_service] l3vpn_data={:s}".format(str(l3vpn_data))
)
except requests.exceptions.ConnectionError:
raise Exception("faild to send post request to TFS L3VPN NBI")
def delete_connectivity_service(self, service_uuid: str) -> None:
url = self._l3vpn_url + f"/vpn-service={service_uuid}"
try:
requests.delete(url, auth=self._auth)
LOGGER.debug("[delete_connectivity_service] url={:s}".format(str(url)))
except requests.exceptions.ConnectionError:
raise Exception("faild to send delete request to TFS L3VPN NBI")
# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from typing import Any, Dict, Optional, Tuple, TypedDict
import requests
from common.proto.kpi_sample_types_pb2 import KpiSampleType
from common.type_checkers.Checkers import chk_attribute, chk_string, chk_type
from device.service.driver_api._Driver import RESOURCE_ENDPOINTS
from .Constants import SPECIAL_RESOURCE_MAPPINGS
class LANPrefixesDict(TypedDict):
lan: str
lan_tag: str
LOGGER = logging.getLogger(__name__)
SITE_NETWORK_ACCESS_TYPE = "ietf-l3vpn-svc:multipoint"
def service_exists(wim_url: str, auth, service_uuid: str) -> bool:
try:
get_connectivity_service(wim_url, auth, service_uuid)
return True
except: # pylint: disable=bare-except
return False
def get_all_active_connectivity_services(wim_url: str, auth):
try:
LOGGER.info("Sending get all connectivity services")
servicepoint = f"{wim_url}/restconf/data/ietf-l3vpn-svc:l3vpn-svc/vpn-services"
response = requests.get(servicepoint, auth=auth)
if response.status_code != requests.codes.ok:
raise Exception(
"Unable to get all connectivity services",
http_code=response.status_code,
)
return response
except requests.exceptions.ConnectionError:
raise Exception("Request Timeout", http_code=408)
def get_connectivity_service(wim_url, auth, service_uuid):
try:
LOGGER.info("Sending get connectivity service")
servicepoint = f"{wim_url}/restconf/data/ietf-l3vpn-svc:l3vpn-svc/vpn-services/vpn-service={service_uuid}"
response = requests.get(servicepoint)
if response.status_code != requests.codes.ok:
raise Exception(
"Unable to get connectivity service{:s}".format(str(service_uuid)),
http_code=response.status_code,
)
return response
except requests.exceptions.ConnectionError:
raise Exception("Request Timeout", http_code=408)
def process_optional_string_field(
endpoint_data: Dict[str, Any],
field_name: str,
endpoint_resource_value: Dict[str, Any],
) -> None:
field_value = chk_attribute(
field_name, endpoint_data, "endpoint_data", default=None
)
if field_value is None:
return
chk_string("endpoint_data.{:s}".format(field_name), field_value)
if len(field_value) > 0:
endpoint_resource_value[field_name] = field_value
def compose_resource_endpoint(
endpoint_data: Dict[str, Any],
) -> Optional[Tuple[str, Dict]]:
try:
# Check type of endpoint_data
chk_type("endpoint_data", endpoint_data, dict)
# Check endpoint UUID (mandatory)
endpoint_uuid = chk_attribute("uuid", endpoint_data, "endpoint_data")
chk_string("endpoint_data.uuid", endpoint_uuid, min_length=1)
endpoint_resource_path = SPECIAL_RESOURCE_MAPPINGS.get(RESOURCE_ENDPOINTS)
endpoint_resource_key = "{:s}/endpoint[{:s}]".format(
endpoint_resource_path, endpoint_uuid
)
endpoint_resource_value = {"uuid": endpoint_uuid}
# Check endpoint optional string fields
process_optional_string_field(endpoint_data, "name", endpoint_resource_value)
process_optional_string_field(
endpoint_data, "site_location", endpoint_resource_value
)
process_optional_string_field(endpoint_data, "ce-ip", endpoint_resource_value)
process_optional_string_field(
endpoint_data, "address_ip", endpoint_resource_value
)
process_optional_string_field(
endpoint_data, "address_prefix", endpoint_resource_value
)
process_optional_string_field(endpoint_data, "mtu", endpoint_resource_value)
process_optional_string_field(
endpoint_data, "ipv4_lan_prefixes", endpoint_resource_value
)
process_optional_string_field(endpoint_data, "type", endpoint_resource_value)
process_optional_string_field(
endpoint_data, "context_uuid", endpoint_resource_value
)
process_optional_string_field(
endpoint_data, "topology_uuid", endpoint_resource_value
)
# Check endpoint sample types (optional)
endpoint_sample_types = chk_attribute(
"sample_types", endpoint_data, "endpoint_data", default=[]
)
chk_type("endpoint_data.sample_types", endpoint_sample_types, list)
sample_types = {}
sample_type_errors = []
for i, endpoint_sample_type in enumerate(endpoint_sample_types):
field_name = "endpoint_data.sample_types[{:d}]".format(i)
try:
chk_type(field_name, endpoint_sample_type, (int, str))
if isinstance(endpoint_sample_type, int):
metric_name = KpiSampleType.Name(endpoint_sample_type)
metric_id = endpoint_sample_type
elif isinstance(endpoint_sample_type, str):
metric_id = KpiSampleType.Value(endpoint_sample_type)
metric_name = endpoint_sample_type
else:
str_type = str(type(endpoint_sample_type))
raise Exception("Bad format: {:s}".format(str_type)) # pylint: disable=broad-exception-raised
except Exception as e: # pylint: disable=broad-exception-caught
MSG = "Unsupported {:s}({:s}) : {:s}"
sample_type_errors.append(
MSG.format(field_name, str(endpoint_sample_type), str(e))
)
metric_name = metric_name.lower().replace("kpisampletype_", "")
monitoring_resource_key = "{:s}/state/{:s}".format(
endpoint_resource_key, metric_name
)
sample_types[metric_id] = monitoring_resource_key
if len(sample_type_errors) > 0:
# pylint: disable=broad-exception-raised
raise Exception(
"Malformed Sample Types:\n{:s}".format("\n".join(sample_type_errors))
)
if len(sample_types) > 0:
endpoint_resource_value["sample_types"] = sample_types
if "site_location" in endpoint_data:
endpoint_resource_value["site_location"] = endpoint_data["site_location"]
if "ce-ip" in endpoint_data:
endpoint_resource_value["ce-ip"] = endpoint_data["ce-ip"]
if "address_ip" in endpoint_data:
endpoint_resource_value["address_ip"] = endpoint_data["address_ip"]
if "address_prefix" in endpoint_data:
endpoint_resource_value["address_prefix"] = endpoint_data["address_prefix"]
if "mtu" in endpoint_data:
endpoint_resource_value["mtu"] = endpoint_data["mtu"]
if "ipv4_lan_prefixes" in endpoint_data:
endpoint_resource_value["ipv4_lan_prefixes"] = endpoint_data[
"ipv4_lan_prefixes"
]
return endpoint_resource_key, endpoint_resource_value
except: # pylint: disable=bare-except
LOGGER.exception("Problem composing endpoint({:s})".format(str(endpoint_data)))
return None
# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
import re
import threading
from typing import Any, Iterator, List, Optional, Tuple, Union
import anytree
import requests
from requests.auth import HTTPBasicAuth
from common.method_wrappers.Decorator import MetricsPool, metered_subclass_method
from common.type_checkers.Checkers import chk_length, chk_string, chk_type
from device.service.driver_api._Driver import (
RESOURCE_ENDPOINTS,
RESOURCE_SERVICES,
_Driver,
)
from device.service.driver_api.AnyTreeTools import (
TreeNode,
dump_subtree,
get_subnode,
set_subnode_value,
)
from device.service.driver_api.ImportTopologyEnum import (
ImportTopologyEnum,
get_import_topology,
)
from .Constants import SPECIAL_RESOURCE_MAPPINGS
from .TfsApiClient import TfsApiClient
from .Tools import compose_resource_endpoint
LOGGER = logging.getLogger(__name__)
ALL_RESOURCE_KEYS = [
RESOURCE_ENDPOINTS,
RESOURCE_SERVICES,
]
RE_GET_ENDPOINT_FROM_INTERFACE = re.compile(r"^\/interface\[([^\]]+)\].*")
RE_IETF_L3VPN_DATA = re.compile(r"^\/service\[[^\]]+\]\/IETFL3VPN$")
RE_IETF_L3VPN_OPERATION = re.compile(r"^\/service\[[^\]]+\]\/IETFL3VPN\/operation$")
DRIVER_NAME = "ietf_l3vpn"
METRICS_POOL = MetricsPool("Device", "Driver", labels={"driver": DRIVER_NAME})
class IetfL3VpnDriver(_Driver):
def __init__(self, address: str, port: str, **settings) -> None:
super().__init__(DRIVER_NAME, address, int(port), **settings)
self.__lock = threading.Lock()
self.__started = threading.Event()
self.__terminate = threading.Event()
self.__running = TreeNode(".")
scheme = self.settings.get("scheme", "http")
username = self.settings.get("username")
password = self.settings.get("password")
self.tac = TfsApiClient(
self.address,
self.port,
scheme=scheme,
username=username,
password=password,
)
self.__auth = None
# (
# HTTPBasicAuth(username, password)
# if username is not None and password is not None
# else None
# )
self.__tfs_nbi_root = "{:s}://{:s}:{:d}".format(
scheme, self.address, int(self.port)
)
self.__timeout = int(self.settings.get("timeout", 120))
self.__import_topology = get_import_topology(
self.settings, default=ImportTopologyEnum.DEVICES
)
endpoints = self.settings.get("endpoints", [])
endpoint_resources = []
for endpoint in endpoints:
endpoint_resource = compose_resource_endpoint(endpoint)
if endpoint_resource is None:
continue
endpoint_resources.append(endpoint_resource)
self._set_initial_config(endpoint_resources)
def _set_initial_config(
self, resources: List[Tuple[str, Any]]
) -> List[Union[bool, Exception]]:
chk_type("resources", resources, list)
if len(resources) == 0:
return []
results = []
resolver = anytree.Resolver(pathattr="name")
with self.__lock:
for i, resource in enumerate(resources):
str_resource_name = "resources[#{:d}]".format(i)
try:
chk_type(str_resource_name, resource, (list, tuple))
chk_length(str_resource_name, resource, min_length=2, max_length=2)
resource_key, resource_value = resource
chk_string(str_resource_name, resource_key, allow_empty=False)
resource_path = resource_key.split("/")
except Exception as e: # pylint: disable=broad-except
LOGGER.exception(
"Exception validating {:s}: {:s}".format(
str_resource_name, str(resource_key)
)
)
results.append(e) # if validation fails, store the exception
continue
try:
resource_value = json.loads(resource_value)
except: # pylint: disable=bare-except
pass
set_subnode_value(
resolver, self.__running, resource_path, resource_value
)
results.append(True)
return results
def Connect(self) -> bool:
url = (
self.__tfs_nbi_root + "/restconf/data/ietf-l3vpn-svc:l3vpn-svc/vpn-services"
)
with self.__lock:
if self.__started.is_set():
return True
try:
# requests.get(url, timeout=self.__timeout, auth=self.__auth)
...
except requests.exceptions.Timeout:
LOGGER.exception("Timeout connecting {:s}".format(url))
return False
except Exception: # pylint: disable=broad-except
LOGGER.exception("Exception connecting {:s}".format(url))
return False
else:
self.__started.set()
return True
def Disconnect(self) -> bool:
with self.__lock:
self.__terminate.set()
return True
@metered_subclass_method(METRICS_POOL)
def GetInitialConfig(self) -> List[Tuple[str, Any]]:
with self.__lock:
return []
@metered_subclass_method(METRICS_POOL)
def GetConfig(
self, resource_keys: List[str] = []
) -> List[Tuple[str, Union[Any, None, Exception]]]:
chk_type("resources", resource_keys, list)
with self.__lock:
if len(resource_keys) == 0:
return dump_subtree(self.__running)
results = []
resolver = anytree.Resolver(pathattr="name")
for i, resource_key in enumerate(resource_keys):
str_resource_name = "resource_key[#{:d}]".format(i)
try:
chk_string(str_resource_name, resource_key, allow_empty=False)
resource_key = SPECIAL_RESOURCE_MAPPINGS.get(
resource_key, resource_key
)
resource_path = resource_key.split("/")
except Exception as e: # pylint: disable=broad-except
LOGGER.exception(
"Exception validating {:s}: {:s}".format(
str_resource_name, str(resource_key)
)
)
results.append(
(resource_key, e)
) # if validation fails, store the exception
continue
resource_node = get_subnode(
resolver, self.__running, resource_path, default=None
)
# if not found, resource_node is None
if resource_node is None:
continue
results.extend(dump_subtree(resource_node))
return results
return results
@metered_subclass_method(METRICS_POOL)
def SetConfig(
self, resources: List[Tuple[str, Any]]
) -> List[Union[bool, Exception]]:
results = []
if len(resources) == 0:
return results
with self.__lock:
for resource in resources:
resource_key, resource_value = resource
if RE_IETF_L3VPN_OPERATION.match(resource_key):
operation_type = json.loads(resource_value)["type"]
results.append((resource_key, True))
break
else:
raise Exception("operation type not found in resources")
for resource in resources:
LOGGER.info("resource = {:s}".format(str(resource)))
resource_key, resource_value = resource
if not RE_IETF_L3VPN_DATA.match(resource_key):
continue
try:
resource_value = json.loads(resource_value)
# if service_exists(self.__tfs_nbi_root, self.__auth, service_uuid):
# exc = NotImplementedError(
# "IETF L3VPN Service Update is still not supported"
# )
# results.append((resource[0], exc))
# continue
if operation_type == "create":
service_id = resource_value["ietf-l3vpn-svc:l3vpn-svc"][
"vpn-services"
]["vpn-service"][0]["vpn-id"]
self.tac.create_connectivity_service(resource_value)
elif operation_type == "update":
service_id = resource_value["ietf-l3vpn-svc:l3vpn-svc"][
"vpn-services"
]["vpn-service"][0]["vpn-id"]
self.tac.update_connectivity_service(resource_value)
else:
raise Exception("operation type not supported")
results.append((resource_key, True))
except Exception as e: # pylint: disable=broad-except
LOGGER.exception(
"Unhandled error processing resource_key({:s})".format(
str(resource_key)
)
)
results.append((resource_key, e))
return results
@metered_subclass_method(METRICS_POOL)
def DeleteConfig(
self, resources: List[Tuple[str, Any]]
) -> List[Union[bool, Exception]]:
results = []
if len(resources) == 0:
return results
with self.__lock:
for resource in resources:
LOGGER.info("resource = {:s}".format(str(resource)))
resource_key, resource_value = resource
if not RE_IETF_L3VPN_DATA.match(resource_key):
continue
try:
resource_value = json.loads(resource_value)
service_id = resource_value["id"]
# if service_exists(self.__tfs_nbi_root, self.__auth, service_uuid):
self.tac.delete_connectivity_service(service_id)
results.append((resource_key, True))
except Exception as e: # pylint: disable=broad-except
LOGGER.exception(
"Unhandled error processing resource_key({:s})".format(
str(resource_key)
)
)
results.append((resource_key, e))
return results
@metered_subclass_method(METRICS_POOL)
def SubscribeState(
self, subscriptions: List[Tuple[str, float, float]]
) -> List[Union[bool, Exception]]:
# TODO: IETF L3VPN does not support monitoring by now
return [False for _ in subscriptions]
@metered_subclass_method(METRICS_POOL)
def UnsubscribeState(
self, subscriptions: List[Tuple[str, float, float]]
) -> List[Union[bool, Exception]]:
# TODO: IETF L3VPN does not support monitoring by now
return [False for _ in subscriptions]
def GetState(
self, blocking=False, terminate: Optional[threading.Event] = None
) -> Iterator[Tuple[float, str, Any]]:
# TODO: IETF L3VPN does not support monitoring by now
return []
# Copyright 2022-2025 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from device.service.driver_api._Driver import (
RESOURCE_ENDPOINTS,
RESOURCE_INTERFACES,
RESOURCE_NETWORK_INSTANCES,
)
SPECIAL_RESOURCE_MAPPINGS = {
RESOURCE_ENDPOINTS: "/endpoints",
RESOURCE_INTERFACES: "/interfaces",
RESOURCE_NETWORK_INSTANCES: "/net-instances",
}
# Copyright 2022-2025 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from typing import Any, Dict, Optional, Tuple
import requests
from common.proto.kpi_sample_types_pb2 import KpiSampleType
from common.type_checkers.Checkers import chk_attribute, chk_string, chk_type
from device.service.driver_api._Driver import RESOURCE_ENDPOINTS
from .Constants import SPECIAL_RESOURCE_MAPPINGS
LOGGER = logging.getLogger(__name__)
def process_optional_string_field(
endpoint_data: Dict[str, Any],
field_name: str,
endpoint_resource_value: Dict[str, Any],
) -> None:
field_value = chk_attribute(
field_name, endpoint_data, "endpoint_data", default=None
)
if field_value is None:
return
chk_string("endpoint_data.{:s}".format(field_name), field_value)
if len(field_value) > 0:
endpoint_resource_value[field_name] = field_value
def compose_resource_endpoint(
endpoint_data: Dict[str, Any],
) -> Optional[Tuple[str, Dict]]:
try:
# Check type of endpoint_data
chk_type("endpoint_data", endpoint_data, dict)
# Check endpoint UUID (mandatory)
endpoint_uuid = chk_attribute("uuid", endpoint_data, "endpoint_data")
chk_string("endpoint_data.uuid", endpoint_uuid, min_length=1)
endpoint_resource_path = SPECIAL_RESOURCE_MAPPINGS.get(RESOURCE_ENDPOINTS)
endpoint_resource_key = "{:s}/endpoint[{:s}]".format(
endpoint_resource_path, endpoint_uuid
)
endpoint_resource_value = {"uuid": endpoint_uuid}
# Check endpoint optional string fields
process_optional_string_field(endpoint_data, "name", endpoint_resource_value)
process_optional_string_field(
endpoint_data, "site_location", endpoint_resource_value
)
process_optional_string_field(endpoint_data, "ce-ip", endpoint_resource_value)
process_optional_string_field(
endpoint_data, "address_ip", endpoint_resource_value
)
process_optional_string_field(
endpoint_data, "address_prefix", endpoint_resource_value
)
process_optional_string_field(endpoint_data, "mtu", endpoint_resource_value)
process_optional_string_field(
endpoint_data, "ipv4_lan_prefixes", endpoint_resource_value
)
process_optional_string_field(endpoint_data, "type", endpoint_resource_value)
process_optional_string_field(
endpoint_data, "context_uuid", endpoint_resource_value
)
process_optional_string_field(
endpoint_data, "topology_uuid", endpoint_resource_value
)
# Check endpoint sample types (optional)
endpoint_sample_types = chk_attribute(
"sample_types", endpoint_data, "endpoint_data", default=[]
)
chk_type("endpoint_data.sample_types", endpoint_sample_types, list)
sample_types = {}
sample_type_errors = []
for i, endpoint_sample_type in enumerate(endpoint_sample_types):
field_name = "endpoint_data.sample_types[{:d}]".format(i)
try:
chk_type(field_name, endpoint_sample_type, (int, str))
if isinstance(endpoint_sample_type, int):
metric_name = KpiSampleType.Name(endpoint_sample_type)
metric_id = endpoint_sample_type
elif isinstance(endpoint_sample_type, str):
metric_id = KpiSampleType.Value(endpoint_sample_type)
metric_name = endpoint_sample_type
else:
str_type = str(type(endpoint_sample_type))
raise Exception("Bad format: {:s}".format(str_type)) # pylint: disable=broad-exception-raised
except Exception as e: # pylint: disable=broad-exception-caught
MSG = "Unsupported {:s}({:s}) : {:s}"
sample_type_errors.append(
MSG.format(field_name, str(endpoint_sample_type), str(e))
)
metric_name = metric_name.lower().replace("kpisampletype_", "")
monitoring_resource_key = "{:s}/state/{:s}".format(
endpoint_resource_key, metric_name
)
sample_types[metric_id] = monitoring_resource_key
if len(sample_type_errors) > 0:
# pylint: disable=broad-exception-raised
raise Exception(
"Malformed Sample Types:\n{:s}".format("\n".join(sample_type_errors))
)
if len(sample_types) > 0:
endpoint_resource_value["sample_types"] = sample_types
if "site_location" in endpoint_data:
endpoint_resource_value["site_location"] = endpoint_data["site_location"]
if "ce-ip" in endpoint_data:
endpoint_resource_value["ce-ip"] = endpoint_data["ce-ip"]
if "address_ip" in endpoint_data:
endpoint_resource_value["address_ip"] = endpoint_data["address_ip"]
if "address_prefix" in endpoint_data:
endpoint_resource_value["address_prefix"] = endpoint_data["address_prefix"]
if "mtu" in endpoint_data:
endpoint_resource_value["mtu"] = endpoint_data["mtu"]
if "ipv4_lan_prefixes" in endpoint_data:
endpoint_resource_value["ipv4_lan_prefixes"] = endpoint_data[
"ipv4_lan_prefixes"
]
return endpoint_resource_key, endpoint_resource_value
except: # pylint: disable=bare-except
LOGGER.exception("Problem composing endpoint({:s})".format(str(endpoint_data)))
return None
# Copyright 2022-2025 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright 2022-2025 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
import re
import threading
from typing import Any, Iterator, List, Optional, Tuple, Union
import anytree
import requests
from requests.auth import HTTPBasicAuth
from common.method_wrappers.Decorator import MetricsPool, metered_subclass_method
from common.type_checkers.Checkers import chk_length, chk_string, chk_type
from device.service.driver_api._Driver import (
RESOURCE_ENDPOINTS,
RESOURCE_SERVICES,
_Driver,
)
from device.service.driver_api.AnyTreeTools import (
TreeNode,
dump_subtree,
get_subnode,
set_subnode_value,
)
from device.service.driver_api.ImportTopologyEnum import (
ImportTopologyEnum,
get_import_topology,
)
from .Constants import SPECIAL_RESOURCE_MAPPINGS
from .tfs_slice_nbi_client import TfsApiClient
from .Tools import compose_resource_endpoint
LOGGER = logging.getLogger(__name__)
ALL_RESOURCE_KEYS = [
RESOURCE_ENDPOINTS,
RESOURCE_SERVICES,
]
RE_IETF_SLICE_DATA = re.compile(r"^\/service\[[^\]]+\]\/IETFSlice$")
RE_IETF_SLICE_OPERATION = re.compile(r"^\/service\[[^\]]+\]\/IETFSlice\/operation$")
DRIVER_NAME = "ietf_slice"
METRICS_POOL = MetricsPool("Device", "Driver", labels={"driver": DRIVER_NAME})
class IetfSliceDriver(_Driver):
def __init__(self, address: str, port: str, **settings) -> None:
super().__init__(DRIVER_NAME, address, int(port), **settings)
self.__lock = threading.Lock()
self.__started = threading.Event()
self.__terminate = threading.Event()
self.__running = TreeNode(".")
scheme = self.settings.get("scheme", "http")
username = self.settings.get("username")
password = self.settings.get("password")
self.tac = TfsApiClient(
self.address,
self.port,
scheme=scheme,
username=username,
password=password,
)
self.__auth = None
# (
# HTTPBasicAuth(username, password)
# if username is not None and password is not None
# else None
# )
self.__tfs_nbi_root = "{:s}://{:s}:{:d}".format(
scheme, self.address, int(self.port)
)
self.__timeout = int(self.settings.get("timeout", 120))
self.__import_topology = get_import_topology(
self.settings, default=ImportTopologyEnum.DEVICES
)
endpoints = self.settings.get("endpoints", [])
endpoint_resources = []
for endpoint in endpoints:
endpoint_resource = compose_resource_endpoint(endpoint)
if endpoint_resource is None:
continue
endpoint_resources.append(endpoint_resource)
self._set_initial_config(endpoint_resources)
def _set_initial_config(
self, resources: List[Tuple[str, Any]]
) -> List[Union[bool, Exception]]:
chk_type("resources", resources, list)
if len(resources) == 0:
return []
results = []
resolver = anytree.Resolver(pathattr="name")
with self.__lock:
for i, resource in enumerate(resources):
str_resource_name = "resources[#{:d}]".format(i)
try:
chk_type(str_resource_name, resource, (list, tuple))
chk_length(str_resource_name, resource, min_length=2, max_length=2)
resource_key, resource_value = resource
chk_string(str_resource_name, resource_key, allow_empty=False)
resource_path = resource_key.split("/")
except Exception as e: # pylint: disable=broad-except
LOGGER.exception(
"Exception validating {:s}: {:s}".format(
str_resource_name, str(resource_key)
)
)
results.append(e) # if validation fails, store the exception
continue
try:
resource_value = json.loads(resource_value)
except: # pylint: disable=bare-except
pass
set_subnode_value(
resolver, self.__running, resource_path, resource_value
)
results.append(True)
return results
def Connect(self) -> bool:
url = self.__tfs_nbi_root + "/restconf/data/ietf-network-slice-service:ietf-nss"
with self.__lock:
if self.__started.is_set():
return True
try:
# requests.get(url, timeout=self.__timeout)
...
except requests.exceptions.Timeout:
LOGGER.exception("Timeout connecting {:s}".format(url))
return False
except Exception: # pylint: disable=broad-except
LOGGER.exception("Exception connecting {:s}".format(url))
return False
else:
self.__started.set()
return True
def Disconnect(self) -> bool:
with self.__lock:
self.__terminate.set()
return True
@metered_subclass_method(METRICS_POOL)
def GetInitialConfig(self) -> List[Tuple[str, Any]]:
with self.__lock:
return []
@metered_subclass_method(METRICS_POOL)
def GetConfig(
self, resource_keys: List[str] = []
) -> List[Tuple[str, Union[Any, None, Exception]]]:
chk_type("resources", resource_keys, list)
with self.__lock:
if len(resource_keys) == 0:
return dump_subtree(self.__running)
results = []
resolver = anytree.Resolver(pathattr="name")
for i, resource_key in enumerate(resource_keys):
str_resource_name = "resource_key[#{:d}]".format(i)
try:
chk_string(str_resource_name, resource_key, allow_empty=False)
resource_key = SPECIAL_RESOURCE_MAPPINGS.get(
resource_key, resource_key
)
resource_path = resource_key.split("/")
except Exception as e: # pylint: disable=broad-except
LOGGER.exception(
"Exception validating {:s}: {:s}".format(
str_resource_name, str(resource_key)
)
)
results.append(
(resource_key, e)
) # if validation fails, store the exception
continue
resource_node = get_subnode(
resolver, self.__running, resource_path, default=None
)
# if not found, resource_node is None
if resource_node is None:
continue
results.extend(dump_subtree(resource_node))
return results
@metered_subclass_method(METRICS_POOL)
def SetConfig(
self, resources: List[Tuple[str, Any]]
) -> List[Union[bool, Exception]]:
results = []
if len(resources) == 0:
return results
with self.__lock:
for resource in resources:
resource_key, resource_value = resource
if RE_IETF_SLICE_OPERATION.match(resource_key):
operation_type = json.loads(resource_value)["type"]
results.append((resource_key, True))
break
else:
raise Exception("operation type not found in resources")
for resource in resources:
LOGGER.info("resource = {:s}".format(str(resource)))
resource_key, resource_value = resource
if not RE_IETF_SLICE_DATA.match(resource_key):
continue
try:
resource_value = json.loads(resource_value)
slice_name = resource_value["network-slice-services"][
"slice-service"
][0]["id"]
if operation_type == "create":
self.tac.create_slice(resource_value)
elif operation_type == "update":
connection_groups = resource_value["network-slice-services"][
"slice-service"
][0]["connection-groups"]["connection-group"]
if len(connection_groups) != 1:
raise Exception("only one connection group is supported")
connection_group = connection_groups[0]
self.tac.update_slice(
slice_name, connection_group["id"], connection_group
)
elif operation_type == "delete":
self.tac.delete_slice(slice_name)
results.append((resource_key, True))
except Exception as e: # pylint: disable=broad-except
LOGGER.exception(
"Unhandled error processing resource_key({:s})".format(
str(resource_key)
)
)
results.append((resource_key, e))
return results
@metered_subclass_method(METRICS_POOL)
def DeleteConfig(
self, resources: List[Tuple[str, Any]]
) -> List[Union[bool, Exception]]:
results = []
if len(resources) == 0:
return results
with self.__lock:
for resource in resources:
LOGGER.info("resource = {:s}".format(str(resource)))
resource_key, resource_value = resource
try:
results.append((resource_key, True))
except Exception as e: # pylint: disable=broad-except
LOGGER.exception(
"Unhandled error processing resource_key({:s})".format(
str(resource_key)
)
)
results.append((resource_key, e))
return results
@metered_subclass_method(METRICS_POOL)
def SubscribeState(
self, subscriptions: List[Tuple[str, float, float]]
) -> List[Union[bool, Exception]]:
# TODO: IETF Slice does not support monitoring by now
return [False for _ in subscriptions]
@metered_subclass_method(METRICS_POOL)
def UnsubscribeState(
self, subscriptions: List[Tuple[str, float, float]]
) -> List[Union[bool, Exception]]:
# TODO: IETF Slice does not support monitoring by now
return [False for _ in subscriptions]
def GetState(
self, blocking=False, terminate: Optional[threading.Event] = None
) -> Iterator[Tuple[float, str, Any]]:
# TODO: IETF Slice does not support monitoring by now
return []
# Copyright 2022-2025 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from typing import Optional
import requests
from requests.auth import HTTPBasicAuth
IETF_SLICE_URL = "{:s}://{:s}:{:d}/restconf/data/ietf-network-slice-service"
TIMEOUT = 30
LOGGER = logging.getLogger(__name__)
HEADERS = {"Content-Type": "application/json"}
class TfsApiClient:
def __init__(
self,
address: str,
port: int,
scheme: str = "http",
username: Optional[str] = None,
password: Optional[str] = None,
) -> None:
self._slice_url = IETF_SLICE_URL.format(scheme, address, port)
self._auth = None
# (
# HTTPBasicAuth(username, password)
# if username is not None and password is not None
# else None
# )
def create_slice(self, slice_data: dict) -> None:
url = self._slice_url + ":network-slice-services"
try:
requests.post(url, json=slice_data, headers=HEADERS)
LOGGER.info(f"IETF Slice Post to {url}: {slice_data}")
except requests.exceptions.ConnectionError:
raise Exception("faild to send post request to TFS IETF Slice NBI")
def update_slice(
self,
slice_name: str,
connection_group_id: str,
updated_connection_group_data: dict,
) -> None:
url = (
self._slice_url
+ f":network-slice-services/slice-service={slice_name}/connection-groups/connection-group={connection_group_id}"
)
try:
requests.put(url, json=updated_connection_group_data, headers=HEADERS)
LOGGER.info(f"IETF Slice Put to {url}: {updated_connection_group_data}")
except requests.exceptions.ConnectionError:
raise Exception("faild to send update request to TFS IETF Slice NBI")
def delete_slice(self, slice_name: str) -> None:
url = self._slice_url + f":network-slice-services/slice-service={slice_name}"
try:
requests.delete(url)
LOGGER.info(f"IETF Slice Delete to {url}")
except requests.exceptions.ConnectionError:
raise Exception("faild to send delete request to TFS IETF Slice NBI")
# Copyright 2022-2025 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from device.service.driver_api._Driver import (
RESOURCE_ENDPOINTS,
RESOURCE_INTERFACES,
RESOURCE_NETWORK_INSTANCES,
)
SPECIAL_RESOURCE_MAPPINGS = {
RESOURCE_ENDPOINTS: "/endpoints",
RESOURCE_INTERFACES: "/interfaces",
RESOURCE_NETWORK_INSTANCES: "/net-instances",
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment