Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • tfs/controller
1 result
Show changes
Showing
with 2445 additions and 186 deletions
# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from typing import Any, Dict, Optional, Tuple, TypedDict
import requests
from common.proto.kpi_sample_types_pb2 import KpiSampleType
from common.type_checkers.Checkers import chk_attribute, chk_string, chk_type
from device.service.driver_api._Driver import RESOURCE_ENDPOINTS
from .Constants import SPECIAL_RESOURCE_MAPPINGS
class LANPrefixesDict(TypedDict):
lan: str
lan_tag: str
LOGGER = logging.getLogger(__name__)
SITE_NETWORK_ACCESS_TYPE = "ietf-l3vpn-svc:multipoint"
def service_exists(wim_url: str, auth, service_uuid: str) -> bool:
try:
get_connectivity_service(wim_url, auth, service_uuid)
return True
except: # pylint: disable=bare-except
return False
def get_all_active_connectivity_services(wim_url: str, auth):
try:
LOGGER.info("Sending get all connectivity services")
servicepoint = f"{wim_url}/restconf/data/ietf-l3vpn-svc:l3vpn-svc/vpn-services"
response = requests.get(servicepoint, auth=auth)
if response.status_code != requests.codes.ok:
raise Exception(
"Unable to get all connectivity services",
http_code=response.status_code,
)
return response
except requests.exceptions.ConnectionError:
raise Exception("Request Timeout", http_code=408)
def get_connectivity_service(wim_url, auth, service_uuid):
try:
LOGGER.info("Sending get connectivity service")
servicepoint = f"{wim_url}/restconf/data/ietf-l3vpn-svc:l3vpn-svc/vpn-services/vpn-service={service_uuid}"
response = requests.get(servicepoint)
if response.status_code != requests.codes.ok:
raise Exception(
"Unable to get connectivity service{:s}".format(str(service_uuid)),
http_code=response.status_code,
)
return response
except requests.exceptions.ConnectionError:
raise Exception("Request Timeout", http_code=408)
def process_optional_string_field(
endpoint_data: Dict[str, Any],
field_name: str,
endpoint_resource_value: Dict[str, Any],
) -> None:
field_value = chk_attribute(
field_name, endpoint_data, "endpoint_data", default=None
)
if field_value is None:
return
chk_string("endpoint_data.{:s}".format(field_name), field_value)
if len(field_value) > 0:
endpoint_resource_value[field_name] = field_value
def compose_resource_endpoint(
endpoint_data: Dict[str, Any],
) -> Optional[Tuple[str, Dict]]:
try:
# Check type of endpoint_data
chk_type("endpoint_data", endpoint_data, dict)
# Check endpoint UUID (mandatory)
endpoint_uuid = chk_attribute("uuid", endpoint_data, "endpoint_data")
chk_string("endpoint_data.uuid", endpoint_uuid, min_length=1)
endpoint_resource_path = SPECIAL_RESOURCE_MAPPINGS.get(RESOURCE_ENDPOINTS)
endpoint_resource_key = "{:s}/endpoint[{:s}]".format(
endpoint_resource_path, endpoint_uuid
)
endpoint_resource_value = {"uuid": endpoint_uuid}
# Check endpoint optional string fields
process_optional_string_field(endpoint_data, "name", endpoint_resource_value)
process_optional_string_field(
endpoint_data, "site_location", endpoint_resource_value
)
process_optional_string_field(endpoint_data, "ce-ip", endpoint_resource_value)
process_optional_string_field(
endpoint_data, "address_ip", endpoint_resource_value
)
process_optional_string_field(
endpoint_data, "address_prefix", endpoint_resource_value
)
process_optional_string_field(endpoint_data, "mtu", endpoint_resource_value)
process_optional_string_field(
endpoint_data, "ipv4_lan_prefixes", endpoint_resource_value
)
process_optional_string_field(endpoint_data, "type", endpoint_resource_value)
process_optional_string_field(
endpoint_data, "context_uuid", endpoint_resource_value
)
process_optional_string_field(
endpoint_data, "topology_uuid", endpoint_resource_value
)
# Check endpoint sample types (optional)
endpoint_sample_types = chk_attribute(
"sample_types", endpoint_data, "endpoint_data", default=[]
)
chk_type("endpoint_data.sample_types", endpoint_sample_types, list)
sample_types = {}
sample_type_errors = []
for i, endpoint_sample_type in enumerate(endpoint_sample_types):
field_name = "endpoint_data.sample_types[{:d}]".format(i)
try:
chk_type(field_name, endpoint_sample_type, (int, str))
if isinstance(endpoint_sample_type, int):
metric_name = KpiSampleType.Name(endpoint_sample_type)
metric_id = endpoint_sample_type
elif isinstance(endpoint_sample_type, str):
metric_id = KpiSampleType.Value(endpoint_sample_type)
metric_name = endpoint_sample_type
else:
str_type = str(type(endpoint_sample_type))
raise Exception("Bad format: {:s}".format(str_type)) # pylint: disable=broad-exception-raised
except Exception as e: # pylint: disable=broad-exception-caught
MSG = "Unsupported {:s}({:s}) : {:s}"
sample_type_errors.append(
MSG.format(field_name, str(endpoint_sample_type), str(e))
)
metric_name = metric_name.lower().replace("kpisampletype_", "")
monitoring_resource_key = "{:s}/state/{:s}".format(
endpoint_resource_key, metric_name
)
sample_types[metric_id] = monitoring_resource_key
if len(sample_type_errors) > 0:
# pylint: disable=broad-exception-raised
raise Exception(
"Malformed Sample Types:\n{:s}".format("\n".join(sample_type_errors))
)
if len(sample_types) > 0:
endpoint_resource_value["sample_types"] = sample_types
if "site_location" in endpoint_data:
endpoint_resource_value["site_location"] = endpoint_data["site_location"]
if "ce-ip" in endpoint_data:
endpoint_resource_value["ce-ip"] = endpoint_data["ce-ip"]
if "address_ip" in endpoint_data:
endpoint_resource_value["address_ip"] = endpoint_data["address_ip"]
if "address_prefix" in endpoint_data:
endpoint_resource_value["address_prefix"] = endpoint_data["address_prefix"]
if "mtu" in endpoint_data:
endpoint_resource_value["mtu"] = endpoint_data["mtu"]
if "ipv4_lan_prefixes" in endpoint_data:
endpoint_resource_value["ipv4_lan_prefixes"] = endpoint_data[
"ipv4_lan_prefixes"
]
return endpoint_resource_key, endpoint_resource_value
except: # pylint: disable=bare-except
LOGGER.exception("Problem composing endpoint({:s})".format(str(endpoint_data)))
return None
# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
import re
import threading
from typing import Any, Iterator, List, Optional, Tuple, Union
import anytree
import requests
from requests.auth import HTTPBasicAuth
from common.method_wrappers.Decorator import MetricsPool, metered_subclass_method
from common.type_checkers.Checkers import chk_length, chk_string, chk_type
from device.service.driver_api._Driver import (
RESOURCE_ENDPOINTS,
RESOURCE_SERVICES,
_Driver,
)
from device.service.driver_api.AnyTreeTools import (
TreeNode,
dump_subtree,
get_subnode,
set_subnode_value,
)
from device.service.driver_api.ImportTopologyEnum import (
ImportTopologyEnum,
get_import_topology,
)
from .Constants import SPECIAL_RESOURCE_MAPPINGS
from .TfsApiClient import TfsApiClient
from .Tools import compose_resource_endpoint
LOGGER = logging.getLogger(__name__)
ALL_RESOURCE_KEYS = [
RESOURCE_ENDPOINTS,
RESOURCE_SERVICES,
]
RE_GET_ENDPOINT_FROM_INTERFACE = re.compile(r"^\/interface\[([^\]]+)\].*")
RE_IETF_L3VPN_DATA = re.compile(r"^\/service\[[^\]]+\]\/IETFL3VPN$")
RE_IETF_L3VPN_OPERATION = re.compile(r"^\/service\[[^\]]+\]\/IETFL3VPN\/operation$")
DRIVER_NAME = "ietf_l3vpn"
METRICS_POOL = MetricsPool("Device", "Driver", labels={"driver": DRIVER_NAME})
class IetfL3VpnDriver(_Driver):
def __init__(self, address: str, port: str, **settings) -> None:
super().__init__(DRIVER_NAME, address, int(port), **settings)
self.__lock = threading.Lock()
self.__started = threading.Event()
self.__terminate = threading.Event()
self.__running = TreeNode(".")
scheme = self.settings.get("scheme", "http")
username = self.settings.get("username")
password = self.settings.get("password")
self.tac = TfsApiClient(
self.address,
self.port,
scheme=scheme,
username=username,
password=password,
)
self.__auth = None
# (
# HTTPBasicAuth(username, password)
# if username is not None and password is not None
# else None
# )
self.__tfs_nbi_root = "{:s}://{:s}:{:d}".format(
scheme, self.address, int(self.port)
)
self.__timeout = int(self.settings.get("timeout", 120))
self.__import_topology = get_import_topology(
self.settings, default=ImportTopologyEnum.DEVICES
)
endpoints = self.settings.get("endpoints", [])
endpoint_resources = []
for endpoint in endpoints:
endpoint_resource = compose_resource_endpoint(endpoint)
if endpoint_resource is None:
continue
endpoint_resources.append(endpoint_resource)
self._set_initial_config(endpoint_resources)
def _set_initial_config(
self, resources: List[Tuple[str, Any]]
) -> List[Union[bool, Exception]]:
chk_type("resources", resources, list)
if len(resources) == 0:
return []
results = []
resolver = anytree.Resolver(pathattr="name")
with self.__lock:
for i, resource in enumerate(resources):
str_resource_name = "resources[#{:d}]".format(i)
try:
chk_type(str_resource_name, resource, (list, tuple))
chk_length(str_resource_name, resource, min_length=2, max_length=2)
resource_key, resource_value = resource
chk_string(str_resource_name, resource_key, allow_empty=False)
resource_path = resource_key.split("/")
except Exception as e: # pylint: disable=broad-except
LOGGER.exception(
"Exception validating {:s}: {:s}".format(
str_resource_name, str(resource_key)
)
)
results.append(e) # if validation fails, store the exception
continue
try:
resource_value = json.loads(resource_value)
except: # pylint: disable=bare-except
pass
set_subnode_value(
resolver, self.__running, resource_path, resource_value
)
results.append(True)
return results
def Connect(self) -> bool:
url = (
self.__tfs_nbi_root + "/restconf/data/ietf-l3vpn-svc:l3vpn-svc/vpn-services"
)
with self.__lock:
if self.__started.is_set():
return True
try:
# requests.get(url, timeout=self.__timeout, auth=self.__auth)
...
except requests.exceptions.Timeout:
LOGGER.exception("Timeout connecting {:s}".format(url))
return False
except Exception: # pylint: disable=broad-except
LOGGER.exception("Exception connecting {:s}".format(url))
return False
else:
self.__started.set()
return True
def Disconnect(self) -> bool:
with self.__lock:
self.__terminate.set()
return True
@metered_subclass_method(METRICS_POOL)
def GetInitialConfig(self) -> List[Tuple[str, Any]]:
with self.__lock:
return []
@metered_subclass_method(METRICS_POOL)
def GetConfig(
self, resource_keys: List[str] = []
) -> List[Tuple[str, Union[Any, None, Exception]]]:
chk_type("resources", resource_keys, list)
with self.__lock:
if len(resource_keys) == 0:
return dump_subtree(self.__running)
results = []
resolver = anytree.Resolver(pathattr="name")
for i, resource_key in enumerate(resource_keys):
str_resource_name = "resource_key[#{:d}]".format(i)
try:
chk_string(str_resource_name, resource_key, allow_empty=False)
resource_key = SPECIAL_RESOURCE_MAPPINGS.get(
resource_key, resource_key
)
resource_path = resource_key.split("/")
except Exception as e: # pylint: disable=broad-except
LOGGER.exception(
"Exception validating {:s}: {:s}".format(
str_resource_name, str(resource_key)
)
)
results.append(
(resource_key, e)
) # if validation fails, store the exception
continue
resource_node = get_subnode(
resolver, self.__running, resource_path, default=None
)
# if not found, resource_node is None
if resource_node is None:
continue
results.extend(dump_subtree(resource_node))
return results
return results
@metered_subclass_method(METRICS_POOL)
def SetConfig(
self, resources: List[Tuple[str, Any]]
) -> List[Union[bool, Exception]]:
results = []
if len(resources) == 0:
return results
with self.__lock:
for resource in resources:
resource_key, resource_value = resource
if RE_IETF_L3VPN_OPERATION.match(resource_key):
operation_type = json.loads(resource_value)["type"]
results.append((resource_key, True))
break
else:
raise Exception("operation type not found in resources")
for resource in resources:
LOGGER.info("resource = {:s}".format(str(resource)))
resource_key, resource_value = resource
if not RE_IETF_L3VPN_DATA.match(resource_key):
continue
try:
resource_value = json.loads(resource_value)
# if service_exists(self.__tfs_nbi_root, self.__auth, service_uuid):
# exc = NotImplementedError(
# "IETF L3VPN Service Update is still not supported"
# )
# results.append((resource[0], exc))
# continue
if operation_type == "create":
service_id = resource_value["ietf-l3vpn-svc:l3vpn-svc"][
"vpn-services"
]["vpn-service"][0]["vpn-id"]
self.tac.create_connectivity_service(resource_value)
elif operation_type == "update":
service_id = resource_value["ietf-l3vpn-svc:l3vpn-svc"][
"vpn-services"
]["vpn-service"][0]["vpn-id"]
self.tac.update_connectivity_service(resource_value)
else:
raise Exception("operation type not supported")
results.append((resource_key, True))
except Exception as e: # pylint: disable=broad-except
LOGGER.exception(
"Unhandled error processing resource_key({:s})".format(
str(resource_key)
)
)
results.append((resource_key, e))
return results
@metered_subclass_method(METRICS_POOL)
def DeleteConfig(
self, resources: List[Tuple[str, Any]]
) -> List[Union[bool, Exception]]:
results = []
if len(resources) == 0:
return results
with self.__lock:
for resource in resources:
LOGGER.info("resource = {:s}".format(str(resource)))
resource_key, resource_value = resource
if not RE_IETF_L3VPN_DATA.match(resource_key):
continue
try:
resource_value = json.loads(resource_value)
service_id = resource_value["id"]
# if service_exists(self.__tfs_nbi_root, self.__auth, service_uuid):
self.tac.delete_connectivity_service(service_id)
results.append((resource_key, True))
except Exception as e: # pylint: disable=broad-except
LOGGER.exception(
"Unhandled error processing resource_key({:s})".format(
str(resource_key)
)
)
results.append((resource_key, e))
return results
@metered_subclass_method(METRICS_POOL)
def SubscribeState(
self, subscriptions: List[Tuple[str, float, float]]
) -> List[Union[bool, Exception]]:
# TODO: IETF L3VPN does not support monitoring by now
return [False for _ in subscriptions]
@metered_subclass_method(METRICS_POOL)
def UnsubscribeState(
self, subscriptions: List[Tuple[str, float, float]]
) -> List[Union[bool, Exception]]:
# TODO: IETF L3VPN does not support monitoring by now
return [False for _ in subscriptions]
def GetState(
self, blocking=False, terminate: Optional[threading.Event] = None
) -> Iterator[Tuple[float, str, Any]]:
# TODO: IETF L3VPN does not support monitoring by now
return []
# Copyright 2022-2025 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from device.service.driver_api._Driver import (
RESOURCE_ENDPOINTS,
RESOURCE_INTERFACES,
RESOURCE_NETWORK_INSTANCES,
)
SPECIAL_RESOURCE_MAPPINGS = {
RESOURCE_ENDPOINTS: "/endpoints",
RESOURCE_INTERFACES: "/interfaces",
RESOURCE_NETWORK_INSTANCES: "/net-instances",
}
# Copyright 2022-2025 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from typing import Any, Dict, Optional, Tuple
import requests
from common.proto.kpi_sample_types_pb2 import KpiSampleType
from common.type_checkers.Checkers import chk_attribute, chk_string, chk_type
from device.service.driver_api._Driver import RESOURCE_ENDPOINTS
from .Constants import SPECIAL_RESOURCE_MAPPINGS
LOGGER = logging.getLogger(__name__)
def process_optional_string_field(
endpoint_data: Dict[str, Any],
field_name: str,
endpoint_resource_value: Dict[str, Any],
) -> None:
field_value = chk_attribute(
field_name, endpoint_data, "endpoint_data", default=None
)
if field_value is None:
return
chk_string("endpoint_data.{:s}".format(field_name), field_value)
if len(field_value) > 0:
endpoint_resource_value[field_name] = field_value
def compose_resource_endpoint(
endpoint_data: Dict[str, Any],
) -> Optional[Tuple[str, Dict]]:
try:
# Check type of endpoint_data
chk_type("endpoint_data", endpoint_data, dict)
# Check endpoint UUID (mandatory)
endpoint_uuid = chk_attribute("uuid", endpoint_data, "endpoint_data")
chk_string("endpoint_data.uuid", endpoint_uuid, min_length=1)
endpoint_resource_path = SPECIAL_RESOURCE_MAPPINGS.get(RESOURCE_ENDPOINTS)
endpoint_resource_key = "{:s}/endpoint[{:s}]".format(
endpoint_resource_path, endpoint_uuid
)
endpoint_resource_value = {"uuid": endpoint_uuid}
# Check endpoint optional string fields
process_optional_string_field(endpoint_data, "name", endpoint_resource_value)
process_optional_string_field(
endpoint_data, "site_location", endpoint_resource_value
)
process_optional_string_field(endpoint_data, "ce-ip", endpoint_resource_value)
process_optional_string_field(
endpoint_data, "address_ip", endpoint_resource_value
)
process_optional_string_field(
endpoint_data, "address_prefix", endpoint_resource_value
)
process_optional_string_field(endpoint_data, "mtu", endpoint_resource_value)
process_optional_string_field(
endpoint_data, "ipv4_lan_prefixes", endpoint_resource_value
)
process_optional_string_field(endpoint_data, "type", endpoint_resource_value)
process_optional_string_field(
endpoint_data, "context_uuid", endpoint_resource_value
)
process_optional_string_field(
endpoint_data, "topology_uuid", endpoint_resource_value
)
# Check endpoint sample types (optional)
endpoint_sample_types = chk_attribute(
"sample_types", endpoint_data, "endpoint_data", default=[]
)
chk_type("endpoint_data.sample_types", endpoint_sample_types, list)
sample_types = {}
sample_type_errors = []
for i, endpoint_sample_type in enumerate(endpoint_sample_types):
field_name = "endpoint_data.sample_types[{:d}]".format(i)
try:
chk_type(field_name, endpoint_sample_type, (int, str))
if isinstance(endpoint_sample_type, int):
metric_name = KpiSampleType.Name(endpoint_sample_type)
metric_id = endpoint_sample_type
elif isinstance(endpoint_sample_type, str):
metric_id = KpiSampleType.Value(endpoint_sample_type)
metric_name = endpoint_sample_type
else:
str_type = str(type(endpoint_sample_type))
raise Exception("Bad format: {:s}".format(str_type)) # pylint: disable=broad-exception-raised
except Exception as e: # pylint: disable=broad-exception-caught
MSG = "Unsupported {:s}({:s}) : {:s}"
sample_type_errors.append(
MSG.format(field_name, str(endpoint_sample_type), str(e))
)
metric_name = metric_name.lower().replace("kpisampletype_", "")
monitoring_resource_key = "{:s}/state/{:s}".format(
endpoint_resource_key, metric_name
)
sample_types[metric_id] = monitoring_resource_key
if len(sample_type_errors) > 0:
# pylint: disable=broad-exception-raised
raise Exception(
"Malformed Sample Types:\n{:s}".format("\n".join(sample_type_errors))
)
if len(sample_types) > 0:
endpoint_resource_value["sample_types"] = sample_types
if "site_location" in endpoint_data:
endpoint_resource_value["site_location"] = endpoint_data["site_location"]
if "ce-ip" in endpoint_data:
endpoint_resource_value["ce-ip"] = endpoint_data["ce-ip"]
if "address_ip" in endpoint_data:
endpoint_resource_value["address_ip"] = endpoint_data["address_ip"]
if "address_prefix" in endpoint_data:
endpoint_resource_value["address_prefix"] = endpoint_data["address_prefix"]
if "mtu" in endpoint_data:
endpoint_resource_value["mtu"] = endpoint_data["mtu"]
if "ipv4_lan_prefixes" in endpoint_data:
endpoint_resource_value["ipv4_lan_prefixes"] = endpoint_data[
"ipv4_lan_prefixes"
]
return endpoint_resource_key, endpoint_resource_value
except: # pylint: disable=bare-except
LOGGER.exception("Problem composing endpoint({:s})".format(str(endpoint_data)))
return None
# Copyright 2022-2025 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
This diff is collapsed.
# Copyright 2022-2025 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from typing import Optional
import requests
from requests.auth import HTTPBasicAuth
IETF_SLICE_URL = "{:s}://{:s}:{:d}/restconf/data/ietf-network-slice-service"
TIMEOUT = 30
LOGGER = logging.getLogger(__name__)
HEADERS = {"Content-Type": "application/json"}
class TfsApiClient:
def __init__(
self,
address: str,
port: int,
scheme: str = "http",
username: Optional[str] = None,
password: Optional[str] = None,
) -> None:
self._slice_url = IETF_SLICE_URL.format(scheme, address, port)
self._auth = None
# (
# HTTPBasicAuth(username, password)
# if username is not None and password is not None
# else None
# )
def create_slice(self, slice_data: dict) -> None:
url = self._slice_url + ":network-slice-services"
try:
requests.post(url, json=slice_data, headers=HEADERS)
LOGGER.info(f"IETF Slice Post to {url}: {slice_data}")
except requests.exceptions.ConnectionError:
raise Exception("faild to send post request to TFS IETF Slice NBI")
def update_slice(
self,
slice_name: str,
connection_group_id: str,
updated_connection_group_data: dict,
) -> None:
url = (
self._slice_url
+ f":network-slice-services/slice-service={slice_name}/connection-groups/connection-group={connection_group_id}"
)
try:
requests.put(url, json=updated_connection_group_data, headers=HEADERS)
LOGGER.info(f"IETF Slice Put to {url}: {updated_connection_group_data}")
except requests.exceptions.ConnectionError:
raise Exception("faild to send update request to TFS IETF Slice NBI")
def delete_slice(self, slice_name: str) -> None:
url = self._slice_url + f":network-slice-services/slice-service={slice_name}"
try:
requests.delete(url)
LOGGER.info(f"IETF Slice Delete to {url}")
except requests.exceptions.ConnectionError:
raise Exception("faild to send delete request to TFS IETF Slice NBI")
# Copyright 2022-2025 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from device.service.driver_api._Driver import (
RESOURCE_ENDPOINTS,
RESOURCE_INTERFACES,
RESOURCE_NETWORK_INSTANCES,
)
SPECIAL_RESOURCE_MAPPINGS = {
RESOURCE_ENDPOINTS: "/endpoints",
RESOURCE_INTERFACES: "/interfaces",
RESOURCE_NETWORK_INSTANCES: "/net-instances",
}
This diff is collapsed.
# Copyright 2022-2025 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
This diff is collapsed.
This diff is collapsed.
...@@ -34,6 +34,7 @@ class P4Type(enum.Enum): ...@@ -34,6 +34,7 @@ class P4Type(enum.Enum):
meter = 6 meter = 6
direct_meter = 7 direct_meter = 7
controller_packet_metadata = 8 controller_packet_metadata = 8
digest = 9
P4Type.table.p4info_name = "tables" P4Type.table.p4info_name = "tables"
...@@ -44,6 +45,7 @@ P4Type.direct_counter.p4info_name = "direct_counters" ...@@ -44,6 +45,7 @@ P4Type.direct_counter.p4info_name = "direct_counters"
P4Type.meter.p4info_name = "meters" P4Type.meter.p4info_name = "meters"
P4Type.direct_meter.p4info_name = "direct_meters" P4Type.direct_meter.p4info_name = "direct_meters"
P4Type.controller_packet_metadata.p4info_name = "controller_packet_metadata" P4Type.controller_packet_metadata.p4info_name = "controller_packet_metadata"
P4Type.digest.p4info_name = "digests"
for object_type in P4Type: for object_type in P4Type:
object_type.pretty_name = object_type.name.replace('_', ' ') object_type.pretty_name = object_type.name.replace('_', ' ')
...@@ -58,11 +60,12 @@ class P4RuntimeEntity(enum.Enum): ...@@ -58,11 +60,12 @@ class P4RuntimeEntity(enum.Enum):
table_entry = 1 table_entry = 1
action_profile_member = 2 action_profile_member = 2
action_profile_group = 3 action_profile_group = 3
meter_entry = 4 counter_entry = 4
direct_meter_entry = 5 direct_counter_entry = 5
counter_entry = 6 meter_entry = 6
direct_counter_entry = 7 direct_meter_entry = 7
packet_replication_engine_entry = 8 packet_replication_engine_entry = 8
digest_entry = 9
class Context: class Context:
......
This diff is collapsed.
...@@ -14,6 +14,7 @@ ...@@ -14,6 +14,7 @@
from common.Constants import ServiceNameEnum from common.Constants import ServiceNameEnum
from common.Settings import get_service_port_grpc from common.Settings import get_service_port_grpc
from common.proto.forecaster_pb2 import DESCRIPTOR as FORECASTER_DESCRIPTOR
from common.proto.forecaster_pb2_grpc import add_ForecasterServiceServicer_to_server from common.proto.forecaster_pb2_grpc import add_ForecasterServiceServicer_to_server
from common.tools.service.GenericGrpcService import GenericGrpcService from common.tools.service.GenericGrpcService import GenericGrpcService
from .ForecasterServiceServicerImpl import ForecasterServiceServicerImpl from .ForecasterServiceServicerImpl import ForecasterServiceServicerImpl
...@@ -26,3 +27,5 @@ class ForecasterService(GenericGrpcService): ...@@ -26,3 +27,5 @@ class ForecasterService(GenericGrpcService):
def install_servicers(self): def install_servicers(self):
add_ForecasterServiceServicer_to_server(self.forecaster_servicer, self.server) add_ForecasterServiceServicer_to_server(self.forecaster_servicer, self.server)
self.add_reflection_service_name(FORECASTER_DESCRIPTOR, 'ForecasterService')