From 30202ef78b15ae84a1b8f7c9ce38ecdc83318e9a Mon Sep 17 00:00:00 2001 From: cajadiazj Date: Mon, 9 Jan 2023 17:12:16 +0000 Subject: [PATCH 01/62] gNMI_driver created --- src/device/service/drivers/gnmi/__init__.py | 27 +++ .../service/drivers/gnmi/gNMI_driver.py | 163 ++++++++++++++++++ 2 files changed, 190 insertions(+) create mode 100644 src/device/service/drivers/gnmi/__init__.py create mode 100644 src/device/service/drivers/gnmi/gNMI_driver.py diff --git a/src/device/service/drivers/gnmi/__init__.py b/src/device/service/drivers/gnmi/__init__.py new file mode 100644 index 000000000..925746998 --- /dev/null +++ b/src/device/service/drivers/gnmi/__init__.py @@ -0,0 +1,27 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from device.service.driver_api._Driver import RESOURCE_ENDPOINTS, RESOURCE_INTERFACES, RESOURCE_NETWORK_INSTANCES + +ALL_RESOURCE_KEYS = [ + RESOURCE_ENDPOINTS, + RESOURCE_INTERFACES, + RESOURCE_NETWORK_INSTANCES, +] + +RESOURCE_KEY_MAPPINGS = { + RESOURCE_ENDPOINTS : 'component', + RESOURCE_INTERFACES : 'interface', + RESOURCE_NETWORK_INSTANCES: 'network_instance', +} diff --git a/src/device/service/drivers/gnmi/gNMI_driver.py b/src/device/service/drivers/gnmi/gNMI_driver.py new file mode 100644 index 000000000..0a39bf222 --- /dev/null +++ b/src/device/service/drivers/gnmi/gNMI_driver.py @@ -0,0 +1,163 @@ +# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import anytree, logging, queue, threading +from typing import Any, Iterator, List, Optional, Tuple, Union +from common.type_checkers.Checkers import chk_float, chk_length, chk_string, chk_type +from device.service.driver_api._Driver import ( + RESOURCE_ENDPOINTS, RESOURCE_INTERFACES, RESOURCE_NETWORK_INSTANCES, + _Driver) + +LOGGER = logging.getLogger(__name__) + +SPECIAL_RESOURCE_MAPPINGS = { + RESOURCE_ENDPOINTS : '/endpoints', + RESOURCE_INTERFACES : '/interfaces', + RESOURCE_NETWORK_INSTANCES: '/net-instances', +} + +class MonitoringThread(threading.Thread): + def __init__(self, in_subscriptions : queue.Queue, out_samples : queue.Queue) -> None: + super().__init__(daemon=True) + self._in_subscriptions = in_subscriptions + self._out_samples = out_samples + + def run(self) -> None: + while True: + # TODO: req_iterator = generate_requests(self._in_subscriptions) + # TODO: stub.Subscribe(req_iterator) + self._out_samples.put_nowait((timestamp, resource_key, value)) + +class EmulatedDriver(_Driver): + def __init__(self, address : str, port : int, **settings) -> None: # pylint: disable=super-init-not-called + self.__lock = threading.Lock() + + # endpoints = settings.get('endpoints', []) + + self.__started = threading.Event() + self.__terminate = threading.Event() + + self.__in_subscriptions = queue.Queue() + self.__out_samples = queue.Queue() + + self.__monitoring_thread = MonitoringThread(self.__in_subscriptions, self.__out_samples) + + def Connect(self) -> bool: + # If started, assume it is already connected + if self.__started.is_set(): return True + + # TODO: check capabilities + self.__monitoring_thread.start() + + # Indicate the driver is now connected to the device + self.__started.set() + return True + + def Disconnect(self) -> bool: + # Trigger termination of loops and processes + self.__terminate.set() + + # TODO: send unsubscriptions + # TODO: terminate monitoring thread + # TODO: disconnect gRPC + self.__monitoring_thread.join() + + # If not started, assume it is already disconnected + if not self.__started.is_set(): return True + return True + + def GetInitialConfig(self) -> List[Tuple[str, Any]]: + with self.__lock: + return [] + + def GetConfig(self, resource_keys : List[str] = []) -> List[Tuple[str, Union[Any, None, Exception]]]: + chk_type('resources', resource_keys, list) + with self.__lock: + results = [] + for i,resource_key in enumerate(resource_keys): + str_resource_name = 'resource_key[#{:d}]'.format(i) + try: + chk_string(str_resource_name, resource_key, allow_empty=False) + resource_key = SPECIAL_RESOURCE_MAPPINGS.get(resource_key, resource_key) + except Exception as e: # pylint: disable=broad-except + LOGGER.exception('Exception validating {:s}: {:s}'.format(str_resource_name, str(resource_key))) + results.append((resource_key, e)) # if validation fails, store the exception + continue + + # TODO: if resource_key == '/endpoints': retornar lista de endpoints + # results.extend(endpoints) + return results + + def SubscribeState(self, subscriptions : List[Tuple[str, float, float]]) -> List[Union[bool, Exception]]: + chk_type('subscriptions', subscriptions, list) + if len(subscriptions) == 0: return [] + results = [] + with self.__lock: + for i,subscription in enumerate(subscriptions): + str_subscription_name = 'subscriptions[#{:d}]'.format(i) + try: + chk_type(str_subscription_name, subscription, (list, tuple)) + chk_length(str_subscription_name, subscription, min_length=3, max_length=3) + resource_key,sampling_duration,sampling_interval = subscription + chk_string(str_subscription_name + '.resource_key', resource_key, allow_empty=False) + resource_path = resource_key.split('/') + chk_float(str_subscription_name + '.sampling_duration', sampling_duration, min_value=0) + chk_float(str_subscription_name + '.sampling_interval', sampling_interval, min_value=0) + except Exception as e: # pylint: disable=broad-except + LOGGER.exception('Exception validating {:s}: {:s}'.format(str_subscription_name, str(resource_key))) + results.append(e) # if validation fails, store the exception + continue + + # TODO: format subscription + # TODO: self.__in_subscriptions.put_nowait(subscription) + results.append(True) + return results + + def UnsubscribeState(self, subscriptions : List[Tuple[str, float, float]]) -> List[Union[bool, Exception]]: + chk_type('subscriptions', subscriptions, list) + if len(subscriptions) == 0: return [] + results = [] + resolver = anytree.Resolver(pathattr='name') + with self.__lock: + for i,resource in enumerate(subscriptions): + str_subscription_name = 'resources[#{:d}]'.format(i) + try: + chk_type(str_subscription_name, resource, (list, tuple)) + chk_length(str_subscription_name, resource, min_length=3, max_length=3) + resource_key,sampling_duration,sampling_interval = resource + chk_string(str_subscription_name + '.resource_key', resource_key, allow_empty=False) + resource_path = resource_key.split('/') + chk_float(str_subscription_name + '.sampling_duration', sampling_duration, min_value=0) + chk_float(str_subscription_name + '.sampling_interval', sampling_interval, min_value=0) + except Exception as e: # pylint: disable=broad-except + LOGGER.exception('Exception validating {:s}: {:s}'.format(str_subscription_name, str(resource_key))) + results.append(e) # if validation fails, store the exception + continue + + # TODO: format unsubscription + # TODO: self.__in_subscriptions.put_nowait(unsubscription) + results.append(True) + return results + + def GetState(self, blocking=False, terminate : Optional[threading.Event] = None) -> Iterator[Tuple[str, Any]]: + while True: + if self.__terminate.is_set(): break + if terminate is not None and terminate.is_set(): break + try: + sample = self.__out_samples.get(block=blocking, timeout=0.1) + except queue.Empty: + if blocking: continue + return + if sample is None: continue + yield sample -- GitLab From fc2c8ed470748eb843fad3406f9454b28d179dd2 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Mon, 6 Feb 2023 10:07:27 +0000 Subject: [PATCH 02/62] Device component - OpenConfig Driver: - adding skeleton for gNMI --- .../GnmiTelemetry.py} | 5 +- .../drivers/openconfig/NetConfTelemetry.py | 14 ++++ .../drivers/openconfig/OpenConfigDriver.py | 62 +++-------------- .../drivers/openconfig/SamplesCache.py | 68 +++++++++++++++++++ .../openconfig/TelemetryProtocolEnum.py | 27 ++++++++ .../__init__.py => openconfig/_Telemetry.py} | 24 +++---- 6 files changed, 133 insertions(+), 67 deletions(-) rename src/device/service/drivers/{gnmi/gNMI_driver.py => openconfig/GnmiTelemetry.py} (96%) create mode 100644 src/device/service/drivers/openconfig/NetConfTelemetry.py create mode 100644 src/device/service/drivers/openconfig/SamplesCache.py create mode 100644 src/device/service/drivers/openconfig/TelemetryProtocolEnum.py rename src/device/service/drivers/{gnmi/__init__.py => openconfig/_Telemetry.py} (54%) diff --git a/src/device/service/drivers/gnmi/gNMI_driver.py b/src/device/service/drivers/openconfig/GnmiTelemetry.py similarity index 96% rename from src/device/service/drivers/gnmi/gNMI_driver.py rename to src/device/service/drivers/openconfig/GnmiTelemetry.py index 0a39bf222..44b15d519 100644 --- a/src/device/service/drivers/gnmi/gNMI_driver.py +++ b/src/device/service/drivers/openconfig/GnmiTelemetry.py @@ -1,4 +1,4 @@ -# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,6 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +# Ref: https://github.com/openconfig/reference/blob/master/rpc/gnmi/gnmi-specification.md +# Ref: https://github.com/openconfig/gnmi/blob/master/proto/gnmi/gnmi.proto + import anytree, logging, queue, threading from typing import Any, Iterator, List, Optional, Tuple, Union from common.type_checkers.Checkers import chk_float, chk_length, chk_string, chk_type diff --git a/src/device/service/drivers/openconfig/NetConfTelemetry.py b/src/device/service/drivers/openconfig/NetConfTelemetry.py new file mode 100644 index 000000000..1549d9811 --- /dev/null +++ b/src/device/service/drivers/openconfig/NetConfTelemetry.py @@ -0,0 +1,14 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/src/device/service/drivers/openconfig/OpenConfigDriver.py b/src/device/service/drivers/openconfig/OpenConfigDriver.py index ef3d0728d..a541a23bf 100644 --- a/src/device/service/drivers/openconfig/OpenConfigDriver.py +++ b/src/device/service/drivers/openconfig/OpenConfigDriver.py @@ -30,6 +30,7 @@ from device.service.driver_api.AnyTreeTools import TreeNode, get_subnode, set_su #from .Tools import xml_pretty_print, xml_to_dict, xml_to_file from .templates import ALL_RESOURCE_KEYS, EMPTY_CONFIG, compose_config, get_filter, parse from .RetryDecorator import retry +from .TelemetryProtocolEnum import DEFAULT_TELEMETRY_PROTOCOL, TelemetryProtocolEnum, parse_telemetry_protocol DEBUG_MODE = False logging.getLogger('ncclient.manager').setLevel(logging.DEBUG if DEBUG_MODE else logging.WARNING) @@ -121,60 +122,6 @@ class NetconfSessionHandler: def commit(self, confirmed=False, timeout=None, persist=None, persist_id=None): return self.__manager.commit(confirmed=confirmed, timeout=timeout, persist=persist, persist_id=persist_id) -def compute_delta_sample(previous_sample, previous_timestamp, current_sample, current_timestamp): - if previous_sample is None: return None - if previous_timestamp is None: return None - if current_sample is None: return None - if current_timestamp is None: return None - delay = current_timestamp - previous_timestamp - field_keys = set(previous_sample.keys()).union(current_sample.keys()) - field_keys.discard('name') - delta_sample = {'name': previous_sample['name']} - for field_key in field_keys: - previous_sample_value = previous_sample[field_key] - if not isinstance(previous_sample_value, (int, float)): continue - current_sample_value = current_sample[field_key] - if not isinstance(current_sample_value, (int, float)): continue - delta_value = current_sample_value - previous_sample_value - if delta_value < 0: continue - delta_sample[field_key] = delta_value / delay - return delta_sample - -class SamplesCache: - def __init__(self, netconf_handler : NetconfSessionHandler) -> None: - self.__netconf_handler = netconf_handler - self.__lock = threading.Lock() - self.__timestamp = None - self.__absolute_samples = {} - self.__delta_samples = {} - - def _refresh_samples(self) -> None: - with self.__lock: - try: - now = datetime.timestamp(datetime.utcnow()) - if self.__timestamp is not None and (now - self.__timestamp) < SAMPLE_EVICTION_SECONDS: return - str_filter = get_filter(SAMPLE_RESOURCE_KEY) - xml_data = self.__netconf_handler.get(filter=str_filter).data_ele - interface_samples = parse(SAMPLE_RESOURCE_KEY, xml_data) - for interface,samples in interface_samples: - match = RE_GET_ENDPOINT_FROM_INTERFACE_KEY.match(interface) - if match is None: continue - interface = match.group(1) - delta_sample = compute_delta_sample( - self.__absolute_samples.get(interface), self.__timestamp, samples, now) - if delta_sample is not None: self.__delta_samples[interface] = delta_sample - self.__absolute_samples[interface] = samples - self.__timestamp = now - except: # pylint: disable=bare-except - LOGGER.exception('Error collecting samples') - - def get(self, resource_key : str) -> Tuple[float, Dict]: - self._refresh_samples() - match = RE_GET_ENDPOINT_FROM_INTERFACE_XPATH.match(resource_key) - with self.__lock: - if match is None: return self.__timestamp, {} - interface = match.group(1) - return self.__timestamp, copy.deepcopy(self.__delta_samples.get(interface, {})) def do_sampling(samples_cache : SamplesCache, resource_key : str, out_samples : queue.Queue) -> None: try: @@ -249,6 +196,13 @@ class OpenConfigDriver(_Driver): self.__subscriptions = TreeNode('.') self.__started = threading.Event() self.__terminate = threading.Event() + + self.__telemetry_protocol = parse_telemetry_protocol(settings.get('telemetry_protocol')) + if self.__telemetry_protocol == TelemetryProtocolEnum.GNMI: + self.__telemetry = GnmiTelemetry() + else: + self.__telemetry = NetConfTelemetry() + self.__scheduler = BackgroundScheduler(daemon=True) # scheduler used to emulate sampling events self.__scheduler.configure( jobstores = {'default': MemoryJobStore()}, diff --git a/src/device/service/drivers/openconfig/SamplesCache.py b/src/device/service/drivers/openconfig/SamplesCache.py new file mode 100644 index 000000000..c2fa6bcec --- /dev/null +++ b/src/device/service/drivers/openconfig/SamplesCache.py @@ -0,0 +1,68 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +def compute_delta_sample(previous_sample, previous_timestamp, current_sample, current_timestamp): + if previous_sample is None: return None + if previous_timestamp is None: return None + if current_sample is None: return None + if current_timestamp is None: return None + delay = current_timestamp - previous_timestamp + field_keys = set(previous_sample.keys()).union(current_sample.keys()) + field_keys.discard('name') + delta_sample = {'name': previous_sample['name']} + for field_key in field_keys: + previous_sample_value = previous_sample[field_key] + if not isinstance(previous_sample_value, (int, float)): continue + current_sample_value = current_sample[field_key] + if not isinstance(current_sample_value, (int, float)): continue + delta_value = current_sample_value - previous_sample_value + if delta_value < 0: continue + delta_sample[field_key] = delta_value / delay + return delta_sample + +class SamplesCache: + def __init__(self, netconf_handler : NetconfSessionHandler) -> None: + self.__netconf_handler = netconf_handler + self.__lock = threading.Lock() + self.__timestamp = None + self.__absolute_samples = {} + self.__delta_samples = {} + + def _refresh_samples(self) -> None: + with self.__lock: + try: + now = datetime.timestamp(datetime.utcnow()) + if self.__timestamp is not None and (now - self.__timestamp) < SAMPLE_EVICTION_SECONDS: return + str_filter = get_filter(SAMPLE_RESOURCE_KEY) + xml_data = self.__netconf_handler.get(filter=str_filter).data_ele + interface_samples = parse(SAMPLE_RESOURCE_KEY, xml_data) + for interface,samples in interface_samples: + match = RE_GET_ENDPOINT_FROM_INTERFACE_KEY.match(interface) + if match is None: continue + interface = match.group(1) + delta_sample = compute_delta_sample( + self.__absolute_samples.get(interface), self.__timestamp, samples, now) + if delta_sample is not None: self.__delta_samples[interface] = delta_sample + self.__absolute_samples[interface] = samples + self.__timestamp = now + except: # pylint: disable=bare-except + LOGGER.exception('Error collecting samples') + + def get(self, resource_key : str) -> Tuple[float, Dict]: + self._refresh_samples() + match = RE_GET_ENDPOINT_FROM_INTERFACE_XPATH.match(resource_key) + with self.__lock: + if match is None: return self.__timestamp, {} + interface = match.group(1) + return self.__timestamp, copy.deepcopy(self.__delta_samples.get(interface, {})) diff --git a/src/device/service/drivers/openconfig/TelemetryProtocolEnum.py b/src/device/service/drivers/openconfig/TelemetryProtocolEnum.py new file mode 100644 index 000000000..e5927848f --- /dev/null +++ b/src/device/service/drivers/openconfig/TelemetryProtocolEnum.py @@ -0,0 +1,27 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import enum +from typing import Optional + +class TelemetryProtocolEnum(enum.Enum): + GNMI = 'gnmi' + NETCONF = 'netconf' + +DEFAULT_TELEMETRY_PROTOCOL = TelemetryProtocolEnum.NETCONF + +def parse_telemetry_protocol(telemetry_protocol : Optional[str] = None) -> TelemetryProtocolEnum: + if telemetry_protocol is None: return DEFAULT_TELEMETRY_PROTOCOL + # pylint: disable=no-member + return TelemetryProtocolEnum._member_map_.get(telemetry_protocol, DEFAULT_TELEMETRY_PROTOCOL) diff --git a/src/device/service/drivers/gnmi/__init__.py b/src/device/service/drivers/openconfig/_Telemetry.py similarity index 54% rename from src/device/service/drivers/gnmi/__init__.py rename to src/device/service/drivers/openconfig/_Telemetry.py index 925746998..efd05993b 100644 --- a/src/device/service/drivers/gnmi/__init__.py +++ b/src/device/service/drivers/openconfig/_Telemetry.py @@ -1,4 +1,4 @@ -# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/) +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,16 +12,16 @@ # See the License for the specific language governing permissions and # limitations under the License. -from device.service.driver_api._Driver import RESOURCE_ENDPOINTS, RESOURCE_INTERFACES, RESOURCE_NETWORK_INSTANCES -ALL_RESOURCE_KEYS = [ - RESOURCE_ENDPOINTS, - RESOURCE_INTERFACES, - RESOURCE_NETWORK_INSTANCES, -] +class _Telemetry: + def __init__(self) -> None: + pass -RESOURCE_KEY_MAPPINGS = { - RESOURCE_ENDPOINTS : 'component', - RESOURCE_INTERFACES : 'interface', - RESOURCE_NETWORK_INSTANCES: 'network_instance', -} + def subscribe(self) -> None: + pass + + def unsubscribe(self) -> None: + pass + + def get_samples_queue(self) -> None: + pass -- GitLab From 2ba1b34888248e2130d395ed08695f7bdaf97a3c Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Mon, 6 Feb 2023 10:09:33 +0000 Subject: [PATCH 03/62] Device component - OpenConfig Driver: - adding custom my_deploy.sh --- my_deploy.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/my_deploy.sh b/my_deploy.sh index 6f0e64afe..2770aba13 100755 --- a/my_deploy.sh +++ b/my_deploy.sh @@ -20,7 +20,8 @@ export TFS_REGISTRY_IMAGES="http://localhost:32000/tfs/" # Set the list of components, separated by spaces, you want to build images for, and deploy. -export TFS_COMPONENTS="context device automation monitoring pathcomp service slice compute webui load_generator" +#export TFS_COMPONENTS="context device automation monitoring pathcomp service slice compute webui load_generator" +export TFS_COMPONENTS="context device monitoring pathcomp service slice webui" # Set the tag you want to use for your images. export TFS_IMAGE_TAG="dev" -- GitLab From 3771de69dbe3fedf3635eea4cef0ac3941665da5 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Mon, 6 Feb 2023 10:12:08 +0000 Subject: [PATCH 04/62] Device component - OpenConfig Driver: - updating skeleton for gNMI --- src/device/service/drivers/openconfig/OpenConfigDriver.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/device/service/drivers/openconfig/OpenConfigDriver.py b/src/device/service/drivers/openconfig/OpenConfigDriver.py index a541a23bf..923cd8876 100644 --- a/src/device/service/drivers/openconfig/OpenConfigDriver.py +++ b/src/device/service/drivers/openconfig/OpenConfigDriver.py @@ -131,6 +131,7 @@ def do_sampling(samples_cache : SamplesCache, resource_key : str, out_samples : if value is None: LOGGER.warning('[do_sampling] value not found for {:s}'.format(resource_key)) return + # resource_key template: //oci:interfaces/oci:interface[oci:name='{:s}']/state/counters/{:s} sample = (timestamp, resource_key, value) out_samples.put_nowait(sample) except: # pylint: disable=bare-except -- GitLab From 0e8dc476c6aaaeec274d6f93a8d260d3455ff2be Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Thu, 9 Feb 2023 10:04:48 +0000 Subject: [PATCH 05/62] Device component - OpenConfigDriver: - Reorganized classes and imports --- .../openconfig/NetconfSessionHandler.py | 129 ++++++++++++++ .../drivers/openconfig/OpenConfigDriver.py | 165 ++---------------- .../drivers/openconfig/SamplesCache.py | 32 ++++ 3 files changed, 179 insertions(+), 147 deletions(-) create mode 100644 src/device/service/drivers/openconfig/NetconfSessionHandler.py diff --git a/src/device/service/drivers/openconfig/NetconfSessionHandler.py b/src/device/service/drivers/openconfig/NetconfSessionHandler.py new file mode 100644 index 000000000..746f11d12 --- /dev/null +++ b/src/device/service/drivers/openconfig/NetconfSessionHandler.py @@ -0,0 +1,129 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging, threading +from typing import Any, List, Tuple +from ncclient.manager import Manager, connect_ssh +from common.tools.client.RetryDecorator import delay_exponential +from common.type_checkers.Checkers import chk_length, chk_string, chk_type +from device.service.driver_api.Exceptions import UnsupportedResourceKeyException +from .templates import EMPTY_CONFIG, compose_config +from .RetryDecorator import retry + +MAX_RETRIES = 15 +DELAY_FUNCTION = delay_exponential(initial=0.01, increment=2.0, maximum=5.0) +RETRY_DECORATOR = retry(max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect') + +LOGGER = logging.getLogger(__name__) + +class NetconfSessionHandler: + def __init__(self, address : str, port : int, **settings) -> None: + self.__lock = threading.RLock() + self.__connected = threading.Event() + self.__address = address + self.__port = int(port) + self.__username = settings.get('username') + self.__password = settings.get('password') + self.__vendor = settings.get('vendor') + self.__key_filename = settings.get('key_filename') + self.__hostkey_verify = settings.get('hostkey_verify', True) + self.__look_for_keys = settings.get('look_for_keys', True) + self.__allow_agent = settings.get('allow_agent', True) + self.__force_running = settings.get('force_running', False) + self.__commit_per_delete = settings.get('delete_rule', False) + self.__device_params = settings.get('device_params', {}) + self.__manager_params = settings.get('manager_params', {}) + self.__nc_params = settings.get('nc_params', {}) + self.__manager : Manager = None + self.__candidate_supported = False + + def connect(self): + with self.__lock: + self.__manager = connect_ssh( + host=self.__address, port=self.__port, username=self.__username, password=self.__password, + device_params=self.__device_params, manager_params=self.__manager_params, nc_params=self.__nc_params, + key_filename=self.__key_filename, hostkey_verify=self.__hostkey_verify, allow_agent=self.__allow_agent, + look_for_keys=self.__look_for_keys) + self.__candidate_supported = ':candidate' in self.__manager.server_capabilities + self.__connected.set() + + def disconnect(self): + if not self.__connected.is_set(): return + with self.__lock: + self.__manager.close_session() + + @property + def use_candidate(self): return self.__candidate_supported and not self.__force_running + + @property + def commit_per_rule(self): return self.__commit_per_delete + + @property + def vendor(self): return self.__vendor + + @RETRY_DECORATOR + def get(self, filter=None, with_defaults=None): # pylint: disable=redefined-builtin + with self.__lock: + return self.__manager.get(filter=filter, with_defaults=with_defaults) + + @RETRY_DECORATOR + def edit_config( + self, config, target='running', default_operation=None, test_option=None, + error_option=None, format='xml' # pylint: disable=redefined-builtin + ): + if config == EMPTY_CONFIG: return + with self.__lock: + self.__manager.edit_config( + config, target=target, default_operation=default_operation, test_option=test_option, + error_option=error_option, format=format) + + def locked(self, target): + return self.__manager.locked(target=target) + + def commit(self, confirmed=False, timeout=None, persist=None, persist_id=None): + return self.__manager.commit(confirmed=confirmed, timeout=timeout, persist=persist, persist_id=persist_id) + +def edit_config( + netconf_handler : NetconfSessionHandler, resources : List[Tuple[str, Any]], delete=False, commit_per_rule= False, + target='running', default_operation='merge', test_option=None, error_option=None, + format='xml' # pylint: disable=redefined-builtin +): + str_method = 'DeleteConfig' if delete else 'SetConfig' + LOGGER.info('[{:s}] resources = {:s}'.format(str_method, str(resources))) + results = [None for _ in resources] + for i,resource in enumerate(resources): + str_resource_name = 'resources[#{:d}]'.format(i) + try: + LOGGER.info('[{:s}] resource = {:s}'.format(str_method, str(resource))) + chk_type(str_resource_name, resource, (list, tuple)) + chk_length(str_resource_name, resource, min_length=2, max_length=2) + resource_key,resource_value = resource + chk_string(str_resource_name + '.key', resource_key, allow_empty=False) + str_config_message = compose_config( + resource_key, resource_value, delete=delete, vendor=netconf_handler.vendor) + if str_config_message is None: raise UnsupportedResourceKeyException(resource_key) + LOGGER.info('[{:s}] str_config_message[{:d}] = {:s}'.format( + str_method, len(str_config_message), str(str_config_message))) + netconf_handler.edit_config( + config=str_config_message, target=target, default_operation=default_operation, + test_option=test_option, error_option=error_option, format=format) + if commit_per_rule: + netconf_handler.commit() + results[i] = True + except Exception as e: # pylint: disable=broad-except + str_operation = 'preparing' if target == 'candidate' else ('deleting' if delete else 'setting') + msg = '[{:s}] Exception {:s} {:s}: {:s}' + LOGGER.exception(msg.format(str_method, str_operation, str_resource_name, str(resource))) + results[i] = e # if validation fails, store the exception + return results diff --git a/src/device/service/drivers/openconfig/OpenConfigDriver.py b/src/device/service/drivers/openconfig/OpenConfigDriver.py index 923cd8876..96affc318 100644 --- a/src/device/service/drivers/openconfig/OpenConfigDriver.py +++ b/src/device/service/drivers/openconfig/OpenConfigDriver.py @@ -12,25 +12,23 @@ # See the License for the specific language governing permissions and # limitations under the License. -import anytree, copy, logging, pytz, queue, re, threading +import anytree, logging, pytz, queue, threading #import lxml.etree as ET from datetime import datetime, timedelta -from typing import Any, Dict, Iterator, List, Optional, Tuple, Union +from typing import Any, Iterator, List, Optional, Tuple, Union from apscheduler.executors.pool import ThreadPoolExecutor from apscheduler.job import Job from apscheduler.jobstores.memory import MemoryJobStore from apscheduler.schedulers.background import BackgroundScheduler -from ncclient.manager import Manager, connect_ssh from common.method_wrappers.Decorator import MetricTypeEnum, MetricsPool, metered_subclass_method, INF -from common.tools.client.RetryDecorator import delay_exponential from common.type_checkers.Checkers import chk_length, chk_string, chk_type, chk_float -from device.service.driver_api.Exceptions import UnsupportedResourceKeyException from device.service.driver_api._Driver import _Driver -from device.service.driver_api.AnyTreeTools import TreeNode, get_subnode, set_subnode_value #dump_subtree +from device.service.driver_api.AnyTreeTools import TreeNode, get_subnode, set_subnode_value +from .templates import ALL_RESOURCE_KEYS, get_filter, parse +from .NetconfSessionHandler import NetconfSessionHandler, edit_config +from .SamplesCache import SamplesCache, do_sampling #dump_subtree +#from .TelemetryProtocolEnum import TelemetryProtocolEnum, parse_telemetry_protocol #from .Tools import xml_pretty_print, xml_to_dict, xml_to_file -from .templates import ALL_RESOURCE_KEYS, EMPTY_CONFIG, compose_config, get_filter, parse -from .RetryDecorator import retry -from .TelemetryProtocolEnum import DEFAULT_TELEMETRY_PROTOCOL, TelemetryProtocolEnum, parse_telemetry_protocol DEBUG_MODE = False logging.getLogger('ncclient.manager').setLevel(logging.DEBUG if DEBUG_MODE else logging.WARNING) @@ -41,136 +39,6 @@ logging.getLogger('monitoring-client').setLevel(logging.INFO if DEBUG_MODE else LOGGER = logging.getLogger(__name__) -RE_GET_ENDPOINT_FROM_INTERFACE_KEY = re.compile(r'.*interface\[([^\]]+)\].*') -RE_GET_ENDPOINT_FROM_INTERFACE_XPATH = re.compile(r".*interface\[oci\:name\='([^\]]+)'\].*") - -# Collection of samples through NetConf is very slow and each request collects all the data. -# Populate a cache periodically (when first interface is interrogated). -# Evict data after some seconds, when data is considered as outdated - -SAMPLE_EVICTION_SECONDS = 30.0 # seconds -SAMPLE_RESOURCE_KEY = 'interfaces/interface/state/counters' - -MAX_RETRIES = 15 -DELAY_FUNCTION = delay_exponential(initial=0.01, increment=2.0, maximum=5.0) -RETRY_DECORATOR = retry(max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect') - -class NetconfSessionHandler: - def __init__(self, address : str, port : int, **settings) -> None: - self.__lock = threading.RLock() - self.__connected = threading.Event() - self.__address = address - self.__port = int(port) - self.__username = settings.get('username') - self.__password = settings.get('password') - self.__vendor = settings.get('vendor') - self.__key_filename = settings.get('key_filename') - self.__hostkey_verify = settings.get('hostkey_verify', True) - self.__look_for_keys = settings.get('look_for_keys', True) - self.__allow_agent = settings.get('allow_agent', True) - self.__force_running = settings.get('force_running', False) - self.__commit_per_delete = settings.get('delete_rule', False) - self.__device_params = settings.get('device_params', {}) - self.__manager_params = settings.get('manager_params', {}) - self.__nc_params = settings.get('nc_params', {}) - self.__manager : Manager = None - self.__candidate_supported = False - - def connect(self): - with self.__lock: - self.__manager = connect_ssh( - host=self.__address, port=self.__port, username=self.__username, password=self.__password, - device_params=self.__device_params, manager_params=self.__manager_params, nc_params=self.__nc_params, - key_filename=self.__key_filename, hostkey_verify=self.__hostkey_verify, allow_agent=self.__allow_agent, - look_for_keys=self.__look_for_keys) - self.__candidate_supported = ':candidate' in self.__manager.server_capabilities - self.__connected.set() - - def disconnect(self): - if not self.__connected.is_set(): return - with self.__lock: - self.__manager.close_session() - - @property - def use_candidate(self): return self.__candidate_supported and not self.__force_running - - @property - def commit_per_rule(self): return self.__commit_per_delete - - @property - def vendor(self): return self.__vendor - - @RETRY_DECORATOR - def get(self, filter=None, with_defaults=None): # pylint: disable=redefined-builtin - with self.__lock: - return self.__manager.get(filter=filter, with_defaults=with_defaults) - - @RETRY_DECORATOR - def edit_config( - self, config, target='running', default_operation=None, test_option=None, - error_option=None, format='xml' # pylint: disable=redefined-builtin - ): - if config == EMPTY_CONFIG: return - with self.__lock: - self.__manager.edit_config( - config, target=target, default_operation=default_operation, test_option=test_option, - error_option=error_option, format=format) - - def locked(self, target): - return self.__manager.locked(target=target) - - def commit(self, confirmed=False, timeout=None, persist=None, persist_id=None): - return self.__manager.commit(confirmed=confirmed, timeout=timeout, persist=persist, persist_id=persist_id) - - -def do_sampling(samples_cache : SamplesCache, resource_key : str, out_samples : queue.Queue) -> None: - try: - timestamp, samples = samples_cache.get(resource_key) - counter_name = resource_key.split('/')[-1].split(':')[-1] - value = samples.get(counter_name) - if value is None: - LOGGER.warning('[do_sampling] value not found for {:s}'.format(resource_key)) - return - # resource_key template: //oci:interfaces/oci:interface[oci:name='{:s}']/state/counters/{:s} - sample = (timestamp, resource_key, value) - out_samples.put_nowait(sample) - except: # pylint: disable=bare-except - LOGGER.exception('Error retrieving samples') - -def edit_config( - netconf_handler : NetconfSessionHandler, resources : List[Tuple[str, Any]], delete=False, commit_per_rule= False, - target='running', default_operation='merge', test_option=None, error_option=None, - format='xml' # pylint: disable=redefined-builtin -): - str_method = 'DeleteConfig' if delete else 'SetConfig' - LOGGER.info('[{:s}] resources = {:s}'.format(str_method, str(resources))) - results = [None for _ in resources] - for i,resource in enumerate(resources): - str_resource_name = 'resources[#{:d}]'.format(i) - try: - LOGGER.info('[{:s}] resource = {:s}'.format(str_method, str(resource))) - chk_type(str_resource_name, resource, (list, tuple)) - chk_length(str_resource_name, resource, min_length=2, max_length=2) - resource_key,resource_value = resource - chk_string(str_resource_name + '.key', resource_key, allow_empty=False) - str_config_message = compose_config( - resource_key, resource_value, delete=delete, vendor=netconf_handler.vendor) - if str_config_message is None: raise UnsupportedResourceKeyException(resource_key) - LOGGER.info('[{:s}] str_config_message[{:d}] = {:s}'.format( - str_method, len(str_config_message), str(str_config_message))) - netconf_handler.edit_config( - config=str_config_message, target=target, default_operation=default_operation, - test_option=test_option, error_option=error_option, format=format) - if commit_per_rule: - netconf_handler.commit() - results[i] = True - except Exception as e: # pylint: disable=broad-except - str_operation = 'preparing' if target == 'candidate' else ('deleting' if delete else 'setting') - msg = '[{:s}] Exception {:s} {:s}: {:s}' - LOGGER.exception(msg.format(str_method, str_operation, str_resource_name, str(resource))) - results[i] = e # if validation fails, store the exception - return results - HISTOGRAM_BUCKETS = ( # .005, .01, .025, .05, .075, .1, .25, .5, .75, 1.0, INF 0.0001, 0.00025, 0.00050, 0.00075, @@ -198,11 +66,11 @@ class OpenConfigDriver(_Driver): self.__started = threading.Event() self.__terminate = threading.Event() - self.__telemetry_protocol = parse_telemetry_protocol(settings.get('telemetry_protocol')) - if self.__telemetry_protocol == TelemetryProtocolEnum.GNMI: - self.__telemetry = GnmiTelemetry() - else: - self.__telemetry = NetConfTelemetry() + #self.__telemetry_protocol = parse_telemetry_protocol(settings.get('telemetry_protocol')) + #if self.__telemetry_protocol == TelemetryProtocolEnum.GNMI: + # self.__telemetry = GnmiTelemetry() + #else: + # self.__telemetry = NetConfTelemetry() self.__scheduler = BackgroundScheduler(daemon=True) # scheduler used to emulate sampling events self.__scheduler.configure( @@ -268,7 +136,8 @@ class OpenConfigDriver(_Driver): if self.__netconf_handler.use_candidate: with self.__netconf_handler.locked(target='candidate'): if self.__netconf_handler.commit_per_rule: - results = edit_config(self.__netconf_handler, resources, target='candidate', commit_per_rule= True) + results = edit_config( + self.__netconf_handler, resources, target='candidate', commit_per_rule=True) else: results = edit_config(self.__netconf_handler, resources, target='candidate') try: @@ -288,13 +157,15 @@ class OpenConfigDriver(_Driver): if self.__netconf_handler.use_candidate: with self.__netconf_handler.locked(target='candidate'): if self.__netconf_handler.commit_per_rule: - results = edit_config(self.__netconf_handler, resources, target='candidate', delete=True, commit_per_rule= True) + results = edit_config( + self.__netconf_handler, resources, target='candidate', delete=True, commit_per_rule=True) else: results = edit_config(self.__netconf_handler, resources, target='candidate', delete=True) try: self.__netconf_handler.commit() except Exception as e: # pylint: disable=broad-except - LOGGER.exception('[DeleteConfig] Exception commiting resources: {:s}'.format(str(resources))) + MSG = '[DeleteConfig] Exception commiting resources: {:s}' + LOGGER.exception(MSG.format(str(resources))) results = [e for _ in resources] # if commit fails, set exception in each resource else: results = edit_config(self.__netconf_handler, resources, delete=True) diff --git a/src/device/service/drivers/openconfig/SamplesCache.py b/src/device/service/drivers/openconfig/SamplesCache.py index c2fa6bcec..24dc33663 100644 --- a/src/device/service/drivers/openconfig/SamplesCache.py +++ b/src/device/service/drivers/openconfig/SamplesCache.py @@ -12,6 +12,24 @@ # See the License for the specific language governing permissions and # limitations under the License. +# Collection of samples through NetConf is very slow and each request collects all the data. +# Populate a cache periodically (when first interface is interrogated). +# Evict data after some seconds, when data is considered as outdated + +import copy, queue, logging, re, threading +from datetime import datetime +from typing import Dict, Tuple +from .templates import get_filter, parse +from .NetconfSessionHandler import NetconfSessionHandler + +SAMPLE_EVICTION_SECONDS = 30.0 # seconds +SAMPLE_RESOURCE_KEY = 'interfaces/interface/state/counters' + +RE_GET_ENDPOINT_FROM_INTERFACE_KEY = re.compile(r'.*interface\[([^\]]+)\].*') +RE_GET_ENDPOINT_FROM_INTERFACE_XPATH = re.compile(r".*interface\[oci\:name\='([^\]]+)'\].*") + +LOGGER = logging.getLogger(__name__) + def compute_delta_sample(previous_sample, previous_timestamp, current_sample, current_timestamp): if previous_sample is None: return None if previous_timestamp is None: return None @@ -66,3 +84,17 @@ class SamplesCache: if match is None: return self.__timestamp, {} interface = match.group(1) return self.__timestamp, copy.deepcopy(self.__delta_samples.get(interface, {})) + +def do_sampling(samples_cache : SamplesCache, resource_key : str, out_samples : queue.Queue) -> None: + try: + timestamp, samples = samples_cache.get(resource_key) + counter_name = resource_key.split('/')[-1].split(':')[-1] + value = samples.get(counter_name) + if value is None: + LOGGER.warning('[do_sampling] value not found for {:s}'.format(resource_key)) + return + # resource_key template: //oci:interfaces/oci:interface[oci:name='{:s}']/state/counters/{:s} + sample = (timestamp, resource_key, value) + out_samples.put_nowait(sample) + except: # pylint: disable=bare-except + LOGGER.exception('Error retrieving samples') -- GitLab From dcde117f5c7e712dfe11bb0842793ef4f5cab722 Mon Sep 17 00:00:00 2001 From: cajadiazj Date: Thu, 23 Feb 2023 18:10:07 +0100 Subject: [PATCH 06/62] gNMI support added --- .../service/drivers/openconfig/gnmi.proto | 501 ++++ .../service/drivers/openconfig/gnmi_ext.proto | 76 + .../service/drivers/openconfig/gnmi_pb2.py | 129 ++ .../drivers/openconfig/gnmi_pb2.py.old | 2037 +++++++++++++++++ .../service/drivers/openconfig/gnmi_pb2.pyi | 380 +++ .../drivers/openconfig/gnmi_pb2_grpc.py | 185 ++ 6 files changed, 3308 insertions(+) create mode 100644 src/device/service/drivers/openconfig/gnmi.proto create mode 100644 src/device/service/drivers/openconfig/gnmi_ext.proto create mode 100644 src/device/service/drivers/openconfig/gnmi_pb2.py create mode 100644 src/device/service/drivers/openconfig/gnmi_pb2.py.old create mode 100644 src/device/service/drivers/openconfig/gnmi_pb2.pyi create mode 100644 src/device/service/drivers/openconfig/gnmi_pb2_grpc.py diff --git a/src/device/service/drivers/openconfig/gnmi.proto b/src/device/service/drivers/openconfig/gnmi.proto new file mode 100644 index 000000000..292880a55 --- /dev/null +++ b/src/device/service/drivers/openconfig/gnmi.proto @@ -0,0 +1,501 @@ +syntax = "proto3"; + +import "google/protobuf/any.proto"; +import "google/protobuf/descriptor.proto"; +// import gnmi_ext.proto; + +// Package gNMI defines a service specification for the gRPC Network Management +// Interface. This interface is defined to be a standard interface via which +// a network management system ("client") can subscribe to state values, +// retrieve snapshots of state information, and manipulate the state of a data +// tree supported by a device ("target"). +// +// This document references the gNMI Specification which can be found at +// http://github.com/openconfig/reference/blob/master/rpc/gnmi +package gnmi; + +// Define a protobuf FileOption that defines the gNMI service version. +extend google.protobuf.FileOptions { + // The gNMI service semantic version. + string gnmi_service = 1001; +} + +// gNMI_service is the current version of the gNMI service, returned through +// the Capabilities RPC. +option (gnmi_service) = "0.8.0"; + +option go_package = "github.com/openconfig/gnmi/proto/gnmi"; +option java_multiple_files = true; +option java_outer_classname = "GnmiProto"; +option java_package = "com.github.gnmi.proto"; + + +service gNMI { + // Capabilities allows the client to retrieve the set of capabilities that + // is supported by the target. This allows the target to validate the + // service version that is implemented and retrieve the set of models that + // the target supports. The models can then be specified in subsequent RPCs + // to restrict the set of data that is utilized. + // Reference: gNMI Specification Section 3.2 + rpc Capabilities(CapabilityRequest) returns (CapabilityResponse); + // Retrieve a snapshot of data from the target. A Get RPC requests that the + // target snapshots a subset of the data tree as specified by the paths + // included in the message and serializes this to be returned to the + // client using the specified encoding. + // Reference: gNMI Specification Section 3.3 + rpc Get(GetRequest) returns (GetResponse); + // Set allows the client to modify the state of data on the target. The + // paths to modified along with the new values that the client wishes + // to set the value to. + // Reference: gNMI Specification Section 3.4 + rpc Set(SetRequest) returns (SetResponse); + // Subscribe allows a client to request the target to send it values + // of particular paths within the data tree. These values may be streamed + // at a particular cadence (STREAM), sent one off on a long-lived channel + // (POLL), or sent as a one-off retrieval (ONCE). + // Reference: gNMI Specification Section 3.5 + rpc Subscribe(stream SubscribeRequest) returns (stream SubscribeResponse); +} +// The Extension message contains a single gNMI extension. +message Extension { + oneof ext { + RegisteredExtension registered_ext = 1; // A registered extension. + // Well known extensions. + MasterArbitration master_arbitration = 2; // Master arbitration extension. + History history = 3; // History extension. + } +} + +// The RegisteredExtension message defines an extension which is defined outside +// of this file. +message RegisteredExtension { + ExtensionID id = 1; // The unique ID assigned to this extension. + bytes msg = 2; // The binary-marshalled protobuf extension payload. +} + +// RegisteredExtension is an enumeration acting as a registry for extensions +// defined by external sources. +enum ExtensionID { + EID_UNSET = 0; + // New extensions are to be defined within this enumeration - their definition + // MUST link to a reference describing their implementation. + + // An experimental extension that may be used during prototyping of a new + // extension. + EID_EXPERIMENTAL = 999; +} + +// MasterArbitration is used to select the master among multiple gNMI clients +// with the same Roles. The client with the largest election_id is honored as +// the master. +// The document about gNMI master arbitration can be found at +// https://github.com/openconfig/reference/blob/master/rpc/gnmi/gnmi-master-arbitration.md +message MasterArbitration { + Role role = 1; + Uint128 election_id = 2; +} + +// Representation of unsigned 128-bit integer. +message Uint128 { + uint64 high = 1; + uint64 low = 2; +} + +// There can be one master for each role. The role is identified by its id. +message Role { + string id = 1; + // More fields can be added if needed, for example, to specify what paths the + // role can read/write. +} + +// The History extension allows clients to request historical data. Its +// spec can be found at +// https://github.com/openconfig/reference/blob/master/rpc/gnmi/gnmi-history.md +message History { + oneof request { + int64 snapshot_time = 1; // Nanoseconds since the epoch + TimeRange range = 2; + } +} + +message TimeRange { + int64 start = 1; // Nanoseconds since the epoch + int64 end = 2; // Nanoseconds since the epoch +} +// Notification is a re-usable message that is used to encode data from the +// target to the client. A Notification carries two types of changes to the data +// tree: +// - Deleted values (delete) - a set of paths that have been removed from the +// data tree. +// - Updated values (update) - a set of path-value pairs indicating the path +// whose value has changed in the data tree. +// Reference: gNMI Specification Section 2.1 +message Notification { + int64 timestamp = 1; // Timestamp in nanoseconds since Epoch. + Path prefix = 2; // Prefix used for paths in the message. + repeated Update update = 4; // Data elements that have changed values. + repeated Path delete = 5; // Data elements that have been deleted. + // This notification contains a set of paths that are always updated together + // referenced by a globally unique prefix. + bool atomic = 6; + // Reserved field numbers and identifiers. + reserved "alias"; + reserved 3; +} + +// Update is a re-usable message that is used to store a particular Path, +// Value pair. +// Reference: gNMI Specification Section 2.1 +message Update { + Path path = 1; // The path (key) for the update. + Value value = 2 [deprecated=true]; // The value (value) for the update. + TypedValue val = 3; // The explicitly typed update value. + uint32 duplicates = 4; // Number of coalesced duplicates. +} + +// TypedValue is used to encode a value being sent between the client and +// target (originated by either entity). +message TypedValue { + // One of the fields within the val oneof is populated with the value + // of the update. The type of the value being included in the Update + // determines which field should be populated. In the case that the + // encoding is a particular form of the base protobuf type, a specific + // field is used to store the value (e.g., json_val). + oneof value { + string string_val = 1; // String value. + int64 int_val = 2; // Integer value. + uint64 uint_val = 3; // Unsigned integer value. + bool bool_val = 4; // Bool value. + bytes bytes_val = 5; // Arbitrary byte sequence value. + float float_val = 6 [deprecated=true]; // Deprecated - use double_val. + double double_val = 14; // Floating point value. + Decimal64 decimal_val = 7 [deprecated=true]; // Deprecated - use double_val. + ScalarArray leaflist_val = 8; // Mixed type scalar array value. + google.protobuf.Any any_val = 9; // protobuf.Any encoded bytes. + bytes json_val = 10; // JSON-encoded text. + bytes json_ietf_val = 11; // JSON-encoded text per RFC7951. + string ascii_val = 12; // Arbitrary ASCII text. + // Protobuf binary encoded bytes. The message type is not included. + // See the specification at + // github.com/openconfig/reference/blob/master/rpc/gnmi/protobuf-vals.md + // for a complete specification. [Experimental] + bytes proto_bytes = 13; + } +} + +// Path encodes a data tree path as a series of repeated strings, with +// each element of the path representing a data tree node name and the +// associated attributes. +// Reference: gNMI Specification Section 2.2.2. +message Path { + // Elements of the path are no longer encoded as a string, but rather within + // the elem field as a PathElem message. + repeated string element = 1 [deprecated=true]; + string origin = 2; // Label to disambiguate path. + repeated PathElem elem = 3; // Elements of the path. + string target = 4; // The name of the target + // (Sec. 2.2.2.1) +} + +// PathElem encodes an element of a gNMI path, along with any attributes (keys) +// that may be associated with it. +// Reference: gNMI Specification Section 2.2.2. +message PathElem { + string name = 1; // The name of the element in the path. + map key = 2; // Map of key (attribute) name to value. +} + +// Value encodes a data tree node's value - along with the way in which +// the value is encoded. This message is deprecated by gNMI 0.3.0. +// Reference: gNMI Specification Section 2.2.3. +message Value { + option deprecated = true; + bytes value = 1; // Value of the variable being transmitted. + Encoding type = 2; // Encoding used for the value field. +} + +// Encoding defines the value encoding formats that are supported by the gNMI +// protocol. These encodings are used by both the client (when sending Set +// messages to modify the state of the target) and the target when serializing +// data to be returned to the client (in both Subscribe and Get RPCs). +// Reference: gNMI Specification Section 2.3 +enum Encoding { + JSON = 0; // JSON encoded text. + BYTES = 1; // Arbitrarily encoded bytes. + PROTO = 2; // Encoded according to scalar values of TypedValue. + ASCII = 3; // ASCII text of an out-of-band agreed format. + JSON_IETF = 4; // JSON encoded text as per RFC7951. +} + +// Error message previously utilised to return errors to the client. Deprecated +// in favour of using the google.golang.org/genproto/googleapis/rpc/status +// message in the RPC response. +// Reference: gNMI Specification Section 2.5 +message Error { + option deprecated = true; + uint32 code = 1; // Canonical gRPC error code. + string message = 2; // Human readable error. + google.protobuf.Any data = 3; // Optional additional information. +} + +// Decimal64 is used to encode a fixed precision decimal number. The value +// is expressed as a set of digits with the precision specifying the +// number of digits following the decimal point in the digit set. +// This message is deprecated in favor of encoding all floating point types +// as double precision. +message Decimal64 { + option deprecated = true; + int64 digits = 1; // Set of digits. + uint32 precision = 2; // Number of digits following the decimal point. +} + +// ScalarArray is used to encode a mixed-type array of values. +message ScalarArray { + // The set of elements within the array. Each TypedValue message should + // specify only elements that have a field identifier of 1-7 (i.e., the + // values are scalar values). + repeated TypedValue element = 1; +} + +// SubscribeRequest is the message sent by the client to the target when +// initiating a subscription to a set of paths within the data tree. The +// request field must be populated and the initial message must specify a +// SubscriptionList to initiate a subscription. +// Reference: gNMI Specification Section 3.5.1.1 +message SubscribeRequest { + oneof request { + SubscriptionList subscribe = 1; // Specify the paths within a subscription. + Poll poll = 3; // Trigger a polled update. + } + // Extension messages associated with the SubscribeRequest. See the + // gNMI extension specification for further definition. + repeated Extension extension = 5; + // Reserved field numbers and identifiers. + reserved 4; + reserved "aliases"; +} + +// Poll is sent within a SubscribeRequest to trigger the device to +// send telemetry updates for the paths that are associated with the +// subscription. +// Reference: gNMI Specification Section Section 3.5.1.4 +message Poll { +} + +// SubscribeResponse is the message used by the target within a Subscribe RPC. +// The target includes a Notification message which is used to transmit values +// of the path(s) that are associated with the subscription. The same message +// is to indicate that the target has sent all data values once (is +// synchronized). +// Reference: gNMI Specification Section 3.5.1.4 +message SubscribeResponse { + oneof response { + Notification update = 1; // Changed or sampled value for a path. + // Indicate target has sent all values associated with the subscription + // at least once. + bool sync_response = 3; + // Deprecated in favour of google.golang.org/genproto/googleapis/rpc/status + Error error = 4 [deprecated=true]; + } + // Extension messages associated with the SubscribeResponse. See the + // gNMI extension specification for further definition. + repeated Extension extension = 5; +} + +// SubscriptionList is used within a Subscribe message to specify the list of +// paths that the client wishes to subscribe to. The message consists of a +// list of (possibly prefixed) paths, and options that relate to the +// subscription. +// Reference: gNMI Specification Section 3.5.1.2 +message SubscriptionList { + Path prefix = 1; // Prefix used for paths. + repeated Subscription subscription = 2; // Set of subscriptions to create. + QOSMarking qos = 4; // DSCP marking to be used. + // Mode of the subscription. + enum Mode { + STREAM = 0; // Values streamed by the target (Sec. 3.5.1.5.2). + ONCE = 1; // Values sent once-off by the target (Sec. 3.5.1.5.1). + POLL = 2; // Values sent in response to a poll request (Sec. 3.5.1.5.3). + } + Mode mode = 5; + // Whether elements of the schema that are marked as eligible for aggregation + // should be aggregated or not. + bool allow_aggregation = 6; + // The set of schemas that define the elements of the data tree that should + // be sent by the target. + repeated ModelData use_models = 7; + // The encoding that the target should use within the Notifications generated + // corresponding to the SubscriptionList. + Encoding encoding = 8; + // An optional field to specify that only updates to current state should be + // sent to a client. If set, the initial state is not sent to the client but + // rather only the sync message followed by any subsequent updates to the + // current state. For ONCE and POLL modes, this causes the server to send only + // the sync message (Sec. 3.5.2.3). + bool updates_only = 9; + // Reserved field numbers and identifiers. + reserved 3; + reserved "use_aliases"; +} + +// Subscription is a single request within a SubscriptionList. The path +// specified is interpreted (along with the prefix) as the elements of the data +// tree that the client is subscribing to. The mode determines how the target +// should trigger updates to be sent. +// Reference: gNMI Specification Section 3.5.1.3 +message Subscription { + Path path = 1; // The data tree path. + SubscriptionMode mode = 2; // Subscription mode to be used. + uint64 sample_interval = 3; // ns between samples in SAMPLE mode. + // Indicates whether values that have not changed should be sent in a SAMPLE + // subscription. + bool suppress_redundant = 4; + // Specifies the maximum allowable silent period in nanoseconds when + // suppress_redundant is in use. The target should send a value at least once + // in the period specified. + uint64 heartbeat_interval = 5; +} + +// SubscriptionMode is the mode of the subscription, specifying how the +// target must return values in a subscription. +// Reference: gNMI Specification Section 3.5.1.3 +enum SubscriptionMode { + TARGET_DEFINED = 0; // The target selects the relevant mode for each element. + ON_CHANGE = 1; // The target sends an update on element value change. + SAMPLE = 2; // The target samples values according to the interval. +} + +// QOSMarking specifies the DSCP value to be set on transmitted telemetry +// updates from the target. +// Reference: gNMI Specification Section 3.5.1.2 +message QOSMarking { + uint32 marking = 1; +} + +// SetRequest is sent from a client to the target to update values in the data +// tree. Paths are either deleted by the client, or modified by means of being +// updated, or replaced. Where a replace is used, unspecified values are +// considered to be replaced, whereas when update is used the changes are +// considered to be incremental. The set of changes that are specified within +// a single SetRequest are considered to be a transaction. +// Reference: gNMI Specification Section 3.4.1 +message SetRequest { + Path prefix = 1; // Prefix used for paths in the message. + repeated Path delete = 2; // Paths to be deleted from the data tree. + repeated Update replace = 3; // Updates specifying elements to be replaced. + repeated Update update = 4; // Updates specifying elements to updated. + // Extension messages associated with the SetRequest. See the + // gNMI extension specification for further definition. + repeated Extension extension = 5; +} + +// SetResponse is the response to a SetRequest, sent from the target to the +// client. It reports the result of the modifications to the data tree that were +// specified by the client. Errors for this RPC should be reported using the +// https://github.com/googleapis/googleapis/blob/master/google/rpc/status.proto +// message in the RPC return. The gnmi.Error message can be used to add additional +// details where required. +// Reference: gNMI Specification Section 3.4.2 +message SetResponse { + Path prefix = 1; // Prefix used for paths. + // A set of responses specifying the result of the operations specified in + // the SetRequest. + repeated UpdateResult response = 2; + Error message = 3 [deprecated=true]; // The overall status of the transaction. + int64 timestamp = 4; // Timestamp of transaction (ns since epoch). + // Extension messages associated with the SetResponse. See the + // gNMI extension specification for further definition. + repeated Extension extension = 5; +} + +// UpdateResult is used within the SetResponse message to communicate the +// result of an operation specified within a SetRequest message. +// Reference: gNMI Specification Section 3.4.2 +message UpdateResult { + // The operation that was associated with the Path specified. + enum Operation { + INVALID = 0; + DELETE = 1; // The result relates to a delete of Path. + REPLACE = 2; // The result relates to a replace of Path. + UPDATE = 3; // The result relates to an update of Path. + } + // Deprecated timestamp for the UpdateResult, this field has been + // replaced by the timestamp within the SetResponse message, since + // all mutations effected by a set should be applied as a single + // transaction. + int64 timestamp = 1 [deprecated=true]; + Path path = 2; // Path associated with the update. + Error message = 3 [deprecated=true]; // Status of the update operation. + Operation op = 4; // Update operation type. +} + +// GetRequest is sent when a client initiates a Get RPC. It is used to specify +// the set of data elements for which the target should return a snapshot of +// data. The use_models field specifies the set of schema modules that are to +// be used by the target - where use_models is not specified then the target +// must use all schema models that it has. +// Reference: gNMI Specification Section 3.3.1 +message GetRequest { + Path prefix = 1; // Prefix used for paths. + repeated Path path = 2; // Paths requested by the client. + // Type of elements within the data tree. + enum DataType { + ALL = 0; // All data elements. + CONFIG = 1; // Config (rw) only elements. + STATE = 2; // State (ro) only elements. + // Data elements marked in the schema as operational. This refers to data + // elements whose value relates to the state of processes or interactions + // running on the device. + OPERATIONAL = 3; + } + DataType type = 3; // The type of data being requested. + Encoding encoding = 5; // Encoding to be used. + repeated ModelData use_models = 6; // The schema models to be used. + // Extension messages associated with the GetRequest. See the + // gNMI extension specification for further definition. + repeated Extension extension = 7; +} + +// GetResponse is used by the target to respond to a GetRequest from a client. +// The set of Notifications corresponds to the data values that are requested +// by the client in the GetRequest. +// Reference: gNMI Specification Section 3.3.2 +message GetResponse { + repeated Notification notification = 1; // Data values. + Error error = 2 [deprecated=true]; // Errors that occurred in the Get. + // Extension messages associated with the GetResponse. See the + // gNMI extension specification for further definition. + repeated Extension extension = 3; +} + +// CapabilityRequest is sent by the client in the Capabilities RPC to request +// that the target reports its capabilities. +// Reference: gNMI Specification Section 3.2.1 +message CapabilityRequest { + // Extension messages associated with the CapabilityRequest. See the + // gNMI extension specification for further definition. + repeated Extension extension = 1; +} + +// CapabilityResponse is used by the target to report its capabilities to the +// client within the Capabilities RPC. +// Reference: gNMI Specification Section 3.2.2 +message CapabilityResponse { + repeated ModelData supported_models = 1; // Supported schema models. + repeated Encoding supported_encodings = 2; // Supported encodings. + string gNMI_version = 3; // Supported gNMI version. + // Extension messages associated with the CapabilityResponse. See the + // gNMI extension specification for further definition. + repeated Extension extension = 4; +} + +// ModelData is used to describe a set of schema modules. It can be used in a +// CapabilityResponse where a target reports the set of modules that it +// supports, and within the SubscribeRequest and GetRequest messages to specify +// the set of models from which data tree elements should be reported. +// Reference: gNMI Specification Section 3.2.3 +message ModelData { + string name = 1; // Name of the model. + string organization = 2; // Organization publishing the model. + string version = 3; // Semantic version of the model. +} \ No newline at end of file diff --git a/src/device/service/drivers/openconfig/gnmi_ext.proto b/src/device/service/drivers/openconfig/gnmi_ext.proto new file mode 100644 index 000000000..9960f12af --- /dev/null +++ b/src/device/service/drivers/openconfig/gnmi_ext.proto @@ -0,0 +1,76 @@ +syntax = "proto3"; + +// Package gnmi_ext defines a set of extensions messages which can be optionally +// included with the request and response messages of gNMI RPCs. A set of +// well-known extensions are defined within this file, along with a registry for +// extensions defined outside of this package. +package gnmi_ext; + +option go_package = "github.com/openconfig/gnmi/proto/gnmi_ext"; + +// The Extension message contains a single gNMI extension. +message Extension { + oneof ext { + RegisteredExtension registered_ext = 1; // A registered extension. + // Well known extensions. + MasterArbitration master_arbitration = 2; // Master arbitration extension. + History history = 3; // History extension. + } +} + +// The RegisteredExtension message defines an extension which is defined outside +// of this file. +message RegisteredExtension { + ExtensionID id = 1; // The unique ID assigned to this extension. + bytes msg = 2; // The binary-marshalled protobuf extension payload. +} + +// RegisteredExtension is an enumeration acting as a registry for extensions +// defined by external sources. +enum ExtensionID { + EID_UNSET = 0; + // New extensions are to be defined within this enumeration - their definition + // MUST link to a reference describing their implementation. + + // An experimental extension that may be used during prototyping of a new + // extension. + EID_EXPERIMENTAL = 999; +} + +// MasterArbitration is used to select the master among multiple gNMI clients +// with the same Roles. The client with the largest election_id is honored as +// the master. +// The document about gNMI master arbitration can be found at +// https://github.com/openconfig/reference/blob/master/rpc/gnmi/gnmi-master-arbitration.md +message MasterArbitration { + Role role = 1; + Uint128 election_id = 2; +} + +// Representation of unsigned 128-bit integer. +message Uint128 { + uint64 high = 1; + uint64 low = 2; +} + +// There can be one master for each role. The role is identified by its id. +message Role { + string id = 1; + // More fields can be added if needed, for example, to specify what paths the + // role can read/write. +} + +// The History extension allows clients to request historical data. Its +// spec can be found at +// https://github.com/openconfig/reference/blob/master/rpc/gnmi/gnmi-history.md +message History { + oneof request { + int64 snapshot_time = 1; // Nanoseconds since the epoch + TimeRange range = 2; + } +} + +message TimeRange { + int64 start = 1; // Nanoseconds since the epoch + int64 end = 2; // Nanoseconds since the epoch +} \ No newline at end of file diff --git a/src/device/service/drivers/openconfig/gnmi_pb2.py b/src/device/service/drivers/openconfig/gnmi_pb2.py new file mode 100644 index 000000000..af67f9a8c --- /dev/null +++ b/src/device/service/drivers/openconfig/gnmi_pb2.py @@ -0,0 +1,129 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: gnmi.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 +from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\ngnmi.proto\x12\x04gnmi\x1a\x19google/protobuf/any.proto\x1a google/protobuf/descriptor.proto\"\xa0\x01\n\tExtension\x12\x33\n\x0eregistered_ext\x18\x01 \x01(\x0b\x32\x19.gnmi.RegisteredExtensionH\x00\x12\x35\n\x12master_arbitration\x18\x02 \x01(\x0b\x32\x17.gnmi.MasterArbitrationH\x00\x12 \n\x07history\x18\x03 \x01(\x0b\x32\r.gnmi.HistoryH\x00\x42\x05\n\x03\x65xt\"A\n\x13RegisteredExtension\x12\x1d\n\x02id\x18\x01 \x01(\x0e\x32\x11.gnmi.ExtensionID\x12\x0b\n\x03msg\x18\x02 \x01(\x0c\"Q\n\x11MasterArbitration\x12\x18\n\x04role\x18\x01 \x01(\x0b\x32\n.gnmi.Role\x12\"\n\x0b\x65lection_id\x18\x02 \x01(\x0b\x32\r.gnmi.Uint128\"$\n\x07Uint128\x12\x0c\n\x04high\x18\x01 \x01(\x04\x12\x0b\n\x03low\x18\x02 \x01(\x04\"\x12\n\x04Role\x12\n\n\x02id\x18\x01 \x01(\t\"O\n\x07History\x12\x17\n\rsnapshot_time\x18\x01 \x01(\x03H\x00\x12 \n\x05range\x18\x02 \x01(\x0b\x32\x0f.gnmi.TimeRangeH\x00\x42\t\n\x07request\"\'\n\tTimeRange\x12\r\n\x05start\x18\x01 \x01(\x03\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x03\"\x94\x01\n\x0cNotification\x12\x11\n\ttimestamp\x18\x01 \x01(\x03\x12\x1a\n\x06prefix\x18\x02 \x01(\x0b\x32\n.gnmi.Path\x12\x1c\n\x06update\x18\x04 \x03(\x0b\x32\x0c.gnmi.Update\x12\x1a\n\x06\x64\x65lete\x18\x05 \x03(\x0b\x32\n.gnmi.Path\x12\x0e\n\x06\x61tomic\x18\x06 \x01(\x08J\x04\x08\x03\x10\x04R\x05\x61lias\"u\n\x06Update\x12\x18\n\x04path\x18\x01 \x01(\x0b\x32\n.gnmi.Path\x12\x1e\n\x05value\x18\x02 \x01(\x0b\x32\x0b.gnmi.ValueB\x02\x18\x01\x12\x1d\n\x03val\x18\x03 \x01(\x0b\x32\x10.gnmi.TypedValue\x12\x12\n\nduplicates\x18\x04 \x01(\r\"\x83\x03\n\nTypedValue\x12\x14\n\nstring_val\x18\x01 \x01(\tH\x00\x12\x11\n\x07int_val\x18\x02 \x01(\x03H\x00\x12\x12\n\x08uint_val\x18\x03 \x01(\x04H\x00\x12\x12\n\x08\x62ool_val\x18\x04 \x01(\x08H\x00\x12\x13\n\tbytes_val\x18\x05 \x01(\x0cH\x00\x12\x17\n\tfloat_val\x18\x06 \x01(\x02\x42\x02\x18\x01H\x00\x12\x14\n\ndouble_val\x18\x0e \x01(\x01H\x00\x12*\n\x0b\x64\x65\x63imal_val\x18\x07 \x01(\x0b\x32\x0f.gnmi.Decimal64B\x02\x18\x01H\x00\x12)\n\x0cleaflist_val\x18\x08 \x01(\x0b\x32\x11.gnmi.ScalarArrayH\x00\x12\'\n\x07\x61ny_val\x18\t \x01(\x0b\x32\x14.google.protobuf.AnyH\x00\x12\x12\n\x08json_val\x18\n \x01(\x0cH\x00\x12\x17\n\rjson_ietf_val\x18\x0b \x01(\x0cH\x00\x12\x13\n\tascii_val\x18\x0c \x01(\tH\x00\x12\x15\n\x0bproto_bytes\x18\r \x01(\x0cH\x00\x42\x07\n\x05value\"Y\n\x04Path\x12\x13\n\x07\x65lement\x18\x01 \x03(\tB\x02\x18\x01\x12\x0e\n\x06origin\x18\x02 \x01(\t\x12\x1c\n\x04\x65lem\x18\x03 \x03(\x0b\x32\x0e.gnmi.PathElem\x12\x0e\n\x06target\x18\x04 \x01(\t\"j\n\x08PathElem\x12\x0c\n\x04name\x18\x01 \x01(\t\x12$\n\x03key\x18\x02 \x03(\x0b\x32\x17.gnmi.PathElem.KeyEntry\x1a*\n\x08KeyEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"8\n\x05Value\x12\r\n\x05value\x18\x01 \x01(\x0c\x12\x1c\n\x04type\x18\x02 \x01(\x0e\x32\x0e.gnmi.Encoding:\x02\x18\x01\"N\n\x05\x45rror\x12\x0c\n\x04\x63ode\x18\x01 \x01(\r\x12\x0f\n\x07message\x18\x02 \x01(\t\x12\"\n\x04\x64\x61ta\x18\x03 \x01(\x0b\x32\x14.google.protobuf.Any:\x02\x18\x01\"2\n\tDecimal64\x12\x0e\n\x06\x64igits\x18\x01 \x01(\x03\x12\x11\n\tprecision\x18\x02 \x01(\r:\x02\x18\x01\"0\n\x0bScalarArray\x12!\n\x07\x65lement\x18\x01 \x03(\x0b\x32\x10.gnmi.TypedValue\"\x99\x01\n\x10SubscribeRequest\x12+\n\tsubscribe\x18\x01 \x01(\x0b\x32\x16.gnmi.SubscriptionListH\x00\x12\x1a\n\x04poll\x18\x03 \x01(\x0b\x32\n.gnmi.PollH\x00\x12\"\n\textension\x18\x05 \x03(\x0b\x32\x0f.gnmi.ExtensionB\t\n\x07requestJ\x04\x08\x04\x10\x05R\x07\x61liases\"\x06\n\x04Poll\"\xa4\x01\n\x11SubscribeResponse\x12$\n\x06update\x18\x01 \x01(\x0b\x32\x12.gnmi.NotificationH\x00\x12\x17\n\rsync_response\x18\x03 \x01(\x08H\x00\x12 \n\x05\x65rror\x18\x04 \x01(\x0b\x32\x0b.gnmi.ErrorB\x02\x18\x01H\x00\x12\"\n\textension\x18\x05 \x03(\x0b\x32\x0f.gnmi.ExtensionB\n\n\x08response\"\xd5\x02\n\x10SubscriptionList\x12\x1a\n\x06prefix\x18\x01 \x01(\x0b\x32\n.gnmi.Path\x12(\n\x0csubscription\x18\x02 \x03(\x0b\x32\x12.gnmi.Subscription\x12\x1d\n\x03qos\x18\x04 \x01(\x0b\x32\x10.gnmi.QOSMarking\x12)\n\x04mode\x18\x05 \x01(\x0e\x32\x1b.gnmi.SubscriptionList.Mode\x12\x19\n\x11\x61llow_aggregation\x18\x06 \x01(\x08\x12#\n\nuse_models\x18\x07 \x03(\x0b\x32\x0f.gnmi.ModelData\x12 \n\x08\x65ncoding\x18\x08 \x01(\x0e\x32\x0e.gnmi.Encoding\x12\x14\n\x0cupdates_only\x18\t \x01(\x08\"&\n\x04Mode\x12\n\n\x06STREAM\x10\x00\x12\x08\n\x04ONCE\x10\x01\x12\x08\n\x04POLL\x10\x02J\x04\x08\x03\x10\x04R\x0buse_aliases\"\x9f\x01\n\x0cSubscription\x12\x18\n\x04path\x18\x01 \x01(\x0b\x32\n.gnmi.Path\x12$\n\x04mode\x18\x02 \x01(\x0e\x32\x16.gnmi.SubscriptionMode\x12\x17\n\x0fsample_interval\x18\x03 \x01(\x04\x12\x1a\n\x12suppress_redundant\x18\x04 \x01(\x08\x12\x1a\n\x12heartbeat_interval\x18\x05 \x01(\x04\"\x1d\n\nQOSMarking\x12\x0f\n\x07marking\x18\x01 \x01(\r\"\xa5\x01\n\nSetRequest\x12\x1a\n\x06prefix\x18\x01 \x01(\x0b\x32\n.gnmi.Path\x12\x1a\n\x06\x64\x65lete\x18\x02 \x03(\x0b\x32\n.gnmi.Path\x12\x1d\n\x07replace\x18\x03 \x03(\x0b\x32\x0c.gnmi.Update\x12\x1c\n\x06update\x18\x04 \x03(\x0b\x32\x0c.gnmi.Update\x12\"\n\textension\x18\x05 \x03(\x0b\x32\x0f.gnmi.Extension\"\xa8\x01\n\x0bSetResponse\x12\x1a\n\x06prefix\x18\x01 \x01(\x0b\x32\n.gnmi.Path\x12$\n\x08response\x18\x02 \x03(\x0b\x32\x12.gnmi.UpdateResult\x12 \n\x07message\x18\x03 \x01(\x0b\x32\x0b.gnmi.ErrorB\x02\x18\x01\x12\x11\n\ttimestamp\x18\x04 \x01(\x03\x12\"\n\textension\x18\x05 \x03(\x0b\x32\x0f.gnmi.Extension\"\xca\x01\n\x0cUpdateResult\x12\x15\n\ttimestamp\x18\x01 \x01(\x03\x42\x02\x18\x01\x12\x18\n\x04path\x18\x02 \x01(\x0b\x32\n.gnmi.Path\x12 \n\x07message\x18\x03 \x01(\x0b\x32\x0b.gnmi.ErrorB\x02\x18\x01\x12(\n\x02op\x18\x04 \x01(\x0e\x32\x1c.gnmi.UpdateResult.Operation\"=\n\tOperation\x12\x0b\n\x07INVALID\x10\x00\x12\n\n\x06\x44\x45LETE\x10\x01\x12\x0b\n\x07REPLACE\x10\x02\x12\n\n\x06UPDATE\x10\x03\"\x93\x02\n\nGetRequest\x12\x1a\n\x06prefix\x18\x01 \x01(\x0b\x32\n.gnmi.Path\x12\x18\n\x04path\x18\x02 \x03(\x0b\x32\n.gnmi.Path\x12\'\n\x04type\x18\x03 \x01(\x0e\x32\x19.gnmi.GetRequest.DataType\x12 \n\x08\x65ncoding\x18\x05 \x01(\x0e\x32\x0e.gnmi.Encoding\x12#\n\nuse_models\x18\x06 \x03(\x0b\x32\x0f.gnmi.ModelData\x12\"\n\textension\x18\x07 \x03(\x0b\x32\x0f.gnmi.Extension\";\n\x08\x44\x61taType\x12\x07\n\x03\x41LL\x10\x00\x12\n\n\x06\x43ONFIG\x10\x01\x12\t\n\x05STATE\x10\x02\x12\x0f\n\x0bOPERATIONAL\x10\x03\"{\n\x0bGetResponse\x12(\n\x0cnotification\x18\x01 \x03(\x0b\x32\x12.gnmi.Notification\x12\x1e\n\x05\x65rror\x18\x02 \x01(\x0b\x32\x0b.gnmi.ErrorB\x02\x18\x01\x12\"\n\textension\x18\x03 \x03(\x0b\x32\x0f.gnmi.Extension\"7\n\x11\x43\x61pabilityRequest\x12\"\n\textension\x18\x01 \x03(\x0b\x32\x0f.gnmi.Extension\"\xa6\x01\n\x12\x43\x61pabilityResponse\x12)\n\x10supported_models\x18\x01 \x03(\x0b\x32\x0f.gnmi.ModelData\x12+\n\x13supported_encodings\x18\x02 \x03(\x0e\x32\x0e.gnmi.Encoding\x12\x14\n\x0cgNMI_version\x18\x03 \x01(\t\x12\"\n\textension\x18\x04 \x03(\x0b\x32\x0f.gnmi.Extension\"@\n\tModelData\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0corganization\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\t*3\n\x0b\x45xtensionID\x12\r\n\tEID_UNSET\x10\x00\x12\x15\n\x10\x45ID_EXPERIMENTAL\x10\xe7\x07*D\n\x08\x45ncoding\x12\x08\n\x04JSON\x10\x00\x12\t\n\x05\x42YTES\x10\x01\x12\t\n\x05PROTO\x10\x02\x12\t\n\x05\x41SCII\x10\x03\x12\r\n\tJSON_IETF\x10\x04*A\n\x10SubscriptionMode\x12\x12\n\x0eTARGET_DEFINED\x10\x00\x12\r\n\tON_CHANGE\x10\x01\x12\n\n\x06SAMPLE\x10\x02\x32\xe3\x01\n\x04gNMI\x12\x41\n\x0c\x43\x61pabilities\x12\x17.gnmi.CapabilityRequest\x1a\x18.gnmi.CapabilityResponse\x12*\n\x03Get\x12\x10.gnmi.GetRequest\x1a\x11.gnmi.GetResponse\x12*\n\x03Set\x12\x10.gnmi.SetRequest\x1a\x11.gnmi.SetResponse\x12@\n\tSubscribe\x12\x16.gnmi.SubscribeRequest\x1a\x17.gnmi.SubscribeResponse(\x01\x30\x01:3\n\x0cgnmi_service\x12\x1c.google.protobuf.FileOptions\x18\xe9\x07 \x01(\tBS\n\x15\x63om.github.gnmi.protoB\tGnmiProtoP\x01Z%github.com/openconfig/gnmi/proto/gnmi\xca>\x05\x30.8.0b\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'gnmi_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(gnmi_service) + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\025com.github.gnmi.protoB\tGnmiProtoP\001Z%github.com/openconfig/gnmi/proto/gnmi\312>\0050.8.0' + _UPDATE.fields_by_name['value']._options = None + _UPDATE.fields_by_name['value']._serialized_options = b'\030\001' + _TYPEDVALUE.fields_by_name['float_val']._options = None + _TYPEDVALUE.fields_by_name['float_val']._serialized_options = b'\030\001' + _TYPEDVALUE.fields_by_name['decimal_val']._options = None + _TYPEDVALUE.fields_by_name['decimal_val']._serialized_options = b'\030\001' + _PATH.fields_by_name['element']._options = None + _PATH.fields_by_name['element']._serialized_options = b'\030\001' + _PATHELEM_KEYENTRY._options = None + _PATHELEM_KEYENTRY._serialized_options = b'8\001' + _VALUE._options = None + _VALUE._serialized_options = b'\030\001' + _ERROR._options = None + _ERROR._serialized_options = b'\030\001' + _DECIMAL64._options = None + _DECIMAL64._serialized_options = b'\030\001' + _SUBSCRIBERESPONSE.fields_by_name['error']._options = None + _SUBSCRIBERESPONSE.fields_by_name['error']._serialized_options = b'\030\001' + _SETRESPONSE.fields_by_name['message']._options = None + _SETRESPONSE.fields_by_name['message']._serialized_options = b'\030\001' + _UPDATERESULT.fields_by_name['timestamp']._options = None + _UPDATERESULT.fields_by_name['timestamp']._serialized_options = b'\030\001' + _UPDATERESULT.fields_by_name['message']._options = None + _UPDATERESULT.fields_by_name['message']._serialized_options = b'\030\001' + _GETRESPONSE.fields_by_name['error']._options = None + _GETRESPONSE.fields_by_name['error']._serialized_options = b'\030\001' + _EXTENSIONID._serialized_start=3780 + _EXTENSIONID._serialized_end=3831 + _ENCODING._serialized_start=3833 + _ENCODING._serialized_end=3901 + _SUBSCRIPTIONMODE._serialized_start=3903 + _SUBSCRIPTIONMODE._serialized_end=3968 + _EXTENSION._serialized_start=82 + _EXTENSION._serialized_end=242 + _REGISTEREDEXTENSION._serialized_start=244 + _REGISTEREDEXTENSION._serialized_end=309 + _MASTERARBITRATION._serialized_start=311 + _MASTERARBITRATION._serialized_end=392 + _UINT128._serialized_start=394 + _UINT128._serialized_end=430 + _ROLE._serialized_start=432 + _ROLE._serialized_end=450 + _HISTORY._serialized_start=452 + _HISTORY._serialized_end=531 + _TIMERANGE._serialized_start=533 + _TIMERANGE._serialized_end=572 + _NOTIFICATION._serialized_start=575 + _NOTIFICATION._serialized_end=723 + _UPDATE._serialized_start=725 + _UPDATE._serialized_end=842 + _TYPEDVALUE._serialized_start=845 + _TYPEDVALUE._serialized_end=1232 + _PATH._serialized_start=1234 + _PATH._serialized_end=1323 + _PATHELEM._serialized_start=1325 + _PATHELEM._serialized_end=1431 + _PATHELEM_KEYENTRY._serialized_start=1389 + _PATHELEM_KEYENTRY._serialized_end=1431 + _VALUE._serialized_start=1433 + _VALUE._serialized_end=1489 + _ERROR._serialized_start=1491 + _ERROR._serialized_end=1569 + _DECIMAL64._serialized_start=1571 + _DECIMAL64._serialized_end=1621 + _SCALARARRAY._serialized_start=1623 + _SCALARARRAY._serialized_end=1671 + _SUBSCRIBEREQUEST._serialized_start=1674 + _SUBSCRIBEREQUEST._serialized_end=1827 + _POLL._serialized_start=1829 + _POLL._serialized_end=1835 + _SUBSCRIBERESPONSE._serialized_start=1838 + _SUBSCRIBERESPONSE._serialized_end=2002 + _SUBSCRIPTIONLIST._serialized_start=2005 + _SUBSCRIPTIONLIST._serialized_end=2346 + _SUBSCRIPTIONLIST_MODE._serialized_start=2289 + _SUBSCRIPTIONLIST_MODE._serialized_end=2327 + _SUBSCRIPTION._serialized_start=2349 + _SUBSCRIPTION._serialized_end=2508 + _QOSMARKING._serialized_start=2510 + _QOSMARKING._serialized_end=2539 + _SETREQUEST._serialized_start=2542 + _SETREQUEST._serialized_end=2707 + _SETRESPONSE._serialized_start=2710 + _SETRESPONSE._serialized_end=2878 + _UPDATERESULT._serialized_start=2881 + _UPDATERESULT._serialized_end=3083 + _UPDATERESULT_OPERATION._serialized_start=3022 + _UPDATERESULT_OPERATION._serialized_end=3083 + _GETREQUEST._serialized_start=3086 + _GETREQUEST._serialized_end=3361 + _GETREQUEST_DATATYPE._serialized_start=3302 + _GETREQUEST_DATATYPE._serialized_end=3361 + _GETRESPONSE._serialized_start=3363 + _GETRESPONSE._serialized_end=3486 + _CAPABILITYREQUEST._serialized_start=3488 + _CAPABILITYREQUEST._serialized_end=3543 + _CAPABILITYRESPONSE._serialized_start=3546 + _CAPABILITYRESPONSE._serialized_end=3712 + _MODELDATA._serialized_start=3714 + _MODELDATA._serialized_end=3778 + _GNMI._serialized_start=3971 + _GNMI._serialized_end=4198 +# @@protoc_insertion_point(module_scope) diff --git a/src/device/service/drivers/openconfig/gnmi_pb2.py.old b/src/device/service/drivers/openconfig/gnmi_pb2.py.old new file mode 100644 index 000000000..313674f8c --- /dev/null +++ b/src/device/service/drivers/openconfig/gnmi_pb2.py.old @@ -0,0 +1,2037 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: proto/gnmi/gnmi.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 +from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='proto/gnmi/gnmi.proto', + package='gnmi', + syntax='proto3', + serialized_pb=_b('\n\x15proto/gnmi/gnmi.proto\x12\x04gnmi\x1a\x19google/protobuf/any.proto\x1a google/protobuf/descriptor.proto\"\x86\x01\n\x0cNotification\x12\x11\n\ttimestamp\x18\x01 \x01(\x03\x12\x1a\n\x06prefix\x18\x02 \x01(\x0b\x32\n.gnmi.Path\x12\r\n\x05\x61lias\x18\x03 \x01(\t\x12\x1c\n\x06update\x18\x04 \x03(\x0b\x32\x0c.gnmi.Update\x12\x1a\n\x06\x64\x65lete\x18\x05 \x03(\x0b\x32\n.gnmi.Path\"u\n\x06Update\x12\x18\n\x04path\x18\x01 \x01(\x0b\x32\n.gnmi.Path\x12\x1e\n\x05value\x18\x02 \x01(\x0b\x32\x0b.gnmi.ValueB\x02\x18\x01\x12\x1d\n\x03val\x18\x03 \x01(\x0b\x32\x10.gnmi.TypedValue\x12\x12\n\nduplicates\x18\x04 \x01(\r\"\xce\x02\n\nTypedValue\x12\x14\n\nstring_val\x18\x01 \x01(\tH\x00\x12\x11\n\x07int_val\x18\x02 \x01(\x03H\x00\x12\x12\n\x08uint_val\x18\x03 \x01(\x04H\x00\x12\x12\n\x08\x62ool_val\x18\x04 \x01(\x08H\x00\x12\x13\n\tbytes_val\x18\x05 \x01(\x0cH\x00\x12\x13\n\tfloat_val\x18\x06 \x01(\x02H\x00\x12&\n\x0b\x64\x65\x63imal_val\x18\x07 \x01(\x0b\x32\x0f.gnmi.Decimal64H\x00\x12)\n\x0cleaflist_val\x18\x08 \x01(\x0b\x32\x11.gnmi.ScalarArrayH\x00\x12\'\n\x07\x61ny_val\x18\t \x01(\x0b\x32\x14.google.protobuf.AnyH\x00\x12\x12\n\x08json_val\x18\n \x01(\x0cH\x00\x12\x17\n\rjson_ietf_val\x18\x0b \x01(\x0cH\x00\x12\x13\n\tascii_val\x18\x0c \x01(\tH\x00\x42\x07\n\x05value\"Y\n\x04Path\x12\x13\n\x07\x65lement\x18\x01 \x03(\tB\x02\x18\x01\x12\x0e\n\x06origin\x18\x02 \x01(\t\x12\x1c\n\x04\x65lem\x18\x03 \x03(\x0b\x32\x0e.gnmi.PathElem\x12\x0e\n\x06target\x18\x04 \x01(\t\"j\n\x08PathElem\x12\x0c\n\x04name\x18\x01 \x01(\t\x12$\n\x03key\x18\x02 \x03(\x0b\x32\x17.gnmi.PathElem.KeyEntry\x1a*\n\x08KeyEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"8\n\x05Value\x12\r\n\x05value\x18\x01 \x01(\x0c\x12\x1c\n\x04type\x18\x02 \x01(\x0e\x32\x0e.gnmi.Encoding:\x02\x18\x01\"N\n\x05\x45rror\x12\x0c\n\x04\x63ode\x18\x01 \x01(\r\x12\x0f\n\x07message\x18\x02 \x01(\t\x12\"\n\x04\x64\x61ta\x18\x03 \x01(\x0b\x32\x14.google.protobuf.Any:\x02\x18\x01\".\n\tDecimal64\x12\x0e\n\x06\x64igits\x18\x01 \x01(\x04\x12\x11\n\tprecision\x18\x02 \x01(\r\"0\n\x0bScalarArray\x12!\n\x07\x65lement\x18\x01 \x03(\x0b\x32\x10.gnmi.TypedValue\"\x8a\x01\n\x10SubscribeRequest\x12+\n\tsubscribe\x18\x01 \x01(\x0b\x32\x16.gnmi.SubscriptionListH\x00\x12\x1a\n\x04poll\x18\x03 \x01(\x0b\x32\n.gnmi.PollH\x00\x12\"\n\x07\x61liases\x18\x04 \x01(\x0b\x32\x0f.gnmi.AliasListH\x00\x42\t\n\x07request\"\x06\n\x04Poll\"\x80\x01\n\x11SubscribeResponse\x12$\n\x06update\x18\x01 \x01(\x0b\x32\x12.gnmi.NotificationH\x00\x12\x17\n\rsync_response\x18\x03 \x01(\x08H\x00\x12 \n\x05\x65rror\x18\x04 \x01(\x0b\x32\x0b.gnmi.ErrorB\x02\x18\x01H\x00\x42\n\n\x08response\"\xd7\x02\n\x10SubscriptionList\x12\x1a\n\x06prefix\x18\x01 \x01(\x0b\x32\n.gnmi.Path\x12(\n\x0csubscription\x18\x02 \x03(\x0b\x32\x12.gnmi.Subscription\x12\x13\n\x0buse_aliases\x18\x03 \x01(\x08\x12\x1d\n\x03qos\x18\x04 \x01(\x0b\x32\x10.gnmi.QOSMarking\x12)\n\x04mode\x18\x05 \x01(\x0e\x32\x1b.gnmi.SubscriptionList.Mode\x12\x19\n\x11\x61llow_aggregation\x18\x06 \x01(\x08\x12#\n\nuse_models\x18\x07 \x03(\x0b\x32\x0f.gnmi.ModelData\x12 \n\x08\x65ncoding\x18\x08 \x01(\x0e\x32\x0e.gnmi.Encoding\x12\x14\n\x0cupdates_only\x18\t \x01(\x08\"&\n\x04Mode\x12\n\n\x06STREAM\x10\x00\x12\x08\n\x04ONCE\x10\x01\x12\x08\n\x04POLL\x10\x02\"\x9f\x01\n\x0cSubscription\x12\x18\n\x04path\x18\x01 \x01(\x0b\x32\n.gnmi.Path\x12$\n\x04mode\x18\x02 \x01(\x0e\x32\x16.gnmi.SubscriptionMode\x12\x17\n\x0fsample_interval\x18\x03 \x01(\x04\x12\x1a\n\x12suppress_redundant\x18\x04 \x01(\x08\x12\x1a\n\x12heartbeat_interval\x18\x05 \x01(\x04\"\x1d\n\nQOSMarking\x12\x0f\n\x07marking\x18\x01 \x01(\r\"0\n\x05\x41lias\x12\x18\n\x04path\x18\x01 \x01(\x0b\x32\n.gnmi.Path\x12\r\n\x05\x61lias\x18\x02 \x01(\t\"\'\n\tAliasList\x12\x1a\n\x05\x61lias\x18\x01 \x03(\x0b\x32\x0b.gnmi.Alias\"\x81\x01\n\nSetRequest\x12\x1a\n\x06prefix\x18\x01 \x01(\x0b\x32\n.gnmi.Path\x12\x1a\n\x06\x64\x65lete\x18\x02 \x03(\x0b\x32\n.gnmi.Path\x12\x1d\n\x07replace\x18\x03 \x03(\x0b\x32\x0c.gnmi.Update\x12\x1c\n\x06update\x18\x04 \x03(\x0b\x32\x0c.gnmi.Update\"\x84\x01\n\x0bSetResponse\x12\x1a\n\x06prefix\x18\x01 \x01(\x0b\x32\n.gnmi.Path\x12$\n\x08response\x18\x02 \x03(\x0b\x32\x12.gnmi.UpdateResult\x12 \n\x07message\x18\x03 \x01(\x0b\x32\x0b.gnmi.ErrorB\x02\x18\x01\x12\x11\n\ttimestamp\x18\x04 \x01(\x03\"\xca\x01\n\x0cUpdateResult\x12\x15\n\ttimestamp\x18\x01 \x01(\x03\x42\x02\x18\x01\x12\x18\n\x04path\x18\x02 \x01(\x0b\x32\n.gnmi.Path\x12 \n\x07message\x18\x03 \x01(\x0b\x32\x0b.gnmi.ErrorB\x02\x18\x01\x12(\n\x02op\x18\x04 \x01(\x0e\x32\x1c.gnmi.UpdateResult.Operation\"=\n\tOperation\x12\x0b\n\x07INVALID\x10\x00\x12\n\n\x06\x44\x45LETE\x10\x01\x12\x0b\n\x07REPLACE\x10\x02\x12\n\n\x06UPDATE\x10\x03\"\xef\x01\n\nGetRequest\x12\x1a\n\x06prefix\x18\x01 \x01(\x0b\x32\n.gnmi.Path\x12\x18\n\x04path\x18\x02 \x03(\x0b\x32\n.gnmi.Path\x12\'\n\x04type\x18\x03 \x01(\x0e\x32\x19.gnmi.GetRequest.DataType\x12 \n\x08\x65ncoding\x18\x05 \x01(\x0e\x32\x0e.gnmi.Encoding\x12#\n\nuse_models\x18\x06 \x03(\x0b\x32\x0f.gnmi.ModelData\";\n\x08\x44\x61taType\x12\x07\n\x03\x41LL\x10\x00\x12\n\n\x06\x43ONFIG\x10\x01\x12\t\n\x05STATE\x10\x02\x12\x0f\n\x0bOPERATIONAL\x10\x03\"W\n\x0bGetResponse\x12(\n\x0cnotification\x18\x01 \x03(\x0b\x32\x12.gnmi.Notification\x12\x1e\n\x05\x65rror\x18\x02 \x01(\x0b\x32\x0b.gnmi.ErrorB\x02\x18\x01\"\x13\n\x11\x43\x61pabilityRequest\"\x82\x01\n\x12\x43\x61pabilityResponse\x12)\n\x10supported_models\x18\x01 \x03(\x0b\x32\x0f.gnmi.ModelData\x12+\n\x13supported_encodings\x18\x02 \x03(\x0e\x32\x0e.gnmi.Encoding\x12\x14\n\x0cgNMI_version\x18\x03 \x01(\t\"@\n\tModelData\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0corganization\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\t*D\n\x08\x45ncoding\x12\x08\n\x04JSON\x10\x00\x12\t\n\x05\x42YTES\x10\x01\x12\t\n\x05PROTO\x10\x02\x12\t\n\x05\x41SCII\x10\x03\x12\r\n\tJSON_IETF\x10\x04*A\n\x10SubscriptionMode\x12\x12\n\x0eTARGET_DEFINED\x10\x00\x12\r\n\tON_CHANGE\x10\x01\x12\n\n\x06SAMPLE\x10\x02\x32\xe3\x01\n\x04gNMI\x12\x41\n\x0c\x43\x61pabilities\x12\x17.gnmi.CapabilityRequest\x1a\x18.gnmi.CapabilityResponse\x12*\n\x03Get\x12\x10.gnmi.GetRequest\x1a\x11.gnmi.GetResponse\x12*\n\x03Set\x12\x10.gnmi.SetRequest\x1a\x11.gnmi.SetResponse\x12@\n\tSubscribe\x12\x16.gnmi.SubscribeRequest\x1a\x17.gnmi.SubscribeResponse(\x01\x30\x01:3\n\x0cgnmi_service\x12\x1c.google.protobuf.FileOptions\x18\xe9\x07 \x01(\tB\x08\xca>\x05\x30.5.0b\x06proto3') + , + dependencies=[google_dot_protobuf_dot_any__pb2.DESCRIPTOR,google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,]) + +_ENCODING = _descriptor.EnumDescriptor( + name='Encoding', + full_name='gnmi.Encoding', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='JSON', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='BYTES', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PROTO', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ASCII', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='JSON_IETF', index=4, number=4, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=3053, + serialized_end=3121, +) +_sym_db.RegisterEnumDescriptor(_ENCODING) + +Encoding = enum_type_wrapper.EnumTypeWrapper(_ENCODING) +_SUBSCRIPTIONMODE = _descriptor.EnumDescriptor( + name='SubscriptionMode', + full_name='gnmi.SubscriptionMode', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='TARGET_DEFINED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ON_CHANGE', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SAMPLE', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=3123, + serialized_end=3188, +) +_sym_db.RegisterEnumDescriptor(_SUBSCRIPTIONMODE) + +SubscriptionMode = enum_type_wrapper.EnumTypeWrapper(_SUBSCRIPTIONMODE) +JSON = 0 +BYTES = 1 +PROTO = 2 +ASCII = 3 +JSON_IETF = 4 +TARGET_DEFINED = 0 +ON_CHANGE = 1 +SAMPLE = 2 + +GNMI_SERVICE_FIELD_NUMBER = 1001 +gnmi_service = _descriptor.FieldDescriptor( + name='gnmi_service', full_name='gnmi.gnmi_service', index=0, + number=1001, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) + +_SUBSCRIPTIONLIST_MODE = _descriptor.EnumDescriptor( + name='Mode', + full_name='gnmi.SubscriptionList.Mode', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='STREAM', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ONCE', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='POLL', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=1706, + serialized_end=1744, +) +_sym_db.RegisterEnumDescriptor(_SUBSCRIPTIONLIST_MODE) + +_UPDATERESULT_OPERATION = _descriptor.EnumDescriptor( + name='Operation', + full_name='gnmi.UpdateResult.Operation', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='INVALID', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DELETE', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='REPLACE', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='UPDATE', index=3, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2439, + serialized_end=2500, +) +_sym_db.RegisterEnumDescriptor(_UPDATERESULT_OPERATION) + +_GETREQUEST_DATATYPE = _descriptor.EnumDescriptor( + name='DataType', + full_name='gnmi.GetRequest.DataType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='ALL', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CONFIG', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='STATE', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='OPERATIONAL', index=3, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2683, + serialized_end=2742, +) +_sym_db.RegisterEnumDescriptor(_GETREQUEST_DATATYPE) + + +_NOTIFICATION = _descriptor.Descriptor( + name='Notification', + full_name='gnmi.Notification', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='timestamp', full_name='gnmi.Notification.timestamp', index=0, + number=1, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='prefix', full_name='gnmi.Notification.prefix', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='alias', full_name='gnmi.Notification.alias', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='update', full_name='gnmi.Notification.update', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='delete', full_name='gnmi.Notification.delete', index=4, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=93, + serialized_end=227, +) + + +_UPDATE = _descriptor.Descriptor( + name='Update', + full_name='gnmi.Update', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='path', full_name='gnmi.Update.path', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='gnmi.Update.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001'))), + _descriptor.FieldDescriptor( + name='val', full_name='gnmi.Update.val', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='duplicates', full_name='gnmi.Update.duplicates', index=3, + number=4, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=229, + serialized_end=346, +) + + +_TYPEDVALUE = _descriptor.Descriptor( + name='TypedValue', + full_name='gnmi.TypedValue', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='string_val', full_name='gnmi.TypedValue.string_val', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='int_val', full_name='gnmi.TypedValue.int_val', index=1, + number=2, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='uint_val', full_name='gnmi.TypedValue.uint_val', index=2, + number=3, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='bool_val', full_name='gnmi.TypedValue.bool_val', index=3, + number=4, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='bytes_val', full_name='gnmi.TypedValue.bytes_val', index=4, + number=5, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='float_val', full_name='gnmi.TypedValue.float_val', index=5, + number=6, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='decimal_val', full_name='gnmi.TypedValue.decimal_val', index=6, + number=7, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='leaflist_val', full_name='gnmi.TypedValue.leaflist_val', index=7, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='any_val', full_name='gnmi.TypedValue.any_val', index=8, + number=9, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='json_val', full_name='gnmi.TypedValue.json_val', index=9, + number=10, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='json_ietf_val', full_name='gnmi.TypedValue.json_ietf_val', index=10, + number=11, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='ascii_val', full_name='gnmi.TypedValue.ascii_val', index=11, + number=12, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='value', full_name='gnmi.TypedValue.value', + index=0, containing_type=None, fields=[]), + ], + serialized_start=349, + serialized_end=683, +) + + +_PATH = _descriptor.Descriptor( + name='Path', + full_name='gnmi.Path', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='element', full_name='gnmi.Path.element', index=0, + number=1, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001'))), + _descriptor.FieldDescriptor( + name='origin', full_name='gnmi.Path.origin', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='elem', full_name='gnmi.Path.elem', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='target', full_name='gnmi.Path.target', index=3, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=685, + serialized_end=774, +) + + +_PATHELEM_KEYENTRY = _descriptor.Descriptor( + name='KeyEntry', + full_name='gnmi.PathElem.KeyEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='gnmi.PathElem.KeyEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='gnmi.PathElem.KeyEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=840, + serialized_end=882, +) + +_PATHELEM = _descriptor.Descriptor( + name='PathElem', + full_name='gnmi.PathElem', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='gnmi.PathElem.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='key', full_name='gnmi.PathElem.key', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_PATHELEM_KEYENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=776, + serialized_end=882, +) + + +_VALUE = _descriptor.Descriptor( + name='Value', + full_name='gnmi.Value', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='gnmi.Value.value', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='gnmi.Value.type', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\030\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=884, + serialized_end=940, +) + + +_ERROR = _descriptor.Descriptor( + name='Error', + full_name='gnmi.Error', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='code', full_name='gnmi.Error.code', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='message', full_name='gnmi.Error.message', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='data', full_name='gnmi.Error.data', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\030\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=942, + serialized_end=1020, +) + + +_DECIMAL64 = _descriptor.Descriptor( + name='Decimal64', + full_name='gnmi.Decimal64', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='digits', full_name='gnmi.Decimal64.digits', index=0, + number=1, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='precision', full_name='gnmi.Decimal64.precision', index=1, + number=2, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1022, + serialized_end=1068, +) + + +_SCALARARRAY = _descriptor.Descriptor( + name='ScalarArray', + full_name='gnmi.ScalarArray', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='element', full_name='gnmi.ScalarArray.element', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1070, + serialized_end=1118, +) + + +_SUBSCRIBEREQUEST = _descriptor.Descriptor( + name='SubscribeRequest', + full_name='gnmi.SubscribeRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscribe', full_name='gnmi.SubscribeRequest.subscribe', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='poll', full_name='gnmi.SubscribeRequest.poll', index=1, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='aliases', full_name='gnmi.SubscribeRequest.aliases', index=2, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='request', full_name='gnmi.SubscribeRequest.request', + index=0, containing_type=None, fields=[]), + ], + serialized_start=1121, + serialized_end=1259, +) + + +_POLL = _descriptor.Descriptor( + name='Poll', + full_name='gnmi.Poll', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1261, + serialized_end=1267, +) + + +_SUBSCRIBERESPONSE = _descriptor.Descriptor( + name='SubscribeResponse', + full_name='gnmi.SubscribeResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='update', full_name='gnmi.SubscribeResponse.update', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sync_response', full_name='gnmi.SubscribeResponse.sync_response', index=1, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='error', full_name='gnmi.SubscribeResponse.error', index=2, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001'))), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='response', full_name='gnmi.SubscribeResponse.response', + index=0, containing_type=None, fields=[]), + ], + serialized_start=1270, + serialized_end=1398, +) + + +_SUBSCRIPTIONLIST = _descriptor.Descriptor( + name='SubscriptionList', + full_name='gnmi.SubscriptionList', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='prefix', full_name='gnmi.SubscriptionList.prefix', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='subscription', full_name='gnmi.SubscriptionList.subscription', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='use_aliases', full_name='gnmi.SubscriptionList.use_aliases', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='qos', full_name='gnmi.SubscriptionList.qos', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='mode', full_name='gnmi.SubscriptionList.mode', index=4, + number=5, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='allow_aggregation', full_name='gnmi.SubscriptionList.allow_aggregation', index=5, + number=6, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='use_models', full_name='gnmi.SubscriptionList.use_models', index=6, + number=7, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='encoding', full_name='gnmi.SubscriptionList.encoding', index=7, + number=8, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='updates_only', full_name='gnmi.SubscriptionList.updates_only', index=8, + number=9, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _SUBSCRIPTIONLIST_MODE, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1401, + serialized_end=1744, +) + + +_SUBSCRIPTION = _descriptor.Descriptor( + name='Subscription', + full_name='gnmi.Subscription', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='path', full_name='gnmi.Subscription.path', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='mode', full_name='gnmi.Subscription.mode', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sample_interval', full_name='gnmi.Subscription.sample_interval', index=2, + number=3, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='suppress_redundant', full_name='gnmi.Subscription.suppress_redundant', index=3, + number=4, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='heartbeat_interval', full_name='gnmi.Subscription.heartbeat_interval', index=4, + number=5, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1747, + serialized_end=1906, +) + + +_QOSMARKING = _descriptor.Descriptor( + name='QOSMarking', + full_name='gnmi.QOSMarking', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='marking', full_name='gnmi.QOSMarking.marking', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1908, + serialized_end=1937, +) + + +_ALIAS = _descriptor.Descriptor( + name='Alias', + full_name='gnmi.Alias', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='path', full_name='gnmi.Alias.path', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='alias', full_name='gnmi.Alias.alias', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1939, + serialized_end=1987, +) + + +_ALIASLIST = _descriptor.Descriptor( + name='AliasList', + full_name='gnmi.AliasList', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='alias', full_name='gnmi.AliasList.alias', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1989, + serialized_end=2028, +) + + +_SETREQUEST = _descriptor.Descriptor( + name='SetRequest', + full_name='gnmi.SetRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='prefix', full_name='gnmi.SetRequest.prefix', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='delete', full_name='gnmi.SetRequest.delete', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='replace', full_name='gnmi.SetRequest.replace', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='update', full_name='gnmi.SetRequest.update', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2031, + serialized_end=2160, +) + + +_SETRESPONSE = _descriptor.Descriptor( + name='SetResponse', + full_name='gnmi.SetResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='prefix', full_name='gnmi.SetResponse.prefix', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='response', full_name='gnmi.SetResponse.response', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='message', full_name='gnmi.SetResponse.message', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001'))), + _descriptor.FieldDescriptor( + name='timestamp', full_name='gnmi.SetResponse.timestamp', index=3, + number=4, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2163, + serialized_end=2295, +) + + +_UPDATERESULT = _descriptor.Descriptor( + name='UpdateResult', + full_name='gnmi.UpdateResult', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='timestamp', full_name='gnmi.UpdateResult.timestamp', index=0, + number=1, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001'))), + _descriptor.FieldDescriptor( + name='path', full_name='gnmi.UpdateResult.path', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='message', full_name='gnmi.UpdateResult.message', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001'))), + _descriptor.FieldDescriptor( + name='op', full_name='gnmi.UpdateResult.op', index=3, + number=4, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _UPDATERESULT_OPERATION, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2298, + serialized_end=2500, +) + + +_GETREQUEST = _descriptor.Descriptor( + name='GetRequest', + full_name='gnmi.GetRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='prefix', full_name='gnmi.GetRequest.prefix', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='path', full_name='gnmi.GetRequest.path', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='gnmi.GetRequest.type', index=2, + number=3, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='encoding', full_name='gnmi.GetRequest.encoding', index=3, + number=5, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='use_models', full_name='gnmi.GetRequest.use_models', index=4, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _GETREQUEST_DATATYPE, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2503, + serialized_end=2742, +) + + +_GETRESPONSE = _descriptor.Descriptor( + name='GetResponse', + full_name='gnmi.GetResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='notification', full_name='gnmi.GetResponse.notification', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='error', full_name='gnmi.GetResponse.error', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001'))), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2744, + serialized_end=2831, +) + + +_CAPABILITYREQUEST = _descriptor.Descriptor( + name='CapabilityRequest', + full_name='gnmi.CapabilityRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2833, + serialized_end=2852, +) + + +_CAPABILITYRESPONSE = _descriptor.Descriptor( + name='CapabilityResponse', + full_name='gnmi.CapabilityResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='supported_models', full_name='gnmi.CapabilityResponse.supported_models', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='supported_encodings', full_name='gnmi.CapabilityResponse.supported_encodings', index=1, + number=2, type=14, cpp_type=8, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='gNMI_version', full_name='gnmi.CapabilityResponse.gNMI_version', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2855, + serialized_end=2985, +) + + +_MODELDATA = _descriptor.Descriptor( + name='ModelData', + full_name='gnmi.ModelData', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='gnmi.ModelData.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='organization', full_name='gnmi.ModelData.organization', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='version', full_name='gnmi.ModelData.version', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2987, + serialized_end=3051, +) + +_NOTIFICATION.fields_by_name['prefix'].message_type = _PATH +_NOTIFICATION.fields_by_name['update'].message_type = _UPDATE +_NOTIFICATION.fields_by_name['delete'].message_type = _PATH +_UPDATE.fields_by_name['path'].message_type = _PATH +_UPDATE.fields_by_name['value'].message_type = _VALUE +_UPDATE.fields_by_name['val'].message_type = _TYPEDVALUE +_TYPEDVALUE.fields_by_name['decimal_val'].message_type = _DECIMAL64 +_TYPEDVALUE.fields_by_name['leaflist_val'].message_type = _SCALARARRAY +_TYPEDVALUE.fields_by_name['any_val'].message_type = google_dot_protobuf_dot_any__pb2._ANY +_TYPEDVALUE.oneofs_by_name['value'].fields.append( + _TYPEDVALUE.fields_by_name['string_val']) +_TYPEDVALUE.fields_by_name['string_val'].containing_oneof = _TYPEDVALUE.oneofs_by_name['value'] +_TYPEDVALUE.oneofs_by_name['value'].fields.append( + _TYPEDVALUE.fields_by_name['int_val']) +_TYPEDVALUE.fields_by_name['int_val'].containing_oneof = _TYPEDVALUE.oneofs_by_name['value'] +_TYPEDVALUE.oneofs_by_name['value'].fields.append( + _TYPEDVALUE.fields_by_name['uint_val']) +_TYPEDVALUE.fields_by_name['uint_val'].containing_oneof = _TYPEDVALUE.oneofs_by_name['value'] +_TYPEDVALUE.oneofs_by_name['value'].fields.append( + _TYPEDVALUE.fields_by_name['bool_val']) +_TYPEDVALUE.fields_by_name['bool_val'].containing_oneof = _TYPEDVALUE.oneofs_by_name['value'] +_TYPEDVALUE.oneofs_by_name['value'].fields.append( + _TYPEDVALUE.fields_by_name['bytes_val']) +_TYPEDVALUE.fields_by_name['bytes_val'].containing_oneof = _TYPEDVALUE.oneofs_by_name['value'] +_TYPEDVALUE.oneofs_by_name['value'].fields.append( + _TYPEDVALUE.fields_by_name['float_val']) +_TYPEDVALUE.fields_by_name['float_val'].containing_oneof = _TYPEDVALUE.oneofs_by_name['value'] +_TYPEDVALUE.oneofs_by_name['value'].fields.append( + _TYPEDVALUE.fields_by_name['decimal_val']) +_TYPEDVALUE.fields_by_name['decimal_val'].containing_oneof = _TYPEDVALUE.oneofs_by_name['value'] +_TYPEDVALUE.oneofs_by_name['value'].fields.append( + _TYPEDVALUE.fields_by_name['leaflist_val']) +_TYPEDVALUE.fields_by_name['leaflist_val'].containing_oneof = _TYPEDVALUE.oneofs_by_name['value'] +_TYPEDVALUE.oneofs_by_name['value'].fields.append( + _TYPEDVALUE.fields_by_name['any_val']) +_TYPEDVALUE.fields_by_name['any_val'].containing_oneof = _TYPEDVALUE.oneofs_by_name['value'] +_TYPEDVALUE.oneofs_by_name['value'].fields.append( + _TYPEDVALUE.fields_by_name['json_val']) +_TYPEDVALUE.fields_by_name['json_val'].containing_oneof = _TYPEDVALUE.oneofs_by_name['value'] +_TYPEDVALUE.oneofs_by_name['value'].fields.append( + _TYPEDVALUE.fields_by_name['json_ietf_val']) +_TYPEDVALUE.fields_by_name['json_ietf_val'].containing_oneof = _TYPEDVALUE.oneofs_by_name['value'] +_TYPEDVALUE.oneofs_by_name['value'].fields.append( + _TYPEDVALUE.fields_by_name['ascii_val']) +_TYPEDVALUE.fields_by_name['ascii_val'].containing_oneof = _TYPEDVALUE.oneofs_by_name['value'] +_PATH.fields_by_name['elem'].message_type = _PATHELEM +_PATHELEM_KEYENTRY.containing_type = _PATHELEM +_PATHELEM.fields_by_name['key'].message_type = _PATHELEM_KEYENTRY +_VALUE.fields_by_name['type'].enum_type = _ENCODING +_ERROR.fields_by_name['data'].message_type = google_dot_protobuf_dot_any__pb2._ANY +_SCALARARRAY.fields_by_name['element'].message_type = _TYPEDVALUE +_SUBSCRIBEREQUEST.fields_by_name['subscribe'].message_type = _SUBSCRIPTIONLIST +_SUBSCRIBEREQUEST.fields_by_name['poll'].message_type = _POLL +_SUBSCRIBEREQUEST.fields_by_name['aliases'].message_type = _ALIASLIST +_SUBSCRIBEREQUEST.oneofs_by_name['request'].fields.append( + _SUBSCRIBEREQUEST.fields_by_name['subscribe']) +_SUBSCRIBEREQUEST.fields_by_name['subscribe'].containing_oneof = _SUBSCRIBEREQUEST.oneofs_by_name['request'] +_SUBSCRIBEREQUEST.oneofs_by_name['request'].fields.append( + _SUBSCRIBEREQUEST.fields_by_name['poll']) +_SUBSCRIBEREQUEST.fields_by_name['poll'].containing_oneof = _SUBSCRIBEREQUEST.oneofs_by_name['request'] +_SUBSCRIBEREQUEST.oneofs_by_name['request'].fields.append( + _SUBSCRIBEREQUEST.fields_by_name['aliases']) +_SUBSCRIBEREQUEST.fields_by_name['aliases'].containing_oneof = _SUBSCRIBEREQUEST.oneofs_by_name['request'] +_SUBSCRIBERESPONSE.fields_by_name['update'].message_type = _NOTIFICATION +_SUBSCRIBERESPONSE.fields_by_name['error'].message_type = _ERROR +_SUBSCRIBERESPONSE.oneofs_by_name['response'].fields.append( + _SUBSCRIBERESPONSE.fields_by_name['update']) +_SUBSCRIBERESPONSE.fields_by_name['update'].containing_oneof = _SUBSCRIBERESPONSE.oneofs_by_name['response'] +_SUBSCRIBERESPONSE.oneofs_by_name['response'].fields.append( + _SUBSCRIBERESPONSE.fields_by_name['sync_response']) +_SUBSCRIBERESPONSE.fields_by_name['sync_response'].containing_oneof = _SUBSCRIBERESPONSE.oneofs_by_name['response'] +_SUBSCRIBERESPONSE.oneofs_by_name['response'].fields.append( + _SUBSCRIBERESPONSE.fields_by_name['error']) +_SUBSCRIBERESPONSE.fields_by_name['error'].containing_oneof = _SUBSCRIBERESPONSE.oneofs_by_name['response'] +_SUBSCRIPTIONLIST.fields_by_name['prefix'].message_type = _PATH +_SUBSCRIPTIONLIST.fields_by_name['subscription'].message_type = _SUBSCRIPTION +_SUBSCRIPTIONLIST.fields_by_name['qos'].message_type = _QOSMARKING +_SUBSCRIPTIONLIST.fields_by_name['mode'].enum_type = _SUBSCRIPTIONLIST_MODE +_SUBSCRIPTIONLIST.fields_by_name['use_models'].message_type = _MODELDATA +_SUBSCRIPTIONLIST.fields_by_name['encoding'].enum_type = _ENCODING +_SUBSCRIPTIONLIST_MODE.containing_type = _SUBSCRIPTIONLIST +_SUBSCRIPTION.fields_by_name['path'].message_type = _PATH +_SUBSCRIPTION.fields_by_name['mode'].enum_type = _SUBSCRIPTIONMODE +_ALIAS.fields_by_name['path'].message_type = _PATH +_ALIASLIST.fields_by_name['alias'].message_type = _ALIAS +_SETREQUEST.fields_by_name['prefix'].message_type = _PATH +_SETREQUEST.fields_by_name['delete'].message_type = _PATH +_SETREQUEST.fields_by_name['replace'].message_type = _UPDATE +_SETREQUEST.fields_by_name['update'].message_type = _UPDATE +_SETRESPONSE.fields_by_name['prefix'].message_type = _PATH +_SETRESPONSE.fields_by_name['response'].message_type = _UPDATERESULT +_SETRESPONSE.fields_by_name['message'].message_type = _ERROR +_UPDATERESULT.fields_by_name['path'].message_type = _PATH +_UPDATERESULT.fields_by_name['message'].message_type = _ERROR +_UPDATERESULT.fields_by_name['op'].enum_type = _UPDATERESULT_OPERATION +_UPDATERESULT_OPERATION.containing_type = _UPDATERESULT +_GETREQUEST.fields_by_name['prefix'].message_type = _PATH +_GETREQUEST.fields_by_name['path'].message_type = _PATH +_GETREQUEST.fields_by_name['type'].enum_type = _GETREQUEST_DATATYPE +_GETREQUEST.fields_by_name['encoding'].enum_type = _ENCODING +_GETREQUEST.fields_by_name['use_models'].message_type = _MODELDATA +_GETREQUEST_DATATYPE.containing_type = _GETREQUEST +_GETRESPONSE.fields_by_name['notification'].message_type = _NOTIFICATION +_GETRESPONSE.fields_by_name['error'].message_type = _ERROR +_CAPABILITYRESPONSE.fields_by_name['supported_models'].message_type = _MODELDATA +_CAPABILITYRESPONSE.fields_by_name['supported_encodings'].enum_type = _ENCODING +DESCRIPTOR.message_types_by_name['Notification'] = _NOTIFICATION +DESCRIPTOR.message_types_by_name['Update'] = _UPDATE +DESCRIPTOR.message_types_by_name['TypedValue'] = _TYPEDVALUE +DESCRIPTOR.message_types_by_name['Path'] = _PATH +DESCRIPTOR.message_types_by_name['PathElem'] = _PATHELEM +DESCRIPTOR.message_types_by_name['Value'] = _VALUE +DESCRIPTOR.message_types_by_name['Error'] = _ERROR +DESCRIPTOR.message_types_by_name['Decimal64'] = _DECIMAL64 +DESCRIPTOR.message_types_by_name['ScalarArray'] = _SCALARARRAY +DESCRIPTOR.message_types_by_name['SubscribeRequest'] = _SUBSCRIBEREQUEST +DESCRIPTOR.message_types_by_name['Poll'] = _POLL +DESCRIPTOR.message_types_by_name['SubscribeResponse'] = _SUBSCRIBERESPONSE +DESCRIPTOR.message_types_by_name['SubscriptionList'] = _SUBSCRIPTIONLIST +DESCRIPTOR.message_types_by_name['Subscription'] = _SUBSCRIPTION +DESCRIPTOR.message_types_by_name['QOSMarking'] = _QOSMARKING +DESCRIPTOR.message_types_by_name['Alias'] = _ALIAS +DESCRIPTOR.message_types_by_name['AliasList'] = _ALIASLIST +DESCRIPTOR.message_types_by_name['SetRequest'] = _SETREQUEST +DESCRIPTOR.message_types_by_name['SetResponse'] = _SETRESPONSE +DESCRIPTOR.message_types_by_name['UpdateResult'] = _UPDATERESULT +DESCRIPTOR.message_types_by_name['GetRequest'] = _GETREQUEST +DESCRIPTOR.message_types_by_name['GetResponse'] = _GETRESPONSE +DESCRIPTOR.message_types_by_name['CapabilityRequest'] = _CAPABILITYREQUEST +DESCRIPTOR.message_types_by_name['CapabilityResponse'] = _CAPABILITYRESPONSE +DESCRIPTOR.message_types_by_name['ModelData'] = _MODELDATA +DESCRIPTOR.enum_types_by_name['Encoding'] = _ENCODING +DESCRIPTOR.enum_types_by_name['SubscriptionMode'] = _SUBSCRIPTIONMODE +DESCRIPTOR.extensions_by_name['gnmi_service'] = gnmi_service +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +Notification = _reflection.GeneratedProtocolMessageType('Notification', (_message.Message,), dict( + DESCRIPTOR = _NOTIFICATION, + __module__ = 'proto.gnmi.gnmi_pb2' + # @@protoc_insertion_point(class_scope:gnmi.Notification) + )) +_sym_db.RegisterMessage(Notification) + +Update = _reflection.GeneratedProtocolMessageType('Update', (_message.Message,), dict( + DESCRIPTOR = _UPDATE, + __module__ = 'proto.gnmi.gnmi_pb2' + # @@protoc_insertion_point(class_scope:gnmi.Update) + )) +_sym_db.RegisterMessage(Update) + +TypedValue = _reflection.GeneratedProtocolMessageType('TypedValue', (_message.Message,), dict( + DESCRIPTOR = _TYPEDVALUE, + __module__ = 'proto.gnmi.gnmi_pb2' + # @@protoc_insertion_point(class_scope:gnmi.TypedValue) + )) +_sym_db.RegisterMessage(TypedValue) + +Path = _reflection.GeneratedProtocolMessageType('Path', (_message.Message,), dict( + DESCRIPTOR = _PATH, + __module__ = 'proto.gnmi.gnmi_pb2' + # @@protoc_insertion_point(class_scope:gnmi.Path) + )) +_sym_db.RegisterMessage(Path) + +PathElem = _reflection.GeneratedProtocolMessageType('PathElem', (_message.Message,), dict( + + KeyEntry = _reflection.GeneratedProtocolMessageType('KeyEntry', (_message.Message,), dict( + DESCRIPTOR = _PATHELEM_KEYENTRY, + __module__ = 'proto.gnmi.gnmi_pb2' + # @@protoc_insertion_point(class_scope:gnmi.PathElem.KeyEntry) + )) + , + DESCRIPTOR = _PATHELEM, + __module__ = 'proto.gnmi.gnmi_pb2' + # @@protoc_insertion_point(class_scope:gnmi.PathElem) + )) +_sym_db.RegisterMessage(PathElem) +_sym_db.RegisterMessage(PathElem.KeyEntry) + +Value = _reflection.GeneratedProtocolMessageType('Value', (_message.Message,), dict( + DESCRIPTOR = _VALUE, + __module__ = 'proto.gnmi.gnmi_pb2' + # @@protoc_insertion_point(class_scope:gnmi.Value) + )) +_sym_db.RegisterMessage(Value) + +Error = _reflection.GeneratedProtocolMessageType('Error', (_message.Message,), dict( + DESCRIPTOR = _ERROR, + __module__ = 'proto.gnmi.gnmi_pb2' + # @@protoc_insertion_point(class_scope:gnmi.Error) + )) +_sym_db.RegisterMessage(Error) + +Decimal64 = _reflection.GeneratedProtocolMessageType('Decimal64', (_message.Message,), dict( + DESCRIPTOR = _DECIMAL64, + __module__ = 'proto.gnmi.gnmi_pb2' + # @@protoc_insertion_point(class_scope:gnmi.Decimal64) + )) +_sym_db.RegisterMessage(Decimal64) + +ScalarArray = _reflection.GeneratedProtocolMessageType('ScalarArray', (_message.Message,), dict( + DESCRIPTOR = _SCALARARRAY, + __module__ = 'proto.gnmi.gnmi_pb2' + # @@protoc_insertion_point(class_scope:gnmi.ScalarArray) + )) +_sym_db.RegisterMessage(ScalarArray) + +SubscribeRequest = _reflection.GeneratedProtocolMessageType('SubscribeRequest', (_message.Message,), dict( + DESCRIPTOR = _SUBSCRIBEREQUEST, + __module__ = 'proto.gnmi.gnmi_pb2' + # @@protoc_insertion_point(class_scope:gnmi.SubscribeRequest) + )) +_sym_db.RegisterMessage(SubscribeRequest) + +Poll = _reflection.GeneratedProtocolMessageType('Poll', (_message.Message,), dict( + DESCRIPTOR = _POLL, + __module__ = 'proto.gnmi.gnmi_pb2' + # @@protoc_insertion_point(class_scope:gnmi.Poll) + )) +_sym_db.RegisterMessage(Poll) + +SubscribeResponse = _reflection.GeneratedProtocolMessageType('SubscribeResponse', (_message.Message,), dict( + DESCRIPTOR = _SUBSCRIBERESPONSE, + __module__ = 'proto.gnmi.gnmi_pb2' + # @@protoc_insertion_point(class_scope:gnmi.SubscribeResponse) + )) +_sym_db.RegisterMessage(SubscribeResponse) + +SubscriptionList = _reflection.GeneratedProtocolMessageType('SubscriptionList', (_message.Message,), dict( + DESCRIPTOR = _SUBSCRIPTIONLIST, + __module__ = 'proto.gnmi.gnmi_pb2' + # @@protoc_insertion_point(class_scope:gnmi.SubscriptionList) + )) +_sym_db.RegisterMessage(SubscriptionList) + +Subscription = _reflection.GeneratedProtocolMessageType('Subscription', (_message.Message,), dict( + DESCRIPTOR = _SUBSCRIPTION, + __module__ = 'proto.gnmi.gnmi_pb2' + # @@protoc_insertion_point(class_scope:gnmi.Subscription) + )) +_sym_db.RegisterMessage(Subscription) + +QOSMarking = _reflection.GeneratedProtocolMessageType('QOSMarking', (_message.Message,), dict( + DESCRIPTOR = _QOSMARKING, + __module__ = 'proto.gnmi.gnmi_pb2' + # @@protoc_insertion_point(class_scope:gnmi.QOSMarking) + )) +_sym_db.RegisterMessage(QOSMarking) + +Alias = _reflection.GeneratedProtocolMessageType('Alias', (_message.Message,), dict( + DESCRIPTOR = _ALIAS, + __module__ = 'proto.gnmi.gnmi_pb2' + # @@protoc_insertion_point(class_scope:gnmi.Alias) + )) +_sym_db.RegisterMessage(Alias) + +AliasList = _reflection.GeneratedProtocolMessageType('AliasList', (_message.Message,), dict( + DESCRIPTOR = _ALIASLIST, + __module__ = 'proto.gnmi.gnmi_pb2' + # @@protoc_insertion_point(class_scope:gnmi.AliasList) + )) +_sym_db.RegisterMessage(AliasList) + +SetRequest = _reflection.GeneratedProtocolMessageType('SetRequest', (_message.Message,), dict( + DESCRIPTOR = _SETREQUEST, + __module__ = 'proto.gnmi.gnmi_pb2' + # @@protoc_insertion_point(class_scope:gnmi.SetRequest) + )) +_sym_db.RegisterMessage(SetRequest) + +SetResponse = _reflection.GeneratedProtocolMessageType('SetResponse', (_message.Message,), dict( + DESCRIPTOR = _SETRESPONSE, + __module__ = 'proto.gnmi.gnmi_pb2' + # @@protoc_insertion_point(class_scope:gnmi.SetResponse) + )) +_sym_db.RegisterMessage(SetResponse) + +UpdateResult = _reflection.GeneratedProtocolMessageType('UpdateResult', (_message.Message,), dict( + DESCRIPTOR = _UPDATERESULT, + __module__ = 'proto.gnmi.gnmi_pb2' + # @@protoc_insertion_point(class_scope:gnmi.UpdateResult) + )) +_sym_db.RegisterMessage(UpdateResult) + +GetRequest = _reflection.GeneratedProtocolMessageType('GetRequest', (_message.Message,), dict( + DESCRIPTOR = _GETREQUEST, + __module__ = 'proto.gnmi.gnmi_pb2' + # @@protoc_insertion_point(class_scope:gnmi.GetRequest) + )) +_sym_db.RegisterMessage(GetRequest) + +GetResponse = _reflection.GeneratedProtocolMessageType('GetResponse', (_message.Message,), dict( + DESCRIPTOR = _GETRESPONSE, + __module__ = 'proto.gnmi.gnmi_pb2' + # @@protoc_insertion_point(class_scope:gnmi.GetResponse) + )) +_sym_db.RegisterMessage(GetResponse) + +CapabilityRequest = _reflection.GeneratedProtocolMessageType('CapabilityRequest', (_message.Message,), dict( + DESCRIPTOR = _CAPABILITYREQUEST, + __module__ = 'proto.gnmi.gnmi_pb2' + # @@protoc_insertion_point(class_scope:gnmi.CapabilityRequest) + )) +_sym_db.RegisterMessage(CapabilityRequest) + +CapabilityResponse = _reflection.GeneratedProtocolMessageType('CapabilityResponse', (_message.Message,), dict( + DESCRIPTOR = _CAPABILITYRESPONSE, + __module__ = 'proto.gnmi.gnmi_pb2' + # @@protoc_insertion_point(class_scope:gnmi.CapabilityResponse) + )) +_sym_db.RegisterMessage(CapabilityResponse) + +ModelData = _reflection.GeneratedProtocolMessageType('ModelData', (_message.Message,), dict( + DESCRIPTOR = _MODELDATA, + __module__ = 'proto.gnmi.gnmi_pb2' + # @@protoc_insertion_point(class_scope:gnmi.ModelData) + )) +_sym_db.RegisterMessage(ModelData) + +google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(gnmi_service) + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\312>\0050.5.0')) +_UPDATE.fields_by_name['value'].has_options = True +_UPDATE.fields_by_name['value']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001')) +_PATH.fields_by_name['element'].has_options = True +_PATH.fields_by_name['element']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001')) +_PATHELEM_KEYENTRY.has_options = True +_PATHELEM_KEYENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_VALUE.has_options = True +_VALUE._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\030\001')) +_ERROR.has_options = True +_ERROR._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\030\001')) +_SUBSCRIBERESPONSE.fields_by_name['error'].has_options = True +_SUBSCRIBERESPONSE.fields_by_name['error']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001')) +_SETRESPONSE.fields_by_name['message'].has_options = True +_SETRESPONSE.fields_by_name['message']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001')) +_UPDATERESULT.fields_by_name['timestamp'].has_options = True +_UPDATERESULT.fields_by_name['timestamp']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001')) +_UPDATERESULT.fields_by_name['message'].has_options = True +_UPDATERESULT.fields_by_name['message']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001')) +_GETRESPONSE.fields_by_name['error'].has_options = True +_GETRESPONSE.fields_by_name['error']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001')) +try: + # THESE ELEMENTS WILL BE DEPRECATED. + # Please use the generated *_pb2_grpc.py files instead. + import grpc + from grpc.beta import implementations as beta_implementations + from grpc.beta import interfaces as beta_interfaces + from grpc.framework.common import cardinality + from grpc.framework.interfaces.face import utilities as face_utilities + + + class gNMIStub(object): + # missing associated documentation comment in .proto file + pass + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.Capabilities = channel.unary_unary( + '/gnmi.gNMI/Capabilities', + request_serializer=CapabilityRequest.SerializeToString, + response_deserializer=CapabilityResponse.FromString, + ) + self.Get = channel.unary_unary( + '/gnmi.gNMI/Get', + request_serializer=GetRequest.SerializeToString, + response_deserializer=GetResponse.FromString, + ) + self.Set = channel.unary_unary( + '/gnmi.gNMI/Set', + request_serializer=SetRequest.SerializeToString, + response_deserializer=SetResponse.FromString, + ) + self.Subscribe = channel.stream_stream( + '/gnmi.gNMI/Subscribe', + request_serializer=SubscribeRequest.SerializeToString, + response_deserializer=SubscribeResponse.FromString, + ) + + + class gNMIServicer(object): + # missing associated documentation comment in .proto file + pass + + def Capabilities(self, request, context): + """Capabilities allows the client to retrieve the set of capabilities that + is supported by the target. This allows the target to validate the + service version that is implemented and retrieve the set of models that + the target supports. The models can then be specified in subsequent RPCs + to restrict the set of data that is utilized. + Reference: gNMI Specification Section 3.2 + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Get(self, request, context): + """Retrieve a snapshot of data from the target. A Get RPC requests that the + target snapshots a subset of the data tree as specified by the paths + included in the message and serializes this to be returned to the + client using the specified encoding. + Reference: gNMI Specification Section 3.3 + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Set(self, request, context): + """Set allows the client to modify the state of data on the target. The + paths to modified along with the new values that the client wishes + to set the value to. + Reference: gNMI Specification Section 3.4 + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Subscribe(self, request_iterator, context): + """Subscribe allows a client to request the target to send it values + of particular paths within the data tree. These values may be streamed + at a particular cadence (STREAM), sent one off on a long-lived channel + (POLL), or sent as a one-off retrieval (ONCE). + Reference: gNMI Specification Section 3.5 + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + + def add_gNMIServicer_to_server(servicer, server): + rpc_method_handlers = { + 'Capabilities': grpc.unary_unary_rpc_method_handler( + servicer.Capabilities, + request_deserializer=CapabilityRequest.FromString, + response_serializer=CapabilityResponse.SerializeToString, + ), + 'Get': grpc.unary_unary_rpc_method_handler( + servicer.Get, + request_deserializer=GetRequest.FromString, + response_serializer=GetResponse.SerializeToString, + ), + 'Set': grpc.unary_unary_rpc_method_handler( + servicer.Set, + request_deserializer=SetRequest.FromString, + response_serializer=SetResponse.SerializeToString, + ), + 'Subscribe': grpc.stream_stream_rpc_method_handler( + servicer.Subscribe, + request_deserializer=SubscribeRequest.FromString, + response_serializer=SubscribeResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'gnmi.gNMI', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + class BetagNMIServicer(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + # missing associated documentation comment in .proto file + pass + def Capabilities(self, request, context): + """Capabilities allows the client to retrieve the set of capabilities that + is supported by the target. This allows the target to validate the + service version that is implemented and retrieve the set of models that + the target supports. The models can then be specified in subsequent RPCs + to restrict the set of data that is utilized. + Reference: gNMI Specification Section 3.2 + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def Get(self, request, context): + """Retrieve a snapshot of data from the target. A Get RPC requests that the + target snapshots a subset of the data tree as specified by the paths + included in the message and serializes this to be returned to the + client using the specified encoding. + Reference: gNMI Specification Section 3.3 + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def Set(self, request, context): + """Set allows the client to modify the state of data on the target. The + paths to modified along with the new values that the client wishes + to set the value to. + Reference: gNMI Specification Section 3.4 + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def Subscribe(self, request_iterator, context): + """Subscribe allows a client to request the target to send it values + of particular paths within the data tree. These values may be streamed + at a particular cadence (STREAM), sent one off on a long-lived channel + (POLL), or sent as a one-off retrieval (ONCE). + Reference: gNMI Specification Section 3.5 + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + + + class BetagNMIStub(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + # missing associated documentation comment in .proto file + pass + def Capabilities(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Capabilities allows the client to retrieve the set of capabilities that + is supported by the target. This allows the target to validate the + service version that is implemented and retrieve the set of models that + the target supports. The models can then be specified in subsequent RPCs + to restrict the set of data that is utilized. + Reference: gNMI Specification Section 3.2 + """ + raise NotImplementedError() + Capabilities.future = None + def Get(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Retrieve a snapshot of data from the target. A Get RPC requests that the + target snapshots a subset of the data tree as specified by the paths + included in the message and serializes this to be returned to the + client using the specified encoding. + Reference: gNMI Specification Section 3.3 + """ + raise NotImplementedError() + Get.future = None + def Set(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Set allows the client to modify the state of data on the target. The + paths to modified along with the new values that the client wishes + to set the value to. + Reference: gNMI Specification Section 3.4 + """ + raise NotImplementedError() + Set.future = None + def Subscribe(self, request_iterator, timeout, metadata=None, with_call=False, protocol_options=None): + """Subscribe allows a client to request the target to send it values + of particular paths within the data tree. These values may be streamed + at a particular cadence (STREAM), sent one off on a long-lived channel + (POLL), or sent as a one-off retrieval (ONCE). + Reference: gNMI Specification Section 3.5 + """ + raise NotImplementedError() + + + def beta_create_gNMI_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_deserializers = { + ('gnmi.gNMI', 'Capabilities'): CapabilityRequest.FromString, + ('gnmi.gNMI', 'Get'): GetRequest.FromString, + ('gnmi.gNMI', 'Set'): SetRequest.FromString, + ('gnmi.gNMI', 'Subscribe'): SubscribeRequest.FromString, + } + response_serializers = { + ('gnmi.gNMI', 'Capabilities'): CapabilityResponse.SerializeToString, + ('gnmi.gNMI', 'Get'): GetResponse.SerializeToString, + ('gnmi.gNMI', 'Set'): SetResponse.SerializeToString, + ('gnmi.gNMI', 'Subscribe'): SubscribeResponse.SerializeToString, + } + method_implementations = { + ('gnmi.gNMI', 'Capabilities'): face_utilities.unary_unary_inline(servicer.Capabilities), + ('gnmi.gNMI', 'Get'): face_utilities.unary_unary_inline(servicer.Get), + ('gnmi.gNMI', 'Set'): face_utilities.unary_unary_inline(servicer.Set), + ('gnmi.gNMI', 'Subscribe'): face_utilities.stream_stream_inline(servicer.Subscribe), + } + server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) + return beta_implementations.server(method_implementations, options=server_options) + + + def beta_create_gNMI_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_serializers = { + ('gnmi.gNMI', 'Capabilities'): CapabilityRequest.SerializeToString, + ('gnmi.gNMI', 'Get'): GetRequest.SerializeToString, + ('gnmi.gNMI', 'Set'): SetRequest.SerializeToString, + ('gnmi.gNMI', 'Subscribe'): SubscribeRequest.SerializeToString, + } + response_deserializers = { + ('gnmi.gNMI', 'Capabilities'): CapabilityResponse.FromString, + ('gnmi.gNMI', 'Get'): GetResponse.FromString, + ('gnmi.gNMI', 'Set'): SetResponse.FromString, + ('gnmi.gNMI', 'Subscribe'): SubscribeResponse.FromString, + } + cardinalities = { + 'Capabilities': cardinality.Cardinality.UNARY_UNARY, + 'Get': cardinality.Cardinality.UNARY_UNARY, + 'Set': cardinality.Cardinality.UNARY_UNARY, + 'Subscribe': cardinality.Cardinality.STREAM_STREAM, + } + stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) + return beta_implementations.dynamic_stub(channel, 'gnmi.gNMI', cardinalities, options=stub_options) +except ImportError: + pass +# @@protoc_insertion_point(module_scope) diff --git a/src/device/service/drivers/openconfig/gnmi_pb2.pyi b/src/device/service/drivers/openconfig/gnmi_pb2.pyi new file mode 100644 index 000000000..423bcfb90 --- /dev/null +++ b/src/device/service/drivers/openconfig/gnmi_pb2.pyi @@ -0,0 +1,380 @@ +from google.protobuf import any_pb2 as _any_pb2 +from google.protobuf import descriptor_pb2 as _descriptor_pb2 +from google.protobuf.internal import containers as _containers +from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union + +ASCII: Encoding +BYTES: Encoding +DESCRIPTOR: _descriptor.FileDescriptor +EID_EXPERIMENTAL: ExtensionID +EID_UNSET: ExtensionID +GNMI_SERVICE_FIELD_NUMBER: _ClassVar[int] +JSON: Encoding +JSON_IETF: Encoding +ON_CHANGE: SubscriptionMode +PROTO: Encoding +SAMPLE: SubscriptionMode +TARGET_DEFINED: SubscriptionMode +gnmi_service: _descriptor.FieldDescriptor + +class CapabilityRequest(_message.Message): + __slots__ = ["extension"] + EXTENSION_FIELD_NUMBER: _ClassVar[int] + extension: _containers.RepeatedCompositeFieldContainer[Extension] + def __init__(self, extension: _Optional[_Iterable[_Union[Extension, _Mapping]]] = ...) -> None: ... + +class CapabilityResponse(_message.Message): + __slots__ = ["extension", "gNMI_version", "supported_encodings", "supported_models"] + EXTENSION_FIELD_NUMBER: _ClassVar[int] + GNMI_VERSION_FIELD_NUMBER: _ClassVar[int] + SUPPORTED_ENCODINGS_FIELD_NUMBER: _ClassVar[int] + SUPPORTED_MODELS_FIELD_NUMBER: _ClassVar[int] + extension: _containers.RepeatedCompositeFieldContainer[Extension] + gNMI_version: str + supported_encodings: _containers.RepeatedScalarFieldContainer[Encoding] + supported_models: _containers.RepeatedCompositeFieldContainer[ModelData] + def __init__(self, supported_models: _Optional[_Iterable[_Union[ModelData, _Mapping]]] = ..., supported_encodings: _Optional[_Iterable[_Union[Encoding, str]]] = ..., gNMI_version: _Optional[str] = ..., extension: _Optional[_Iterable[_Union[Extension, _Mapping]]] = ...) -> None: ... + +class Decimal64(_message.Message): + __slots__ = ["digits", "precision"] + DIGITS_FIELD_NUMBER: _ClassVar[int] + PRECISION_FIELD_NUMBER: _ClassVar[int] + digits: int + precision: int + def __init__(self, digits: _Optional[int] = ..., precision: _Optional[int] = ...) -> None: ... + +class Error(_message.Message): + __slots__ = ["code", "data", "message"] + CODE_FIELD_NUMBER: _ClassVar[int] + DATA_FIELD_NUMBER: _ClassVar[int] + MESSAGE_FIELD_NUMBER: _ClassVar[int] + code: int + data: _any_pb2.Any + message: str + def __init__(self, code: _Optional[int] = ..., message: _Optional[str] = ..., data: _Optional[_Union[_any_pb2.Any, _Mapping]] = ...) -> None: ... + +class Extension(_message.Message): + __slots__ = ["history", "master_arbitration", "registered_ext"] + HISTORY_FIELD_NUMBER: _ClassVar[int] + MASTER_ARBITRATION_FIELD_NUMBER: _ClassVar[int] + REGISTERED_EXT_FIELD_NUMBER: _ClassVar[int] + history: History + master_arbitration: MasterArbitration + registered_ext: RegisteredExtension + def __init__(self, registered_ext: _Optional[_Union[RegisteredExtension, _Mapping]] = ..., master_arbitration: _Optional[_Union[MasterArbitration, _Mapping]] = ..., history: _Optional[_Union[History, _Mapping]] = ...) -> None: ... + +class GetRequest(_message.Message): + __slots__ = ["encoding", "extension", "path", "prefix", "type", "use_models"] + class DataType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = [] + ALL: GetRequest.DataType + CONFIG: GetRequest.DataType + ENCODING_FIELD_NUMBER: _ClassVar[int] + EXTENSION_FIELD_NUMBER: _ClassVar[int] + OPERATIONAL: GetRequest.DataType + PATH_FIELD_NUMBER: _ClassVar[int] + PREFIX_FIELD_NUMBER: _ClassVar[int] + STATE: GetRequest.DataType + TYPE_FIELD_NUMBER: _ClassVar[int] + USE_MODELS_FIELD_NUMBER: _ClassVar[int] + encoding: Encoding + extension: _containers.RepeatedCompositeFieldContainer[Extension] + path: _containers.RepeatedCompositeFieldContainer[Path] + prefix: Path + type: GetRequest.DataType + use_models: _containers.RepeatedCompositeFieldContainer[ModelData] + def __init__(self, prefix: _Optional[_Union[Path, _Mapping]] = ..., path: _Optional[_Iterable[_Union[Path, _Mapping]]] = ..., type: _Optional[_Union[GetRequest.DataType, str]] = ..., encoding: _Optional[_Union[Encoding, str]] = ..., use_models: _Optional[_Iterable[_Union[ModelData, _Mapping]]] = ..., extension: _Optional[_Iterable[_Union[Extension, _Mapping]]] = ...) -> None: ... + +class GetResponse(_message.Message): + __slots__ = ["error", "extension", "notification"] + ERROR_FIELD_NUMBER: _ClassVar[int] + EXTENSION_FIELD_NUMBER: _ClassVar[int] + NOTIFICATION_FIELD_NUMBER: _ClassVar[int] + error: Error + extension: _containers.RepeatedCompositeFieldContainer[Extension] + notification: _containers.RepeatedCompositeFieldContainer[Notification] + def __init__(self, notification: _Optional[_Iterable[_Union[Notification, _Mapping]]] = ..., error: _Optional[_Union[Error, _Mapping]] = ..., extension: _Optional[_Iterable[_Union[Extension, _Mapping]]] = ...) -> None: ... + +class History(_message.Message): + __slots__ = ["range", "snapshot_time"] + RANGE_FIELD_NUMBER: _ClassVar[int] + SNAPSHOT_TIME_FIELD_NUMBER: _ClassVar[int] + range: TimeRange + snapshot_time: int + def __init__(self, snapshot_time: _Optional[int] = ..., range: _Optional[_Union[TimeRange, _Mapping]] = ...) -> None: ... + +class MasterArbitration(_message.Message): + __slots__ = ["election_id", "role"] + ELECTION_ID_FIELD_NUMBER: _ClassVar[int] + ROLE_FIELD_NUMBER: _ClassVar[int] + election_id: Uint128 + role: Role + def __init__(self, role: _Optional[_Union[Role, _Mapping]] = ..., election_id: _Optional[_Union[Uint128, _Mapping]] = ...) -> None: ... + +class ModelData(_message.Message): + __slots__ = ["name", "organization", "version"] + NAME_FIELD_NUMBER: _ClassVar[int] + ORGANIZATION_FIELD_NUMBER: _ClassVar[int] + VERSION_FIELD_NUMBER: _ClassVar[int] + name: str + organization: str + version: str + def __init__(self, name: _Optional[str] = ..., organization: _Optional[str] = ..., version: _Optional[str] = ...) -> None: ... + +class Notification(_message.Message): + __slots__ = ["atomic", "delete", "prefix", "timestamp", "update"] + ATOMIC_FIELD_NUMBER: _ClassVar[int] + DELETE_FIELD_NUMBER: _ClassVar[int] + PREFIX_FIELD_NUMBER: _ClassVar[int] + TIMESTAMP_FIELD_NUMBER: _ClassVar[int] + UPDATE_FIELD_NUMBER: _ClassVar[int] + atomic: bool + delete: _containers.RepeatedCompositeFieldContainer[Path] + prefix: Path + timestamp: int + update: _containers.RepeatedCompositeFieldContainer[Update] + def __init__(self, timestamp: _Optional[int] = ..., prefix: _Optional[_Union[Path, _Mapping]] = ..., update: _Optional[_Iterable[_Union[Update, _Mapping]]] = ..., delete: _Optional[_Iterable[_Union[Path, _Mapping]]] = ..., atomic: bool = ...) -> None: ... + +class Path(_message.Message): + __slots__ = ["elem", "element", "origin", "target"] + ELEMENT_FIELD_NUMBER: _ClassVar[int] + ELEM_FIELD_NUMBER: _ClassVar[int] + ORIGIN_FIELD_NUMBER: _ClassVar[int] + TARGET_FIELD_NUMBER: _ClassVar[int] + elem: _containers.RepeatedCompositeFieldContainer[PathElem] + element: _containers.RepeatedScalarFieldContainer[str] + origin: str + target: str + def __init__(self, element: _Optional[_Iterable[str]] = ..., origin: _Optional[str] = ..., elem: _Optional[_Iterable[_Union[PathElem, _Mapping]]] = ..., target: _Optional[str] = ...) -> None: ... + +class PathElem(_message.Message): + __slots__ = ["key", "name"] + class KeyEntry(_message.Message): + __slots__ = ["key", "value"] + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: str + def __init__(self, key: _Optional[str] = ..., value: _Optional[str] = ...) -> None: ... + KEY_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + key: _containers.ScalarMap[str, str] + name: str + def __init__(self, name: _Optional[str] = ..., key: _Optional[_Mapping[str, str]] = ...) -> None: ... + +class Poll(_message.Message): + __slots__ = [] + def __init__(self) -> None: ... + +class QOSMarking(_message.Message): + __slots__ = ["marking"] + MARKING_FIELD_NUMBER: _ClassVar[int] + marking: int + def __init__(self, marking: _Optional[int] = ...) -> None: ... + +class RegisteredExtension(_message.Message): + __slots__ = ["id", "msg"] + ID_FIELD_NUMBER: _ClassVar[int] + MSG_FIELD_NUMBER: _ClassVar[int] + id: ExtensionID + msg: bytes + def __init__(self, id: _Optional[_Union[ExtensionID, str]] = ..., msg: _Optional[bytes] = ...) -> None: ... + +class Role(_message.Message): + __slots__ = ["id"] + ID_FIELD_NUMBER: _ClassVar[int] + id: str + def __init__(self, id: _Optional[str] = ...) -> None: ... + +class ScalarArray(_message.Message): + __slots__ = ["element"] + ELEMENT_FIELD_NUMBER: _ClassVar[int] + element: _containers.RepeatedCompositeFieldContainer[TypedValue] + def __init__(self, element: _Optional[_Iterable[_Union[TypedValue, _Mapping]]] = ...) -> None: ... + +class SetRequest(_message.Message): + __slots__ = ["delete", "extension", "prefix", "replace", "update"] + DELETE_FIELD_NUMBER: _ClassVar[int] + EXTENSION_FIELD_NUMBER: _ClassVar[int] + PREFIX_FIELD_NUMBER: _ClassVar[int] + REPLACE_FIELD_NUMBER: _ClassVar[int] + UPDATE_FIELD_NUMBER: _ClassVar[int] + delete: _containers.RepeatedCompositeFieldContainer[Path] + extension: _containers.RepeatedCompositeFieldContainer[Extension] + prefix: Path + replace: _containers.RepeatedCompositeFieldContainer[Update] + update: _containers.RepeatedCompositeFieldContainer[Update] + def __init__(self, prefix: _Optional[_Union[Path, _Mapping]] = ..., delete: _Optional[_Iterable[_Union[Path, _Mapping]]] = ..., replace: _Optional[_Iterable[_Union[Update, _Mapping]]] = ..., update: _Optional[_Iterable[_Union[Update, _Mapping]]] = ..., extension: _Optional[_Iterable[_Union[Extension, _Mapping]]] = ...) -> None: ... + +class SetResponse(_message.Message): + __slots__ = ["extension", "message", "prefix", "response", "timestamp"] + EXTENSION_FIELD_NUMBER: _ClassVar[int] + MESSAGE_FIELD_NUMBER: _ClassVar[int] + PREFIX_FIELD_NUMBER: _ClassVar[int] + RESPONSE_FIELD_NUMBER: _ClassVar[int] + TIMESTAMP_FIELD_NUMBER: _ClassVar[int] + extension: _containers.RepeatedCompositeFieldContainer[Extension] + message: Error + prefix: Path + response: _containers.RepeatedCompositeFieldContainer[UpdateResult] + timestamp: int + def __init__(self, prefix: _Optional[_Union[Path, _Mapping]] = ..., response: _Optional[_Iterable[_Union[UpdateResult, _Mapping]]] = ..., message: _Optional[_Union[Error, _Mapping]] = ..., timestamp: _Optional[int] = ..., extension: _Optional[_Iterable[_Union[Extension, _Mapping]]] = ...) -> None: ... + +class SubscribeRequest(_message.Message): + __slots__ = ["extension", "poll", "subscribe"] + EXTENSION_FIELD_NUMBER: _ClassVar[int] + POLL_FIELD_NUMBER: _ClassVar[int] + SUBSCRIBE_FIELD_NUMBER: _ClassVar[int] + extension: _containers.RepeatedCompositeFieldContainer[Extension] + poll: Poll + subscribe: SubscriptionList + def __init__(self, subscribe: _Optional[_Union[SubscriptionList, _Mapping]] = ..., poll: _Optional[_Union[Poll, _Mapping]] = ..., extension: _Optional[_Iterable[_Union[Extension, _Mapping]]] = ...) -> None: ... + +class SubscribeResponse(_message.Message): + __slots__ = ["error", "extension", "sync_response", "update"] + ERROR_FIELD_NUMBER: _ClassVar[int] + EXTENSION_FIELD_NUMBER: _ClassVar[int] + SYNC_RESPONSE_FIELD_NUMBER: _ClassVar[int] + UPDATE_FIELD_NUMBER: _ClassVar[int] + error: Error + extension: _containers.RepeatedCompositeFieldContainer[Extension] + sync_response: bool + update: Notification + def __init__(self, update: _Optional[_Union[Notification, _Mapping]] = ..., sync_response: bool = ..., error: _Optional[_Union[Error, _Mapping]] = ..., extension: _Optional[_Iterable[_Union[Extension, _Mapping]]] = ...) -> None: ... + +class Subscription(_message.Message): + __slots__ = ["heartbeat_interval", "mode", "path", "sample_interval", "suppress_redundant"] + HEARTBEAT_INTERVAL_FIELD_NUMBER: _ClassVar[int] + MODE_FIELD_NUMBER: _ClassVar[int] + PATH_FIELD_NUMBER: _ClassVar[int] + SAMPLE_INTERVAL_FIELD_NUMBER: _ClassVar[int] + SUPPRESS_REDUNDANT_FIELD_NUMBER: _ClassVar[int] + heartbeat_interval: int + mode: SubscriptionMode + path: Path + sample_interval: int + suppress_redundant: bool + def __init__(self, path: _Optional[_Union[Path, _Mapping]] = ..., mode: _Optional[_Union[SubscriptionMode, str]] = ..., sample_interval: _Optional[int] = ..., suppress_redundant: bool = ..., heartbeat_interval: _Optional[int] = ...) -> None: ... + +class SubscriptionList(_message.Message): + __slots__ = ["allow_aggregation", "encoding", "mode", "prefix", "qos", "subscription", "updates_only", "use_models"] + class Mode(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = [] + ALLOW_AGGREGATION_FIELD_NUMBER: _ClassVar[int] + ENCODING_FIELD_NUMBER: _ClassVar[int] + MODE_FIELD_NUMBER: _ClassVar[int] + ONCE: SubscriptionList.Mode + POLL: SubscriptionList.Mode + PREFIX_FIELD_NUMBER: _ClassVar[int] + QOS_FIELD_NUMBER: _ClassVar[int] + STREAM: SubscriptionList.Mode + SUBSCRIPTION_FIELD_NUMBER: _ClassVar[int] + UPDATES_ONLY_FIELD_NUMBER: _ClassVar[int] + USE_MODELS_FIELD_NUMBER: _ClassVar[int] + allow_aggregation: bool + encoding: Encoding + mode: SubscriptionList.Mode + prefix: Path + qos: QOSMarking + subscription: _containers.RepeatedCompositeFieldContainer[Subscription] + updates_only: bool + use_models: _containers.RepeatedCompositeFieldContainer[ModelData] + def __init__(self, prefix: _Optional[_Union[Path, _Mapping]] = ..., subscription: _Optional[_Iterable[_Union[Subscription, _Mapping]]] = ..., qos: _Optional[_Union[QOSMarking, _Mapping]] = ..., mode: _Optional[_Union[SubscriptionList.Mode, str]] = ..., allow_aggregation: bool = ..., use_models: _Optional[_Iterable[_Union[ModelData, _Mapping]]] = ..., encoding: _Optional[_Union[Encoding, str]] = ..., updates_only: bool = ...) -> None: ... + +class TimeRange(_message.Message): + __slots__ = ["end", "start"] + END_FIELD_NUMBER: _ClassVar[int] + START_FIELD_NUMBER: _ClassVar[int] + end: int + start: int + def __init__(self, start: _Optional[int] = ..., end: _Optional[int] = ...) -> None: ... + +class TypedValue(_message.Message): + __slots__ = ["any_val", "ascii_val", "bool_val", "bytes_val", "decimal_val", "double_val", "float_val", "int_val", "json_ietf_val", "json_val", "leaflist_val", "proto_bytes", "string_val", "uint_val"] + ANY_VAL_FIELD_NUMBER: _ClassVar[int] + ASCII_VAL_FIELD_NUMBER: _ClassVar[int] + BOOL_VAL_FIELD_NUMBER: _ClassVar[int] + BYTES_VAL_FIELD_NUMBER: _ClassVar[int] + DECIMAL_VAL_FIELD_NUMBER: _ClassVar[int] + DOUBLE_VAL_FIELD_NUMBER: _ClassVar[int] + FLOAT_VAL_FIELD_NUMBER: _ClassVar[int] + INT_VAL_FIELD_NUMBER: _ClassVar[int] + JSON_IETF_VAL_FIELD_NUMBER: _ClassVar[int] + JSON_VAL_FIELD_NUMBER: _ClassVar[int] + LEAFLIST_VAL_FIELD_NUMBER: _ClassVar[int] + PROTO_BYTES_FIELD_NUMBER: _ClassVar[int] + STRING_VAL_FIELD_NUMBER: _ClassVar[int] + UINT_VAL_FIELD_NUMBER: _ClassVar[int] + any_val: _any_pb2.Any + ascii_val: str + bool_val: bool + bytes_val: bytes + decimal_val: Decimal64 + double_val: float + float_val: float + int_val: int + json_ietf_val: bytes + json_val: bytes + leaflist_val: ScalarArray + proto_bytes: bytes + string_val: str + uint_val: int + def __init__(self, string_val: _Optional[str] = ..., int_val: _Optional[int] = ..., uint_val: _Optional[int] = ..., bool_val: bool = ..., bytes_val: _Optional[bytes] = ..., float_val: _Optional[float] = ..., double_val: _Optional[float] = ..., decimal_val: _Optional[_Union[Decimal64, _Mapping]] = ..., leaflist_val: _Optional[_Union[ScalarArray, _Mapping]] = ..., any_val: _Optional[_Union[_any_pb2.Any, _Mapping]] = ..., json_val: _Optional[bytes] = ..., json_ietf_val: _Optional[bytes] = ..., ascii_val: _Optional[str] = ..., proto_bytes: _Optional[bytes] = ...) -> None: ... + +class Uint128(_message.Message): + __slots__ = ["high", "low"] + HIGH_FIELD_NUMBER: _ClassVar[int] + LOW_FIELD_NUMBER: _ClassVar[int] + high: int + low: int + def __init__(self, high: _Optional[int] = ..., low: _Optional[int] = ...) -> None: ... + +class Update(_message.Message): + __slots__ = ["duplicates", "path", "val", "value"] + DUPLICATES_FIELD_NUMBER: _ClassVar[int] + PATH_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + VAL_FIELD_NUMBER: _ClassVar[int] + duplicates: int + path: Path + val: TypedValue + value: Value + def __init__(self, path: _Optional[_Union[Path, _Mapping]] = ..., value: _Optional[_Union[Value, _Mapping]] = ..., val: _Optional[_Union[TypedValue, _Mapping]] = ..., duplicates: _Optional[int] = ...) -> None: ... + +class UpdateResult(_message.Message): + __slots__ = ["message", "op", "path", "timestamp"] + class Operation(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = [] + DELETE: UpdateResult.Operation + INVALID: UpdateResult.Operation + MESSAGE_FIELD_NUMBER: _ClassVar[int] + OP_FIELD_NUMBER: _ClassVar[int] + PATH_FIELD_NUMBER: _ClassVar[int] + REPLACE: UpdateResult.Operation + TIMESTAMP_FIELD_NUMBER: _ClassVar[int] + UPDATE: UpdateResult.Operation + message: Error + op: UpdateResult.Operation + path: Path + timestamp: int + def __init__(self, timestamp: _Optional[int] = ..., path: _Optional[_Union[Path, _Mapping]] = ..., message: _Optional[_Union[Error, _Mapping]] = ..., op: _Optional[_Union[UpdateResult.Operation, str]] = ...) -> None: ... + +class Value(_message.Message): + __slots__ = ["type", "value"] + TYPE_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + type: Encoding + value: bytes + def __init__(self, value: _Optional[bytes] = ..., type: _Optional[_Union[Encoding, str]] = ...) -> None: ... + +class ExtensionID(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = [] + +class Encoding(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = [] + +class SubscriptionMode(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = [] \ No newline at end of file diff --git a/src/device/service/drivers/openconfig/gnmi_pb2_grpc.py b/src/device/service/drivers/openconfig/gnmi_pb2_grpc.py new file mode 100644 index 000000000..43fb01413 --- /dev/null +++ b/src/device/service/drivers/openconfig/gnmi_pb2_grpc.py @@ -0,0 +1,185 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +import gnmi_pb2 as gnmi__pb2 + + +class gNMIStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.Capabilities = channel.unary_unary( + '/gnmi.gNMI/Capabilities', + request_serializer=gnmi__pb2.CapabilityRequest.SerializeToString, + response_deserializer=gnmi__pb2.CapabilityResponse.FromString, + ) + self.Get = channel.unary_unary( + '/gnmi.gNMI/Get', + request_serializer=gnmi__pb2.GetRequest.SerializeToString, + response_deserializer=gnmi__pb2.GetResponse.FromString, + ) + self.Set = channel.unary_unary( + '/gnmi.gNMI/Set', + request_serializer=gnmi__pb2.SetRequest.SerializeToString, + response_deserializer=gnmi__pb2.SetResponse.FromString, + ) + self.Subscribe = channel.stream_stream( + '/gnmi.gNMI/Subscribe', + request_serializer=gnmi__pb2.SubscribeRequest.SerializeToString, + response_deserializer=gnmi__pb2.SubscribeResponse.FromString, + ) + + +class gNMIServicer(object): + """Missing associated documentation comment in .proto file.""" + + def Capabilities(self, request, context): + """Capabilities allows the client to retrieve the set of capabilities that + is supported by the target. This allows the target to validate the + service version that is implemented and retrieve the set of models that + the target supports. The models can then be specified in subsequent RPCs + to restrict the set of data that is utilized. + Reference: gNMI Specification Section 3.2 + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Get(self, request, context): + """Retrieve a snapshot of data from the target. A Get RPC requests that the + target snapshots a subset of the data tree as specified by the paths + included in the message and serializes this to be returned to the + client using the specified encoding. + Reference: gNMI Specification Section 3.3 + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Set(self, request, context): + """Set allows the client to modify the state of data on the target. The + paths to modified along with the new values that the client wishes + to set the value to. + Reference: gNMI Specification Section 3.4 + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Subscribe(self, request_iterator, context): + """Subscribe allows a client to request the target to send it values + of particular paths within the data tree. These values may be streamed + at a particular cadence (STREAM), sent one off on a long-lived channel + (POLL), or sent as a one-off retrieval (ONCE). + Reference: gNMI Specification Section 3.5 + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_gNMIServicer_to_server(servicer, server): + rpc_method_handlers = { + 'Capabilities': grpc.unary_unary_rpc_method_handler( + servicer.Capabilities, + request_deserializer=gnmi__pb2.CapabilityRequest.FromString, + response_serializer=gnmi__pb2.CapabilityResponse.SerializeToString, + ), + 'Get': grpc.unary_unary_rpc_method_handler( + servicer.Get, + request_deserializer=gnmi__pb2.GetRequest.FromString, + response_serializer=gnmi__pb2.GetResponse.SerializeToString, + ), + 'Set': grpc.unary_unary_rpc_method_handler( + servicer.Set, + request_deserializer=gnmi__pb2.SetRequest.FromString, + response_serializer=gnmi__pb2.SetResponse.SerializeToString, + ), + 'Subscribe': grpc.stream_stream_rpc_method_handler( + servicer.Subscribe, + request_deserializer=gnmi__pb2.SubscribeRequest.FromString, + response_serializer=gnmi__pb2.SubscribeResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'gnmi.gNMI', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class gNMI(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def Capabilities(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/gnmi.gNMI/Capabilities', + gnmi__pb2.CapabilityRequest.SerializeToString, + gnmi__pb2.CapabilityResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def Get(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/gnmi.gNMI/Get', + gnmi__pb2.GetRequest.SerializeToString, + gnmi__pb2.GetResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def Set(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/gnmi.gNMI/Set', + gnmi__pb2.SetRequest.SerializeToString, + gnmi__pb2.SetResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def Subscribe(request_iterator, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.stream_stream(request_iterator, target, '/gnmi.gNMI/Subscribe', + gnmi__pb2.SubscribeRequest.SerializeToString, + gnmi__pb2.SubscribeResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) -- GitLab From b996d41107235a21ee39d26158e012ac1d81eb94 Mon Sep 17 00:00:00 2001 From: cajadiazj Date: Thu, 23 Feb 2023 18:24:30 +0100 Subject: [PATCH 07/62] gNMI support added --- manifests/deviceservice.yaml | 2 +- manifests/monitoringservice.yaml | 2 +- my_deploy.sh | 6 +- .../drivers/openconfig/OpenConfigDriver.py | 226 +++++++++++++++++- 4 files changed, 226 insertions(+), 10 deletions(-) diff --git a/manifests/deviceservice.yaml b/manifests/deviceservice.yaml index ca2c81f0f..ddcc997cd 100644 --- a/manifests/deviceservice.yaml +++ b/manifests/deviceservice.yaml @@ -36,7 +36,7 @@ spec: - containerPort: 9192 env: - name: LOG_LEVEL - value: "INFO" + value: "DEBUG" readinessProbe: exec: command: ["/bin/grpc_health_probe", "-addr=:2020"] diff --git a/manifests/monitoringservice.yaml b/manifests/monitoringservice.yaml index 4447a1427..06ac823a1 100644 --- a/manifests/monitoringservice.yaml +++ b/manifests/monitoringservice.yaml @@ -36,7 +36,7 @@ spec: - containerPort: 9192 env: - name: LOG_LEVEL - value: "INFO" + value: "DEBUG" envFrom: - secretRef: name: qdb-data diff --git a/my_deploy.sh b/my_deploy.sh index 6f0e64afe..9f671be3b 100755 --- a/my_deploy.sh +++ b/my_deploy.sh @@ -57,7 +57,7 @@ export CRDB_DATABASE="tfs" export CRDB_DEPLOY_MODE="single" # Disable flag for dropping database, if exists. -export CRDB_DROP_DATABASE_IF_EXISTS="" +export CRDB_DROP_DATABASE_IF_EXISTS="YES" # Disable flag for re-deploying CockroachDB from scratch. export CRDB_REDEPLOY="" @@ -87,7 +87,7 @@ export QDB_PASSWORD="quest" export QDB_TABLE="tfs_monitoring" ## If not already set, disable flag for dropping table if exists. -#export QDB_DROP_TABLE_IF_EXISTS="" +export QDB_DROP_TABLE_IF_EXISTS="" # If not already set, disable flag for re-deploying QuestDB from scratch. -export QDB_REDEPLOY="" +export QDB_REDEPLOY="YES" diff --git a/src/device/service/drivers/openconfig/OpenConfigDriver.py b/src/device/service/drivers/openconfig/OpenConfigDriver.py index ef3d0728d..d128a15e5 100644 --- a/src/device/service/drivers/openconfig/OpenConfigDriver.py +++ b/src/device/service/drivers/openconfig/OpenConfigDriver.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -import anytree, copy, logging, pytz, queue, re, threading +import anytree, copy, logging, pytz, queue, re, threading, json, os, sys #import lxml.etree as ET from datetime import datetime, timedelta from typing import Any, Dict, Iterator, List, Optional, Tuple, Union @@ -31,6 +31,15 @@ from device.service.driver_api.AnyTreeTools import TreeNode, get_subnode, set_su from .templates import ALL_RESOURCE_KEYS, EMPTY_CONFIG, compose_config, get_filter, parse from .RetryDecorator import retry +import grpc +from google.protobuf.json_format import MessageToJson + +gnmi_path__ = os.path.dirname(os.path.abspath(__file__)) +sys.path.append(gnmi_path__) +import gnmi_pb2_grpc +import gnmi_pb2 + + DEBUG_MODE = False logging.getLogger('ncclient.manager').setLevel(logging.DEBUG if DEBUG_MODE else logging.WARNING) logging.getLogger('ncclient.transport.ssh').setLevel(logging.DEBUG if DEBUG_MODE else logging.WARNING) @@ -56,6 +65,7 @@ RETRY_DECORATOR = retry(max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, class NetconfSessionHandler: def __init__(self, address : str, port : int, **settings) -> None: + mensaje = f"__init__: address={address}, port={port}, settings={settings}" self.__lock = threading.RLock() self.__connected = threading.Event() self.__address = address @@ -121,6 +131,182 @@ class NetconfSessionHandler: def commit(self, confirmed=False, timeout=None, persist=None, persist_id=None): return self.__manager.commit(confirmed=confirmed, timeout=timeout, persist=persist, persist_id=persist_id) +class gNMISessionHandler: + def __init__(self, address : str, **settings) -> None: + self.__lock = threading.RLock() + self.__connected = threading.Event() + self.__address = address + self.__port = settings.get('gnmi_port') + self.__username = settings.get('username') + self.__password = settings.get('password') + self.__vendor = settings.get('vendor') + self.__key_filename = settings.get('key_filename') + self.__hostkey_verify = settings.get('hostkey_verify', True) + self.__look_for_keys = settings.get('look_for_keys', True) + self.__allow_agent = settings.get('allow_agent', True) + self.__force_running = settings.get('force_running', False) + self.__commit_per_delete = settings.get('delete_rule', False) + self.__device_params = settings.get('device_params', {}) + self.__manager_params = settings.get('manager_params', {}) + self.__nc_params = settings.get('nc_params', {}) + self.__stub = None + self.__candidate_supported = False + self.__channel = None + self.__supportedEncodings = None + self.__options = Options() + + def connect(self): + with self.__lock: + self.__channel = grpc.insecure_channel(str(self.__address)+':'+self.__port) + self.__stub = gnmi_pb2_grpc.gNMIStub(self.__channel) + metadata = [('username',self.__username ), ('password', self.__password)] + req = gnmi_pb2.CapabilityRequest() + response = self.__stub.Capabilities(req, metadata=metadata) + data = json.loads(MessageToJson(response)) + self.__supportedEncodings = data['supportedEncodings'] + # TODO: self.__candidate_supported = + self.__connected.set() + + def disconnect(self): + if not self.__connected.is_set(): return + with self.__lock: + self.__channel.close() + + def subscribeStreaming(self, subscription : Tuple[str, float, float], out_samples : queue.Queue) -> None: + resource_key, sampling_duration, sampling_interval = subscription + options = copy.deepcopy(self.__options) + options.xpaths = [parse_xpath(resource_key)] + options.timeout = int(sampling_duration) + options.interval = int(sampling_interval) + req_iterator = gen_request(options) + metadata = [('username',self.__username), ('password', self.__password)] + responses = self.__stub.Subscribe(req_iterator, self.__options.timeout, metadata=metadata) + previous_sample = None + delta = 0.0 + previous_timestamp = datetime.timestamp(datetime.utcnow()) + for response in responses: + data = json.loads(MessageToJson(response)) + if data.get("update") is not None and data.get("update").get("update") != None: + now = datetime.timestamp(datetime.utcnow()) + for element in data['update']['update']: + counter_name = split_resource_key(dict_to_xpath(element['path'])) + if counter_name == split_resource_key(resource_key): + value = int(element['val']['uintVal']) + delay = now - previous_timestamp + if previous_sample is not None: delta = (value - previous_sample)/delay + previous_sample = int(value) + previous_timestamp = now + sample = (now, resource_key, delta) + out_samples.put_nowait(sample) + + @property + def use_candidate(self): return self.__candidate_supported and not self.__force_running + + @property + def commit_per_rule(self): return self.__commit_per_delete + + @property + def vendor(self): return self.__vendor + + @RETRY_DECORATOR + def get(self): # pylint: disable=redefined-builtin + return False + + @RETRY_DECORATOR + def edit_config( + self, config, target='running', default_operation=None, test_option=None, + error_option=None, format='xml' # pylint: disable=redefined-builtin + ): + if config == EMPTY_CONFIG: return + with self.__lock: + self.__manager.edit_config( + config, target=target, default_operation=default_operation, test_option=test_option, + error_option=error_option, format=format) + + def locked(self, target): + return self.__manager.locked(target=target) + + def commit(self, confirmed=False, timeout=None, persist=None, persist_id=None): + return self.__manager.commit(confirmed=confirmed, timeout=timeout, persist=persist, persist_id=persist_id) + +def path_from_string(path='/'): + if path: + if path[0]=='/': + if path[-1]=='/': + path_list = re.split('''/(?=(?:[^\[\]]|\[[^\[\]]+\])*$)''', path)[1:-1] + else: + path_list = re.split('''/(?=(?:[^\[\]]|\[[^\[\]]+\])*$)''', path)[1:] + else: + if path[-1]=='/': + path_list = re.split('''/(?=(?:[^\[\]]|\[[^\[\]]+\])*$)''', path)[:-1] + else: + path_list = re.split('''/(?=(?:[^\[\]]|\[[^\[\]]+\])*$)''', path) + else: + return gnmi_pb2.Path(elem=[]) + + mypath = [] + + for e in path_list: + eName = e.split("[", 1)[0] + eKeys = re.findall('\[(.*?)\]', e) + dKeys = dict(x.split('=', 1) for x in eKeys) + mypath.append(gnmi_pb2.PathElem(name=eName, key=dKeys)) + + return gnmi_pb2.Path(elem=mypath) + +def gen_request(options): + + mysubs = [] + path = options.xpaths[0] + mypath = path_from_string(path) + mysub = gnmi_pb2.Subscription(path=mypath, mode=options.submode, suppress_redundant=options.suppress, sample_interval=options.interval*1000000000, heartbeat_interval=options.heartbeat) + mysubs.append(mysub) + + if options.prefix: + myprefix = path_from_string(options.prefix) + else: + myprefix = None + + if options.qos: + myqos = gnmi_pb2.QOSMarking(marking=options.qos) + else: + myqos = None + + mysblist = gnmi_pb2.SubscriptionList(prefix=myprefix, mode=options.mode, allow_aggregation=options.aggregate, encoding=options.encoding, subscription=mysubs, qos=myqos) + mysubreq = gnmi_pb2.SubscribeRequest( subscribe=mysblist ) + + yield mysubreq + +def parse_xpath(xpath): + xpath = xpath.replace("//", "/") + xpath = xpath.replace("oci:interface[", "interface[") + xpath = xpath.replace("/oci", "/openconfig-interfaces") + xpath = re.sub(r"\[oci:name='(.*?)'\]", r"[name=\1]", xpath) + # Eliminar el contador del final + xpath = "/".join(xpath.split("/")[:-1]) + "/" + return xpath + +def split_resource_key(path): + pattern = r"/state/counters/(.*)" + match = re.search(pattern, path) + if match: + return match.group(1) + else: + return None + +def dict_to_xpath(d: dict) -> str: + xpath = '/' + for item in d['elem']: + name = item.get('name') + if name == 'interface': + key = item.get('key') + interface_name = key.get('name') + xpath += f"/oci:interface[oci:name='{interface_name}']" + else: + xpath += f"/{name}" + xpath = xpath.replace('openconfig-interfaces', 'oci') + return xpath + def compute_delta_sample(previous_sample, previous_timestamp, current_sample, current_timestamp): if previous_sample is None: return None if previous_timestamp is None: return None @@ -141,12 +327,13 @@ def compute_delta_sample(previous_sample, previous_timestamp, current_sample, cu return delta_sample class SamplesCache: - def __init__(self, netconf_handler : NetconfSessionHandler) -> None: + def __init__(self, netconf_handler : NetconfSessionHandler, gNMI_handler : gNMISessionHandler) -> None: self.__netconf_handler = netconf_handler self.__lock = threading.Lock() self.__timestamp = None self.__absolute_samples = {} self.__delta_samples = {} + self.__gNMI_handler = gNMI_handler def _refresh_samples(self) -> None: with self.__lock: @@ -189,6 +376,24 @@ def do_sampling(samples_cache : SamplesCache, resource_key : str, out_samples : except: # pylint: disable=bare-except LOGGER.exception('Error retrieving samples') +class Options: + def __init__(self, xpaths=None, prefix=None, mode=0, submode=0, suppress=False, interval=0, + encoding='JSON', heartbeat=0, qos=None, aggregate=False, server=None, username='admin', password='admin', timeout=None): + self.xpaths = xpaths + self.prefix = prefix + self.mode = mode + self.submode = submode + self.suppress = suppress + self.interval = interval + self.encoding = encoding + self.heartbeat = heartbeat + self.qos = qos + self.aggregate = aggregate + self.server = server + self.username = username + self.password = password + self.timeout = timeout + def edit_config( netconf_handler : NetconfSessionHandler, resources : List[Tuple[str, Any]], delete=False, commit_per_rule= False, target='running', default_operation='merge', test_option=None, error_option=None, @@ -249,6 +454,7 @@ class OpenConfigDriver(_Driver): self.__subscriptions = TreeNode('.') self.__started = threading.Event() self.__terminate = threading.Event() + self.__gnmi_monitoring = settings.get('monitoring_protocol') == 'gnmi' self.__scheduler = BackgroundScheduler(daemon=True) # scheduler used to emulate sampling events self.__scheduler.configure( jobstores = {'default': MemoryJobStore()}, @@ -257,12 +463,16 @@ class OpenConfigDriver(_Driver): timezone=pytz.utc) self.__out_samples = queue.Queue() self.__netconf_handler : NetconfSessionHandler = NetconfSessionHandler(address, port, **settings) - self.__samples_cache = SamplesCache(self.__netconf_handler) + self.__gNMI_handler : gNMISessionHandler = gNMISessionHandler(address, **settings) + self.__samples_cache = SamplesCache(self.__netconf_handler, self.__gNMI_handler) def Connect(self) -> bool: with self.__lock: if self.__started.is_set(): return True self.__netconf_handler.connect() + if self.__gnmi_monitoring: + self.__gNMI_handler.connect() + LOGGER.debug('Using gNMI as monitoring protocol') # Connect triggers activation of sampling events that will be scheduled based on subscriptions self.__scheduler.start() self.__started.set() @@ -276,7 +486,7 @@ class OpenConfigDriver(_Driver): if not self.__started.is_set(): return True # Disconnect triggers deactivation of sampling events self.__scheduler.shutdown() - self.__netconf_handler.disconnect() + if self.__gnmi_monitoring: self.__netconf_handler.disconnect() return True @metered_subclass_method(METRICS_POOL) @@ -373,7 +583,13 @@ class OpenConfigDriver(_Driver): end_date = start_date + timedelta(seconds=sampling_duration) job_id = 'k={:s}/d={:f}/i={:f}'.format(resource_key, sampling_duration, sampling_interval) - job = self.__scheduler.add_job( + + if self.__gnmi_monitoring: + LOGGER.debug('Processing gNMI subscription: '+ str(subscription)) + job = threading.Thread(target=self.__gNMI_handler.subscribeStreaming, args=(subscription, self.__out_samples)) + job.start() + else: + job = self.__scheduler.add_job( do_sampling, args=(self.__samples_cache, resource_key, self.__out_samples), kwargs={}, id=job_id, trigger='interval', seconds=sampling_interval, start_date=start_date, end_date=end_date, timezone=pytz.utc) -- GitLab From c01e8271a9618fc75926fc921f64fa86d7121005 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Mon, 5 Jun 2023 11:17:30 +0000 Subject: [PATCH 08/62] Pre-merge clean-up --- manifests/deviceservice.yaml | 2 +- manifests/monitoringservice.yaml | 2 +- my_deploy.sh | 7 +++---- 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/manifests/deviceservice.yaml b/manifests/deviceservice.yaml index ddcc997cd..ca2c81f0f 100644 --- a/manifests/deviceservice.yaml +++ b/manifests/deviceservice.yaml @@ -36,7 +36,7 @@ spec: - containerPort: 9192 env: - name: LOG_LEVEL - value: "DEBUG" + value: "INFO" readinessProbe: exec: command: ["/bin/grpc_health_probe", "-addr=:2020"] diff --git a/manifests/monitoringservice.yaml b/manifests/monitoringservice.yaml index 06ac823a1..4447a1427 100644 --- a/manifests/monitoringservice.yaml +++ b/manifests/monitoringservice.yaml @@ -36,7 +36,7 @@ spec: - containerPort: 9192 env: - name: LOG_LEVEL - value: "DEBUG" + value: "INFO" envFrom: - secretRef: name: qdb-data diff --git a/my_deploy.sh b/my_deploy.sh index 85b50d397..c8cacf4a8 100755 --- a/my_deploy.sh +++ b/my_deploy.sh @@ -20,8 +20,7 @@ export TFS_REGISTRY_IMAGES="http://localhost:32000/tfs/" # Set the list of components, separated by spaces, you want to build images for, and deploy. -#export TFS_COMPONENTS="context device automation monitoring pathcomp service slice compute webui load_generator" -export TFS_COMPONENTS="context device monitoring pathcomp service slice webui" +export TFS_COMPONENTS="context device automation monitoring pathcomp service slice compute webui load_generator" # Set the tag you want to use for your images. export TFS_IMAGE_TAG="dev" @@ -58,7 +57,7 @@ export CRDB_DATABASE="tfs" export CRDB_DEPLOY_MODE="single" # Disable flag for dropping database, if exists. -export CRDB_DROP_DATABASE_IF_EXISTS="YES" +export CRDB_DROP_DATABASE_IF_EXISTS="" # Disable flag for re-deploying CockroachDB from scratch. export CRDB_REDEPLOY="" @@ -91,4 +90,4 @@ export QDB_TABLE="tfs_monitoring" export QDB_DROP_TABLE_IF_EXISTS="" # If not already set, disable flag for re-deploying QuestDB from scratch. -export QDB_REDEPLOY="YES" +export QDB_REDEPLOY="" -- GitLab From 1b9cc8691298f313a03ae97167d393695043b182 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Mon, 5 Jun 2023 11:18:20 +0000 Subject: [PATCH 09/62] Pre-merge clean-up --- my_deploy.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/my_deploy.sh b/my_deploy.sh index c8cacf4a8..6f0e64afe 100755 --- a/my_deploy.sh +++ b/my_deploy.sh @@ -87,7 +87,7 @@ export QDB_PASSWORD="quest" export QDB_TABLE="tfs_monitoring" ## If not already set, disable flag for dropping table if exists. -export QDB_DROP_TABLE_IF_EXISTS="" +#export QDB_DROP_TABLE_IF_EXISTS="" # If not already set, disable flag for re-deploying QuestDB from scratch. export QDB_REDEPLOY="" -- GitLab From 95098c014d5304ba726e4740d6256dc5a3efa46c Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Mon, 12 Jun 2023 12:41:00 +0000 Subject: [PATCH 10/62] Minor corrections to hackfest commands.txt --- hackfest/commands.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/hackfest/commands.txt b/hackfest/commands.txt index 31558364d..34bb8188b 100644 --- a/hackfest/commands.txt +++ b/hackfest/commands.txt @@ -267,11 +267,11 @@ Bye! ############ ## Download and install the latest release -$ sudo bash -c "$(curl -sL https://get.containerlab.dev)“ +$ sudo bash -c "$(curl -sL https://get.containerlab.dev)" ## Deploy proposed two SR node scenario -$ cd tfs-ctrl/hackfest/gnmi -$ sudo containerlab deploy -t srlinux.clab.yml +$ cd ~/tfs-ctrl/hackfest/gnmi +$ sudo containerlab deploy --topo srlinux.clab.yml ## Access SR Bash $ docker exec -it clab-srlinux-srl1 bash -- GitLab From 3c9bda61dd91e4d37e6cda98a45f73376a9c2f3f Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Wed, 14 Jun 2023 14:39:13 +0000 Subject: [PATCH 11/62] Code cleanup --- manifests/deviceservice.yaml | 2 +- manifests/pathcompservice.yaml | 2 +- manifests/serviceservice.yaml | 2 +- my_deploy.sh | 6 ++++-- src/common/method_wrappers/tests/deploy_specs.sh | 4 ++-- 5 files changed, 9 insertions(+), 7 deletions(-) diff --git a/manifests/deviceservice.yaml b/manifests/deviceservice.yaml index 3892129e9..22c0f5f9d 100644 --- a/manifests/deviceservice.yaml +++ b/manifests/deviceservice.yaml @@ -39,7 +39,7 @@ spec: - containerPort: 9192 env: - name: LOG_LEVEL - value: "DEBUG" + value: "INFO" readinessProbe: exec: command: ["/bin/grpc_health_probe", "-addr=:2020"] diff --git a/manifests/pathcompservice.yaml b/manifests/pathcompservice.yaml index f10178700..3ba12750b 100644 --- a/manifests/pathcompservice.yaml +++ b/manifests/pathcompservice.yaml @@ -36,7 +36,7 @@ spec: - containerPort: 9192 env: - name: LOG_LEVEL - value: "DEBUG" + value: "INFO" readinessProbe: exec: command: ["/bin/grpc_health_probe", "-addr=:10020"] diff --git a/manifests/serviceservice.yaml b/manifests/serviceservice.yaml index 3865fd6c0..7d7bdaa4e 100644 --- a/manifests/serviceservice.yaml +++ b/manifests/serviceservice.yaml @@ -36,7 +36,7 @@ spec: - containerPort: 9192 env: - name: LOG_LEVEL - value: "DEBUG" + value: "INFO" readinessProbe: exec: command: ["/bin/grpc_health_probe", "-addr=:3030"] diff --git a/my_deploy.sh b/my_deploy.sh index 1ffbffa15..0b8b6d9e8 100755 --- a/my_deploy.sh +++ b/my_deploy.sh @@ -20,7 +20,8 @@ export TFS_REGISTRY_IMAGES="http://localhost:32000/tfs/" # Set the list of components, separated by spaces, you want to build images for, and deploy. -export TFS_COMPONENTS="context device pathcomp service slice compute webui load_generator" +#export TFS_COMPONENTS="context device pathcomp service slice compute webui load_generator" +export TFS_COMPONENTS="context device pathcomp service slice compute webui" # Uncoment to activate Monitoring #export TFS_COMPONENTS="${TFS_COMPONENTS} monitoring" @@ -41,7 +42,8 @@ export TFS_IMAGE_TAG="dev" export TFS_K8S_NAMESPACE="tfs" # Set additional manifest files to be applied after the deployment -export TFS_EXTRA_MANIFESTS="manifests/nginx_ingress_http.yaml manifests/servicemonitors.yaml" +#export TFS_EXTRA_MANIFESTS="manifests/nginx_ingress_http.yaml manifests/servicemonitors.yaml" +export TFS_EXTRA_MANIFESTS="manifests/nginx_ingress_http.yaml" # Uncoment when deploying Optical CyberSecurity #export TFS_EXTRA_MANIFESTS="${TFS_EXTRA_MANIFESTS} manifests/cachingservice.yaml" diff --git a/src/common/method_wrappers/tests/deploy_specs.sh b/src/common/method_wrappers/tests/deploy_specs.sh index 41537a26c..1f41d2348 100755 --- a/src/common/method_wrappers/tests/deploy_specs.sh +++ b/src/common/method_wrappers/tests/deploy_specs.sh @@ -57,7 +57,7 @@ export CRDB_DATABASE="tfs" export CRDB_DEPLOY_MODE="single" # Disable flag for dropping database, if it exists. -export CRDB_DROP_DATABASE_IF_EXISTS="YES" +export CRDB_DROP_DATABASE_IF_EXISTS="" # Disable flag for re-deploying CockroachDB from scratch. export CRDB_REDEPLOY="" @@ -90,7 +90,7 @@ export QDB_TABLE_MONITORING_KPIS="tfs_monitoring_kpis" export QDB_TABLE_SLICE_GROUPS="tfs_slice_groups" # Disable flag for dropping tables if they exist. -export QDB_DROP_TABLES_IF_EXIST="YES" +export QDB_DROP_TABLES_IF_EXIST="" # Disable flag for re-deploying QuestDB from scratch. export QDB_REDEPLOY="" -- GitLab From a01d46305654b5b3583f5fcd276d821fb9d5df82 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Wed, 14 Jun 2023 14:44:08 +0000 Subject: [PATCH 12/62] Code cleanup --- src/load_generator/command/__main__.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/load_generator/command/__main__.py b/src/load_generator/command/__main__.py index 555f1dd4f..4fa2094e0 100644 --- a/src/load_generator/command/__main__.py +++ b/src/load_generator/command/__main__.py @@ -25,14 +25,14 @@ LOGGER = logging.getLogger(__name__) def main(): LOGGER.info('Starting...') parameters = Parameters( - num_requests = 10, + num_requests = 100, request_types = [ RequestType.SERVICE_L2NM, RequestType.SERVICE_L3NM, #RequestType.SERVICE_MW, #RequestType.SERVICE_TAPI, - #RequestType.SLICE_L2NM, - #RequestType.SLICE_L3NM, + RequestType.SLICE_L2NM, + RequestType.SLICE_L3NM, ], device_regex=r'.+', endpoint_regex=r'.+', -- GitLab From b7ebd90b9c38c17d8db147d0ecbbe915cd77ed14 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Wed, 14 Jun 2023 14:46:39 +0000 Subject: [PATCH 13/62] Device component - OpenConfig & gNMI Driver: - Moved gNMI code to new driver --- .../protocols/gnmi/Acknowledgement.txt | 25 +++++++++++++++++++ .../protocols/gnmi/__init__.py | 13 ++++++++++ .../protocols/gnmi}/gnmi.proto | 0 .../protocols/gnmi}/gnmi_ext.proto | 0 .../protocols/gnmi}/gnmi_pb2.py | 0 .../protocols/gnmi}/gnmi_pb2.py.old | 0 .../protocols/gnmi}/gnmi_pb2.pyi | 0 .../protocols/gnmi}/gnmi_pb2_grpc.py | 2 +- 8 files changed, 39 insertions(+), 1 deletion(-) create mode 100644 src/device/service/drivers/gnmi_openconfig/protocols/gnmi/Acknowledgement.txt create mode 100644 src/device/service/drivers/gnmi_openconfig/protocols/gnmi/__init__.py rename src/device/service/drivers/{openconfig => gnmi_openconfig/protocols/gnmi}/gnmi.proto (100%) rename src/device/service/drivers/{openconfig => gnmi_openconfig/protocols/gnmi}/gnmi_ext.proto (100%) rename src/device/service/drivers/{openconfig => gnmi_openconfig/protocols/gnmi}/gnmi_pb2.py (100%) rename src/device/service/drivers/{openconfig => gnmi_openconfig/protocols/gnmi}/gnmi_pb2.py.old (100%) rename src/device/service/drivers/{openconfig => gnmi_openconfig/protocols/gnmi}/gnmi_pb2.pyi (100%) rename src/device/service/drivers/{openconfig => gnmi_openconfig/protocols/gnmi}/gnmi_pb2_grpc.py (99%) diff --git a/src/device/service/drivers/gnmi_openconfig/protocols/gnmi/Acknowledgement.txt b/src/device/service/drivers/gnmi_openconfig/protocols/gnmi/Acknowledgement.txt new file mode 100644 index 000000000..a004e1f58 --- /dev/null +++ b/src/device/service/drivers/gnmi_openconfig/protocols/gnmi/Acknowledgement.txt @@ -0,0 +1,25 @@ +This code is partially based on: +https://github.com/nokia/pygnmi/blob/master/gNMI_Subscribe.py + + +MIT License + +Copyright (c) 2017 Nokia + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/src/device/service/drivers/gnmi_openconfig/protocols/gnmi/__init__.py b/src/device/service/drivers/gnmi_openconfig/protocols/gnmi/__init__.py new file mode 100644 index 000000000..38d04994f --- /dev/null +++ b/src/device/service/drivers/gnmi_openconfig/protocols/gnmi/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/src/device/service/drivers/openconfig/gnmi.proto b/src/device/service/drivers/gnmi_openconfig/protocols/gnmi/gnmi.proto similarity index 100% rename from src/device/service/drivers/openconfig/gnmi.proto rename to src/device/service/drivers/gnmi_openconfig/protocols/gnmi/gnmi.proto diff --git a/src/device/service/drivers/openconfig/gnmi_ext.proto b/src/device/service/drivers/gnmi_openconfig/protocols/gnmi/gnmi_ext.proto similarity index 100% rename from src/device/service/drivers/openconfig/gnmi_ext.proto rename to src/device/service/drivers/gnmi_openconfig/protocols/gnmi/gnmi_ext.proto diff --git a/src/device/service/drivers/openconfig/gnmi_pb2.py b/src/device/service/drivers/gnmi_openconfig/protocols/gnmi/gnmi_pb2.py similarity index 100% rename from src/device/service/drivers/openconfig/gnmi_pb2.py rename to src/device/service/drivers/gnmi_openconfig/protocols/gnmi/gnmi_pb2.py diff --git a/src/device/service/drivers/openconfig/gnmi_pb2.py.old b/src/device/service/drivers/gnmi_openconfig/protocols/gnmi/gnmi_pb2.py.old similarity index 100% rename from src/device/service/drivers/openconfig/gnmi_pb2.py.old rename to src/device/service/drivers/gnmi_openconfig/protocols/gnmi/gnmi_pb2.py.old diff --git a/src/device/service/drivers/openconfig/gnmi_pb2.pyi b/src/device/service/drivers/gnmi_openconfig/protocols/gnmi/gnmi_pb2.pyi similarity index 100% rename from src/device/service/drivers/openconfig/gnmi_pb2.pyi rename to src/device/service/drivers/gnmi_openconfig/protocols/gnmi/gnmi_pb2.pyi diff --git a/src/device/service/drivers/openconfig/gnmi_pb2_grpc.py b/src/device/service/drivers/gnmi_openconfig/protocols/gnmi/gnmi_pb2_grpc.py similarity index 99% rename from src/device/service/drivers/openconfig/gnmi_pb2_grpc.py rename to src/device/service/drivers/gnmi_openconfig/protocols/gnmi/gnmi_pb2_grpc.py index 43fb01413..517d3d9eb 100644 --- a/src/device/service/drivers/openconfig/gnmi_pb2_grpc.py +++ b/src/device/service/drivers/gnmi_openconfig/protocols/gnmi/gnmi_pb2_grpc.py @@ -2,7 +2,7 @@ """Client and server classes corresponding to protobuf-defined services.""" import grpc -import gnmi_pb2 as gnmi__pb2 +from . import gnmi_pb2 as gnmi__pb2 class gNMIStub(object): -- GitLab From d46e59edaf4586021fc9435d30c400602399f454 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Wed, 14 Jun 2023 14:59:34 +0000 Subject: [PATCH 14/62] Code Cleanup --- .../drivers/openconfig/GnmiTelemetry.py | 166 ------------------ .../openconfig/TelemetryProtocolEnum.py | 27 --- .../service/drivers/openconfig/_Telemetry.py | 27 --- 3 files changed, 220 deletions(-) delete mode 100644 src/device/service/drivers/openconfig/GnmiTelemetry.py delete mode 100644 src/device/service/drivers/openconfig/TelemetryProtocolEnum.py delete mode 100644 src/device/service/drivers/openconfig/_Telemetry.py diff --git a/src/device/service/drivers/openconfig/GnmiTelemetry.py b/src/device/service/drivers/openconfig/GnmiTelemetry.py deleted file mode 100644 index 44b15d519..000000000 --- a/src/device/service/drivers/openconfig/GnmiTelemetry.py +++ /dev/null @@ -1,166 +0,0 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Ref: https://github.com/openconfig/reference/blob/master/rpc/gnmi/gnmi-specification.md -# Ref: https://github.com/openconfig/gnmi/blob/master/proto/gnmi/gnmi.proto - -import anytree, logging, queue, threading -from typing import Any, Iterator, List, Optional, Tuple, Union -from common.type_checkers.Checkers import chk_float, chk_length, chk_string, chk_type -from device.service.driver_api._Driver import ( - RESOURCE_ENDPOINTS, RESOURCE_INTERFACES, RESOURCE_NETWORK_INSTANCES, - _Driver) - -LOGGER = logging.getLogger(__name__) - -SPECIAL_RESOURCE_MAPPINGS = { - RESOURCE_ENDPOINTS : '/endpoints', - RESOURCE_INTERFACES : '/interfaces', - RESOURCE_NETWORK_INSTANCES: '/net-instances', -} - -class MonitoringThread(threading.Thread): - def __init__(self, in_subscriptions : queue.Queue, out_samples : queue.Queue) -> None: - super().__init__(daemon=True) - self._in_subscriptions = in_subscriptions - self._out_samples = out_samples - - def run(self) -> None: - while True: - # TODO: req_iterator = generate_requests(self._in_subscriptions) - # TODO: stub.Subscribe(req_iterator) - self._out_samples.put_nowait((timestamp, resource_key, value)) - -class EmulatedDriver(_Driver): - def __init__(self, address : str, port : int, **settings) -> None: # pylint: disable=super-init-not-called - self.__lock = threading.Lock() - - # endpoints = settings.get('endpoints', []) - - self.__started = threading.Event() - self.__terminate = threading.Event() - - self.__in_subscriptions = queue.Queue() - self.__out_samples = queue.Queue() - - self.__monitoring_thread = MonitoringThread(self.__in_subscriptions, self.__out_samples) - - def Connect(self) -> bool: - # If started, assume it is already connected - if self.__started.is_set(): return True - - # TODO: check capabilities - self.__monitoring_thread.start() - - # Indicate the driver is now connected to the device - self.__started.set() - return True - - def Disconnect(self) -> bool: - # Trigger termination of loops and processes - self.__terminate.set() - - # TODO: send unsubscriptions - # TODO: terminate monitoring thread - # TODO: disconnect gRPC - self.__monitoring_thread.join() - - # If not started, assume it is already disconnected - if not self.__started.is_set(): return True - return True - - def GetInitialConfig(self) -> List[Tuple[str, Any]]: - with self.__lock: - return [] - - def GetConfig(self, resource_keys : List[str] = []) -> List[Tuple[str, Union[Any, None, Exception]]]: - chk_type('resources', resource_keys, list) - with self.__lock: - results = [] - for i,resource_key in enumerate(resource_keys): - str_resource_name = 'resource_key[#{:d}]'.format(i) - try: - chk_string(str_resource_name, resource_key, allow_empty=False) - resource_key = SPECIAL_RESOURCE_MAPPINGS.get(resource_key, resource_key) - except Exception as e: # pylint: disable=broad-except - LOGGER.exception('Exception validating {:s}: {:s}'.format(str_resource_name, str(resource_key))) - results.append((resource_key, e)) # if validation fails, store the exception - continue - - # TODO: if resource_key == '/endpoints': retornar lista de endpoints - # results.extend(endpoints) - return results - - def SubscribeState(self, subscriptions : List[Tuple[str, float, float]]) -> List[Union[bool, Exception]]: - chk_type('subscriptions', subscriptions, list) - if len(subscriptions) == 0: return [] - results = [] - with self.__lock: - for i,subscription in enumerate(subscriptions): - str_subscription_name = 'subscriptions[#{:d}]'.format(i) - try: - chk_type(str_subscription_name, subscription, (list, tuple)) - chk_length(str_subscription_name, subscription, min_length=3, max_length=3) - resource_key,sampling_duration,sampling_interval = subscription - chk_string(str_subscription_name + '.resource_key', resource_key, allow_empty=False) - resource_path = resource_key.split('/') - chk_float(str_subscription_name + '.sampling_duration', sampling_duration, min_value=0) - chk_float(str_subscription_name + '.sampling_interval', sampling_interval, min_value=0) - except Exception as e: # pylint: disable=broad-except - LOGGER.exception('Exception validating {:s}: {:s}'.format(str_subscription_name, str(resource_key))) - results.append(e) # if validation fails, store the exception - continue - - # TODO: format subscription - # TODO: self.__in_subscriptions.put_nowait(subscription) - results.append(True) - return results - - def UnsubscribeState(self, subscriptions : List[Tuple[str, float, float]]) -> List[Union[bool, Exception]]: - chk_type('subscriptions', subscriptions, list) - if len(subscriptions) == 0: return [] - results = [] - resolver = anytree.Resolver(pathattr='name') - with self.__lock: - for i,resource in enumerate(subscriptions): - str_subscription_name = 'resources[#{:d}]'.format(i) - try: - chk_type(str_subscription_name, resource, (list, tuple)) - chk_length(str_subscription_name, resource, min_length=3, max_length=3) - resource_key,sampling_duration,sampling_interval = resource - chk_string(str_subscription_name + '.resource_key', resource_key, allow_empty=False) - resource_path = resource_key.split('/') - chk_float(str_subscription_name + '.sampling_duration', sampling_duration, min_value=0) - chk_float(str_subscription_name + '.sampling_interval', sampling_interval, min_value=0) - except Exception as e: # pylint: disable=broad-except - LOGGER.exception('Exception validating {:s}: {:s}'.format(str_subscription_name, str(resource_key))) - results.append(e) # if validation fails, store the exception - continue - - # TODO: format unsubscription - # TODO: self.__in_subscriptions.put_nowait(unsubscription) - results.append(True) - return results - - def GetState(self, blocking=False, terminate : Optional[threading.Event] = None) -> Iterator[Tuple[str, Any]]: - while True: - if self.__terminate.is_set(): break - if terminate is not None and terminate.is_set(): break - try: - sample = self.__out_samples.get(block=blocking, timeout=0.1) - except queue.Empty: - if blocking: continue - return - if sample is None: continue - yield sample diff --git a/src/device/service/drivers/openconfig/TelemetryProtocolEnum.py b/src/device/service/drivers/openconfig/TelemetryProtocolEnum.py deleted file mode 100644 index e5927848f..000000000 --- a/src/device/service/drivers/openconfig/TelemetryProtocolEnum.py +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import enum -from typing import Optional - -class TelemetryProtocolEnum(enum.Enum): - GNMI = 'gnmi' - NETCONF = 'netconf' - -DEFAULT_TELEMETRY_PROTOCOL = TelemetryProtocolEnum.NETCONF - -def parse_telemetry_protocol(telemetry_protocol : Optional[str] = None) -> TelemetryProtocolEnum: - if telemetry_protocol is None: return DEFAULT_TELEMETRY_PROTOCOL - # pylint: disable=no-member - return TelemetryProtocolEnum._member_map_.get(telemetry_protocol, DEFAULT_TELEMETRY_PROTOCOL) diff --git a/src/device/service/drivers/openconfig/_Telemetry.py b/src/device/service/drivers/openconfig/_Telemetry.py deleted file mode 100644 index efd05993b..000000000 --- a/src/device/service/drivers/openconfig/_Telemetry.py +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -class _Telemetry: - def __init__(self) -> None: - pass - - def subscribe(self) -> None: - pass - - def unsubscribe(self) -> None: - pass - - def get_samples_queue(self) -> None: - pass -- GitLab From 80f5f6cbb5648d9efe8521946f05ecfc39529d62 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Wed, 14 Jun 2023 16:04:41 +0000 Subject: [PATCH 15/62] Device component - OpenConfig Driver: - Extracted gNMI code (will be added to new driver) --- .../drivers/openconfig/NetConfTelemetry.py | 14 -- .../openconfig/NetconfSessionHandler.py | 129 --------------- .../drivers/openconfig/OpenConfigDriver.py | 147 +----------------- .../drivers/openconfig/SamplesCache.py | 100 ------------ 4 files changed, 5 insertions(+), 385 deletions(-) delete mode 100644 src/device/service/drivers/openconfig/NetConfTelemetry.py delete mode 100644 src/device/service/drivers/openconfig/NetconfSessionHandler.py delete mode 100644 src/device/service/drivers/openconfig/SamplesCache.py diff --git a/src/device/service/drivers/openconfig/NetConfTelemetry.py b/src/device/service/drivers/openconfig/NetConfTelemetry.py deleted file mode 100644 index 1549d9811..000000000 --- a/src/device/service/drivers/openconfig/NetConfTelemetry.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - diff --git a/src/device/service/drivers/openconfig/NetconfSessionHandler.py b/src/device/service/drivers/openconfig/NetconfSessionHandler.py deleted file mode 100644 index 746f11d12..000000000 --- a/src/device/service/drivers/openconfig/NetconfSessionHandler.py +++ /dev/null @@ -1,129 +0,0 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging, threading -from typing import Any, List, Tuple -from ncclient.manager import Manager, connect_ssh -from common.tools.client.RetryDecorator import delay_exponential -from common.type_checkers.Checkers import chk_length, chk_string, chk_type -from device.service.driver_api.Exceptions import UnsupportedResourceKeyException -from .templates import EMPTY_CONFIG, compose_config -from .RetryDecorator import retry - -MAX_RETRIES = 15 -DELAY_FUNCTION = delay_exponential(initial=0.01, increment=2.0, maximum=5.0) -RETRY_DECORATOR = retry(max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect') - -LOGGER = logging.getLogger(__name__) - -class NetconfSessionHandler: - def __init__(self, address : str, port : int, **settings) -> None: - self.__lock = threading.RLock() - self.__connected = threading.Event() - self.__address = address - self.__port = int(port) - self.__username = settings.get('username') - self.__password = settings.get('password') - self.__vendor = settings.get('vendor') - self.__key_filename = settings.get('key_filename') - self.__hostkey_verify = settings.get('hostkey_verify', True) - self.__look_for_keys = settings.get('look_for_keys', True) - self.__allow_agent = settings.get('allow_agent', True) - self.__force_running = settings.get('force_running', False) - self.__commit_per_delete = settings.get('delete_rule', False) - self.__device_params = settings.get('device_params', {}) - self.__manager_params = settings.get('manager_params', {}) - self.__nc_params = settings.get('nc_params', {}) - self.__manager : Manager = None - self.__candidate_supported = False - - def connect(self): - with self.__lock: - self.__manager = connect_ssh( - host=self.__address, port=self.__port, username=self.__username, password=self.__password, - device_params=self.__device_params, manager_params=self.__manager_params, nc_params=self.__nc_params, - key_filename=self.__key_filename, hostkey_verify=self.__hostkey_verify, allow_agent=self.__allow_agent, - look_for_keys=self.__look_for_keys) - self.__candidate_supported = ':candidate' in self.__manager.server_capabilities - self.__connected.set() - - def disconnect(self): - if not self.__connected.is_set(): return - with self.__lock: - self.__manager.close_session() - - @property - def use_candidate(self): return self.__candidate_supported and not self.__force_running - - @property - def commit_per_rule(self): return self.__commit_per_delete - - @property - def vendor(self): return self.__vendor - - @RETRY_DECORATOR - def get(self, filter=None, with_defaults=None): # pylint: disable=redefined-builtin - with self.__lock: - return self.__manager.get(filter=filter, with_defaults=with_defaults) - - @RETRY_DECORATOR - def edit_config( - self, config, target='running', default_operation=None, test_option=None, - error_option=None, format='xml' # pylint: disable=redefined-builtin - ): - if config == EMPTY_CONFIG: return - with self.__lock: - self.__manager.edit_config( - config, target=target, default_operation=default_operation, test_option=test_option, - error_option=error_option, format=format) - - def locked(self, target): - return self.__manager.locked(target=target) - - def commit(self, confirmed=False, timeout=None, persist=None, persist_id=None): - return self.__manager.commit(confirmed=confirmed, timeout=timeout, persist=persist, persist_id=persist_id) - -def edit_config( - netconf_handler : NetconfSessionHandler, resources : List[Tuple[str, Any]], delete=False, commit_per_rule= False, - target='running', default_operation='merge', test_option=None, error_option=None, - format='xml' # pylint: disable=redefined-builtin -): - str_method = 'DeleteConfig' if delete else 'SetConfig' - LOGGER.info('[{:s}] resources = {:s}'.format(str_method, str(resources))) - results = [None for _ in resources] - for i,resource in enumerate(resources): - str_resource_name = 'resources[#{:d}]'.format(i) - try: - LOGGER.info('[{:s}] resource = {:s}'.format(str_method, str(resource))) - chk_type(str_resource_name, resource, (list, tuple)) - chk_length(str_resource_name, resource, min_length=2, max_length=2) - resource_key,resource_value = resource - chk_string(str_resource_name + '.key', resource_key, allow_empty=False) - str_config_message = compose_config( - resource_key, resource_value, delete=delete, vendor=netconf_handler.vendor) - if str_config_message is None: raise UnsupportedResourceKeyException(resource_key) - LOGGER.info('[{:s}] str_config_message[{:d}] = {:s}'.format( - str_method, len(str_config_message), str(str_config_message))) - netconf_handler.edit_config( - config=str_config_message, target=target, default_operation=default_operation, - test_option=test_option, error_option=error_option, format=format) - if commit_per_rule: - netconf_handler.commit() - results[i] = True - except Exception as e: # pylint: disable=broad-except - str_operation = 'preparing' if target == 'candidate' else ('deleting' if delete else 'setting') - msg = '[{:s}] Exception {:s} {:s}: {:s}' - LOGGER.exception(msg.format(str_method, str_operation, str_resource_name, str(resource))) - results[i] = e # if validation fails, store the exception - return results diff --git a/src/device/service/drivers/openconfig/OpenConfigDriver.py b/src/device/service/drivers/openconfig/OpenConfigDriver.py index de41e0cee..c9a9e7e6f 100644 --- a/src/device/service/drivers/openconfig/OpenConfigDriver.py +++ b/src/device/service/drivers/openconfig/OpenConfigDriver.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -import anytree, copy, logging, pytz, queue, re, threading, json, os, sys +import anytree, copy, logging, pytz, queue, re, threading #import lxml.etree as ET from datetime import datetime, timedelta from typing import Any, Dict, Iterator, List, Optional, Tuple, Union @@ -31,14 +31,6 @@ from device.service.driver_api.AnyTreeTools import TreeNode, get_subnode, set_su from .templates import ALL_RESOURCE_KEYS, EMPTY_CONFIG, compose_config, get_filter, parse from .RetryDecorator import retry -import grpc -from google.protobuf.json_format import MessageToJson - -gnmi_path__ = os.path.dirname(os.path.abspath(__file__)) -sys.path.append(gnmi_path__) -import gnmi_pb2_grpc -import gnmi_pb2 - DEBUG_MODE = False logging.getLogger('ncclient.manager').setLevel(logging.DEBUG if DEBUG_MODE else logging.WARNING) @@ -139,106 +131,6 @@ class NetconfSessionHandler: def commit(self, confirmed=False, timeout=None, persist=None, persist_id=None): return self.__manager.commit(confirmed=confirmed, timeout=timeout, persist=persist, persist_id=persist_id) -class gNMISessionHandler: - def __init__(self, address : str, **settings) -> None: - self.__lock = threading.RLock() - self.__connected = threading.Event() - self.__address = address - self.__port = settings.get('gnmi_port') - self.__username = settings.get('username') - self.__password = settings.get('password') - self.__vendor = settings.get('vendor') - self.__key_filename = settings.get('key_filename') - self.__hostkey_verify = settings.get('hostkey_verify', True) - self.__look_for_keys = settings.get('look_for_keys', True) - self.__allow_agent = settings.get('allow_agent', True) - self.__force_running = settings.get('force_running', False) - self.__commit_per_delete = settings.get('delete_rule', False) - self.__device_params = settings.get('device_params', {}) - self.__manager_params = settings.get('manager_params', {}) - self.__nc_params = settings.get('nc_params', {}) - self.__stub = None - self.__candidate_supported = False - self.__channel = None - self.__supportedEncodings = None - self.__options = Options() - - def connect(self): - with self.__lock: - self.__channel = grpc.insecure_channel(str(self.__address)+':'+self.__port) - self.__stub = gnmi_pb2_grpc.gNMIStub(self.__channel) - metadata = [('username',self.__username ), ('password', self.__password)] - req = gnmi_pb2.CapabilityRequest() - response = self.__stub.Capabilities(req, metadata=metadata) - data = json.loads(MessageToJson(response)) - self.__supportedEncodings = data['supportedEncodings'] - # TODO: self.__candidate_supported = - self.__connected.set() - - def disconnect(self): - if not self.__connected.is_set(): return - with self.__lock: - self.__channel.close() - - def subscribeStreaming(self, subscription : Tuple[str, float, float], out_samples : queue.Queue) -> None: - resource_key, sampling_duration, sampling_interval = subscription - options = copy.deepcopy(self.__options) - options.xpaths = [parse_xpath(resource_key)] - options.timeout = int(sampling_duration) - options.interval = int(sampling_interval) - req_iterator = gen_request(options) - metadata = [('username',self.__username), ('password', self.__password)] - responses = self.__stub.Subscribe(req_iterator, self.__options.timeout, metadata=metadata) - previous_sample = None - delta = 0.0 - previous_timestamp = datetime.timestamp(datetime.utcnow()) - for response in responses: - data = json.loads(MessageToJson(response)) - if data.get("update") is not None and data.get("update").get("update") != None: - now = datetime.timestamp(datetime.utcnow()) - for element in data['update']['update']: - counter_name = split_resource_key(dict_to_xpath(element['path'])) - if counter_name == split_resource_key(resource_key): - value = int(element['val']['uintVal']) - delay = now - previous_timestamp - if previous_sample is not None: delta = (value - previous_sample)/delay - previous_sample = int(value) - previous_timestamp = now - sample = (now, resource_key, delta) - out_samples.put_nowait(sample) - - @property - def use_candidate(self): return self.__candidate_supported and not self.__force_running - - @property - def commit_per_rule(self): return self.__commit_per_delete - - @property - def vendor(self): return self.__vendor - - @RETRY_DECORATOR - def get(self): # pylint: disable=redefined-builtin - return False - - @RETRY_DECORATOR - def edit_config( - self, config, target='running', default_operation=None, test_option=None, - error_option=None, format='xml' # pylint: disable=redefined-builtin - ): - if config == EMPTY_CONFIG: return - with self.__lock: - self.__manager.edit_config( - config, target=target, default_operation=default_operation, test_option=test_option, - error_option=error_option, format=format) - - @RETRY_DECORATOR - def locked(self, target): - return self.__manager.locked(target=target) - - @RETRY_DECORATOR - def commit(self, confirmed=False, timeout=None, persist=None, persist_id=None): - return self.__manager.commit(confirmed=confirmed, timeout=timeout, persist=persist, persist_id=persist_id) - def compute_delta_sample(previous_sample, previous_timestamp, current_sample, current_timestamp): if previous_sample is None: return None if previous_timestamp is None: return None @@ -259,14 +151,13 @@ def compute_delta_sample(previous_sample, previous_timestamp, current_sample, cu return delta_sample class SamplesCache: - def __init__(self, netconf_handler : NetconfSessionHandler, gNMI_handler : gNMISessionHandler, logger : logging.Logger) -> None: + def __init__(self, netconf_handler : NetconfSessionHandler, logger : logging.Logger) -> None: self.__netconf_handler = netconf_handler self.__logger = logger self.__lock = threading.Lock() self.__timestamp = None self.__absolute_samples = {} self.__delta_samples = {} - self.__gNMI_handler = gNMI_handler def _refresh_samples(self) -> None: with self.__lock: @@ -311,24 +202,6 @@ def do_sampling( except: # pylint: disable=bare-except logger.exception('Error retrieving samples') -class Options: - def __init__(self, xpaths=None, prefix=None, mode=0, submode=0, suppress=False, interval=0, - encoding='JSON', heartbeat=0, qos=None, aggregate=False, server=None, username='admin', password='admin', timeout=None): - self.xpaths = xpaths - self.prefix = prefix - self.mode = mode - self.submode = submode - self.suppress = suppress - self.interval = interval - self.encoding = encoding - self.heartbeat = heartbeat - self.qos = qos - self.aggregate = aggregate - self.server = server - self.username = username - self.password = password - self.timeout = timeout - def edit_config( # edit the configuration of openconfig devices netconf_handler : NetconfSessionHandler, logger : logging.Logger, resources : List[Tuple[str, Any]], delete=False, commit_per_rule=False, target='running', default_operation='merge', test_option=None, error_option=None, @@ -386,7 +259,6 @@ class OpenConfigDriver(_Driver): self.__subscriptions = TreeNode('.') self.__started = threading.Event() self.__terminate = threading.Event() - self.__gnmi_monitoring = settings.get('monitoring_protocol') == 'gnmi' self.__scheduler = BackgroundScheduler(daemon=True) # scheduler used to emulate sampling events self.__scheduler.configure( jobstores = {'default': MemoryJobStore()}, @@ -395,16 +267,12 @@ class OpenConfigDriver(_Driver): timezone=pytz.utc) self.__out_samples = queue.Queue() self.__netconf_handler = NetconfSessionHandler(self.address, self.port, **(self.settings)) - self.__gNMI_handler : gNMISessionHandler = gNMISessionHandler(address, **settings) - self.__samples_cache = SamplesCache(self.__netconf_handler, self.__gNMI_handler, self.__logger) + self.__samples_cache = SamplesCache(self.__netconf_handler, self.__logger) def Connect(self) -> bool: with self.__lock: if self.__started.is_set(): return True self.__netconf_handler.connect() - if self.__gnmi_monitoring: - self.__gNMI_handler.connect() - LOGGER.debug('Using gNMI as monitoring protocol') # Connect triggers activation of sampling events that will be scheduled based on subscriptions self.__scheduler.start() self.__started.set() @@ -418,7 +286,7 @@ class OpenConfigDriver(_Driver): if not self.__started.is_set(): return True # Disconnect triggers deactivation of sampling events self.__scheduler.shutdown() - if self.__gnmi_monitoring: self.__netconf_handler.disconnect() + self.__netconf_handler.disconnect() return True @metered_subclass_method(METRICS_POOL) @@ -506,12 +374,7 @@ class OpenConfigDriver(_Driver): job_id = 'k={:s}/d={:f}/i={:f}'.format(resource_key, sampling_duration, sampling_interval) - if self.__gnmi_monitoring: - LOGGER.debug('Processing gNMI subscription: '+ str(subscription)) - job = threading.Thread(target=self.__gNMI_handler.subscribeStreaming, args=(subscription, self.__out_samples)) - job.start() - else: - job = self.__scheduler.add_job( + job = self.__scheduler.add_job( do_sampling, args=(self.__samples_cache, self.__logger, resource_key, self.__out_samples), kwargs={}, id=job_id, trigger='interval', seconds=sampling_interval, start_date=start_date, end_date=end_date, timezone=pytz.utc) diff --git a/src/device/service/drivers/openconfig/SamplesCache.py b/src/device/service/drivers/openconfig/SamplesCache.py deleted file mode 100644 index 24dc33663..000000000 --- a/src/device/service/drivers/openconfig/SamplesCache.py +++ /dev/null @@ -1,100 +0,0 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Collection of samples through NetConf is very slow and each request collects all the data. -# Populate a cache periodically (when first interface is interrogated). -# Evict data after some seconds, when data is considered as outdated - -import copy, queue, logging, re, threading -from datetime import datetime -from typing import Dict, Tuple -from .templates import get_filter, parse -from .NetconfSessionHandler import NetconfSessionHandler - -SAMPLE_EVICTION_SECONDS = 30.0 # seconds -SAMPLE_RESOURCE_KEY = 'interfaces/interface/state/counters' - -RE_GET_ENDPOINT_FROM_INTERFACE_KEY = re.compile(r'.*interface\[([^\]]+)\].*') -RE_GET_ENDPOINT_FROM_INTERFACE_XPATH = re.compile(r".*interface\[oci\:name\='([^\]]+)'\].*") - -LOGGER = logging.getLogger(__name__) - -def compute_delta_sample(previous_sample, previous_timestamp, current_sample, current_timestamp): - if previous_sample is None: return None - if previous_timestamp is None: return None - if current_sample is None: return None - if current_timestamp is None: return None - delay = current_timestamp - previous_timestamp - field_keys = set(previous_sample.keys()).union(current_sample.keys()) - field_keys.discard('name') - delta_sample = {'name': previous_sample['name']} - for field_key in field_keys: - previous_sample_value = previous_sample[field_key] - if not isinstance(previous_sample_value, (int, float)): continue - current_sample_value = current_sample[field_key] - if not isinstance(current_sample_value, (int, float)): continue - delta_value = current_sample_value - previous_sample_value - if delta_value < 0: continue - delta_sample[field_key] = delta_value / delay - return delta_sample - -class SamplesCache: - def __init__(self, netconf_handler : NetconfSessionHandler) -> None: - self.__netconf_handler = netconf_handler - self.__lock = threading.Lock() - self.__timestamp = None - self.__absolute_samples = {} - self.__delta_samples = {} - - def _refresh_samples(self) -> None: - with self.__lock: - try: - now = datetime.timestamp(datetime.utcnow()) - if self.__timestamp is not None and (now - self.__timestamp) < SAMPLE_EVICTION_SECONDS: return - str_filter = get_filter(SAMPLE_RESOURCE_KEY) - xml_data = self.__netconf_handler.get(filter=str_filter).data_ele - interface_samples = parse(SAMPLE_RESOURCE_KEY, xml_data) - for interface,samples in interface_samples: - match = RE_GET_ENDPOINT_FROM_INTERFACE_KEY.match(interface) - if match is None: continue - interface = match.group(1) - delta_sample = compute_delta_sample( - self.__absolute_samples.get(interface), self.__timestamp, samples, now) - if delta_sample is not None: self.__delta_samples[interface] = delta_sample - self.__absolute_samples[interface] = samples - self.__timestamp = now - except: # pylint: disable=bare-except - LOGGER.exception('Error collecting samples') - - def get(self, resource_key : str) -> Tuple[float, Dict]: - self._refresh_samples() - match = RE_GET_ENDPOINT_FROM_INTERFACE_XPATH.match(resource_key) - with self.__lock: - if match is None: return self.__timestamp, {} - interface = match.group(1) - return self.__timestamp, copy.deepcopy(self.__delta_samples.get(interface, {})) - -def do_sampling(samples_cache : SamplesCache, resource_key : str, out_samples : queue.Queue) -> None: - try: - timestamp, samples = samples_cache.get(resource_key) - counter_name = resource_key.split('/')[-1].split(':')[-1] - value = samples.get(counter_name) - if value is None: - LOGGER.warning('[do_sampling] value not found for {:s}'.format(resource_key)) - return - # resource_key template: //oci:interfaces/oci:interface[oci:name='{:s}']/state/counters/{:s} - sample = (timestamp, resource_key, value) - out_samples.put_nowait(sample) - except: # pylint: disable=bare-except - LOGGER.exception('Error retrieving samples') -- GitLab From ecd6bd72f41b17bdd8e86bd593efd464853ee453 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Wed, 14 Jun 2023 16:11:03 +0000 Subject: [PATCH 16/62] Code Cleanup --- .../drivers/openconfig/OpenConfigDriver.py | 42 +++++++++---------- 1 file changed, 20 insertions(+), 22 deletions(-) diff --git a/src/device/service/drivers/openconfig/OpenConfigDriver.py b/src/device/service/drivers/openconfig/OpenConfigDriver.py index c9a9e7e6f..0d4acd3f4 100644 --- a/src/device/service/drivers/openconfig/OpenConfigDriver.py +++ b/src/device/service/drivers/openconfig/OpenConfigDriver.py @@ -55,24 +55,23 @@ RETRY_DECORATOR = retry(max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, class NetconfSessionHandler: def __init__(self, address : str, port : int, **settings) -> None: - mensaje = f"__init__: address={address}, port={port}, settings={settings}" self.__lock = threading.RLock() self.__connected = threading.Event() self.__address = address self.__port = int(port) - self.__username = settings.get('username') - self.__password = settings.get('password') - self.__vendor = settings.get('vendor') - self.__version = settings.get('version', "1") - self.__key_filename = settings.get('key_filename') - self.__hostkey_verify = settings.get('hostkey_verify', True) - self.__look_for_keys = settings.get('look_for_keys', True) - self.__allow_agent = settings.get('allow_agent', True) - self.__force_running = settings.get('force_running', False) - self.__commit_per_rule = settings.get('commit_per_rule', False) - self.__device_params = settings.get('device_params', {}) - self.__manager_params = settings.get('manager_params', {}) - self.__nc_params = settings.get('nc_params', {}) + self.__username = settings.get('username') + self.__password = settings.get('password') + self.__vendor = settings.get('vendor') + self.__version = settings.get('version', "1") + self.__key_filename = settings.get('key_filename') + self.__hostkey_verify = settings.get('hostkey_verify', True) + self.__look_for_keys = settings.get('look_for_keys', True) + self.__allow_agent = settings.get('allow_agent', True) + self.__force_running = settings.get('force_running', False) + self.__commit_per_rule = settings.get('commit_per_rule', False) + self.__device_params = settings.get('device_params', {}) + self.__manager_params = settings.get('manager_params', {}) + self.__nc_params = settings.get('nc_params', {}) self.__message_renderer = settings.get('message_renderer','jinja') self.__manager : Manager = None self.__candidate_supported = False @@ -202,13 +201,13 @@ def do_sampling( except: # pylint: disable=bare-except logger.exception('Error retrieving samples') -def edit_config( # edit the configuration of openconfig devices +def edit_config( netconf_handler : NetconfSessionHandler, logger : logging.Logger, resources : List[Tuple[str, Any]], delete=False, commit_per_rule=False, target='running', default_operation='merge', test_option=None, error_option=None, format='xml' # pylint: disable=redefined-builtin ): str_method = 'DeleteConfig' if delete else 'SetConfig' - logger.debug('[{:s}] resources = {:s}'.format(str_method, str(resources))) + #logger.debug('[{:s}] resources = {:s}'.format(str_method, str(resources))) results = [None for _ in resources] for i,resource in enumerate(resources): str_resource_name = 'resources[#{:d}]'.format(i) @@ -218,17 +217,17 @@ def edit_config( chk_length(str_resource_name, resource, min_length=2, max_length=2) resource_key,resource_value = resource chk_string(str_resource_name + '.key', resource_key, allow_empty=False) - str_config_messages = compose_config( # get template for configuration + str_config_messages = compose_config( resource_key, resource_value, delete=delete, vendor=netconf_handler.vendor, message_renderer=netconf_handler.message_renderer) - for str_config_message in str_config_messages: # configuration of the received templates + for str_config_message in str_config_messages: if str_config_message is None: raise UnsupportedResourceKeyException(resource_key) logger.debug('[{:s}] str_config_message[{:d}] = {:s}'.format( str_method, len(str_config_message), str(str_config_message))) - netconf_handler.edit_config( # configure the device + netconf_handler.edit_config( config=str_config_message, target=target, default_operation=default_operation, test_option=test_option, error_option=error_option, format=format) if commit_per_rule: - netconf_handler.commit() # configuration commit + netconf_handler.commit() results[i] = True except Exception as e: # pylint: disable=broad-except str_operation = 'preparing' if target == 'candidate' else ('deleting' if delete else 'setting') @@ -373,7 +372,6 @@ class OpenConfigDriver(_Driver): end_date = start_date + timedelta(seconds=sampling_duration) job_id = 'k={:s}/d={:f}/i={:f}'.format(resource_key, sampling_duration, sampling_interval) - job = self.__scheduler.add_job( do_sampling, args=(self.__samples_cache, self.__logger, resource_key, self.__out_samples), kwargs={}, id=job_id, trigger='interval', seconds=sampling_interval, @@ -438,4 +436,4 @@ class OpenConfigDriver(_Driver): if blocking: continue return if sample is None: continue - yield sample \ No newline at end of file + yield sample -- GitLab From 63f5af8635791978ce6b4d565960be839dc42d13 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Wed, 14 Jun 2023 16:12:57 +0000 Subject: [PATCH 17/62] Code Cleanup & header addition --- .../openconfig/templates/ACL/ACL_multivendor.py | 14 ++++++++++++++ .../drivers/openconfig/templates/ACL/__init__.py | 14 ++++++++++++++ .../templates/VPN/Interfaces_multivendor.py | 14 ++++++++++++++ .../templates/VPN/Network_instance_multivendor.py | 14 ++++++++++++++ .../openconfig/templates/VPN/Routing_policy.py | 14 ++++++++++++++ .../drivers/openconfig/templates/VPN/__init__.py | 14 ++++++++++++++ 6 files changed, 84 insertions(+) create mode 100644 src/device/service/drivers/openconfig/templates/ACL/__init__.py create mode 100644 src/device/service/drivers/openconfig/templates/VPN/__init__.py diff --git a/src/device/service/drivers/openconfig/templates/ACL/ACL_multivendor.py b/src/device/service/drivers/openconfig/templates/ACL/ACL_multivendor.py index 4d332e5d0..bcee6ab64 100755 --- a/src/device/service/drivers/openconfig/templates/ACL/ACL_multivendor.py +++ b/src/device/service/drivers/openconfig/templates/ACL/ACL_multivendor.py @@ -1,3 +1,17 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from .openconfig_acl import openconfig_acl from pyangbind.lib.serialise import pybindIETFXMLEncoder from common.tools.grpc.Tools import grpc_message_to_json diff --git a/src/device/service/drivers/openconfig/templates/ACL/__init__.py b/src/device/service/drivers/openconfig/templates/ACL/__init__.py new file mode 100644 index 000000000..1549d9811 --- /dev/null +++ b/src/device/service/drivers/openconfig/templates/ACL/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/src/device/service/drivers/openconfig/templates/VPN/Interfaces_multivendor.py b/src/device/service/drivers/openconfig/templates/VPN/Interfaces_multivendor.py index 6cfe525d7..f4bc9abbb 100644 --- a/src/device/service/drivers/openconfig/templates/VPN/Interfaces_multivendor.py +++ b/src/device/service/drivers/openconfig/templates/VPN/Interfaces_multivendor.py @@ -1,3 +1,17 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from .openconfig_interfaces import openconfig_interfaces from pyangbind.lib.serialise import pybindIETFXMLEncoder diff --git a/src/device/service/drivers/openconfig/templates/VPN/Network_instance_multivendor.py b/src/device/service/drivers/openconfig/templates/VPN/Network_instance_multivendor.py index aaedd4b92..120602976 100644 --- a/src/device/service/drivers/openconfig/templates/VPN/Network_instance_multivendor.py +++ b/src/device/service/drivers/openconfig/templates/VPN/Network_instance_multivendor.py @@ -1,3 +1,17 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from .openconfig_network_instance import openconfig_network_instance from pyangbind.lib.serialise import pybindIETFXMLEncoder diff --git a/src/device/service/drivers/openconfig/templates/VPN/Routing_policy.py b/src/device/service/drivers/openconfig/templates/VPN/Routing_policy.py index 895385a65..935354ce6 100644 --- a/src/device/service/drivers/openconfig/templates/VPN/Routing_policy.py +++ b/src/device/service/drivers/openconfig/templates/VPN/Routing_policy.py @@ -1,3 +1,17 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from .openconfig_routing_policy import openconfig_routing_policy from pyangbind.lib.serialise import pybindIETFXMLEncoder diff --git a/src/device/service/drivers/openconfig/templates/VPN/__init__.py b/src/device/service/drivers/openconfig/templates/VPN/__init__.py new file mode 100644 index 000000000..1549d9811 --- /dev/null +++ b/src/device/service/drivers/openconfig/templates/VPN/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + -- GitLab From ad015728605b25b1fe83cffbb53f2bc228bbaaf6 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Wed, 14 Jun 2023 16:22:17 +0000 Subject: [PATCH 18/62] Service component: - Minor code cleanup --- src/service/service/service_handler_api/Tools.py | 1 + .../service_handlers/l2nm_emulated/ConfigRules.py | 9 --------- .../service_handlers/l3nm_openconfig/ConfigRules.py | 3 --- 3 files changed, 1 insertion(+), 12 deletions(-) diff --git a/src/service/service/service_handler_api/Tools.py b/src/service/service/service_handler_api/Tools.py index 787b0f499..b06d128d9 100644 --- a/src/service/service/service_handler_api/Tools.py +++ b/src/service/service/service_handler_api/Tools.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + import functools, re from typing import Any, List, Optional, Tuple, Union from common.method_wrappers.ServiceExceptions import NotFoundException diff --git a/src/service/service/service_handlers/l2nm_emulated/ConfigRules.py b/src/service/service/service_handlers/l2nm_emulated/ConfigRules.py index 747e6c498..ef85550f2 100644 --- a/src/service/service/service_handlers/l2nm_emulated/ConfigRules.py +++ b/src/service/service/service_handlers/l2nm_emulated/ConfigRules.py @@ -27,12 +27,6 @@ def setup_config_rules( json_settings : Dict = service_settings.value json_endpoint_settings : Dict = endpoint_settings.value - if service_settings is None: return [] - if endpoint_settings is None: return [] - - json_settings : Dict = service_settings.value - json_endpoint_settings : Dict = endpoint_settings.value - #mtu = json_settings.get('mtu', 1450 ) # 1512 #address_families = json_settings.get('address_families', [] ) # ['IPV4'] #bgp_as = json_settings.get('bgp_as', 0 ) # 65000 @@ -98,9 +92,6 @@ def teardown_config_rules( json_settings : Dict = service_settings.value json_endpoint_settings : Dict = endpoint_settings.value - if service_settings is None: return [] - if endpoint_settings is None: return [] - #json_settings : Dict = service_settings.value json_endpoint_settings : Dict = endpoint_settings.value diff --git a/src/service/service/service_handlers/l3nm_openconfig/ConfigRules.py b/src/service/service/service_handlers/l3nm_openconfig/ConfigRules.py index b2ae12c31..b25d3b684 100644 --- a/src/service/service/service_handlers/l3nm_openconfig/ConfigRules.py +++ b/src/service/service/service_handlers/l3nm_openconfig/ConfigRules.py @@ -29,9 +29,6 @@ def setup_config_rules( json_settings : Dict = service_settings.value json_endpoint_settings : Dict = endpoint_settings.value - json_settings : Dict = {} if service_settings is None else service_settings.value - json_endpoint_settings : Dict = {} if endpoint_settings is None else endpoint_settings.value - mtu = json_settings.get('mtu', 1450 ) # 1512 #address_families = json_settings.get('address_families', [] ) # ['IPV4'] bgp_as = json_settings.get('bgp_as', 65000 ) # 65000 -- GitLab From 038399b731e88950a39556cd2cdc5e00db888df6 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Wed, 14 Jun 2023 16:23:26 +0000 Subject: [PATCH 19/62] Proto: - Added gNMI OpenConfig Driver --- proto/context.proto | 1 + 1 file changed, 1 insertion(+) diff --git a/proto/context.proto b/proto/context.proto index 3104f1b54..363942b81 100644 --- a/proto/context.proto +++ b/proto/context.proto @@ -195,6 +195,7 @@ enum DeviceDriverEnum { DEVICEDRIVER_ONF_TR_352 = 5; DEVICEDRIVER_XR = 6; DEVICEDRIVER_IETF_L2VPN = 7; + DEVICEDRIVER_GNMI_OPENCONFIG = 8; } enum DeviceOperationalStatusEnum { -- GitLab From fcc9d352f8c3e6ab82b85ee93d0854cb07e4827d Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Wed, 14 Jun 2023 16:23:41 +0000 Subject: [PATCH 20/62] Common - Type Checkers: - Added gNMI OpenConfig Driver --- src/common/type_checkers/Assertions.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/common/type_checkers/Assertions.py b/src/common/type_checkers/Assertions.py index d5476a953..25eb42fab 100644 --- a/src/common/type_checkers/Assertions.py +++ b/src/common/type_checkers/Assertions.py @@ -34,6 +34,7 @@ def validate_device_driver_enum(message): 'DEVICEDRIVER_ONF_TR_352', 'DEVICEDRIVER_XR', 'DEVICEDRIVER_IETF_L2VPN', + 'DEVICEDRIVER_GNMI_OPENCONFIG', ] def validate_device_operational_status_enum(message): -- GitLab From 726c44d97554a89d3c36df12c0e54fdd5a921058 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Wed, 14 Jun 2023 16:23:54 +0000 Subject: [PATCH 21/62] Context component: - Added gNMI OpenConfig Driver --- src/context/service/database/models/enums/DeviceDriver.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/context/service/database/models/enums/DeviceDriver.py b/src/context/service/database/models/enums/DeviceDriver.py index a612803e2..09be94b1d 100644 --- a/src/context/service/database/models/enums/DeviceDriver.py +++ b/src/context/service/database/models/enums/DeviceDriver.py @@ -25,6 +25,7 @@ class ORM_DeviceDriverEnum(enum.Enum): ONF_TR_352 = DeviceDriverEnum.DEVICEDRIVER_ONF_TR_352 XR = DeviceDriverEnum.DEVICEDRIVER_XR IETF_L2VPN = DeviceDriverEnum.DEVICEDRIVER_IETF_L2VPN + GNMI_OPENCONFIG = DeviceDriverEnum.DEVICEDRIVER_GNMI_OPENCONFIG grpc_to_enum__device_driver = functools.partial( grpc_to_enum, DeviceDriverEnum, ORM_DeviceDriverEnum) -- GitLab From 8b3738cfe03bbd4750260d9730347756247adddb Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Wed, 14 Jun 2023 16:25:51 +0000 Subject: [PATCH 22/62] Hackfest: - Added ContainerLab exercise --- .dockerignore | 7 +++ hackfest/containerlab/.gitignore | 2 + hackfest/containerlab/commands.txt | 61 +++++++++++++++++++++ hackfest/containerlab/tfs-scenario.clab.yml | 59 ++++++++++++++++++++ 4 files changed, 129 insertions(+) create mode 100644 .dockerignore create mode 100644 hackfest/containerlab/.gitignore create mode 100644 hackfest/containerlab/commands.txt create mode 100644 hackfest/containerlab/tfs-scenario.clab.yml diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 000000000..efb309338 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,7 @@ +# Avoid including these folders when building the components +coverage/ +data/ +deploy/ +manifests/ +hackfest/ +scripts/ diff --git a/hackfest/containerlab/.gitignore b/hackfest/containerlab/.gitignore new file mode 100644 index 000000000..48cbf3879 --- /dev/null +++ b/hackfest/containerlab/.gitignore @@ -0,0 +1,2 @@ +clab-tfs-scenario +.tfs-scenario.clab.yml.bak diff --git a/hackfest/containerlab/commands.txt b/hackfest/containerlab/commands.txt new file mode 100644 index 000000000..ae023a294 --- /dev/null +++ b/hackfest/containerlab/commands.txt @@ -0,0 +1,61 @@ +############ +# ContainerLab +############ + +Refs: +https://documentation.nokia.com/srlinux/22-6/SR_Linux_Book_Files/SysMgmt_Guide/data-models.html#openconfig-ov +https://documentation.nokia.com/srlinux/SR_Linux_HTML_R21-11/SysMgmt_Guide/gnmi-interface.html#ai9ersv4qe +https://github.com/openconfig/kne/blob/v0.1.9/examples/nokia/srlinux-services/srl-openconfig.cfg.json +https://containerlab.dev/manual/kinds/srl/#default-node-configuration +https://learn.srlinux.dev/tutorials/infrastructure/kne/srl-with-oc-services/ +https://github.com/openconfig/reference/blob/master/rpc/gnmi/gnmi-specification.md +https://gnmic.kmrd.dev/cmd/get/ + + +IMPORTANT: for Nokia SR Linux, use kind "srl" and type "ixr6" + +## Download and install the latest release +$ sudo bash -c "$(curl -sL https://get.containerlab.dev)" + +## Deploy proposed two SR node scenario +$ cd ~/tfs-ctrl/hackfest/containerlab +$ sudo containerlab deploy --topo tfs-scenario.clab.yml + +## Access SR Bash +$ docker exec -it clab-tfs-scenario-srl1 bash + +## Acess SR CLI +$ docker exec -it clab-tfs-scenario-srl1 sr_cli + +## Destroy scenario +$ sudo containerlab destroy --topo tfs-scenario.clab.yml + +## Install gNMIc +$ sudo bash -c "$(curl -sL https://get-gnmic.kmrd.dev)" + +## gNMI Capabilities request +$ gnmic -a clab-srlinux-srl1 -u admin -p NokiaSrl1! --skip-verify capabilities + +## gNMI Get request +$ gnmic -a clab-srlinux-srl1 -u admin -p NokiaSrl1! --skip-verify -e json_ietf get --path /system/name/host-name +$ gnmic -a clab-srlinux-srl1 -u admin -p NokiaSrl1! --skip-verify -e json_ietf get --path /interface[name=mgmt0] + +## gNMI Set request +$ gnmic -a clab-srlinux-srl1 -u admin -p NokiaSrl1! --skip-verify -e json_ietf set --update-path /system/name/host-name --update-value slr11 + +(we check the changed value) +$ gnmic -a clab-srlinux-srl1 -u admin -p NokiaSrl1! --skip-verify -e json_ietf get --path /system/name/host-name + +## Subscribe request +$ gnmic -a clab-srlinux-srl1 -u admin -p NokiaSrl1! --skip-verify -e json_ietf subscribe --path /interface[name=mgmt0]/statistics +(In another terminal, you can generate traffic) +$ssh admin@clab-srlinux-srl1 + + +## Enable OpenConfig data models and set as default: +$ docker exec -it clab-tfs-scenario-srl1 sr_cli +# enter candidate +# system management openconfig admin-state enable +# system gnmi-server network-instance mgmt yang-models openconfig +# commit stay +# quit diff --git a/hackfest/containerlab/tfs-scenario.clab.yml b/hackfest/containerlab/tfs-scenario.clab.yml new file mode 100644 index 000000000..c8d7dfde8 --- /dev/null +++ b/hackfest/containerlab/tfs-scenario.clab.yml @@ -0,0 +1,59 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Example based on clos01 example (http://containerlab.dev/lab-examples/min-clos/) + +# TFS 2 Nokia SR Linux nodes + 2 Linux clients +name: tfs-scenario + +mgmt: + network: mgmt-net + ipv4-subnet: 172.100.100.0/24 + +topology: + kinds: + srl: + image: ghcr.io/nokia/srlinux + linux: + image: ghcr.io/hellt/network-multitool + nodes: + srl1: + kind: srl + type: ixr6 + cpu: 0.5 + memory: 500MB + mgmt-ipv4: 172.100.100.101 + #startup-config: srl1.cli + srl2: + kind: srl + type: ixr6 + cpu: 0.5 + memory: 500MB + mgmt-ipv4: 172.100.100.102 + #startup-config: srl2.cli + client1: + kind: linux + cpu: 0.1 + memory: 100MB + mgmt-ipv4: 172.100.100.201 + client2: + kind: linux + cpu: 0.1 + memory: 100MB + mgmt-ipv4: 172.100.100.202 + + links: + - endpoints: ["srl1:e1-1", "srl2:e1-1"] + - endpoints: ["client1:eth1", "srl1:e1-2"] + - endpoints: ["client2:eth1", "srl2:e1-2"] -- GitLab From eedd26b267aafaab041dfff74bc7c4647f86b19a Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Wed, 14 Jun 2023 16:27:08 +0000 Subject: [PATCH 23/62] WebUI component: - Added gNMI OpenConfig Driver --- src/webui/service/device/forms.py | 1 + src/webui/service/device/routes.py | 2 ++ src/webui/service/templates/device/add.html | 25 +++++++++------------ 3 files changed, 14 insertions(+), 14 deletions(-) diff --git a/src/webui/service/device/forms.py b/src/webui/service/device/forms.py index 24bc92b3a..a6e07fe3c 100644 --- a/src/webui/service/device/forms.py +++ b/src/webui/service/device/forms.py @@ -30,6 +30,7 @@ class AddDeviceForm(FlaskForm): device_drivers_onf_tr_352 = BooleanField('ONF_TR_352') device_drivers_xr = BooleanField('XR') device_drivers_ietf_l2vpn = BooleanField('IETF L2VPN') + device_drivers_gnmi_openconfig = BooleanField('GNMI OPENCONFIG') device_config_address = StringField('connect/address',default='127.0.0.1',validators=[DataRequired(), Length(min=5)]) device_config_port = StringField('connect/port',default='0',validators=[DataRequired(), Length(min=1)]) device_config_settings = TextAreaField('connect/settings',default='{}',validators=[DataRequired(), Length(min=2)]) diff --git a/src/webui/service/device/routes.py b/src/webui/service/device/routes.py index bc4684770..4590c7f01 100644 --- a/src/webui/service/device/routes.py +++ b/src/webui/service/device/routes.py @@ -122,6 +122,8 @@ def add(): device_drivers.append(DeviceDriverEnum.DEVICEDRIVER_XR) if form.device_drivers_ietf_l2vpn.data: device_drivers.append(DeviceDriverEnum.DEVICEDRIVER_IETF_L2VPN) + if form.device_drivers_gnmi_openconfig.data: + device_drivers.append(DeviceDriverEnum.DEVICEDRIVER_GNMI_OPENCONFIG) device_obj.device_drivers.extend(device_drivers) # pylint: disable=no-member try: diff --git a/src/webui/service/templates/device/add.html b/src/webui/service/templates/device/add.html index 6b11a1920..c9165667d 100644 --- a/src/webui/service/templates/device/add.html +++ b/src/webui/service/templates/device/add.html @@ -81,20 +81,17 @@ {% endfor %} {% else %} - {{ form.device_drivers_undefined }} {{ form.device_drivers_undefined.label(class="col-sm-3 - col-form-label") }} - {{ form.device_drivers_openconfig }} {{ form.device_drivers_openconfig.label(class="col-sm-3 - col-form-label") }} - {{ form.device_drivers_transport_api }} {{ form.device_drivers_transport_api.label(class="col-sm-3 - col-form-label") }} -
{{ form.device_drivers_p4 }} {{ form.device_drivers_p4.label(class="col-sm-3 col-form-label") }} - {{ form.device_drivers_ietf_network_topology }} {{ - form.device_drivers_ietf_network_topology.label(class="col-sm-3 - col-form-label") }} - {{ form.device_drivers_onf_tr_352 }} {{ form.device_drivers_onf_tr_352.label(class="col-sm-3 - col-form-label") }}
- {{ form.device_drivers_xr }} {{ form.device_drivers_xr.label(class="col-sm-3 - col-form-label") }} + {{ form.device_drivers_undefined }} {{ form.device_drivers_undefined.label(class="col-sm-3 col-form-label") }} + {{ form.device_drivers_openconfig }} {{ form.device_drivers_openconfig.label(class="col-sm-3 col-form-label") }} + {{ form.device_drivers_transport_api }} {{ form.device_drivers_transport_api.label(class="col-sm-3 col-form-label") }} +
+ {{ form.device_drivers_p4 }} {{ form.device_drivers_p4.label(class="col-sm-3 col-form-label") }} + {{ form.device_drivers_ietf_network_topology }} {{form.device_drivers_ietf_network_topology.label(class="col-sm-3 col-form-label") }} + {{ form.device_drivers_onf_tr_352 }} {{ form.device_drivers_onf_tr_352.label(class="col-sm-3 col-form-label") }} +
+ {{ form.device_drivers_xr }} {{ form.device_drivers_xr.label(class="col-sm-3 col-form-label") }} + {{ form.device_drivers_ietf_l2vpn }} {{ form.device_drivers_ietf_l2vpn.label(class="col-sm-3 col-form-label") }} + {{ form.device_drivers_gnmi_openconfig }} {{ form.device_drivers_gnmi_openconfig.label(class="col-sm-3 col-form-label") }} {% endif %} -- GitLab From 756eb99775b110696ebc5df7225b262c8977bb31 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Wed, 14 Jun 2023 16:28:03 +0000 Subject: [PATCH 24/62] WebUI component: - Code cleanup --- src/webui/service/service/routes.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/webui/service/service/routes.py b/src/webui/service/service/routes.py index a24e6169c..62f39d375 100644 --- a/src/webui/service/service/routes.py +++ b/src/webui/service/service/routes.py @@ -112,11 +112,11 @@ def home(): ste=ServiceTypeEnum, sse=ServiceStatusEnum, active_drivers=active_drivers) -@service.route('add', methods=['GET', 'POST']) -def add(): - flash('Add service route called', 'danger') - raise NotImplementedError() - #return render_template('service/home.html') +#@service.route('add', methods=['GET', 'POST']) +#def add(): +# flash('Add service route called', 'danger') +# raise NotImplementedError() +# #return render_template('service/home.html') def get_hub_module_name(dev: Device) -> Optional[str]: -- GitLab From a8a9b0fc3a798d73a85a41b766fcc668ac4e2221 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Wed, 14 Jun 2023 16:28:45 +0000 Subject: [PATCH 25/62] WebUI component: - Code cleanup --- src/webui/service/service/routes.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/src/webui/service/service/routes.py b/src/webui/service/service/routes.py index 62f39d375..b5873de7e 100644 --- a/src/webui/service/service/routes.py +++ b/src/webui/service/service/routes.py @@ -112,13 +112,6 @@ def home(): ste=ServiceTypeEnum, sse=ServiceStatusEnum, active_drivers=active_drivers) -#@service.route('add', methods=['GET', 'POST']) -#def add(): -# flash('Add service route called', 'danger') -# raise NotImplementedError() -# #return render_template('service/home.html') - - def get_hub_module_name(dev: Device) -> Optional[str]: for cr in dev.device_config.config_rules: if cr.action == ConfigActionEnum.CONFIGACTION_SET and cr.custom and cr.custom.resource_key == "_connect/settings": -- GitLab From 8aa0b1154faa32cce1978657a974b55fe0a6e5fd Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Sat, 17 Jun 2023 07:59:02 +0000 Subject: [PATCH 26/62] WebUI component: - deactivated unstable code - corrected service template --- src/webui/service/service/routes.py | 282 +++++++++--------- src/webui/service/templates/service/home.html | 2 +- 2 files changed, 142 insertions(+), 142 deletions(-) diff --git a/src/webui/service/service/routes.py b/src/webui/service/service/routes.py index b5873de7e..a6e55aded 100644 --- a/src/webui/service/service/routes.py +++ b/src/webui/service/service/routes.py @@ -55,14 +55,14 @@ type = ["ACL_UNDEFINED", "ACL_IPV4","ACL_IPV6","ACL_L2","ACL_MPLS","ACL_MIXE f_action = ["UNDEFINED", "DROP","ACCEPT","REJECT"] l_action = ["UNDEFINED", "LOG_NONE","LOG_SYSLOG"] -@service.get('/') #Route for the homepage of the created "service" blueprint -@contextmanager -def connected_client(c): - try: - c.connect() - yield c - finally: - c.close() +#@service.get('/') #Route for the homepage of the created "service" blueprint +#@contextmanager +#def connected_client(c): +# try: +# c.connect() +# yield c +# finally: +# c.close() # Context client must be in connected state when calling this function def get_device_drivers_in_use(topology_uuid: str, context_uuid: str) -> Set[str]: @@ -123,139 +123,139 @@ def get_hub_module_name(dev: Device) -> Optional[str]: pass return None -@service.route('add-xr', methods=['GET', 'POST']) -def add_xr(): - ### FIXME: copypaste - if 'context_uuid' not in session or 'topology_uuid' not in session: - flash("Please select a context!", "warning") - return redirect(url_for("main.home")) - - context_uuid = session['context_uuid'] - topology_uuid = session['topology_uuid'] - - context_client.connect() - grpc_topology = get_topology(context_client, topology_uuid, context_uuid=context_uuid, rw_copy=False) - if grpc_topology is None: - flash('Context({:s})/Topology({:s}) not found'.format(str(context_uuid), str(topology_uuid)), 'danger') - return redirect(url_for("main.home")) - else: - topo_device_uuids = {device_id.device_uuid.uuid for device_id in grpc_topology.device_ids} - grpc_devices= context_client.ListDevices(Empty()) - devices = [ - device for device in grpc_devices.devices - if device.device_id.device_uuid.uuid in topo_device_uuids and DeviceDriverEnum.DEVICEDRIVER_XR in device.device_drivers - ] - devices.sort(key=lambda dev: dev.name) - - hub_interfaces_by_device = defaultdict(list) - leaf_interfaces_by_device = defaultdict(list) - constellation_name_to_uuid = {} - dev_ep_to_uuid = {} - ep_uuid_to_name = {} - for d in devices: - constellation_name_to_uuid[d.name] = d.device_id.device_uuid.uuid - hm_name = get_hub_module_name(d) - if hm_name is not None: - hm_if_prefix= hm_name + "|" - for ep in d.device_endpoints: - dev_ep_to_uuid[(d.name, ep.name)] = ep.endpoint_id.endpoint_uuid.uuid - if ep.name.startswith(hm_if_prefix): - hub_interfaces_by_device[d.name].append(ep.name) - else: - leaf_interfaces_by_device[d.name].append(ep.name) - ep_uuid_to_name[ep.endpoint_id.endpoint_uuid.uuid] = (d.name, ep.name) - hub_interfaces_by_device[d.name].sort() - leaf_interfaces_by_device[d.name].sort() - - # Find out what endpoints are already used so that they can be disabled - # in the create screen - context_obj = get_context(context_client, context_uuid, rw_copy=False) - if context_obj is None: - flash('Context({:s}) not found'.format(str(context_uuid)), 'danger') - return redirect(request.url) - - services = context_client.ListServices(context_obj.context_id) - ep_used_by={} - for service in services.services: - if service.service_type == ServiceTypeEnum.SERVICETYPE_TAPI_CONNECTIVITY_SERVICE: - for ep in service.service_endpoint_ids: - ep_uuid = ep.endpoint_uuid.uuid - if ep_uuid in ep_uuid_to_name: - dev_name, ep_name = ep_uuid_to_name[ep_uuid] - ep_used_by[f"{ep_name}@{dev_name}"] = service.name - - context_client.close() - - if request.method != 'POST': - return render_template('service/add-xr.html', devices=devices, hub_if=hub_interfaces_by_device, leaf_if=leaf_interfaces_by_device, ep_used_by=ep_used_by) - else: - service_name = request.form["service_name"] - if service_name == "": - flash(f"Service name must be specified", 'danger') - - constellation = request.form["constellation"] - constellation_uuid = constellation_name_to_uuid.get(constellation, None) - if constellation_uuid is None: - flash(f"Invalid constellation \"{constellation}\"", 'danger') - - hub_if = request.form["hubif"] - hub_if_uuid = dev_ep_to_uuid.get((constellation, hub_if), None) - if hub_if_uuid is None: - flash(f"Invalid hub interface \"{hub_if}\"", 'danger') - - leaf_if = request.form["leafif"] - leaf_if_uuid = dev_ep_to_uuid.get((constellation, leaf_if), None) - if leaf_if_uuid is None: - flash(f"Invalid leaf interface \"{leaf_if}\"", 'danger') - - if service_name == "" or constellation_uuid is None or hub_if_uuid is None or leaf_if_uuid is None: - return redirect(request.url) - - - json_context_uuid=json_context_id(context_uuid) - sr = { - "name": service_name, - "service_id": { - "context_id": {"context_uuid": {"uuid": context_uuid}}, - "service_uuid": {"uuid": service_name} - }, - 'service_type' : ServiceTypeEnum.SERVICETYPE_TAPI_CONNECTIVITY_SERVICE, - "service_endpoint_ids": [ - {'device_id': {'device_uuid': {'uuid': constellation_uuid}}, 'endpoint_uuid': {'uuid': hub_if_uuid}, 'topology_id': json_topology_id("admin", context_id=json_context_uuid)}, - {'device_id': {'device_uuid': {'uuid': constellation_uuid}}, 'endpoint_uuid': {'uuid': leaf_if_uuid}, 'topology_id': json_topology_id("admin", context_id=json_context_uuid)} - ], - 'service_status' : {'service_status': ServiceStatusEnum.SERVICESTATUS_PLANNED}, - 'service_constraints' : [], - } - - json_tapi_settings = { - 'capacity_value' : 50.0, - 'capacity_unit' : 'GHz', - 'layer_proto_name': 'PHOTONIC_MEDIA', - 'layer_proto_qual': 'tapi-photonic-media:PHOTONIC_LAYER_QUALIFIER_NMC', - 'direction' : 'UNIDIRECTIONAL', - } - config_rule = json_config_rule_set('/settings', json_tapi_settings) - - with connected_client(service_client) as sc: - endpoints, sr['service_endpoint_ids'] = sr['service_endpoint_ids'], [] - try: - create_response = sc.CreateService(Service(**sr)) - except Exception as e: - flash(f'Failure to update service name {service_name} with endpoints and configuration, exception {str(e)}', 'danger') - return redirect(request.url) - - sr['service_endpoint_ids'] = endpoints - sr['service_config'] = {'config_rules': [config_rule]} - - try: - update_response = sc.UpdateService(Service(**sr)) - flash(f'Created service {update_response.service_uuid.uuid}', 'success') - except Exception as e: - flash(f'Failure to update service {create_response.service_uuid.uuid} with endpoints and configuration, exception {str(e)}', 'danger') - return redirect(request.url) - - return redirect(url_for('service.home')) +#@service.route('add-xr', methods=['GET', 'POST']) +#def add_xr(): +# ### FIXME: copypaste +# if 'context_uuid' not in session or 'topology_uuid' not in session: +# flash("Please select a context!", "warning") +# return redirect(url_for("main.home")) +# +# context_uuid = session['context_uuid'] +# topology_uuid = session['topology_uuid'] +# +# context_client.connect() +# grpc_topology = get_topology(context_client, topology_uuid, context_uuid=context_uuid, rw_copy=False) +# if grpc_topology is None: +# flash('Context({:s})/Topology({:s}) not found'.format(str(context_uuid), str(topology_uuid)), 'danger') +# return redirect(url_for("main.home")) +# else: +# topo_device_uuids = {device_id.device_uuid.uuid for device_id in grpc_topology.device_ids} +# grpc_devices= context_client.ListDevices(Empty()) +# devices = [ +# device for device in grpc_devices.devices +# if device.device_id.device_uuid.uuid in topo_device_uuids and DeviceDriverEnum.DEVICEDRIVER_XR in device.device_drivers +# ] +# devices.sort(key=lambda dev: dev.name) +# +# hub_interfaces_by_device = defaultdict(list) +# leaf_interfaces_by_device = defaultdict(list) +# constellation_name_to_uuid = {} +# dev_ep_to_uuid = {} +# ep_uuid_to_name = {} +# for d in devices: +# constellation_name_to_uuid[d.name] = d.device_id.device_uuid.uuid +# hm_name = get_hub_module_name(d) +# if hm_name is not None: +# hm_if_prefix= hm_name + "|" +# for ep in d.device_endpoints: +# dev_ep_to_uuid[(d.name, ep.name)] = ep.endpoint_id.endpoint_uuid.uuid +# if ep.name.startswith(hm_if_prefix): +# hub_interfaces_by_device[d.name].append(ep.name) +# else: +# leaf_interfaces_by_device[d.name].append(ep.name) +# ep_uuid_to_name[ep.endpoint_id.endpoint_uuid.uuid] = (d.name, ep.name) +# hub_interfaces_by_device[d.name].sort() +# leaf_interfaces_by_device[d.name].sort() +# +# # Find out what endpoints are already used so that they can be disabled +# # in the create screen +# context_obj = get_context(context_client, context_uuid, rw_copy=False) +# if context_obj is None: +# flash('Context({:s}) not found'.format(str(context_uuid)), 'danger') +# return redirect(request.url) +# +# services = context_client.ListServices(context_obj.context_id) +# ep_used_by={} +# for service in services.services: +# if service.service_type == ServiceTypeEnum.SERVICETYPE_TAPI_CONNECTIVITY_SERVICE: +# for ep in service.service_endpoint_ids: +# ep_uuid = ep.endpoint_uuid.uuid +# if ep_uuid in ep_uuid_to_name: +# dev_name, ep_name = ep_uuid_to_name[ep_uuid] +# ep_used_by[f"{ep_name}@{dev_name}"] = service.name +# +# context_client.close() +# +# if request.method != 'POST': +# return render_template('service/add-xr.html', devices=devices, hub_if=hub_interfaces_by_device, leaf_if=leaf_interfaces_by_device, ep_used_by=ep_used_by) +# else: +# service_name = request.form["service_name"] +# if service_name == "": +# flash(f"Service name must be specified", 'danger') +# +# constellation = request.form["constellation"] +# constellation_uuid = constellation_name_to_uuid.get(constellation, None) +# if constellation_uuid is None: +# flash(f"Invalid constellation \"{constellation}\"", 'danger') +# +# hub_if = request.form["hubif"] +# hub_if_uuid = dev_ep_to_uuid.get((constellation, hub_if), None) +# if hub_if_uuid is None: +# flash(f"Invalid hub interface \"{hub_if}\"", 'danger') +# +# leaf_if = request.form["leafif"] +# leaf_if_uuid = dev_ep_to_uuid.get((constellation, leaf_if), None) +# if leaf_if_uuid is None: +# flash(f"Invalid leaf interface \"{leaf_if}\"", 'danger') +# +# if service_name == "" or constellation_uuid is None or hub_if_uuid is None or leaf_if_uuid is None: +# return redirect(request.url) +# +# +# json_context_uuid=json_context_id(context_uuid) +# sr = { +# "name": service_name, +# "service_id": { +# "context_id": {"context_uuid": {"uuid": context_uuid}}, +# "service_uuid": {"uuid": service_name} +# }, +# 'service_type' : ServiceTypeEnum.SERVICETYPE_TAPI_CONNECTIVITY_SERVICE, +# "service_endpoint_ids": [ +# {'device_id': {'device_uuid': {'uuid': constellation_uuid}}, 'endpoint_uuid': {'uuid': hub_if_uuid}, 'topology_id': json_topology_id("admin", context_id=json_context_uuid)}, +# {'device_id': {'device_uuid': {'uuid': constellation_uuid}}, 'endpoint_uuid': {'uuid': leaf_if_uuid}, 'topology_id': json_topology_id("admin", context_id=json_context_uuid)} +# ], +# 'service_status' : {'service_status': ServiceStatusEnum.SERVICESTATUS_PLANNED}, +# 'service_constraints' : [], +# } +# +# json_tapi_settings = { +# 'capacity_value' : 50.0, +# 'capacity_unit' : 'GHz', +# 'layer_proto_name': 'PHOTONIC_MEDIA', +# 'layer_proto_qual': 'tapi-photonic-media:PHOTONIC_LAYER_QUALIFIER_NMC', +# 'direction' : 'UNIDIRECTIONAL', +# } +# config_rule = json_config_rule_set('/settings', json_tapi_settings) +# +# with connected_client(service_client) as sc: +# endpoints, sr['service_endpoint_ids'] = sr['service_endpoint_ids'], [] +# try: +# create_response = sc.CreateService(Service(**sr)) +# except Exception as e: +# flash(f'Failure to update service name {service_name} with endpoints and configuration, exception {str(e)}', 'danger') +# return redirect(request.url) +# +# sr['service_endpoint_ids'] = endpoints +# sr['service_config'] = {'config_rules': [config_rule]} +# +# try: +# update_response = sc.UpdateService(Service(**sr)) +# flash(f'Created service {update_response.service_uuid.uuid}', 'success') +# except Exception as e: +# flash(f'Failure to update service {create_response.service_uuid.uuid} with endpoints and configuration, exception {str(e)}', 'danger') +# return redirect(request.url) +# +# return redirect(url_for('service.home')) @service.get('/detail') def detail(service_uuid: str): diff --git a/src/webui/service/templates/service/home.html b/src/webui/service/templates/service/home.html index 4e4c1f82f..c22d476e7 100644 --- a/src/webui/service/templates/service/home.html +++ b/src/webui/service/templates/service/home.html @@ -25,7 +25,7 @@ Add New Service - --> + -- GitLab From a6d66b3a2b0a47bebe336c97fd790c6d4c1c5121 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Sat, 17 Jun 2023 07:59:45 +0000 Subject: [PATCH 27/62] PathComp component - Frontend: - Added composition of device-specific config rules --- .../algorithms/tools/ComposeConfigRules.py | 42 ++++++++++++------- 1 file changed, 27 insertions(+), 15 deletions(-) diff --git a/src/pathcomp/frontend/service/algorithms/tools/ComposeConfigRules.py b/src/pathcomp/frontend/service/algorithms/tools/ComposeConfigRules.py index 91367e23f..c6621773b 100644 --- a/src/pathcomp/frontend/service/algorithms/tools/ComposeConfigRules.py +++ b/src/pathcomp/frontend/service/algorithms/tools/ComposeConfigRules.py @@ -19,7 +19,8 @@ from common.tools.object_factory.ConfigRule import json_config_rule_set SETTINGS_RULE_NAME = '/settings' -DEV_EP_SETTINGS = re.compile(r'\/device\[([^\]]+)\]\/endpoint\[([^\]]+)\]\/settings') +DEVICE_SETTINGS = re.compile(r'\/device\[([^\]]+)\]\/settings') +ENDPOINT_SETTINGS = re.compile(r'\/device\[([^\]]+)\]\/endpoint\[([^\]]+)\]\/settings') L2NM_SETTINGS_FIELD_DEFAULTS = { 'encapsulation_type': 'dot1q', @@ -76,26 +77,37 @@ def compose_device_config_rules( device_name_mapping : Dict[str, str], endpoint_name_mapping : Dict[Tuple[str, str], str] ) -> None: + devices_traversed = set() endpoints_traversed = set() for path_hop in path_hops: device_uuid_or_name = path_hop['device'] + devices_traversed.add(device_uuid_or_name) endpoints_traversed.add((device_uuid_or_name, path_hop['ingress_ep'])) endpoints_traversed.add((device_uuid_or_name, path_hop['egress_ep'])) for config_rule in config_rules: if config_rule.WhichOneof('config_rule') != 'custom': continue - match = DEV_EP_SETTINGS.match(config_rule.custom.resource_key) - if match is None: continue - device_uuid_or_name = match.group(1) - device_name_or_uuid = device_name_mapping[device_uuid_or_name] - device_keys = {device_uuid_or_name, device_name_or_uuid} - - endpoint_uuid_or_name = match.group(2) - endpoint_name_or_uuid_1 = endpoint_name_mapping[(device_uuid_or_name, endpoint_uuid_or_name)] - endpoint_name_or_uuid_2 = endpoint_name_mapping[(device_name_or_uuid, endpoint_uuid_or_name)] - endpoint_keys = {endpoint_uuid_or_name, endpoint_name_or_uuid_1, endpoint_name_or_uuid_2} - - device_endpoint_keys = set(itertools.product(device_keys, endpoint_keys)) - if len(device_endpoint_keys.intersection(endpoints_traversed)) == 0: continue - subservice_config_rules.append(config_rule) + match = DEVICE_SETTINGS.match(config_rule.custom.resource_key) + if match is not None: + device_uuid_or_name = match.group(1) + device_name_or_uuid = device_name_mapping[device_uuid_or_name] + device_keys = {device_uuid_or_name, device_name_or_uuid} + + if len(device_keys.intersection(devices_traversed)) == 0: continue + subservice_config_rules.append(config_rule) + + match = ENDPOINT_SETTINGS.match(config_rule.custom.resource_key) + if match is not None: + device_uuid_or_name = match.group(1) + device_name_or_uuid = device_name_mapping[device_uuid_or_name] + device_keys = {device_uuid_or_name, device_name_or_uuid} + + endpoint_uuid_or_name = match.group(2) + endpoint_name_or_uuid_1 = endpoint_name_mapping[(device_uuid_or_name, endpoint_uuid_or_name)] + endpoint_name_or_uuid_2 = endpoint_name_mapping[(device_name_or_uuid, endpoint_uuid_or_name)] + endpoint_keys = {endpoint_uuid_or_name, endpoint_name_or_uuid_1, endpoint_name_or_uuid_2} + + device_endpoint_keys = set(itertools.product(device_keys, endpoint_keys)) + if len(device_endpoint_keys.intersection(endpoints_traversed)) == 0: continue + subservice_config_rules.append(config_rule) -- GitLab From cc0663dfb3250540580a06381eb77fb5fffe8bcb Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Sat, 17 Jun 2023 08:00:55 +0000 Subject: [PATCH 28/62] Monitoring component: - fixed minor issue when checking monitor_flag of KPIs --- src/monitoring/service/ManagementDBTools.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/monitoring/service/ManagementDBTools.py b/src/monitoring/service/ManagementDBTools.py index a2beddccd..6c0a69e0e 100644 --- a/src/monitoring/service/ManagementDBTools.py +++ b/src/monitoring/service/ManagementDBTools.py @@ -261,10 +261,10 @@ class ManagementDB(): else: if data[0] == 1: return True - elif data[0] == 0: + elif data[0] == 0 or data[0] is None: return False else: - LOGGER.debug(f"KPI {kpi_id} is wrong") + LOGGER.debug(f"KPI {kpi_id} is wrong: {str(data)}") return None except sqlite3.Error as e: LOGGER.debug(f"KPI {kpi_id} cannot be checked from the ManagementDB: {e}") -- GitLab From fcd2fad062b8bf56e21bc7ed69b60c97e1425422 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Sat, 17 Jun 2023 08:01:18 +0000 Subject: [PATCH 29/62] Hackfest - ContainerLab: - Added useful commands --- hackfest/containerlab/commands.txt | 32 ++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/hackfest/containerlab/commands.txt b/hackfest/containerlab/commands.txt index ae023a294..4c1c3a951 100644 --- a/hackfest/containerlab/commands.txt +++ b/hackfest/containerlab/commands.txt @@ -59,3 +59,35 @@ $ docker exec -it clab-tfs-scenario-srl1 sr_cli # system gnmi-server network-instance mgmt yang-models openconfig # commit stay # quit + + +# Check configurations done: +gnmic -a 172.100.100.101 -u admin -p NokiaSrl1! --skip-verify -e json_ietf get --path '/network-instances' > srl1-nis.json +gnmic -a 172.100.100.101 -u admin -p NokiaSrl1! --skip-verify -e json_ietf get --path '/interfaces' > srl1-ifs.json +gnmic -a 172.100.100.102 -u admin -p NokiaSrl1! --skip-verify -e json_ietf get --path '/network-instances' > srl2-nis.json +gnmic -a 172.100.100.102 -u admin -p NokiaSrl1! --skip-verify -e json_ietf get --path '/interfaces' > srl2-ifs.json + + +# Delete elements: +gnmic -a 172.100.100.101 -u admin -p NokiaSrl1! --skip-verify -e json_ietf set --delete '/network-instances/network-instance[name=b19229e8]' +gnmic -a 172.100.100.101 -u admin -p NokiaSrl1! --skip-verify -e json_ietf set --delete '/interfaces/interface[name=ethernet-1/1]/subinterfaces/subinterface[index=0]' +gnmic -a 172.100.100.101 -u admin -p NokiaSrl1! --skip-verify -e json_ietf set --delete '/interfaces/interface[name=ethernet-1/2]/subinterfaces/subinterface[index=0]' +gnmic -a 172.100.100.102 -u admin -p NokiaSrl1! --skip-verify -e json_ietf set --delete '/network-instances/network-instance[name=b19229e8]' +gnmic -a 172.100.100.102 -u admin -p NokiaSrl1! --skip-verify -e json_ietf set --delete '/interfaces/interface[name=ethernet-1/1]/subinterfaces/subinterface[index=0]' +gnmic -a 172.100.100.102 -u admin -p NokiaSrl1! --skip-verify -e json_ietf set --delete '/interfaces/interface[name=ethernet-1/2]/subinterfaces/subinterface[index=0]' + +# Run driver in standalone mode +PYTHONPATH=./src python -m src.device.tests.test_gnmi + +# Configure clients +docker exec -it clab-tfs-scenario-client1 bash + ip address add 172.16.1.10/24 dev eth1 + ip route add 172.16.2.0/24 via 172.16.1.1 + + ping 172.16.2.1 or 172.16.2.10 + +docker exec -it clab-tfs-scenario-client2 bash + ip address add 172.16.2.10/24 dev eth1 + ip route add 172.16.1.0/24 via 172.16.2.1 + + ping 172.16.2.1 or 172.16.2.10 -- GitLab From d8243efb63f1214ab8dd2af3c8806b1bf0b583d0 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Sat, 17 Jun 2023 08:02:37 +0000 Subject: [PATCH 30/62] Device component - gNMI Driver: - First functional version of gNMI driver able to configure - Telemetry is work in progress --- src/device/service/drivers/__init__.py | 11 + .../gnmi_openconfig/GnmiOpenConfigDriver.py | 100 ++++++ .../gnmi_openconfig/GnmiSessionHandler.py | 332 ++++++++++++++++++ .../gnmi_openconfig/MonitoringThread.py | 150 ++++++++ .../drivers/gnmi_openconfig/SamplesCache.py | 101 ++++++ .../{protocols/gnmi => }/__init__.py | 1 + .../{protocols => }/gnmi/Acknowledgement.txt | 0 .../drivers/gnmi_openconfig/gnmi/__init__.py | 14 + .../{protocols => }/gnmi/gnmi.proto | 0 .../{protocols => }/gnmi/gnmi_ext.proto | 0 .../{protocols => }/gnmi/gnmi_pb2.py | 0 .../{protocols => }/gnmi/gnmi_pb2.py.old | 0 .../{protocols => }/gnmi/gnmi_pb2.pyi | 0 .../{protocols => }/gnmi/gnmi_pb2_grpc.py | 0 .../gnmi_openconfig/handlers/Component.py | 63 ++++ .../gnmi_openconfig/handlers/Interface.py | 248 +++++++++++++ .../handlers/InterfaceCounter.py | 80 +++++ .../handlers/NetworkInstance.py | 62 ++++ .../handlers/NetworkInstanceInterface.py | 46 +++ .../handlers/NetworkInstanceStaticRoute.py | 61 ++++ .../drivers/gnmi_openconfig/handlers/Tools.py | 30 ++ .../gnmi_openconfig/handlers/_Handler.py | 32 ++ .../gnmi_openconfig/handlers/__init__.py | 103 ++++++ .../handlers/old_bgp_handler.txt | 138 ++++++++ .../gnmi_openconfig/tools/Capabilities.py | 36 ++ .../drivers/gnmi_openconfig/tools/Channel.py | 34 ++ .../drivers/gnmi_openconfig/tools/Path.py | 98 ++++++ .../gnmi_openconfig/tools/Subscriptions.py | 47 +++ .../drivers/gnmi_openconfig/tools/Value.py | 52 +++ .../drivers/gnmi_openconfig/tools/__init__.py | 14 + 30 files changed, 1853 insertions(+) create mode 100644 src/device/service/drivers/gnmi_openconfig/GnmiOpenConfigDriver.py create mode 100644 src/device/service/drivers/gnmi_openconfig/GnmiSessionHandler.py create mode 100644 src/device/service/drivers/gnmi_openconfig/MonitoringThread.py create mode 100644 src/device/service/drivers/gnmi_openconfig/SamplesCache.py rename src/device/service/drivers/gnmi_openconfig/{protocols/gnmi => }/__init__.py (99%) rename src/device/service/drivers/gnmi_openconfig/{protocols => }/gnmi/Acknowledgement.txt (100%) create mode 100644 src/device/service/drivers/gnmi_openconfig/gnmi/__init__.py rename src/device/service/drivers/gnmi_openconfig/{protocols => }/gnmi/gnmi.proto (100%) rename src/device/service/drivers/gnmi_openconfig/{protocols => }/gnmi/gnmi_ext.proto (100%) rename src/device/service/drivers/gnmi_openconfig/{protocols => }/gnmi/gnmi_pb2.py (100%) rename src/device/service/drivers/gnmi_openconfig/{protocols => }/gnmi/gnmi_pb2.py.old (100%) rename src/device/service/drivers/gnmi_openconfig/{protocols => }/gnmi/gnmi_pb2.pyi (100%) rename src/device/service/drivers/gnmi_openconfig/{protocols => }/gnmi/gnmi_pb2_grpc.py (100%) create mode 100644 src/device/service/drivers/gnmi_openconfig/handlers/Component.py create mode 100644 src/device/service/drivers/gnmi_openconfig/handlers/Interface.py create mode 100644 src/device/service/drivers/gnmi_openconfig/handlers/InterfaceCounter.py create mode 100644 src/device/service/drivers/gnmi_openconfig/handlers/NetworkInstance.py create mode 100644 src/device/service/drivers/gnmi_openconfig/handlers/NetworkInstanceInterface.py create mode 100644 src/device/service/drivers/gnmi_openconfig/handlers/NetworkInstanceStaticRoute.py create mode 100644 src/device/service/drivers/gnmi_openconfig/handlers/Tools.py create mode 100644 src/device/service/drivers/gnmi_openconfig/handlers/_Handler.py create mode 100644 src/device/service/drivers/gnmi_openconfig/handlers/__init__.py create mode 100644 src/device/service/drivers/gnmi_openconfig/handlers/old_bgp_handler.txt create mode 100644 src/device/service/drivers/gnmi_openconfig/tools/Capabilities.py create mode 100644 src/device/service/drivers/gnmi_openconfig/tools/Channel.py create mode 100644 src/device/service/drivers/gnmi_openconfig/tools/Path.py create mode 100644 src/device/service/drivers/gnmi_openconfig/tools/Subscriptions.py create mode 100644 src/device/service/drivers/gnmi_openconfig/tools/Value.py create mode 100644 src/device/service/drivers/gnmi_openconfig/tools/__init__.py diff --git a/src/device/service/drivers/__init__.py b/src/device/service/drivers/__init__.py index b3b485a47..6a9726315 100644 --- a/src/device/service/drivers/__init__.py +++ b/src/device/service/drivers/__init__.py @@ -70,6 +70,7 @@ DRIVERS.append( # DeviceDriverEnum.DEVICEDRIVER_P4, # DeviceDriverEnum.DEVICEDRIVER_IETF_NETWORK_TOPOLOGY, # DeviceDriverEnum.DEVICEDRIVER_ONF_TR_352, + # DeviceDriverEnum.DEVICEDRIVER_GNMI_OPENCONFIG, # ], #} ])) @@ -94,6 +95,16 @@ if LOAD_ALL_DEVICE_DRIVERS: } ])) + from .gnmi_openconfig.GnmiOpenConfigDriver import GnmiOpenConfigDriver # pylint: disable=wrong-import-position + DRIVERS.append( + (GnmiOpenConfigDriver, [ + { + # Real Packet Router, specifying gNMI OpenConfig Driver => use GnmiOpenConfigDriver + FilterFieldEnum.DEVICE_TYPE: DeviceTypeEnum.PACKET_ROUTER, + FilterFieldEnum.DRIVER : DeviceDriverEnum.DEVICEDRIVER_GNMI_OPENCONFIG, + } + ])) + if LOAD_ALL_DEVICE_DRIVERS: from .transport_api.TransportApiDriver import TransportApiDriver # pylint: disable=wrong-import-position DRIVERS.append( diff --git a/src/device/service/drivers/gnmi_openconfig/GnmiOpenConfigDriver.py b/src/device/service/drivers/gnmi_openconfig/GnmiOpenConfigDriver.py new file mode 100644 index 000000000..882c0de07 --- /dev/null +++ b/src/device/service/drivers/gnmi_openconfig/GnmiOpenConfigDriver.py @@ -0,0 +1,100 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging, queue, threading +from typing import Any, Iterator, List, Optional, Tuple, Union +from common.method_wrappers.Decorator import MetricsPool, metered_subclass_method +from common.type_checkers.Checkers import chk_type +from device.service.driver_api._Driver import _Driver +from .GnmiSessionHandler import GnmiSessionHandler + +DRIVER_NAME = 'gnmi_openconfig' +METRICS_POOL = MetricsPool('Device', 'Driver', labels={'driver': DRIVER_NAME}) + +class GnmiOpenConfigDriver(_Driver): + def __init__(self, address : str, port : int, **settings) -> None: + super().__init__(DRIVER_NAME, address, port, **settings) + self.__logger = logging.getLogger('{:s}:[{:s}:{:s}]'.format(str(__name__), str(self.address), str(self.port))) + self.__lock = threading.Lock() + self.__started = threading.Event() + self.__terminate = threading.Event() + self.__handler = GnmiSessionHandler(self.address, self.port, settings, self.__logger) + self.__out_samples = self.__handler.out_samples + + def Connect(self) -> bool: + with self.__lock: + if self.__started.is_set(): return True + self.__handler.connect() + self.__started.set() + return True + + def Disconnect(self) -> bool: + with self.__lock: + # Trigger termination of loops and processes + self.__terminate.set() + # If not started, assume it is already disconnected + if not self.__started.is_set(): return True + self.__handler.disconnect() + return True + + @metered_subclass_method(METRICS_POOL) + def GetInitialConfig(self) -> List[Tuple[str, Any]]: + with self.__lock: + return [] + + @metered_subclass_method(METRICS_POOL) + def GetConfig(self, resource_keys : List[str] = []) -> List[Tuple[str, Union[Any, None, Exception]]]: + chk_type('resources', resource_keys, list) + with self.__lock: + return self.__handler.get(resource_keys) + + @metered_subclass_method(METRICS_POOL) + def SetConfig(self, resources : List[Tuple[str, Any]]) -> List[Union[bool, Exception]]: + chk_type('resources', resources, list) + if len(resources) == 0: return [] + with self.__lock: + return self.__handler.set(resources) + + @metered_subclass_method(METRICS_POOL) + def DeleteConfig(self, resources : List[Tuple[str, Any]]) -> List[Union[bool, Exception]]: + chk_type('resources', resources, list) + if len(resources) == 0: return [] + with self.__lock: + return self.__handler.delete(resources) + + @metered_subclass_method(METRICS_POOL) + def SubscribeState(self, subscriptions : List[Tuple[str, float, float]]) -> List[Union[bool, Exception]]: + chk_type('subscriptions', subscriptions, list) + if len(subscriptions) == 0: return [] + with self.__lock: + return self.__handler.subscribe(subscriptions) + + @metered_subclass_method(METRICS_POOL) + def UnsubscribeState(self, subscriptions : List[Tuple[str, float, float]]) -> List[Union[bool, Exception]]: + chk_type('subscriptions', subscriptions, list) + if len(subscriptions) == 0: return [] + with self.__lock: + return self.__handler.unsubscribe(subscriptions) + + def GetState(self, blocking=False, terminate : Optional[threading.Event] = None) -> Iterator[Tuple[str, Any]]: + while True: + if self.__terminate.is_set(): break + if terminate is not None and terminate.is_set(): break + try: + sample = self.__out_samples.get(block=blocking, timeout=0.1) + except queue.Empty: + if blocking: continue + return + if sample is None: continue + yield sample diff --git a/src/device/service/drivers/gnmi_openconfig/GnmiSessionHandler.py b/src/device/service/drivers/gnmi_openconfig/GnmiSessionHandler.py new file mode 100644 index 000000000..04dae4f5f --- /dev/null +++ b/src/device/service/drivers/gnmi_openconfig/GnmiSessionHandler.py @@ -0,0 +1,332 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import copy, grpc, json, logging, queue, threading +from typing import Any, Dict, List, Optional, Tuple, Union +from common.tools.grpc.Tools import grpc_message_to_json_string +from common.type_checkers.Checkers import chk_float, chk_length, chk_string, chk_type +from .gnmi.gnmi_pb2_grpc import gNMIStub +from .gnmi.gnmi_pb2 import Encoding, GetRequest, SetRequest, UpdateResult # pylint: disable=no-name-in-module +from .handlers import ALL_RESOURCE_KEYS, compose, get_path, parse +from .tools.Capabilities import get_supported_encodings +from .tools.Channel import get_grpc_channel +from .tools.Path import path_from_string, path_to_string #, compose_path +from .tools.Subscriptions import Subscriptions +from .tools.Value import decode_value #, value_exists +from .MonitoringThread import MonitoringThread + +class GnmiSessionHandler: + def __init__(self, address : str, port : int, settings : Dict, logger : logging.Logger) -> None: + self._address = address + self._port = port + self._settings = copy.deepcopy(settings) + self._logger = logger + self._lock = threading.Lock() + self._connected = threading.Event() + self._username = settings.get('username') + self._password = settings.get('password') + self._use_tls = settings.get('use_tls', False) + self._channel : Optional[grpc.Channel] = None + self._stub : Optional[gNMIStub] = None + self._monit_thread = None + self._supported_encodings = None + self._subscriptions = Subscriptions() + self._in_subscriptions = queue.Queue() + self._out_samples = queue.Queue() + + @property + def subscriptions(self): return self._subscriptions + + @property + def in_subscriptions(self): return self._in_subscriptions + + @property + def out_samples(self): return self._out_samples + + def connect(self): + with self._lock: + self._channel = get_grpc_channel(self._address, self._port, self._use_tls, self._logger) + self._stub = gNMIStub(self._channel) + self._supported_encodings = get_supported_encodings( + self._stub, self._username, self._password, timeout=120) + self._monit_thread = MonitoringThread( + self._stub, self._logger, self._settings, self._in_subscriptions, self._out_samples) + self._monit_thread.start() + self._connected.set() + + def disconnect(self): + if not self._connected.is_set(): return + with self._lock: + self._monit_thread.stop() + self._monit_thread.join() + self._channel.close() + self._connected.clear() + + def get(self, resource_keys : List[str]) -> List[Tuple[str, Union[Any, None, Exception]]]: + if len(resource_keys) == 0: resource_keys = ALL_RESOURCE_KEYS + chk_type('resources', resource_keys, list) + + parsing_results = [] + + get_request = GetRequest() + get_request.type = GetRequest.DataType.ALL + get_request.encoding = Encoding.JSON_IETF + #get_request.use_models.add() # kept empty: return for all models supported + for i,resource_key in enumerate(resource_keys): + str_resource_name = 'resource_key[#{:d}]'.format(i) + try: + chk_string(str_resource_name, resource_key, allow_empty=False) + self._logger.debug('[GnmiSessionHandler:get] resource_key = {:s}'.format(str(resource_key))) + str_path = get_path(resource_key) + self._logger.debug('[GnmiSessionHandler:get] str_path = {:s}'.format(str(str_path))) + get_request.path.append(path_from_string(str_path)) + except Exception as e: # pylint: disable=broad-except + MSG = 'Exception parsing {:s}: {:s}' + self._logger.exception(MSG.format(str_resource_name, str(resource_key))) + parsing_results.append((resource_key, e)) # if validation fails, store the exception + + if len(parsing_results) > 0: + return parsing_results + + metadata = [('username', self._username), ('password', self._password)] + timeout = None # GNMI_SUBSCRIPTION_TIMEOUT = int(sampling_duration) + get_reply = self._stub.Get(get_request, metadata=metadata, timeout=timeout) + #self._logger.info('get_reply={:s}'.format(grpc_message_to_json_string(get_reply))) + + results = [] + #results[str_filter] = [i, None, False] # (index, value, processed?) + + for notification in get_reply.notification: + #for delete_path in notification.delete: + # self._logger.info('delete_path={:s}'.format(grpc_message_to_json_string(delete_path))) + # str_path = path_to_string(delete_path) + # resource_key_tuple = results.get(str_path) + # if resource_key_tuple is None: + # # pylint: disable=broad-exception-raised + # MSG = 'Unexpected Delete Path({:s}); requested resource_keys({:s})' + # raise Exception(MSG.format(str(str_path), str(resource_keys))) + # resource_key_tuple[2] = True + + for update in notification.update: + #self._logger.info('update={:s}'.format(grpc_message_to_json_string(update))) + str_path = path_to_string(update.path) + #resource_key_tuple = results.get(str_path) + #if resource_key_tuple is None: + # # pylint: disable=broad-exception-raised + # MSG = 'Unexpected Update Path({:s}); requested resource_keys({:s})' + # raise Exception(MSG.format(str(str_path), str(resource_keys))) + try: + value = decode_value(update.val) + #resource_key_tuple[1] = value + #resource_key_tuple[2] = True + results.extend(parse(str_path, value)) + except Exception as e: # pylint: disable=broad-except + MSG = 'Exception processing notification {:s}' + self._logger.exception(MSG.format(grpc_message_to_json_string(notification))) + results.append((str_path, e)) # if validation fails, store the exception + + #_results = sorted(results.items(), key=lambda x: x[1][0]) + #results = list() + #for resource_key,resource_key_tuple in _results: + # _, value, processed = resource_key_tuple + # value = value if processed else Exception('Not Processed') + # results.append((resource_key, value)) + return results + + def set(self, resources : List[Tuple[str, Any]]) -> List[Union[bool, Exception]]: + #resource_keys = [key for key,_ in resources] + #current_values = self.get(resource_keys) + + #resource_tuples = { + # resource_key : [i, value, value_exists(value), None] + # for i,(resource_key,value) in enumerate(current_values) + #} + + #self._logger.info('---0') + #self._logger.info(str(resource_tuples)) + + set_request = SetRequest() + #for resource_key in resource_keys: + for resource_key, resource_value in resources: + self._logger.info('---1') + self._logger.info(str(resource_key)) + self._logger.info(str(resource_value)) + #resource_tuple = resource_tuples.get(resource_key) + #if resource_tuple is None: continue + #_, value, exists, operation_done = resource_tuple + if isinstance(resource_value, str): resource_value = json.loads(resource_value) + str_path, str_data = compose(resource_key, resource_value, delete=False) + self._logger.info('---3') + self._logger.info(str(str_path)) + self._logger.info(str(str_data)) + set_request_list = set_request.update #if exists else set_request.replace + set_request_entry = set_request_list.add() + set_request_entry.path.CopyFrom(path_from_string(str_path)) + set_request_entry.val.json_val = str_data.encode('UTF-8') + + self._logger.info('set_request={:s}'.format(grpc_message_to_json_string(set_request))) + metadata = [('username', self._username), ('password', self._password)] + timeout = None # GNMI_SUBSCRIPTION_TIMEOUT = int(sampling_duration) + set_reply = self._stub.Set(set_request, metadata=metadata, timeout=timeout) + self._logger.info('set_reply={:s}'.format(grpc_message_to_json_string(set_reply))) + + results = [] + for (resource_key, resource_value), update_result in zip(resources, set_reply.response): + operation = update_result.op + if operation == UpdateResult.UPDATE: + results.append((resource_key, True)) + else: + results.append((resource_key, Exception('Unexpected'))) + + #str_path = path_to_string(update_result.path) + #resource_tuple = resource_tuples.get(str_path) + #if resource_tuple is None: continue + #resource_tuple[3] = operation + + #resource_tuples = sorted(resource_tuples.items(), key=lambda x: x[1][0]) + #results = list() + #for resource_key,resource_tuple in resource_tuples: + # _, _, exists, operation_done = resource_tuple + # desired_operation = 'update' if exists else 'replace' + # + # if operation_done == UpdateResult.INVALID: + # value = Exception('Invalid') + # elif operation_done == UpdateResult.DELETE: + # value = Exception('Unexpected Delete') + # elif operation_done == UpdateResult.REPLACE: + # value = True if desired_operation == 'replace' else Exception('Failed') + # elif operation_done == UpdateResult.UPDATE: + # value = True if desired_operation == 'update' else Exception('Failed') + # else: + # value = Exception('Unexpected') + # results.append((resource_key, value)) + return results + + def delete(self, resources : List[Tuple[str, Any]]) -> List[Union[bool, Exception]]: + #resource_keys = [key for key,_ in resources] + #current_values = self.get(resource_keys) + + #resource_tuples = { + # resource_key : [i, value, value_exists(value), None] + # for i,(resource_key,value) in enumerate(current_values) + #} + + #self._logger.info('---0') + #self._logger.info(str(resource_tuples)) + + set_request = SetRequest() + #for resource_key in resource_keys: + for resource_key, resource_value in resources: + self._logger.info('---1') + self._logger.info(str(resource_key)) + self._logger.info(str(resource_value)) + #resource_tuple = resource_tuples.get(resource_key) + #if resource_tuple is None: continue + #_, value, exists, operation_done = resource_tuple + #if not exists: continue + if isinstance(resource_value, str): resource_value = json.loads(resource_value) + str_path, str_data = compose(resource_key, resource_value, delete=True) + self._logger.info('---3') + self._logger.info(str(str_path)) + self._logger.info(str(str_data)) + set_request_entry = set_request.delete.add() + set_request_entry.CopyFrom(path_from_string(str_path)) + + self._logger.info('set_request={:s}'.format(grpc_message_to_json_string(set_request))) + metadata = [('username', self._username), ('password', self._password)] + timeout = None # GNMI_SUBSCRIPTION_TIMEOUT = int(sampling_duration) + set_reply = self._stub.Set(set_request, metadata=metadata, timeout=timeout) + self._logger.info('set_reply={:s}'.format(grpc_message_to_json_string(set_reply))) + + results = [] + for (resource_key, resource_value), update_result in zip(resources, set_reply.response): + operation = update_result.op + if operation == UpdateResult.DELETE: + results.append((resource_key, True)) + else: + results.append((resource_key, Exception('Unexpected'))) + + #str_path = path_to_string(update_result.path) + #resource_tuple = resource_tuples.get(str_path) + #if resource_tuple is None: continue + #resource_tuple[3] = operation + + #resource_tuples = sorted(resource_tuples.items(), key=lambda x: x[1][0]) + #results = list() + #for resource_key,resource_tuple in resource_tuples: + # _, _, exists, operation_done = resource_tuple + # if operation_done == UpdateResult.INVALID: + # value = Exception('Invalid') + # elif operation_done == UpdateResult.DELETE: + # value = True + # elif operation_done == UpdateResult.REPLACE: + # value = Exception('Unexpected Replace') + # elif operation_done == UpdateResult.UPDATE: + # value = Exception('Unexpected Update') + # else: + # value = Exception('Unexpected') + # results.append((resource_key, value)) + return results + + def subscribe(self, subscriptions : List[Tuple[str, float, float]]) -> List[Union[bool, Exception]]: + results = [] + for i,subscription in enumerate(subscriptions): + str_subscription_name = 'subscriptions[#{:d}]'.format(i) + try: + chk_type(str_subscription_name, subscription, (list, tuple)) + chk_length(str_subscription_name, subscription, min_length=3, max_length=3) + resource_key, sampling_duration, sampling_interval = subscription + chk_string(str_subscription_name + '.resource_key', resource_key, allow_empty=False) + chk_float(str_subscription_name + '.sampling_duration', sampling_duration, min_value=0) + chk_float(str_subscription_name + '.sampling_interval', sampling_interval, min_value=0) + except Exception as e: # pylint: disable=broad-except + MSG = 'Exception validating {:s}: {:s}' + self._logger.exception(MSG.format(str_subscription_name, str(resource_key))) + results.append(e) # if validation fails, store the exception + continue + + #resource_path = resource_key.split('/') + #self._subscriptions.add(resource_path, sampling_duration, sampling_interval, reference) + subscription = 'subscribe', resource_key, sampling_duration, sampling_interval + self._in_subscriptions.put_nowait(subscription) + results.append(True) + return results + + def unsubscribe(self, subscriptions : List[Tuple[str, float, float]]) -> List[Union[bool, Exception]]: + results = [] + for i,subscription in enumerate(subscriptions): + str_subscription_name = 'subscriptions[#{:d}]'.format(i) + try: + chk_type(str_subscription_name, subscription, (list, tuple)) + chk_length(str_subscription_name, subscription, min_length=3, max_length=3) + resource_key, sampling_duration, sampling_interval = subscription + chk_string(str_subscription_name + '.resource_key', resource_key, allow_empty=False) + chk_float(str_subscription_name + '.sampling_duration', sampling_duration, min_value=0) + chk_float(str_subscription_name + '.sampling_interval', sampling_interval, min_value=0) + except Exception as e: # pylint: disable=broad-except + MSG = 'Exception validating {:s}: {:s}' + self._logger.exception(MSG.format(str_subscription_name, str(resource_key))) + results.append(e) # if validation fails, store the exception + continue + + #resource_path = resource_key.split('/') + #reference = self._subscriptions.get(resource_path, sampling_duration, sampling_interval) + #if reference is None: + # results.append(False) + # continue + #self._subscriptions.delete(reference) + subscription = 'unsubscribe', resource_key, sampling_duration, sampling_interval + self._in_subscriptions.put_nowait(subscription) + results.append(True) + return results diff --git a/src/device/service/drivers/gnmi_openconfig/MonitoringThread.py b/src/device/service/drivers/gnmi_openconfig/MonitoringThread.py new file mode 100644 index 000000000..5c40b13b9 --- /dev/null +++ b/src/device/service/drivers/gnmi_openconfig/MonitoringThread.py @@ -0,0 +1,150 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Ref: https://github.com/openconfig/reference/blob/master/rpc/gnmi/gnmi-specification.md +# Ref: https://github.com/openconfig/gnmi/blob/master/proto/gnmi/gnmi.proto + +from __future__ import annotations +import grpc, logging, queue, threading +from collections.abc import Iterator +from datetime import datetime +from typing import Dict +from common.tools.grpc.Tools import grpc_message_to_json_string +from .gnmi.gnmi_pb2 import ( # pylint: disable=no-name-in-module + QOSMarking, SubscribeRequest, Subscription, SubscriptionList, SubscriptionMode +) +from .gnmi.gnmi_pb2_grpc import gNMIStub +from .tools.Path import path_from_string, path_to_string + + +LOGGER = logging.getLogger(__name__) + +# SubscriptionList Mode: Mode of the subscription. +# STREAM = 0: Values streamed by the target. gNMI Specification Section 3.5.1.5.2 +# ONCE = 1: Values sent once-off by the target. gNMI Specification Section 3.5.1.5.1 +# POLL = 2: Values sent in response to a poll request. gNMI Specification Section 3.5.1.5.3 +GNMI_SUBSCRIPTION_LIST_MODE = SubscriptionList.Mode.STREAM + +# Path Prefix: Prefix used for paths. +GNMI_PATH_PREFIX = None + +# QOS MArking: DSCP marking to be used. +GNMI_QOS_MARKING = None + +# Allow Aggregation: Whether elements of the schema that are marked as eligible for aggregation +# should be aggregated or not. +GNMI_ALLOW_AGGREGATION = False + +# Encoding: The encoding that the target should use within the Notifications generated +# corresponding to the SubscriptionList. +GNMI_ENCODING = 'JSON' + +#Subscription Mode: The mode of the subscription, specifying how the target must return values +# in a subscription. gNMI Specification Section 3.5.1.3 +# TARGET_DEFINED = 0: The target selects the relevant mode for each element. +# ON_CHANGE = 1: The target sends an update on element value change. +# SAMPLE = 2: The target samples values according to the interval. +GNMI_SUBSCRIPTION_MODE = SubscriptionMode.SAMPLE + +# Suppress Redundant: Indicates whether values that have not changed should be sent in a SAMPLE +# subscription. gNMI Specification Section 3.5.1.3 +GNMI_SUPPRESS_REDUNDANT = False + +# Heartbeat Interval: Specifies the maximum allowable silent period in nanoseconds when +# suppress_redundant is in use. The target should send a value at least once in the period +# specified. gNMI Specification Section 3.5.1.3 +GNMI_HEARTBEAT_INTERVAL = 10 # seconds + +GNMI_SUBSCRIPTION_TIMEOUT = None + +class MonitoringThread(threading.Thread): + def __init__( + self, stub : gNMIStub, logger : logging.Logger, settings : Dict, + in_subscriptions : queue.Queue, out_samples : queue.Queue + ) -> None: + super().__init__(daemon=True) + self._terminate = threading.Event() + self._stub = stub + self._logger = logger + self._username = settings.get('username') + self._password = settings.get('password') + self._in_subscriptions = in_subscriptions + self._out_samples = out_samples + self._response_iterator = None + + def stop(self) -> None: + self._terminate.set() + if self._response_iterator is not None: + self._response_iterator.cancel() + + def generate_requests(self) -> Iterator[SubscribeRequest]: + subscriptions = [] + while not self._terminate.is_set(): + try: + subscription = self._in_subscriptions.get(block=True, timeout=0.1) + operation, resource_key, sampling_duration, sampling_interval = subscription # pylint: disable=unused-variable + if operation != 'subscribe': continue # Unsubscribe not supported by gNM, needs to cancel entire connection + # options.timeout = int(sampling_duration) + #_path = parse_xpath(resource_key) + path = path_from_string(resource_key) + subscription = Subscription( + path=path, mode=GNMI_SUBSCRIPTION_MODE, suppress_redundant=GNMI_SUPPRESS_REDUNDANT, + sample_interval=int(sampling_interval * 1000000000), + heartbeat_interval=int(GNMI_HEARTBEAT_INTERVAL * 1000000000)) + subscriptions.append(subscription) + except queue.Empty: + if len(subscriptions) == 0: continue + #self._logger.warning('[generate_requests] process') + prefix = path_from_string(GNMI_PATH_PREFIX) if GNMI_PATH_PREFIX is not None else None + qos = QOSMarking(marking=GNMI_QOS_MARKING) if GNMI_QOS_MARKING is not None else None + subscriptions_list = SubscriptionList( + prefix=prefix, mode=GNMI_SUBSCRIPTION_LIST_MODE, allow_aggregation=GNMI_ALLOW_AGGREGATION, + encoding=GNMI_ENCODING, subscription=subscriptions, qos=qos) + subscribe_request = SubscribeRequest(subscribe=subscriptions_list) + #str_subscribe_request = grpc_message_to_json_string(subscribe_request) + #self._logger.warning('[generate_requests] subscribe_request={:s}'.format(str_subscribe_request)) + yield subscribe_request + subscriptions = [] + except: # pylint: disable=bare-except + self._logger.exception('[generate_requests] Unhandled Exception') + + def run(self) -> None: + # Add a dummy subscription to be used as keep-alive + # usable only with SRLinux native data models + #subscription = ('/system/name/host-name', None, 1) + #self._in_subscriptions.put_nowait(subscription) + + try: + request_iterator = self.generate_requests() + metadata = [('username', self._username), ('password', self._password)] + timeout = None # GNMI_SUBSCRIPTION_TIMEOUT = int(sampling_duration) + self._response_iterator = self._stub.Subscribe(request_iterator, metadata=metadata, timeout=timeout) + for subscribe_response in self._response_iterator: + timestamp = datetime.timestamp(datetime.utcnow()) + str_subscribe_response = grpc_message_to_json_string(subscribe_response) + self._logger.warning('[run] subscribe_response={:s}'.format(str_subscribe_response)) + for update in subscribe_response.update.update: + str_path = path_to_string(update.path) + if str_path != '/system/name/host-name': continue + #counter_name = update.path[-1].name + value_type = update.val.WhichOneof('value') + value = getattr(update.val, value_type) + sample = (timestamp, str_path, value) + self._logger.warning('[run] sample={:s}'.format(str(sample))) + self._out_samples.put_nowait(sample) + except grpc.RpcError as e: + if e.code() != grpc.StatusCode.CANCELLED: raise # pylint: disable=no-member + if e.details() != 'Locally cancelled by application!': raise # pylint: disable=no-member + except: # pylint: disable=bare-except + self._logger.exception('Unhandled Exception') diff --git a/src/device/service/drivers/gnmi_openconfig/SamplesCache.py b/src/device/service/drivers/gnmi_openconfig/SamplesCache.py new file mode 100644 index 000000000..28be2d661 --- /dev/null +++ b/src/device/service/drivers/gnmi_openconfig/SamplesCache.py @@ -0,0 +1,101 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Collection of samples through NetConf is very slow and each request collects all the data. +# Populate a cache periodically (when first interface is interrogated). +# Evict data after some seconds, when data is considered as outdated + +import copy, queue, logging, re, threading +from datetime import datetime +from typing import Dict, Tuple +from .templates_old import get_filter, parse +from .GnmiSessionHandler import GnmiSessionHandler + +RE_GET_ENDPOINT_FROM_INTERFACE_KEY = re.compile(r'.*interface\[([^\]]+)\].*') +RE_GET_ENDPOINT_FROM_INTERFACE_XPATH = re.compile(r".*interface\[oci\:name\='([^\]]+)'\].*") + +SAMPLE_EVICTION_SECONDS = 30.0 # seconds +SAMPLE_RESOURCE_KEY = 'interfaces/interface/state/counters' + +def compute_delta_sample(previous_sample, previous_timestamp, current_sample, current_timestamp): + if previous_sample is None: return None + if previous_timestamp is None: return None + if current_sample is None: return None + if current_timestamp is None: return None + delay = current_timestamp - previous_timestamp + field_keys = set(previous_sample.keys()).union(current_sample.keys()) + field_keys.discard('name') + delta_sample = {'name': previous_sample['name']} + for field_key in field_keys: + previous_sample_value = previous_sample[field_key] + if not isinstance(previous_sample_value, (int, float)): continue + current_sample_value = current_sample[field_key] + if not isinstance(current_sample_value, (int, float)): continue + delta_value = current_sample_value - previous_sample_value + if delta_value < 0: continue + delta_sample[field_key] = delta_value / delay + return delta_sample + +class SamplesCache: + def __init__(self, handler : GnmiSessionHandler, logger : logging.Logger) -> None: + self.__handler = handler + self.__logger = logger + self.__lock = threading.Lock() + self.__timestamp = None + self.__absolute_samples = {} + self.__delta_samples = {} + + def _refresh_samples(self) -> None: + with self.__lock: + try: + now = datetime.timestamp(datetime.utcnow()) + if self.__timestamp is not None and (now - self.__timestamp) < SAMPLE_EVICTION_SECONDS: return + str_filter = get_filter(SAMPLE_RESOURCE_KEY) + xml_data = self.__handler.get(filter=str_filter).data_ele + interface_samples = parse(SAMPLE_RESOURCE_KEY, xml_data) + for interface,samples in interface_samples: + match = RE_GET_ENDPOINT_FROM_INTERFACE_KEY.match(interface) + if match is None: continue + interface = match.group(1) + delta_sample = compute_delta_sample( + self.__absolute_samples.get(interface), self.__timestamp, samples, now) + if delta_sample is not None: self.__delta_samples[interface] = delta_sample + self.__absolute_samples[interface] = samples + self.__timestamp = now + except: # pylint: disable=bare-except + self.__logger.exception('Error collecting samples') + + def get(self, resource_key : str) -> Tuple[float, Dict]: + self._refresh_samples() + match = RE_GET_ENDPOINT_FROM_INTERFACE_XPATH.match(resource_key) + with self.__lock: + if match is None: return self.__timestamp, {} + interface = match.group(1) + return self.__timestamp, copy.deepcopy(self.__delta_samples.get(interface, {})) + +def do_sampling( + samples_cache : SamplesCache, logger : logging.Logger, resource_key : str, out_samples : queue.Queue +) -> None: + try: + timestamp, samples = samples_cache.get(resource_key) + counter_name = resource_key.split('/')[-1].split(':')[-1] + value = samples.get(counter_name) + if value is None: + logger.warning('[do_sampling] value not found for {:s}'.format(resource_key)) + return + # resource_key template: //oci:interfaces/oci:interface[oci:name='{:s}']/state/counters/{:s} + sample = (timestamp, resource_key, value) + out_samples.put_nowait(sample) + except: # pylint: disable=bare-except + logger.exception('Error retrieving samples') diff --git a/src/device/service/drivers/gnmi_openconfig/protocols/gnmi/__init__.py b/src/device/service/drivers/gnmi_openconfig/__init__.py similarity index 99% rename from src/device/service/drivers/gnmi_openconfig/protocols/gnmi/__init__.py rename to src/device/service/drivers/gnmi_openconfig/__init__.py index 38d04994f..1549d9811 100644 --- a/src/device/service/drivers/gnmi_openconfig/protocols/gnmi/__init__.py +++ b/src/device/service/drivers/gnmi_openconfig/__init__.py @@ -11,3 +11,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + diff --git a/src/device/service/drivers/gnmi_openconfig/protocols/gnmi/Acknowledgement.txt b/src/device/service/drivers/gnmi_openconfig/gnmi/Acknowledgement.txt similarity index 100% rename from src/device/service/drivers/gnmi_openconfig/protocols/gnmi/Acknowledgement.txt rename to src/device/service/drivers/gnmi_openconfig/gnmi/Acknowledgement.txt diff --git a/src/device/service/drivers/gnmi_openconfig/gnmi/__init__.py b/src/device/service/drivers/gnmi_openconfig/gnmi/__init__.py new file mode 100644 index 000000000..1549d9811 --- /dev/null +++ b/src/device/service/drivers/gnmi_openconfig/gnmi/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/src/device/service/drivers/gnmi_openconfig/protocols/gnmi/gnmi.proto b/src/device/service/drivers/gnmi_openconfig/gnmi/gnmi.proto similarity index 100% rename from src/device/service/drivers/gnmi_openconfig/protocols/gnmi/gnmi.proto rename to src/device/service/drivers/gnmi_openconfig/gnmi/gnmi.proto diff --git a/src/device/service/drivers/gnmi_openconfig/protocols/gnmi/gnmi_ext.proto b/src/device/service/drivers/gnmi_openconfig/gnmi/gnmi_ext.proto similarity index 100% rename from src/device/service/drivers/gnmi_openconfig/protocols/gnmi/gnmi_ext.proto rename to src/device/service/drivers/gnmi_openconfig/gnmi/gnmi_ext.proto diff --git a/src/device/service/drivers/gnmi_openconfig/protocols/gnmi/gnmi_pb2.py b/src/device/service/drivers/gnmi_openconfig/gnmi/gnmi_pb2.py similarity index 100% rename from src/device/service/drivers/gnmi_openconfig/protocols/gnmi/gnmi_pb2.py rename to src/device/service/drivers/gnmi_openconfig/gnmi/gnmi_pb2.py diff --git a/src/device/service/drivers/gnmi_openconfig/protocols/gnmi/gnmi_pb2.py.old b/src/device/service/drivers/gnmi_openconfig/gnmi/gnmi_pb2.py.old similarity index 100% rename from src/device/service/drivers/gnmi_openconfig/protocols/gnmi/gnmi_pb2.py.old rename to src/device/service/drivers/gnmi_openconfig/gnmi/gnmi_pb2.py.old diff --git a/src/device/service/drivers/gnmi_openconfig/protocols/gnmi/gnmi_pb2.pyi b/src/device/service/drivers/gnmi_openconfig/gnmi/gnmi_pb2.pyi similarity index 100% rename from src/device/service/drivers/gnmi_openconfig/protocols/gnmi/gnmi_pb2.pyi rename to src/device/service/drivers/gnmi_openconfig/gnmi/gnmi_pb2.pyi diff --git a/src/device/service/drivers/gnmi_openconfig/protocols/gnmi/gnmi_pb2_grpc.py b/src/device/service/drivers/gnmi_openconfig/gnmi/gnmi_pb2_grpc.py similarity index 100% rename from src/device/service/drivers/gnmi_openconfig/protocols/gnmi/gnmi_pb2_grpc.py rename to src/device/service/drivers/gnmi_openconfig/gnmi/gnmi_pb2_grpc.py diff --git a/src/device/service/drivers/gnmi_openconfig/handlers/Component.py b/src/device/service/drivers/gnmi_openconfig/handlers/Component.py new file mode 100644 index 000000000..0b3c1f970 --- /dev/null +++ b/src/device/service/drivers/gnmi_openconfig/handlers/Component.py @@ -0,0 +1,63 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +from typing import Any, Dict, List, Tuple +from common.proto.kpi_sample_types_pb2 import KpiSampleType +from ._Handler import _Handler + +LOGGER = logging.getLogger(__name__) + +PATH_IF_CTR = "/interfaces/interface[name={:s}]/state/counters/{:s}" + +class ComponentHandler(_Handler): + def get_resource_key(self) -> str: return '/endpoints/endpoint' + def get_path(self) -> str: return '/components/component' + + def parse(self, json_data : Dict) -> List[Tuple[str, Dict[str, Any]]]: + #LOGGER.info('json_data = {:s}'.format(json.dumps(json_data))) + json_component_list : List[Dict] = json_data.get('component', []) + response = [] + for json_component in json_component_list: + #LOGGER.info('json_component = {:s}'.format(json.dumps(json_component))) + + endpoint = {} + + component_type = json_component.get('state', {}).get('type') + if component_type is None: continue + component_type = component_type.replace('oc-platform-types:', '') + component_type = component_type.replace('openconfig-platform-types:', '') + if component_type not in {'PORT'}: continue + endpoint['type'] = '-' + + #LOGGER.info('PORT json_component = {:s}'.format(json.dumps(json_component))) + + component_name = json_component.get('name') + if component_name is None: continue + + # TODO: improve mapping between interface name and component name + # By now, computed by time for the sake of saving time for the Hackfest. + interface_name = component_name.lower().replace('-port', '') + + endpoint['uuid'] = interface_name + endpoint['sample_types'] = { + KpiSampleType.KPISAMPLETYPE_BYTES_RECEIVED : PATH_IF_CTR.format(interface_name, 'in-octets' ), + KpiSampleType.KPISAMPLETYPE_BYTES_TRANSMITTED : PATH_IF_CTR.format(interface_name, 'out-octets'), + KpiSampleType.KPISAMPLETYPE_PACKETS_RECEIVED : PATH_IF_CTR.format(interface_name, 'in-pkts' ), + KpiSampleType.KPISAMPLETYPE_PACKETS_TRANSMITTED: PATH_IF_CTR.format(interface_name, 'out-pkts' ), + } + + if len(endpoint) == 0: continue + response.append(('/endpoints/endpoint[{:s}]'.format(endpoint['uuid']), endpoint)) + return response diff --git a/src/device/service/drivers/gnmi_openconfig/handlers/Interface.py b/src/device/service/drivers/gnmi_openconfig/handlers/Interface.py new file mode 100644 index 000000000..20f79b3c2 --- /dev/null +++ b/src/device/service/drivers/gnmi_openconfig/handlers/Interface.py @@ -0,0 +1,248 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json, logging +from typing import Any, Dict, List, Tuple +from ._Handler import _Handler +from .Tools import dict_get_first + +LOGGER = logging.getLogger(__name__) + +class InterfaceHandler(_Handler): + def get_resource_key(self) -> str: return '/interface' + def get_path(self) -> str: return '/interfaces/interface' + + def compose(self, resource_key : str, resource_value : Dict, delete : bool = False) -> Tuple[str, str]: + if_name = str (resource_value['name' ]) # ethernet-1/1 + sif_index = int (resource_value.get('sub_if_index' , 0 )) # 0 + + if delete: + PATH_TMPL = '/interfaces/interface[name={:s}]/subinterfaces/subinterface[index={:d}]' + str_path = PATH_TMPL.format(if_name, sif_index) + str_data = json.dumps({}) + return str_path, str_data + + if_enabled = bool(resource_value.get('enabled' , True)) # True/False + sif_enabled = bool(resource_value.get('sub_if_enabled' , True)) # True/False + sif_ipv4_enabled = bool(resource_value.get('sub_if_ipv4_enabled', True)) # True/False + sif_ipv4_address = str (resource_value['sub_if_ipv4_address' ]) # 172.16.0.1 + sif_ipv4_prefix = int (resource_value['sub_if_ipv4_prefix' ]) # 24 + + str_path = '/interfaces/interface[name={:s}]'.format(if_name) + str_data = json.dumps({ + 'name': if_name, + 'config': {'name': if_name, 'enabled': if_enabled}, + 'subinterfaces': { + 'subinterface': { + 'index': sif_index, + 'config': {'index': sif_index, 'enabled': sif_enabled}, + 'ipv4': { + 'config': {'enabled': sif_ipv4_enabled}, + 'addresses': { + 'address': { + 'ip': sif_ipv4_address, + 'config': {'ip': sif_ipv4_address, 'prefix_length': sif_ipv4_prefix}, + } + } + } + } + } + }) + return str_path, str_data + + def parse(self, json_data : Dict) -> List[Tuple[str, Dict[str, Any]]]: + #LOGGER.info('json_data = {:s}'.format(json.dumps(json_data))) + json_interface_list : List[Dict] = json_data.get('interface', []) + + response = [] + for json_interface in json_interface_list: + #LOGGER.info('json_interface = {:s}'.format(json.dumps(json_interface))) + + interface = {} + + interface_name = json_interface.get('name') + if interface_name is None: + LOGGER.info('DISCARDED json_interface = {:s}'.format(json.dumps(json_interface))) + continue + interface['name'] = interface_name + + CONFIG_FIELDS = ('config', 'openconfig-interface:config', 'oci:config') + json_config : Dict = dict_get_first(json_interface, CONFIG_FIELDS, default={}) + + STATE_FIELDS = ('state', 'openconfig-interface:state', 'oci:state') + json_state : Dict = dict_get_first(json_interface, STATE_FIELDS, default={}) + + interface_type = json_config.get('type') + if interface_type is None: interface_type = json_state.get('type') + if interface_type is None: + LOGGER.info('DISCARDED json_interface = {:s}'.format(json.dumps(json_interface))) + continue + interface_type = interface_type.replace('ianaift:', '') + interface_type = interface_type.replace('iana-if-type:', '') + interface['type'] = interface_type + + interface_mtu = json_config.get('mtu') + if interface_mtu is None: interface_mtu = json_state.get('mtu') + if interface_mtu is not None: interface['mtu'] = int(interface_mtu) + + interface_enabled = json_config.get('enabled') + if interface_enabled is None: interface_enabled = json_state.get('enabled') + interface['enabled'] = False if interface_enabled is None else bool(interface_enabled) + + interface_management = json_config.get('management') + if interface_management is None: interface_management = json_state.get('management') + interface['management'] = False if interface_management is None else bool(interface_management) + + interface_descr = json_interface.get('config', {}).get('description') + if interface_descr is not None: interface['description'] = interface_descr + + json_subinterfaces = json_interface.get('subinterfaces', {}) + json_subinterface_list : List[Dict] = json_subinterfaces.get('subinterface', []) + + for json_subinterface in json_subinterface_list: + #LOGGER.info('json_subinterface = {:s}'.format(json.dumps(json_subinterface))) + + subinterface = {} + + subinterface_index = json_subinterface.get('state', {}).get('index') + if subinterface_index is None: continue + subinterface['index'] = int(subinterface_index) + + subinterface_name = json_subinterface.get('state', {}).get('name') + if subinterface_name is None: continue + subinterface['name'] = subinterface_name + + subinterface_enabled = json_subinterface.get('state', {}).get('enabled', False) + subinterface['enabled'] = bool(subinterface_enabled) + + VLAN_FIELDS = ('vlan', 'openconfig-vlan:vlan', 'ocv:vlan') + json_vlan = dict_get_first(json_subinterface, VLAN_FIELDS, default={}) + + MATCH_FIELDS = ('match', 'openconfig-vlan:match', 'ocv:match') + json_vlan = dict_get_first(json_vlan, MATCH_FIELDS, default={}) + + SIN_TAG_FIELDS = ('single-tagged', 'openconfig-vlan:single-tagged', 'ocv:single-tagged') + json_vlan = dict_get_first(json_vlan, SIN_TAG_FIELDS, default={}) + + CONFIG_FIELDS = ('config', 'openconfig-vlan:config', 'ocv:config') + json_vlan = dict_get_first(json_vlan, CONFIG_FIELDS, default={}) + + VLAN_ID_FIELDS = ('vlan-id', 'openconfig-vlan:vlan-id', 'ocv:vlan-id') + subinterface_vlan_id = dict_get_first(json_vlan, VLAN_ID_FIELDS) + if subinterface_vlan_id is not None: subinterface['vlan_id'] = subinterface_vlan_id + + + # TODO: implement support for multiple IP addresses per subinterface + + IPV4_FIELDS = ('ipv4', 'openconfig-if-ip:ipv4', 'ociip:ipv4') + json_ipv4 = dict_get_first(json_subinterface, IPV4_FIELDS, default={}) + + IPV4_ADDRESSES_FIELDS = ('addresses', 'openconfig-if-ip:addresses', 'ociip:addresses') + json_ipv4_addresses = dict_get_first(json_ipv4, IPV4_ADDRESSES_FIELDS, default={}) + + IPV4_ADDRESS_FIELDS = ('address', 'openconfig-if-ip:address', 'ociip:address') + json_ipv4_address_list : List[Dict] = dict_get_first(json_ipv4_addresses, IPV4_ADDRESS_FIELDS, default=[]) + + #ipv4_addresses = [] + for json_ipv4_address in json_ipv4_address_list: + #LOGGER.info('json_ipv4_address = {:s}'.format(json.dumps(json_ipv4_address))) + + STATE_FIELDS = ('state', 'openconfig-if-ip:state', 'ociip:state') + json_ipv4_address_state = dict_get_first(json_ipv4_address, STATE_FIELDS, default={}) + + #ipv4_address = {} + + #ORIGIN_FIELDS = ('origin', 'openconfig-if-ip:origin', 'ociip:origin') + #ipv4_address_origin = dict_get_first(json_ipv4_address_state, ORIGIN_FIELDS, default={}) + #if ipv4_address_origin is not None: ipv4_address['origin'] = ipv4_address_origin + + IP_FIELDS = ('ip', 'openconfig-if-ip:ip', 'ociip:ip') + ipv4_address_ip = dict_get_first(json_ipv4_address_state, IP_FIELDS) + #if ipv4_address_ip is not None: ipv4_address['address_ip'] = ipv4_address_ip + if ipv4_address_ip is not None: subinterface['address_ip'] = ipv4_address_ip + + PREFIX_FIELDS = ('prefix-length', 'openconfig-if-ip:prefix-length', 'ociip:prefix-length') + ipv4_address_prefix = dict_get_first(json_ipv4_address_state, PREFIX_FIELDS) + #if ipv4_address_prefix is not None: ipv4_address['address_prefix'] = int(ipv4_address_prefix) + if ipv4_address_prefix is not None: subinterface['address_prefix'] = int(ipv4_address_prefix) + + #if len(ipv4_address) == 0: continue + #ipv4_addresses.append(ipv4_address) + + #subinterface['ipv4_addresses'] = ipv4_addresses + + if len(subinterface) == 0: continue + resource_key = '/interface[{:s}]/subinterface[{:s}]'.format(interface['name'], str(subinterface['index'])) + response.append((resource_key, subinterface)) + + if len(interface) == 0: continue + response.append(('/interface[{:s}]'.format(interface['name']), interface)) + + return response + + def parse_counters(self, json_data : Dict) -> List[Tuple[str, Dict[str, Any]]]: + LOGGER.info('[parse_counters] json_data = {:s}'.format(json.dumps(json_data))) + json_interface_list : List[Dict] = json_data.get('interface', []) + + response = [] + for json_interface in json_interface_list: + LOGGER.info('[parse_counters] json_interface = {:s}'.format(json.dumps(json_interface))) + + interface = {} + + NAME_FIELDS = ('name', 'openconfig-interface:name', 'oci:name') + interface_name = dict_get_first(json_interface, NAME_FIELDS) + if interface_name is None: continue + interface['name'] = interface_name + + STATE_FIELDS = ('state', 'openconfig-interface:state', 'oci:state') + json_state = dict_get_first(json_interface, STATE_FIELDS, default={}) + + COUNTERS_FIELDS = ('counters', 'openconfig-interface:counters', 'oci:counters') + json_counters = dict_get_first(json_state, COUNTERS_FIELDS, default={}) + + IN_PKTS_FIELDS = ('in-pkts', 'openconfig-interface:in-pkts', 'oci:in-pkts') + interface_in_pkts = dict_get_first(json_counters, IN_PKTS_FIELDS) + if interface_in_pkts is not None: interface['in-pkts'] = int(interface_in_pkts) + + IN_OCTETS_FIELDS = ('in-octets', 'openconfig-interface:in-octets', 'oci:in-octets') + interface_in_octets = dict_get_first(json_counters, IN_OCTETS_FIELDS) + if interface_in_octets is not None: interface['in-octets'] = int(interface_in_octets) + + IN_ERRORS_FIELDS = ('in-errors', 'openconfig-interface:in-errors', 'oci:in-errors') + interface_in_errors = dict_get_first(json_counters, IN_ERRORS_FIELDS) + if interface_in_errors is not None: interface['in-errors'] = int(interface_in_errors) + + OUT_OCTETS_FIELDS = ('out-octets', 'openconfig-interface:out-octets', 'oci:out-octets') + interface_out_octets = dict_get_first(json_counters, OUT_OCTETS_FIELDS) + if interface_out_octets is not None: interface['out-octets'] = int(interface_out_octets) + + OUT_PKTS_FIELDS = ('out-pkts', 'openconfig-interface:out-pkts', 'oci:out-pkts') + interface_out_pkts = dict_get_first(json_counters, OUT_PKTS_FIELDS) + if interface_out_pkts is not None: interface['out-pkts'] = int(interface_out_pkts) + + OUT_ERRORS_FIELDS = ('out-errors', 'openconfig-interface:out-errors', 'oci:out-errors') + interface_out_errors = dict_get_first(json_counters, OUT_ERRORS_FIELDS) + if interface_out_errors is not None: interface['out-errors'] = int(interface_out_errors) + + OUT_DISCARDS_FIELDS = ('out-discards', 'openconfig-interface:out-discards', 'oci:out-discards') + interface_out_discards = dict_get_first(json_counters, OUT_DISCARDS_FIELDS) + if interface_out_discards is not None: interface['out-discards'] = int(interface_out_discards) + + #LOGGER.info('[parse_counters] interface = {:s}'.format(str(interface))) + + if len(interface) == 0: continue + response.append(('/interface[{:s}]'.format(interface['name']), interface)) + + return response diff --git a/src/device/service/drivers/gnmi_openconfig/handlers/InterfaceCounter.py b/src/device/service/drivers/gnmi_openconfig/handlers/InterfaceCounter.py new file mode 100644 index 000000000..a45dc9e7f --- /dev/null +++ b/src/device/service/drivers/gnmi_openconfig/handlers/InterfaceCounter.py @@ -0,0 +1,80 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json, logging +from typing import Any, Dict, List, Tuple +from ._Handler import _Handler +from .Tools import dict_get_first + +LOGGER = logging.getLogger(__name__) + +class InterfaceCounterHandler(_Handler): + def get_resource_key(self) -> str: return '/interface/counters' + def get_path(self) -> str: return '/interfaces/interface/state/counters' + + def parse(self, json_data : Dict) -> List[Tuple[str, Dict[str, Any]]]: + LOGGER.info('[parse] json_data = {:s}'.format(json.dumps(json_data))) + json_interface_list : List[Dict] = json_data.get('interface', []) + + response = [] + for json_interface in json_interface_list: + LOGGER.info('[parse] json_interface = {:s}'.format(json.dumps(json_interface))) + + interface = {} + + NAME_FIELDS = ('name', 'openconfig-interface:name', 'oci:name') + interface_name = dict_get_first(json_interface, NAME_FIELDS) + if interface_name is None: continue + interface['name'] = interface_name + + STATE_FIELDS = ('state', 'openconfig-interface:state', 'oci:state') + json_state = dict_get_first(json_interface, STATE_FIELDS, default={}) + + COUNTERS_FIELDS = ('counters', 'openconfig-interface:counters', 'oci:counters') + json_counters = dict_get_first(json_state, COUNTERS_FIELDS, default={}) + + IN_PKTS_FIELDS = ('in-pkts', 'openconfig-interface:in-pkts', 'oci:in-pkts') + interface_in_pkts = dict_get_first(json_counters, IN_PKTS_FIELDS) + if interface_in_pkts is not None: interface['in-pkts'] = int(interface_in_pkts) + + IN_OCTETS_FIELDS = ('in-octets', 'openconfig-interface:in-octets', 'oci:in-octets') + interface_in_octets = dict_get_first(json_counters, IN_OCTETS_FIELDS) + if interface_in_octets is not None: interface['in-octets'] = int(interface_in_octets) + + IN_ERRORS_FIELDS = ('in-errors', 'openconfig-interface:in-errors', 'oci:in-errors') + interface_in_errors = dict_get_first(json_counters, IN_ERRORS_FIELDS) + if interface_in_errors is not None: interface['in-errors'] = int(interface_in_errors) + + OUT_OCTETS_FIELDS = ('out-octets', 'openconfig-interface:out-octets', 'oci:out-octets') + interface_out_octets = dict_get_first(json_counters, OUT_OCTETS_FIELDS) + if interface_out_octets is not None: interface['out-octets'] = int(interface_out_octets) + + OUT_PKTS_FIELDS = ('out-pkts', 'openconfig-interface:out-pkts', 'oci:out-pkts') + interface_out_pkts = dict_get_first(json_counters, OUT_PKTS_FIELDS) + if interface_out_pkts is not None: interface['out-pkts'] = int(interface_out_pkts) + + OUT_ERRORS_FIELDS = ('out-errors', 'openconfig-interface:out-errors', 'oci:out-errors') + interface_out_errors = dict_get_first(json_counters, OUT_ERRORS_FIELDS) + if interface_out_errors is not None: interface['out-errors'] = int(interface_out_errors) + + OUT_DISCARDS_FIELDS = ('out-discards', 'openconfig-interface:out-discards', 'oci:out-discards') + interface_out_discards = dict_get_first(json_counters, OUT_DISCARDS_FIELDS) + if interface_out_discards is not None: interface['out-discards'] = int(interface_out_discards) + + #LOGGER.info('[parse] interface = {:s}'.format(str(interface))) + + if len(interface) == 0: continue + response.append(('/interface[{:s}]'.format(interface['name']), interface)) + + return response diff --git a/src/device/service/drivers/gnmi_openconfig/handlers/NetworkInstance.py b/src/device/service/drivers/gnmi_openconfig/handlers/NetworkInstance.py new file mode 100644 index 000000000..aed821a06 --- /dev/null +++ b/src/device/service/drivers/gnmi_openconfig/handlers/NetworkInstance.py @@ -0,0 +1,62 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json, logging +from typing import Any, Dict, List, Tuple +from ._Handler import _Handler + +LOGGER = logging.getLogger(__name__) + +class NetworkInstanceHandler(_Handler): + def get_resource_key(self) -> str: return '/network_instance' + def get_path(self) -> str: return '/network-instances/network-instance' + + def compose(self, resource_key : str, resource_value : Dict, delete : bool = False) -> Tuple[str, str]: + ni_name = str(resource_value['name']) # test-svc + + if delete: + PATH_TMPL = '/network-instances/network-instance[name={:s}]' + str_path = PATH_TMPL.format(ni_name) + str_data = json.dumps({}) + return str_path, str_data + + ni_type = str(resource_value['type']) # L3VRF / L2VSI / ... + + # not works: [FailedPrecondition] unsupported identifier 'DIRECTLY_CONNECTED' + #protocols = [self._compose_directly_connected()] + + MAP_OC_NI_TYPE = { + 'L3VRF': 'openconfig-network-instance-types:L3VRF', + } + ni_type = MAP_OC_NI_TYPE.get(ni_type, ni_type) + + str_path = '/network-instances/network-instance[name={:s}]'.format(ni_name) + str_data = json.dumps({ + 'name': ni_name, + 'config': {'name': ni_name, 'type': ni_type}, + #'protocols': {'protocol': protocols}, + }) + return str_path, str_data + + def _compose_directly_connected(self, name=None, enabled=True) -> Dict: + identifier = 'DIRECTLY_CONNECTED' + if name is None: name = 'DIRECTLY_CONNECTED' + return { + 'identifier': identifier, 'name': name, + 'config': {'identifier': identifier, 'name': name, 'enabled': enabled}, + } + + def parse(self, json_data : Dict) -> List[Tuple[str, Dict[str, Any]]]: + response = [] + return response diff --git a/src/device/service/drivers/gnmi_openconfig/handlers/NetworkInstanceInterface.py b/src/device/service/drivers/gnmi_openconfig/handlers/NetworkInstanceInterface.py new file mode 100644 index 000000000..205373fca --- /dev/null +++ b/src/device/service/drivers/gnmi_openconfig/handlers/NetworkInstanceInterface.py @@ -0,0 +1,46 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json, logging +from typing import Any, Dict, List, Tuple +from ._Handler import _Handler + +LOGGER = logging.getLogger(__name__) + +class NetworkInstanceInterfaceHandler(_Handler): + def get_resource_key(self) -> str: return '/network_instance/interface' + def get_path(self) -> str: return '/network-instances/network-instance/interfaces' + + def compose(self, resource_key : str, resource_value : Dict, delete : bool = False) -> Tuple[str, str]: + ni_name = str(resource_value['name' ]) # test-svc + if_name = str(resource_value['if_name' ]) # ethernet-1/1 + sif_index = int(resource_value['sif_index']) # 0 + if_id = '{:s}.{:d}'.format(if_name, sif_index) + + if delete: + PATH_TMPL = '/network-instances/network-instance[name={:s}]/interfaces/interface[id={:s}]' + str_path = PATH_TMPL.format(ni_name, if_id) + str_data = json.dumps({}) + return str_path, str_data + + str_path = '/network-instances/network-instance[name={:s}]/interfaces/interface[id={:s}]'.format(ni_name, if_id) + str_data = json.dumps({ + 'id': if_id, + 'config': {'id': if_id, 'interface': if_name, 'subinterface': sif_index}, + }) + return str_path, str_data + + def parse(self, json_data : Dict) -> List[Tuple[str, Dict[str, Any]]]: + response = [] + return response diff --git a/src/device/service/drivers/gnmi_openconfig/handlers/NetworkInstanceStaticRoute.py b/src/device/service/drivers/gnmi_openconfig/handlers/NetworkInstanceStaticRoute.py new file mode 100644 index 000000000..9d75e9ac6 --- /dev/null +++ b/src/device/service/drivers/gnmi_openconfig/handlers/NetworkInstanceStaticRoute.py @@ -0,0 +1,61 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json, logging +from typing import Any, Dict, List, Tuple +from ._Handler import _Handler + +LOGGER = logging.getLogger(__name__) + +class NetworkInstanceStaticRouteHandler(_Handler): + def get_resource_key(self) -> str: return '/network_instance/static_route' + def get_path(self) -> str: return '/network-instances/network-instance/static_route' + + def compose(self, resource_key : str, resource_value : Dict, delete : bool = False) -> Tuple[str, str]: + ni_name = str(resource_value['name' ]) # test-svc + prefix = str(resource_value['prefix' ]) # '172.0.1.0/24' + + identifier = 'STATIC' + name = 'static' + if delete: + PATH_TMPL = '/network-instances/network-instance[name={:s}]/protocols' + PATH_TMPL += '/protocol[identifier={:s}][name={:s}]/static-routes/static[prefix={:s}]' + str_path = PATH_TMPL.format(ni_name, identifier, name, prefix) + str_data = json.dumps({}) + return str_path, str_data + + next_hop = str(resource_value['next_hop' ]) # '172.0.0.1' + next_hop_index = int(resource_value.get('next_hop_index', 0)) # 0 + + PATH_TMPL = '/network-instances/network-instance[name={:s}]/protocols/protocol[identifier={:s}][name={:s}]' + str_path = PATH_TMPL.format(ni_name, identifier, name) + str_data = json.dumps({ + 'identifier': identifier, 'name': name, + 'config': {'identifier': identifier, 'name': name, 'enabled': True}, + 'static_routes': {'static': [{ + 'prefix': prefix, + 'config': {'prefix': prefix}, + 'next_hops': { + 'next-hop': [{ + 'index': next_hop_index, + 'config': {'index': next_hop_index, 'next_hop': next_hop} + }] + } + }]} + }) + return str_path, str_data + + def parse(self, json_data : Dict) -> List[Tuple[str, Dict[str, Any]]]: + response = [] + return response diff --git a/src/device/service/drivers/gnmi_openconfig/handlers/Tools.py b/src/device/service/drivers/gnmi_openconfig/handlers/Tools.py new file mode 100644 index 000000000..30343ac28 --- /dev/null +++ b/src/device/service/drivers/gnmi_openconfig/handlers/Tools.py @@ -0,0 +1,30 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re +from typing import Any, Dict, Iterable + +RE_REMOVE_FILTERS = re.compile(r'\[[^\]]+\]') +RE_REMOVE_NAMESPACES = re.compile(r'\/[a-zA-Z0-9\_\-]+:') + +def get_schema(resource_key : str): + resource_key = RE_REMOVE_FILTERS.sub('', resource_key) + resource_key = RE_REMOVE_NAMESPACES.sub('/', resource_key) + return resource_key + +def dict_get_first(d : Dict, field_names : Iterable[str], default=None) -> Any: + for field_name in field_names: + if field_name not in d: continue + return d[field_name] + return default diff --git a/src/device/service/drivers/gnmi_openconfig/handlers/_Handler.py b/src/device/service/drivers/gnmi_openconfig/handlers/_Handler.py new file mode 100644 index 000000000..d20c77b11 --- /dev/null +++ b/src/device/service/drivers/gnmi_openconfig/handlers/_Handler.py @@ -0,0 +1,32 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Any, Dict, List, Tuple + +class _Handler: + def get_resource_key(self) -> str: + # Retrieve the TeraFlowSDN resource_key path schema used to point this handler + raise NotImplementedError() + + def get_path(self) -> str: + # Retrieve the OpenConfig path schema used to interrogate the device + raise NotImplementedError() + + def compose(self, resource_key : str, resource_value : Dict, delete : bool = False) -> Tuple[str, str]: + # Compose a Set/Delete message based on the resource_key/resource_value fields, and the delete flag + raise NotImplementedError() + + def parse(self, json_data : Dict) -> List[Tuple[str, Dict[str, Any]]]: + # Parse a Reply from the device and return a list of resource_key/resource_value pairs + raise NotImplementedError() diff --git a/src/device/service/drivers/gnmi_openconfig/handlers/__init__.py b/src/device/service/drivers/gnmi_openconfig/handlers/__init__.py new file mode 100644 index 000000000..39cd7c66a --- /dev/null +++ b/src/device/service/drivers/gnmi_openconfig/handlers/__init__.py @@ -0,0 +1,103 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +from typing import Dict, List, Optional, Tuple, Union +from device.service.driver_api._Driver import RESOURCE_ENDPOINTS, RESOURCE_INTERFACES, RESOURCE_NETWORK_INSTANCES +from ._Handler import _Handler +from .Component import ComponentHandler +from .Interface import InterfaceHandler +from .InterfaceCounter import InterfaceCounterHandler +from .NetworkInstance import NetworkInstanceHandler +from .NetworkInstanceInterface import NetworkInstanceInterfaceHandler +from .NetworkInstanceStaticRoute import NetworkInstanceStaticRouteHandler +from .Tools import get_schema + +LOGGER = logging.getLogger(__name__) + +comph = ComponentHandler() +ifaceh = InterfaceHandler() +ifctrh = InterfaceCounterHandler() +nih = NetworkInstanceHandler() +niifh = NetworkInstanceInterfaceHandler() +nisrh = NetworkInstanceStaticRouteHandler() + +ALL_RESOURCE_KEYS = [ + RESOURCE_ENDPOINTS, + RESOURCE_INTERFACES, + RESOURCE_NETWORK_INSTANCES, +] + +RESOURCE_KEY_MAPPER = { + RESOURCE_ENDPOINTS : comph.get_resource_key(), + RESOURCE_INTERFACES : ifaceh.get_resource_key(), + RESOURCE_NETWORK_INSTANCES : nih.get_resource_key(), +} + +PATH_MAPPER = { + '/components' : comph.get_path(), + '/interfaces' : ifaceh.get_path(), + '/network-instances' : nih.get_path(), +} + +RESOURCE_KEY_TO_HANDLER = { + comph.get_resource_key() : comph, + ifaceh.get_resource_key() : ifaceh, + ifctrh.get_resource_key() : ifctrh, + nih.get_resource_key() : nih, + niifh.get_resource_key() : niifh, + nisrh.get_resource_key() : nisrh, +} + +PATH_TO_HANDLER = { + comph.get_path() : comph, + ifaceh.get_path() : ifaceh, + ifctrh.get_path() : ifctrh, + nih.get_path() : nih, + niifh.get_path() : niifh, + nisrh.get_path() : nisrh, +} + +def get_handler( + resource_key : Optional[str] = None, path : Optional[str] = None, raise_if_not_found=True +) -> Optional[_Handler]: + if (resource_key is None) == (path is None): + MSG = 'Exactly one of resource_key({:s}) or path({:s}) must be specified' + raise Exception(MSG.format(str(resource_key), str(path))) # pylint: disable=broad-exception-raised + if resource_key is not None: + resource_key_schema = get_schema(resource_key) + resource_key_schema = RESOURCE_KEY_MAPPER.get(resource_key_schema, resource_key_schema) + handler = RESOURCE_KEY_TO_HANDLER.get(resource_key_schema) + if handler is None and raise_if_not_found: + MSG = 'Handler not found: resource_key={:s} resource_key_schema={:s}' + # pylint: disable=broad-exception-raised + raise Exception(MSG.format(str(resource_key), str(resource_key_schema))) + elif path is not None: + path_schema = get_schema(path) + path_schema = PATH_MAPPER.get(path_schema, path_schema) + handler = PATH_TO_HANDLER.get(path_schema) + if handler is None and raise_if_not_found: + MSG = 'Handler not found: resource_key={:s} resource_key_schema={:s}' + # pylint: disable=broad-exception-raised + raise Exception(MSG.format(str(resource_key), str(resource_key_schema))) + return handler + +def get_path(resource_key : str) -> str: + return get_handler(resource_key=resource_key).get_path() + +def parse(str_path : str, value : Union[Dict, List]): + return get_handler(path=str_path).parse(value) + +def compose(resource_key : str, resource_value : Union[Dict, List], delete : bool = False) -> Tuple[str, str]: + return get_handler(resource_key=resource_key).compose(resource_key, resource_value, delete=delete) diff --git a/src/device/service/drivers/gnmi_openconfig/handlers/old_bgp_handler.txt b/src/device/service/drivers/gnmi_openconfig/handlers/old_bgp_handler.txt new file mode 100644 index 000000000..595a19788 --- /dev/null +++ b/src/device/service/drivers/gnmi_openconfig/handlers/old_bgp_handler.txt @@ -0,0 +1,138 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# WARNING: this handler is work in progress. Use with care! + +import logging, json +from typing import Any, Dict, List, Tuple + +LOGGER = logging.getLogger(__name__) + +class NetworkInstanceHandler: + def get_resource_key(self) -> str: return '/network_instance' + def get_path(self) -> str: return '/network-instances/network-instance' + + def compose_set(self, resource_key : str, resource_value : Dict) -> Tuple[str, str]: + ni_name = str(resource_value['name']) # test-svc + ni_type = str(resource_value['type']) # L3VRF / + + if_name = str (resource_value['name' ]) # ethernet-1/1 + if_enabled = bool(resource_value.get('enabled' , True)) # True/False + sif_index = int (resource_value.get('sub_if_index' , 0 )) # 0 + sif_enabled = bool(resource_value.get('sub_if_enabled' , True)) # True/False + sif_ipv4_enabled = bool(resource_value.get('sub_if_ipv4_enabled', True)) # True/False + sif_ipv4_address = str (resource_value['sub_if_ipv4_address' ]) # 172.16.0.1 + sif_ipv4_prefix = int (resource_value['sub_if_ipv4_prefix' ]) # 24 + + str_path = '/interfaces/interface[name={:s}]'.format(if_name) + str_data = json.dumps({ + "name": if_name, + "config": {"name": if_name, "enabled": if_enabled}, + "subinterfaces": { + "subinterface": { + "index": sif_index, + "config": {"index": sif_index, "enabled": sif_enabled}, + "ipv4": { + "config": {"enabled": sif_ipv4_enabled}, + "addresses": { + "address": { + "ip": sif_ipv4_address, + "config": {"ip": sif_ipv4_address, "prefix_length": sif_ipv4_prefix}, + } + } + } + } + } + }) + return str_path, str_data + + + #oc_ni = openconfig_network_instance() + #ni = oc_ni.network_instances.network_instance.add(name=ni_name) + #ni.config.name = ni_name + + #ni_desc = resource_value.get('description') + #if ni_desc is not None: ni.config.description = ni_desc + + #if ni_type == 'L3VRF': + # ni.config.type = 'L3VRF' + # #ni_router_id = resource_value.get('router_id') + # #if ni_router_id is not None: ni.config.router_id = ni_router_id + + # proto_bgp = ni.protocols.protocol.add(identifier='BGP', name=ni_name) + # proto_bgp.config.identifier = 'BGP' + # proto_bgp.config.name = ni_name + # proto_bgp.config.enabled = True + # proto_bgp.bgp.global_.config.as_ = 65000 + # proto_bgp.bgp.global_.config.router_id = '172.0.0.1' + + # #ni.config.route_distinguisher = resource_value['route_distinguisher'] + #elif ni_type == 'L3VRF': + # pass + #else: + # raise NotImplementedError() + + #str_path = '/network-instances/network-instance[name={:s}]'.format(ni_name) + #str_data = pybindJSON.dumps(ni, mode='default') + + #str_path = '/network-instances/network-instance[name={:s}]/protocols/protocol[identifier=BGP][name=BGP]'.format(ni_name) + #str_data = json.dumps({ + # "identifier": "BGP", + # "name": "BGP", + # "config": {"identifier": "BGP", "name": "BGP", "enabled": True}, + # "bgp": {"global": {"config": {"as": 65000, "router-id": "5.5.5.5"}}} + #}) + + str_path = '/network-instances/network-instance[name=test-svc]' + str_data = json.dumps({ + "name": "test-svc", + "config": { + "name": "test-svc", + "type": "openconfig-network-instance-types:L3VRF" + }, + "protocols": { + "protocol": [ + { + "identifier": "DIRECTLY_CONNECTED", + "name": "DIRECTLY-CONNECTED", + "config": {"identifier": "DIRECTLY_CONNECTED", "name": "DIRECTLY-CONNECTED", "enabled": True}, + }, + { + "identifier": "STATIC", + "name": "static", + "config": {"identifier": "STATIC", "name": "static", "enabled": True}, + "static_routes": { + "static": [ + { + "prefix": "172.0.1.0/24", + "config": {"prefix": "172.0.1.0/24"}, + "next_hops": { + "next-hop": [{"index": 0, "config": {"index": 0, "next_hop": "172.0.0.1"}}] + } + } + ] + } + } + ] + }, + }) + + + #str_path = '/network-instances/network-instance[name={:s}]/protocols/protocol[identifier=DIRECTLY_CONNECTED][name=DIR]'.format(ni_name) + #str_data = json.dumps({ + # "identifier": "DIRECTLY_CONNECTED", + # "name": "DIR", + # "config": {"identifier": "DIRECTLY_CONNECTED", "name": "DIR", "enabled": True}, + #}) diff --git a/src/device/service/drivers/gnmi_openconfig/tools/Capabilities.py b/src/device/service/drivers/gnmi_openconfig/tools/Capabilities.py new file mode 100644 index 000000000..b90bf3db8 --- /dev/null +++ b/src/device/service/drivers/gnmi_openconfig/tools/Capabilities.py @@ -0,0 +1,36 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Optional, Set, Union +from common.tools.grpc.Tools import grpc_message_to_json +from ..gnmi.gnmi_pb2 import CapabilityRequest # pylint: disable=no-name-in-module +from ..gnmi.gnmi_pb2_grpc import gNMIStub + +def get_supported_encodings( + stub : gNMIStub, username : str, password : str, timeout : Optional[int] = None +) -> Set[Union[str, int]]: + metadata = [('username', username), ('password', password)] + req = CapabilityRequest() + reply = stub.Capabilities(req, metadata=metadata, timeout=timeout) + + data = grpc_message_to_json(reply) + supported_encodings = { + supported_encoding + for supported_encoding in data.get('supported_encodings', []) + if isinstance(supported_encoding, str) + } + if len(supported_encodings) == 0: + # pylint: disable=broad-exception-raised + raise Exception('No supported encodings found') + return supported_encodings diff --git a/src/device/service/drivers/gnmi_openconfig/tools/Channel.py b/src/device/service/drivers/gnmi_openconfig/tools/Channel.py new file mode 100644 index 000000000..264dd0321 --- /dev/null +++ b/src/device/service/drivers/gnmi_openconfig/tools/Channel.py @@ -0,0 +1,34 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import grpc, logging, ssl + +def get_grpc_channel(address : str, port : int, use_tls : bool, logger : logging.Logger) -> grpc.Channel: + endpoint = str(address) + ':' + str(port) + logger.info('Connecting gNMI {:s}...'.format(endpoint)) + if use_tls: + logger.debug('Getting server certificate...') + str_server_certificate = ssl.get_server_certificate((str(address), int(port))) + bytes_server_certificate = str_server_certificate.encode('UTF-8') + logger.debug('Using secure SSL channel...') + credentials = grpc.ssl_channel_credentials( + root_certificates=bytes_server_certificate, private_key=None, certificate_chain=None) + options = [ + #('grpc.ssl_target_name_override', options.altName,) + ] + channel = grpc.secure_channel(endpoint, credentials, options) + else: + logger.debug('Using insecure channel...') + channel = grpc.insecure_channel(endpoint) + return channel diff --git a/src/device/service/drivers/gnmi_openconfig/tools/Path.py b/src/device/service/drivers/gnmi_openconfig/tools/Path.py new file mode 100644 index 000000000..40ab28dc6 --- /dev/null +++ b/src/device/service/drivers/gnmi_openconfig/tools/Path.py @@ -0,0 +1,98 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re +from typing import List +from ..gnmi.gnmi_pb2 import Path, PathElem + +RE_PATH_SPLIT = re.compile(r'/(?=(?:[^\[\]]|\[[^\[\]]+\])*$)') +RE_PATH_KEYS = re.compile(r'\[(.*?)\]') + +def path_from_string(path='/'): + if not path: return Path(elem=[]) + + if path[0] == '/': + if path[-1] == '/': + path_list = RE_PATH_SPLIT.split(path)[1:-1] + else: + path_list = RE_PATH_SPLIT.split(path)[1:] + else: + if path[-1] == '/': + path_list = RE_PATH_SPLIT.split(path)[:-1] + else: + path_list = RE_PATH_SPLIT.split(path) + + path = [] + for elem in path_list: + elem_name = elem.split('[', 1)[0] + elem_keys = RE_PATH_KEYS.findall(elem) + dict_keys = dict(x.split('=', 1) for x in elem_keys) + path.append(PathElem(name=elem_name, key=dict_keys)) + + return Path(elem=path) + +def path_to_string(path : Path) -> str: + path_parts = list() + for elem in path.elem: + kv_list = list() + for key in elem.key: + value = elem.key[key] + kv = '{:s}={:s}'.format(key, value) + kv_list.append(kv) + + path_part_name = elem.name + if len(kv_list) == 0: + path_parts.append(path_part_name) + else: + str_kv = ', '.join(kv_list) + path_part = '{:s}[{:s}]'.format(path_part_name, str_kv) + path_parts.append(path_part) + + str_path = '/{:s}'.format('/'.join(path_parts)) + return str_path + +def parse_xpath(xpath : str) -> str: + xpath = xpath.replace('//', '/') + xpath = xpath.replace('oci:interface[', 'interface[') + xpath = xpath.replace('/oci', '/openconfig-interfaces') + xpath = re.sub(r"\[oci:name='(.*?)'\]", r"[name=\1]", xpath) + # Eliminar el contador del final + xpath = '/'.join(xpath.split('/')[:-1]) + '/' + return xpath + +def split_resource_key(path): + pattern = r'/state/counters/(.*)' + match = re.search(pattern, path) + if match is None: return None + return match.group(1) + +def dict_to_xpath(d: dict) -> str: + xpath = '/' + for item in d['elem']: + name = item.get('name') + if name == 'interface': + key = item.get('key') + interface_name = key.get('name') + xpath += f"/oci:interface[oci:name='{interface_name}']" + else: + xpath += f"/{name}" + xpath = xpath.replace('openconfig-interfaces', 'oci') + return xpath + +def compose_path(base_path : str, path_filters : List[str] = []): + new_path = '' if base_path is None else str(base_path) + for path_filter in path_filters: + if path_filter == '': continue + new_path = '{:s}[{:s}]'.format(new_path, path_filter) + return new_path diff --git a/src/device/service/drivers/gnmi_openconfig/tools/Subscriptions.py b/src/device/service/drivers/gnmi_openconfig/tools/Subscriptions.py new file mode 100644 index 000000000..18b6445ae --- /dev/null +++ b/src/device/service/drivers/gnmi_openconfig/tools/Subscriptions.py @@ -0,0 +1,47 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Collection of samples through NetConf is very slow and each request collects all the data. +# Populate a cache periodically (when first interface is interrogated). +# Evict data after some seconds, when data is considered as outdated + +import anytree +from typing import Any, List +from device.service.driver_api.AnyTreeTools import TreeNode, get_subnode, set_subnode_value + +class Subscriptions: + def __init__(self) -> None: + self.__resolver = anytree.Resolver(pathattr='name') + self.__subscriptions = TreeNode('.') + + def add( + self, resource_path : List[str], sampling_duration : float, sampling_interval : float, value : Any + ) -> None: + subscription_path = resource_path + ['{:.3f}:{:.3f}'.format(sampling_duration, sampling_interval)] + set_subnode_value(self.__resolver, self.__subscriptions, subscription_path, value) + + def get( + self, resource_path : List[str], sampling_duration : float, sampling_interval : float + ) -> TreeNode: + subscription_path = resource_path + ['{:.3f}:{:.3f}'.format(sampling_duration, sampling_interval)] + value = get_subnode(self.__resolver, self.__subscriptions, subscription_path) + return value + + def delete( + self, reference : TreeNode + ) -> None: + parent : TreeNode = reference.parent + children = list(parent.children) + children.remove(reference) + parent.children = tuple(children) diff --git a/src/device/service/drivers/gnmi_openconfig/tools/Value.py b/src/device/service/drivers/gnmi_openconfig/tools/Value.py new file mode 100644 index 000000000..4797930a1 --- /dev/null +++ b/src/device/service/drivers/gnmi_openconfig/tools/Value.py @@ -0,0 +1,52 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import base64, json +from typing import Any +from ..gnmi.gnmi_pb2 import TypedValue + +def decode_value(value : TypedValue) -> Any: + encoding = value.WhichOneof('value') + if encoding == 'json_val': + value = value.json_val + #mdl, cls = self._classes[className] + #obj = json.loads(strObj) + #if isinstance(obj, (list,)): + # obj = map(lambda n: pybindJSON.loads(n, mdl, cls.__name__), obj) + # data = map(lambda n: json.loads(pybindJSON.dumps(n, mode='default')), obj) + #else: + # obj = pybindJSON.loads(obj, mdl, cls.__name__) + # data = json.loads(pybindJSON.dumps(obj, mode='default')) + raise NotImplementedError() + #return value + elif encoding == 'json_ietf_val': + value : str = value.json_ietf_val + try: + return json.loads(value) + except json.decoder.JSONDecodeError: + # Assume is Base64-encoded + b_b64_value = value.encode('UTF-8') + b_value = base64.b64decode(b_b64_value, validate=True) + value = b_value.decode('UTF-8') + return json.loads(value) + else: + MSG = 'Unsupported Encoding({:s}) in Value({:s})' + # pylint: disable=broad-exception-raised + raise Exception(MSG.format(str(encoding), str(value))) + +def value_exists(value) -> bool: + if value is None: return False + if isinstance(value, Exception): return False + if issubclass(type(value), Exception): return False + return True diff --git a/src/device/service/drivers/gnmi_openconfig/tools/__init__.py b/src/device/service/drivers/gnmi_openconfig/tools/__init__.py new file mode 100644 index 000000000..1549d9811 --- /dev/null +++ b/src/device/service/drivers/gnmi_openconfig/tools/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + -- GitLab From fb887f79671ec37656199ebf4946488bd3a76416 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Sat, 17 Jun 2023 08:02:53 +0000 Subject: [PATCH 31/62] Device component: - Added minor tests for gNMI and NetConf --- src/device/tests/test_gnmi.py | 115 +++++++++++++++++++++++++++++++ src/device/tests/test_netconf.py | 31 +++++++++ 2 files changed, 146 insertions(+) create mode 100644 src/device/tests/test_gnmi.py create mode 100644 src/device/tests/test_netconf.py diff --git a/src/device/tests/test_gnmi.py b/src/device/tests/test_gnmi.py new file mode 100644 index 000000000..50c915582 --- /dev/null +++ b/src/device/tests/test_gnmi.py @@ -0,0 +1,115 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging, os, sys, time +from typing import Dict, Tuple +os.environ['DEVICE_EMULATED_ONLY'] = 'YES' +from device.service.drivers.gnmi_openconfig.GnmiOpenConfigDriver import GnmiOpenConfigDriver # pylint: disable=wrong-import-position +#from device.service.driver_api._Driver import ( +# RESOURCE_ENDPOINTS, RESOURCE_INTERFACES, RESOURCE_NETWORK_INSTANCES, RESOURCE_ROUTING_POLICIES, RESOURCE_SERVICES +#) + +logging.basicConfig(level=logging.DEBUG) +LOGGER = logging.getLogger(__name__) +LOGGER.setLevel(logging.DEBUG) + +# +---+---------------------------+--------------+---------------------------------+-------+---------+--------------------+--------------+ +# | # | Name | Container ID | Image | Kind | State | IPv4 Address | IPv6 Address | +# +---+---------------------------+--------------+---------------------------------+-------+---------+--------------------+--------------+ +# | 1 | clab-tfs-scenario-client1 | a8d48ec3265a | ghcr.io/hellt/network-multitool | linux | running | 172.100.100.201/24 | N/A | +# | 2 | clab-tfs-scenario-client2 | fc88436d2b32 | ghcr.io/hellt/network-multitool | linux | running | 172.100.100.202/24 | N/A | +# | 3 | clab-tfs-scenario-srl1 | b995b9bdadda | ghcr.io/nokia/srlinux | srl | running | 172.100.100.101/24 | N/A | +# | 4 | clab-tfs-scenario-srl2 | aacfc38cc376 | ghcr.io/nokia/srlinux | srl | running | 172.100.100.102/24 | N/A | +# +---+---------------------------+--------------+---------------------------------+-------+---------+--------------------+--------------+ + +def interface(if_name, sif_index, ipv4_address, ipv4_prefix, enabled) -> Tuple[str, Dict]: + str_path = '/interface[{:s}]'.format(if_name) + str_data = {'name': if_name, 'enabled': enabled, 'sub_if_index': sif_index, 'sub_if_enabled': enabled, + 'sub_if_ipv4_enabled': enabled, 'sub_if_ipv4_address': ipv4_address, 'sub_if_ipv4_prefix': ipv4_prefix} + return str_path, str_data + +def network_instance(ni_name, ni_type) -> Tuple[str, Dict]: + str_path = '/network_instance[{:s}]'.format(ni_name) + str_data = {'name': ni_name, 'type': ni_type} + return str_path, str_data + +def network_instance_static_route(ni_name, prefix, next_hop, next_hop_index=0) -> Tuple[str, Dict]: + str_path = '/network_instance[{:s}]/static_route[{:s}]'.format(ni_name, prefix) + str_data = {'name': ni_name, 'prefix': prefix, 'next_hop': next_hop, 'next_hop_index': next_hop_index} + return str_path, str_data + +def network_instance_interface(ni_name, if_name, sif_index) -> Tuple[str, Dict]: + str_path = '/network_instance[{:s}]/interface[{:s}.{:d}]'.format(ni_name, if_name, sif_index) + str_data = {'name': ni_name, 'if_name': if_name, 'sif_index': sif_index} + return str_path, str_data + +def main(): + driver_settings = { + 'protocol': 'gnmi', + 'username': 'admin', + 'password': 'NokiaSrl1!', + 'use_tls' : True, + } + driver = GnmiOpenConfigDriver('172.100.100.102', 57400, **driver_settings) + driver.Connect() + + #resources_to_get = [] + #resources_to_get = [RESOURCE_ENDPOINTS] + #resources_to_get = [RESOURCE_INTERFACES] + #resources_to_get = [RESOURCE_NETWORK_INSTANCES] + #resources_to_get = [RESOURCE_ROUTING_POLICIES] + #resources_to_get = [RESOURCE_SERVICES] + #LOGGER.info('resources_to_get = {:s}'.format(str(resources_to_get))) + #results_getconfig = driver.GetConfig(resources_to_get) + #LOGGER.info('results_getconfig = {:s}'.format(str(results_getconfig))) + + #resources_to_set = [ + # network_instance('test-svc', 'L3VRF'), + # + # interface('ethernet-1/1', 0, '172.16.0.1', 24, True), + # network_instance_interface('test-svc', 'ethernet-1/1', 0), + # + # interface('ethernet-1/2', 0, '172.0.0.1', 24, True), + # network_instance_interface('test-svc', 'ethernet-1/2', 0), + # + # network_instance_static_route('test-svc', '172.0.0.0/24', '172.16.0.2'), + # network_instance_static_route('test-svc', '172.2.0.0/24', '172.16.0.3'), + #] + #LOGGER.info('resources_to_set = {:s}'.format(str(resources_to_set))) + #results_setconfig = driver.SetConfig(resources_to_set) + #LOGGER.info('results_setconfig = {:s}'.format(str(results_setconfig))) + + resources_to_delete = [ + #network_instance_static_route('d35fc1d9', '172.0.0.0/24', '172.16.0.2'), + #network_instance_static_route('d35fc1d9', '172.2.0.0/24', '172.16.0.3'), + + #network_instance_interface('d35fc1d9', 'ethernet-1/1', 0), + #network_instance_interface('d35fc1d9', 'ethernet-1/2', 0), + + interface('ethernet-1/1', 0, '172.16.1.1', 24, True), + interface('ethernet-1/2', 0, '172.0.0.2', 24, True), + + network_instance('20f66fb5', 'L3VRF'), + ] + LOGGER.info('resources_to_delete = {:s}'.format(str(resources_to_delete))) + results_deleteconfig = driver.DeleteConfig(resources_to_delete) + LOGGER.info('results_deleteconfig = {:s}'.format(str(results_deleteconfig))) + + time.sleep(1) + + driver.Disconnect() + return 0 + +if __name__ == '__main__': + sys.exit(main()) diff --git a/src/device/tests/test_netconf.py b/src/device/tests/test_netconf.py new file mode 100644 index 000000000..70551eed7 --- /dev/null +++ b/src/device/tests/test_netconf.py @@ -0,0 +1,31 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ncclient.manager import Manager, connect_ssh + +str_filter = ''' + + + +''' + +_manager : Manager = connect_ssh( + host='10.5.32.3', port=830, username='admin', password='admin', + device_params={'name': 'huaweiyang'}, manager_params={'timeout': 120}, + key_filename=None, hostkey_verify=False, allow_agent=False, + look_for_keys=False) +c = _manager.get(filter=str_filter, with_defaults=None).data_xml +with open('data.xml', 'w') as f: + f.write(c) +_manager.close_session() -- GitLab From 24688dad4e1585106afcb1edbb3209428a284616 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Sat, 17 Jun 2023 08:04:17 +0000 Subject: [PATCH 32/62] Service component - L3NM gNMI OpenConfig Service Handler: - Added first version of L3 gNMI OpenConfig Service Handler - Extended SettingsHandler to retrieve decide-specific settings --- .../service_handler_api/FilterFields.py | 1 + .../service_handler_api/SettingsHandler.py | 10 ++ .../service/service_handlers/__init__.py | 7 + .../ConfigRuleComposer.py | 119 +++++++++++++ .../L3NMGnmiOpenConfigServiceHandler.py | 161 ++++++++++++++++++ .../l3nm_gnmi_openconfig/__init__.py | 14 ++ 6 files changed, 312 insertions(+) create mode 100644 src/service/service/service_handlers/l3nm_gnmi_openconfig/ConfigRuleComposer.py create mode 100644 src/service/service/service_handlers/l3nm_gnmi_openconfig/L3NMGnmiOpenConfigServiceHandler.py create mode 100644 src/service/service/service_handlers/l3nm_gnmi_openconfig/__init__.py diff --git a/src/service/service/service_handler_api/FilterFields.py b/src/service/service/service_handler_api/FilterFields.py index 3ec71dc64..f86412a8c 100644 --- a/src/service/service/service_handler_api/FilterFields.py +++ b/src/service/service/service_handler_api/FilterFields.py @@ -35,6 +35,7 @@ DEVICE_DRIVER_VALUES = { DeviceDriverEnum.DEVICEDRIVER_ONF_TR_352, DeviceDriverEnum.DEVICEDRIVER_XR, DeviceDriverEnum.DEVICEDRIVER_IETF_L2VPN, + DeviceDriverEnum.DEVICEDRIVER_GNMI_OPENCONFIG, } # Map allowed filter fields to allowed values per Filter field. If no restriction (free text) None is specified diff --git a/src/service/service/service_handler_api/SettingsHandler.py b/src/service/service/service_handler_api/SettingsHandler.py index 255e60b06..943112217 100644 --- a/src/service/service/service_handler_api/SettingsHandler.py +++ b/src/service/service/service_handler_api/SettingsHandler.py @@ -56,6 +56,16 @@ class SettingsHandler: def get(self, key_or_path : Union[str, List[str]], default : Optional[Any] = None) -> Optional[TreeNode]: return get_subnode(self.__resolver, self.__config, key_or_path, default=default) + def get_device_settings(self, device : Device) -> Optional[TreeNode]: + device_keys = device.device_id.device_uuid.uuid, device.name + + for device_key in device_keys: + endpoint_settings_uri = '/device[{:s}]/settings'.format(device_key) + endpoint_settings = self.get(endpoint_settings_uri) + if endpoint_settings is not None: return endpoint_settings + + return None + def get_endpoint_settings(self, device : Device, endpoint : EndPoint) -> Optional[TreeNode]: device_keys = device.device_id.device_uuid.uuid, device.name endpoint_keys = endpoint.endpoint_id.endpoint_uuid.uuid, endpoint.name diff --git a/src/service/service/service_handlers/__init__.py b/src/service/service/service_handlers/__init__.py index 257bc138f..7ea0d4f62 100644 --- a/src/service/service/service_handlers/__init__.py +++ b/src/service/service/service_handlers/__init__.py @@ -19,6 +19,7 @@ from .l2nm_ietfl2vpn.L2NM_IETFL2VPN_ServiceHandler import L2NM_IETFL2VPN_Service from .l2nm_openconfig.L2NMOpenConfigServiceHandler import L2NMOpenConfigServiceHandler from .l3nm_emulated.L3NMEmulatedServiceHandler import L3NMEmulatedServiceHandler from .l3nm_openconfig.L3NMOpenConfigServiceHandler import L3NMOpenConfigServiceHandler +from .l3nm_gnmi_openconfig.L3NMGnmiOpenConfigServiceHandler import L3NMGnmiOpenConfigServiceHandler from .microwave.MicrowaveServiceHandler import MicrowaveServiceHandler from .p4.p4_service_handler import P4ServiceHandler from .tapi_tapi.TapiServiceHandler import TapiServiceHandler @@ -49,6 +50,12 @@ SERVICE_HANDLERS = [ FilterFieldEnum.DEVICE_DRIVER : DeviceDriverEnum.DEVICEDRIVER_OPENCONFIG, } ]), + (L3NMGnmiOpenConfigServiceHandler, [ + { + FilterFieldEnum.SERVICE_TYPE : ServiceTypeEnum.SERVICETYPE_L3NM, + FilterFieldEnum.DEVICE_DRIVER : DeviceDriverEnum.DEVICEDRIVER_GNMI_OPENCONFIG, + } + ]), (TapiServiceHandler, [ { FilterFieldEnum.SERVICE_TYPE : ServiceTypeEnum.SERVICETYPE_TAPI_CONNECTIVITY_SERVICE, diff --git a/src/service/service/service_handlers/l3nm_gnmi_openconfig/ConfigRuleComposer.py b/src/service/service/service_handlers/l3nm_gnmi_openconfig/ConfigRuleComposer.py new file mode 100644 index 000000000..7f3593df3 --- /dev/null +++ b/src/service/service/service_handlers/l3nm_gnmi_openconfig/ConfigRuleComposer.py @@ -0,0 +1,119 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Dict, List, Optional, Tuple +from common.proto.context_pb2 import Device, EndPoint +from common.tools.object_factory.ConfigRule import json_config_rule_delete, json_config_rule_set + +from service.service.service_handler_api.AnyTreeTools import TreeNode + +def _interface(if_name, sif_index, ipv4_address, ipv4_prefix, enabled) -> Tuple[str, Dict]: + str_path = '/interface[{:s}]'.format(if_name) + str_data = {'name': if_name, 'enabled': enabled, 'sub_if_index': sif_index, + 'sub_if_enabled': enabled, 'sub_if_ipv4_enabled': enabled, + 'sub_if_ipv4_address': ipv4_address, 'sub_if_ipv4_prefix': ipv4_prefix} + return str_path, str_data + +def _network_instance(ni_name, ni_type) -> Tuple[str, Dict]: + str_path = '/network_instance[{:s}]'.format(ni_name) + str_data = {'name': ni_name, 'type': ni_type} + return str_path, str_data + +def _network_instance_static_route(ni_name, prefix, next_hop, next_hop_index=0) -> Tuple[str, Dict]: + str_path = '/network_instance[{:s}]/static_route[{:s}]'.format(ni_name, prefix) + str_data = {'name': ni_name, 'prefix': prefix, 'next_hop': next_hop, 'next_hop_index': next_hop_index} + return str_path, str_data + +def _network_instance_interface(ni_name, if_name, sif_index) -> Tuple[str, Dict]: + str_path = '/network_instance[{:s}]/interface[{:s}.{:d}]'.format(ni_name, if_name, sif_index) + str_data = {'name': ni_name, 'if_name': if_name, 'sif_index': sif_index} + return str_path, str_data + +class EndpointComposer: + def __init__(self, endpoint_uuid : str) -> None: + self.uuid = endpoint_uuid + self.objekt : Optional[EndPoint] = None + self.sub_interface_index = 0 + self.ipv4_address = None + self.ipv4_prefix = None + + def configure(self, endpoint_obj : EndPoint, settings : Optional[TreeNode]) -> None: + self.objekt = endpoint_obj + if settings is None: return + json_settings : Dict = settings.value + self.ipv4_address = json_settings['ipv4_address'] + self.ipv4_prefix = json_settings['ipv4_prefix'] + self.sub_interface_index = json_settings['sub_interface_index'] + + def get_config_rules(self, network_instance_name : str, delete : bool = False) -> List[Dict]: + json_config_rule = json_config_rule_delete if delete else json_config_rule_set + return [ + json_config_rule(*_interface( + self.objekt.name, self.sub_interface_index, self.ipv4_address, self.ipv4_prefix, True + )), + json_config_rule(*_network_instance_interface( + network_instance_name, self.objekt.name, self.sub_interface_index + )), + ] + +class DeviceComposer: + def __init__(self, device_uuid : str) -> None: + self.uuid = device_uuid + self.objekt : Optional[Device] = None + self.endpoints : Dict[str, EndpointComposer] = dict() + self.static_routes : Dict[str, str] = dict() + + def get_endpoint(self, endpoint_uuid : str) -> EndpointComposer: + if endpoint_uuid not in self.endpoints: + self.endpoints[endpoint_uuid] = EndpointComposer(endpoint_uuid) + return self.endpoints[endpoint_uuid] + + def configure(self, device_obj : Device, settings : Optional[TreeNode]) -> None: + self.objekt = device_obj + if settings is None: return + json_settings : Dict = settings.value + static_routes = json_settings.get('static_routes', []) + for static_route in static_routes: + prefix = static_route['prefix'] + next_hop = static_route['next_hop'] + self.static_routes[prefix] = next_hop + + def get_config_rules(self, network_instance_name : str, delete : bool = False) -> List[Dict]: + json_config_rule = json_config_rule_delete if delete else json_config_rule_set + config_rules = [ + json_config_rule(*_network_instance(network_instance_name, 'L3VRF')) + ] + for endpoint in self.endpoints.values(): + config_rules.extend(endpoint.get_config_rules(network_instance_name, delete=delete)) + for prefix, next_hop in self.static_routes.items(): + config_rules.append( + json_config_rule(*_network_instance_static_route(network_instance_name, prefix, next_hop)) + ) + if delete: config_rules = list(reversed(config_rules)) + return config_rules + +class ConfigRuleComposer: + def __init__(self) -> None: + self.devices : Dict[str, DeviceComposer] = dict() + + def get_device(self, device_uuid : str) -> DeviceComposer: + if device_uuid not in self.devices: + self.devices[device_uuid] = DeviceComposer(device_uuid) + return self.devices[device_uuid] + + def get_config_rules(self, network_instance_name : str, delete : bool = False) -> Dict[str, List[Dict]]: + return { + device_uuid : device.get_config_rules(network_instance_name, delete=delete) + for device_uuid, device in self.devices.items() + } diff --git a/src/service/service/service_handlers/l3nm_gnmi_openconfig/L3NMGnmiOpenConfigServiceHandler.py b/src/service/service/service_handlers/l3nm_gnmi_openconfig/L3NMGnmiOpenConfigServiceHandler.py new file mode 100644 index 000000000..a9cec3f42 --- /dev/null +++ b/src/service/service/service_handlers/l3nm_gnmi_openconfig/L3NMGnmiOpenConfigServiceHandler.py @@ -0,0 +1,161 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json, logging +from typing import Any, Dict, List, Optional, Tuple, Union +from common.method_wrappers.Decorator import MetricsPool, metered_subclass_method +from common.proto.context_pb2 import ConfigRule, DeviceId, Service +from common.tools.object_factory.Device import json_device_id +from common.type_checkers.Checkers import chk_type +from service.service.service_handler_api.Tools import get_device_endpoint_uuids, get_endpoint_matching +from service.service.service_handler_api._ServiceHandler import _ServiceHandler +from service.service.service_handler_api.SettingsHandler import SettingsHandler +from service.service.task_scheduler.TaskExecutor import TaskExecutor +from .ConfigRuleComposer import ConfigRuleComposer + +LOGGER = logging.getLogger(__name__) + +METRICS_POOL = MetricsPool('Service', 'Handler', labels={'handler': 'l3nm_gnmi_openconfig'}) + +class L3NMGnmiOpenConfigServiceHandler(_ServiceHandler): + def __init__( # pylint: disable=super-init-not-called + self, service : Service, task_executor : TaskExecutor, **settings + ) -> None: + self.__service = service + self.__task_executor = task_executor + self.__settings_handler = SettingsHandler(service.service_config, **settings) + self.__composer = ConfigRuleComposer() + self.__endpoint_map : Dict[Tuple[str, str], str] = dict() + + def _compose_config_rules(self, endpoints : List[Tuple[str, str, Optional[str]]]) -> None: + for endpoint in endpoints: + device_uuid, endpoint_uuid = get_device_endpoint_uuids(endpoint) + + device_obj = self.__task_executor.get_device(DeviceId(**json_device_id(device_uuid))) + device_settings = self.__settings_handler.get_device_settings(device_obj) + _device = self.__composer.get_device(device_obj.name) + _device.configure(device_obj, device_settings) + + endpoint_obj = get_endpoint_matching(device_obj, endpoint_uuid) + endpoint_settings = self.__settings_handler.get_endpoint_settings(device_obj, endpoint_obj) + _endpoint = _device.get_endpoint(endpoint_obj.name) + _endpoint.configure(endpoint_obj, endpoint_settings) + + self.__endpoint_map[(device_uuid, endpoint_uuid)] = device_obj.name + + def _do_configurations( + self, config_rules_per_device : Dict[str, List[Dict]], endpoints : List[Tuple[str, str, Optional[str]]], + delete : bool = False + ) -> List[Union[bool, Exception]]: + # Configuration is done atomically on each device, all OK / all KO per device + results_per_device = dict() + for device_name,json_config_rules in config_rules_per_device.items(): + try: + device_obj = self.__composer.get_device(device_name).objekt + if len(json_config_rules) == 0: continue + del device_obj.device_config.config_rules[:] + for json_config_rule in json_config_rules: + device_obj.device_config.config_rules.append(ConfigRule(**json_config_rule)) + self.__task_executor.configure_device(device_obj) + results_per_device[device_name] = True + except Exception as e: # pylint: disable=broad-exception-caught + verb = 'deconfigure' if delete else 'configure' + MSG = 'Unable to {:s} Device({:s}) : ConfigRules({:s})' + LOGGER.exception(MSG.format(verb, str(device_name), str(json_config_rules))) + results_per_device[device_name] = e + + results = [] + for endpoint in endpoints: + device_uuid, endpoint_uuid = get_device_endpoint_uuids(endpoint) + device_name = self.__endpoint_map[(device_uuid, endpoint_uuid)] + results.append(results_per_device[device_name]) + return results + + @metered_subclass_method(METRICS_POOL) + def SetEndpoint( + self, endpoints : List[Tuple[str, str, Optional[str]]], connection_uuid : Optional[str] = None + ) -> List[Union[bool, Exception]]: + chk_type('endpoints', endpoints, list) + if len(endpoints) == 0: return [] + service_uuid = self.__service.service_id.service_uuid.uuid + #settings = self.__settings_handler.get('/settings') + self._compose_config_rules(endpoints) + network_instance_name = service_uuid.split('-')[0] + config_rules_per_device = self.__composer.get_config_rules(network_instance_name, delete=False) + results = self._do_configurations(config_rules_per_device, endpoints) + return results + + @metered_subclass_method(METRICS_POOL) + def DeleteEndpoint( + self, endpoints : List[Tuple[str, str, Optional[str]]], connection_uuid : Optional[str] = None + ) -> List[Union[bool, Exception]]: + chk_type('endpoints', endpoints, list) + if len(endpoints) == 0: return [] + service_uuid = self.__service.service_id.service_uuid.uuid + #settings = self.__settings_handler.get('/settings') + self._compose_config_rules(endpoints) + network_instance_name = service_uuid.split('-')[0] + config_rules_per_device = self.__composer.get_config_rules(network_instance_name, delete=True) + results = self._do_configurations(config_rules_per_device, endpoints, delete=True) + return results + + @metered_subclass_method(METRICS_POOL) + def SetConstraint(self, constraints : List[Tuple[str, Any]]) -> List[Union[bool, Exception]]: + chk_type('constraints', constraints, list) + if len(constraints) == 0: return [] + + msg = '[SetConstraint] Method not implemented. Constraints({:s}) are being ignored.' + LOGGER.warning(msg.format(str(constraints))) + return [True for _ in range(len(constraints))] + + @metered_subclass_method(METRICS_POOL) + def DeleteConstraint(self, constraints : List[Tuple[str, Any]]) -> List[Union[bool, Exception]]: + chk_type('constraints', constraints, list) + if len(constraints) == 0: return [] + + msg = '[DeleteConstraint] Method not implemented. Constraints({:s}) are being ignored.' + LOGGER.warning(msg.format(str(constraints))) + return [True for _ in range(len(constraints))] + + @metered_subclass_method(METRICS_POOL) + def SetConfig(self, resources : List[Tuple[str, Any]]) -> List[Union[bool, Exception]]: + chk_type('resources', resources, list) + if len(resources) == 0: return [] + + results = [] + for resource in resources: + try: + resource_value = json.loads(resource[1]) + self.__settings_handler.set(resource[0], resource_value) + results.append(True) + except Exception as e: # pylint: disable=broad-except + LOGGER.exception('Unable to SetConfig({:s})'.format(str(resource))) + results.append(e) + + return results + + @metered_subclass_method(METRICS_POOL) + def DeleteConfig(self, resources : List[Tuple[str, Any]]) -> List[Union[bool, Exception]]: + chk_type('resources', resources, list) + if len(resources) == 0: return [] + + results = [] + for resource in resources: + try: + self.__settings_handler.delete(resource[0]) + except Exception as e: # pylint: disable=broad-except + LOGGER.exception('Unable to DeleteConfig({:s})'.format(str(resource))) + results.append(e) + + return results diff --git a/src/service/service/service_handlers/l3nm_gnmi_openconfig/__init__.py b/src/service/service/service_handlers/l3nm_gnmi_openconfig/__init__.py new file mode 100644 index 000000000..1549d9811 --- /dev/null +++ b/src/service/service/service_handlers/l3nm_gnmi_openconfig/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + -- GitLab From 8775d5a8d07df1ab0e99b28f67bdc4646ac6116d Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Sat, 17 Jun 2023 08:06:23 +0000 Subject: [PATCH 33/62] Deploy Specs script and Manifests: - Added deploy-specs script - Reduced request resources in components to fit in VM - Activated DEBUG logs - Deactivated horizontal pod autoscalers --- manifests/contextservice.yaml | 50 +++++++++++++++--------------- manifests/deviceservice.yaml | 8 ++--- manifests/monitoringservice.yaml | 2 +- manifests/pathcompservice.yaml | 48 ++++++++++++++--------------- manifests/serviceservice.yaml | 52 ++++++++++++++++---------------- manifests/sliceservice.yaml | 50 +++++++++++++++--------------- my_deploy.sh | 4 +-- 7 files changed, 107 insertions(+), 107 deletions(-) diff --git a/manifests/contextservice.yaml b/manifests/contextservice.yaml index 96735bf5f..288f0677a 100644 --- a/manifests/contextservice.yaml +++ b/manifests/contextservice.yaml @@ -54,11 +54,11 @@ spec: command: ["/bin/grpc_health_probe", "-addr=:1010"] resources: requests: - cpu: 250m + cpu: 150m memory: 128Mi limits: - cpu: 1000m - memory: 1024Mi + cpu: 500m + memory: 512Mi --- apiVersion: v1 kind: Service @@ -79,25 +79,25 @@ spec: protocol: TCP port: 9192 targetPort: 9192 ---- -apiVersion: autoscaling/v2 -kind: HorizontalPodAutoscaler -metadata: - name: contextservice-hpa -spec: - scaleTargetRef: - apiVersion: apps/v1 - kind: Deployment - name: contextservice - minReplicas: 1 - maxReplicas: 20 - metrics: - - type: Resource - resource: - name: cpu - target: - type: Utilization - averageUtilization: 80 - #behavior: - # scaleDown: - # stabilizationWindowSeconds: 30 +#--- +#apiVersion: autoscaling/v2 +#kind: HorizontalPodAutoscaler +#metadata: +# name: contextservice-hpa +#spec: +# scaleTargetRef: +# apiVersion: apps/v1 +# kind: Deployment +# name: contextservice +# minReplicas: 1 +# maxReplicas: 20 +# metrics: +# - type: Resource +# resource: +# name: cpu +# target: +# type: Utilization +# averageUtilization: 80 +# #behavior: +# # scaleDown: +# # stabilizationWindowSeconds: 30 diff --git a/manifests/deviceservice.yaml b/manifests/deviceservice.yaml index 22c0f5f9d..2984869bc 100644 --- a/manifests/deviceservice.yaml +++ b/manifests/deviceservice.yaml @@ -39,7 +39,7 @@ spec: - containerPort: 9192 env: - name: LOG_LEVEL - value: "INFO" + value: "DEBUG" readinessProbe: exec: command: ["/bin/grpc_health_probe", "-addr=:2020"] @@ -48,11 +48,11 @@ spec: command: ["/bin/grpc_health_probe", "-addr=:2020"] resources: requests: - cpu: 250m + cpu: 150m memory: 128Mi limits: - cpu: 1000m - memory: 1024Mi + cpu: 500m + memory: 512Mi --- apiVersion: v1 kind: Service diff --git a/manifests/monitoringservice.yaml b/manifests/monitoringservice.yaml index 4447a1427..06ac823a1 100644 --- a/manifests/monitoringservice.yaml +++ b/manifests/monitoringservice.yaml @@ -36,7 +36,7 @@ spec: - containerPort: 9192 env: - name: LOG_LEVEL - value: "INFO" + value: "DEBUG" envFrom: - secretRef: name: qdb-data diff --git a/manifests/pathcompservice.yaml b/manifests/pathcompservice.yaml index 3ba12750b..7b47c338b 100644 --- a/manifests/pathcompservice.yaml +++ b/manifests/pathcompservice.yaml @@ -36,7 +36,7 @@ spec: - containerPort: 9192 env: - name: LOG_LEVEL - value: "INFO" + value: "DEBUG" readinessProbe: exec: command: ["/bin/grpc_health_probe", "-addr=:10020"] @@ -72,7 +72,7 @@ spec: cpu: 100m memory: 256Mi limits: - cpu: 700m + cpu: 500m memory: 1024Mi --- apiVersion: v1 @@ -98,25 +98,25 @@ spec: protocol: TCP port: 9192 targetPort: 9192 ---- -apiVersion: autoscaling/v2 -kind: HorizontalPodAutoscaler -metadata: - name: pathcompservice-hpa -spec: - scaleTargetRef: - apiVersion: apps/v1 - kind: Deployment - name: pathcompservice - minReplicas: 1 - maxReplicas: 20 - metrics: - - type: Resource - resource: - name: cpu - target: - type: Utilization - averageUtilization: 80 - #behavior: - # scaleDown: - # stabilizationWindowSeconds: 30 +#--- +#apiVersion: autoscaling/v2 +#kind: HorizontalPodAutoscaler +#metadata: +# name: pathcompservice-hpa +#spec: +# scaleTargetRef: +# apiVersion: apps/v1 +# kind: Deployment +# name: pathcompservice +# minReplicas: 1 +# maxReplicas: 20 +# metrics: +# - type: Resource +# resource: +# name: cpu +# target: +# type: Utilization +# averageUtilization: 80 +# #behavior: +# # scaleDown: +# # stabilizationWindowSeconds: 30 diff --git a/manifests/serviceservice.yaml b/manifests/serviceservice.yaml index 7d7bdaa4e..d71273aff 100644 --- a/manifests/serviceservice.yaml +++ b/manifests/serviceservice.yaml @@ -36,7 +36,7 @@ spec: - containerPort: 9192 env: - name: LOG_LEVEL - value: "INFO" + value: "DEBUG" readinessProbe: exec: command: ["/bin/grpc_health_probe", "-addr=:3030"] @@ -45,11 +45,11 @@ spec: command: ["/bin/grpc_health_probe", "-addr=:3030"] resources: requests: - cpu: 250m + cpu: 150m memory: 128Mi limits: - cpu: 1000m - memory: 1024Mi + cpu: 500m + memory: 512Mi --- apiVersion: v1 kind: Service @@ -70,25 +70,25 @@ spec: protocol: TCP port: 9192 targetPort: 9192 ---- -apiVersion: autoscaling/v2 -kind: HorizontalPodAutoscaler -metadata: - name: serviceservice-hpa -spec: - scaleTargetRef: - apiVersion: apps/v1 - kind: Deployment - name: serviceservice - minReplicas: 1 - maxReplicas: 20 - metrics: - - type: Resource - resource: - name: cpu - target: - type: Utilization - averageUtilization: 80 - #behavior: - # scaleDown: - # stabilizationWindowSeconds: 30 +#--- +#apiVersion: autoscaling/v2 +#kind: HorizontalPodAutoscaler +#metadata: +# name: serviceservice-hpa +#spec: +# scaleTargetRef: +# apiVersion: apps/v1 +# kind: Deployment +# name: serviceservice +# minReplicas: 1 +# maxReplicas: 20 +# metrics: +# - type: Resource +# resource: +# name: cpu +# target: +# type: Utilization +# averageUtilization: 80 +# #behavior: +# # scaleDown: +# # stabilizationWindowSeconds: 30 diff --git a/manifests/sliceservice.yaml b/manifests/sliceservice.yaml index e7e5c1604..5ea63ad0c 100644 --- a/manifests/sliceservice.yaml +++ b/manifests/sliceservice.yaml @@ -50,11 +50,11 @@ spec: command: ["/bin/grpc_health_probe", "-addr=:4040"] resources: requests: - cpu: 250m + cpu: 150m memory: 128Mi limits: - cpu: 1000m - memory: 1024Mi + cpu: 500m + memory: 512Mi --- apiVersion: v1 kind: Service @@ -75,25 +75,25 @@ spec: protocol: TCP port: 9192 targetPort: 9192 ---- -apiVersion: autoscaling/v2 -kind: HorizontalPodAutoscaler -metadata: - name: sliceservice-hpa -spec: - scaleTargetRef: - apiVersion: apps/v1 - kind: Deployment - name: sliceservice - minReplicas: 1 - maxReplicas: 20 - metrics: - - type: Resource - resource: - name: cpu - target: - type: Utilization - averageUtilization: 80 - #behavior: - # scaleDown: - # stabilizationWindowSeconds: 30 +#--- +#apiVersion: autoscaling/v2 +#kind: HorizontalPodAutoscaler +#metadata: +# name: sliceservice-hpa +#spec: +# scaleTargetRef: +# apiVersion: apps/v1 +# kind: Deployment +# name: sliceservice +# minReplicas: 1 +# maxReplicas: 20 +# metrics: +# - type: Resource +# resource: +# name: cpu +# target: +# type: Utilization +# averageUtilization: 80 +# #behavior: +# # scaleDown: +# # stabilizationWindowSeconds: 30 diff --git a/my_deploy.sh b/my_deploy.sh index 0b8b6d9e8..48ef64134 100755 --- a/my_deploy.sh +++ b/my_deploy.sh @@ -21,10 +21,10 @@ export TFS_REGISTRY_IMAGES="http://localhost:32000/tfs/" # Set the list of components, separated by spaces, you want to build images for, and deploy. #export TFS_COMPONENTS="context device pathcomp service slice compute webui load_generator" -export TFS_COMPONENTS="context device pathcomp service slice compute webui" +export TFS_COMPONENTS="context device pathcomp service slice webui" # Uncoment to activate Monitoring -#export TFS_COMPONENTS="${TFS_COMPONENTS} monitoring" +export TFS_COMPONENTS="${TFS_COMPONENTS} monitoring" # Uncoment to activate Automation and Policy Manager #export TFS_COMPONENTS="${TFS_COMPONENTS} automation policy" -- GitLab From 629d5f2e6f646a4fe85ff7a555122e5cd2794ad5 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Sat, 17 Jun 2023 17:15:37 +0000 Subject: [PATCH 34/62] Hackfest - ContainerLab: - updated install command --- hackfest/containerlab/commands.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hackfest/containerlab/commands.txt b/hackfest/containerlab/commands.txt index 4c1c3a951..dfd3d062e 100644 --- a/hackfest/containerlab/commands.txt +++ b/hackfest/containerlab/commands.txt @@ -15,7 +15,7 @@ https://gnmic.kmrd.dev/cmd/get/ IMPORTANT: for Nokia SR Linux, use kind "srl" and type "ixr6" ## Download and install the latest release -$ sudo bash -c "$(curl -sL https://get.containerlab.dev)" +$ sudo bash -c "$(curl -sL https://get.containerlab.dev)" -- -v 0.41.2 ## Deploy proposed two SR node scenario $ cd ~/tfs-ctrl/hackfest/containerlab -- GitLab From ec4d1fa30b933d6a1979b8a0f1dbb40b7d7d65f6 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Sat, 17 Jun 2023 17:15:56 +0000 Subject: [PATCH 35/62] Manifests: - disabled debug log in pathcomp frontend --- manifests/pathcompservice.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manifests/pathcompservice.yaml b/manifests/pathcompservice.yaml index 7b47c338b..8808bbb3b 100644 --- a/manifests/pathcompservice.yaml +++ b/manifests/pathcompservice.yaml @@ -36,7 +36,7 @@ spec: - containerPort: 9192 env: - name: LOG_LEVEL - value: "DEBUG" + value: "INFO" readinessProbe: exec: command: ["/bin/grpc_health_probe", "-addr=:10020"] -- GitLab From 215d6026fb7c236261fea26d93952dada1366631 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Sat, 17 Jun 2023 17:17:12 +0000 Subject: [PATCH 36/62] Monitoring component: - Only monitor enabled endpoints --- src/monitoring/service/EventTools.py | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/src/monitoring/service/EventTools.py b/src/monitoring/service/EventTools.py index 0d351eee9..be3fe9b92 100644 --- a/src/monitoring/service/EventTools.py +++ b/src/monitoring/service/EventTools.py @@ -12,11 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. +import grpc, json, logging, queue, threading from typing import Dict -import grpc, logging, queue, threading from common.method_wrappers.ServiceExceptions import ServiceException from common.proto import monitoring_pb2 -from common.proto.context_pb2 import DeviceOperationalStatusEnum, Empty, EventTypeEnum +from common.proto.context_pb2 import ConfigActionEnum, DeviceOperationalStatusEnum, Empty, EventTypeEnum from common.proto.kpi_sample_types_pb2 import KpiSampleType from context.client.ContextClient import ContextClient from monitoring.client.MonitoringClient import MonitoringClient @@ -108,7 +108,21 @@ class EventsDeviceCollector: # device is not ready for monitoring continue + enabled_endpoint_names = set() + for config_rule in device.device_config.config_rules: + if config_rule.action != ConfigActionEnum.CONFIGACTION_SET: continue + if config_rule.WhichOneof('config_rule') != 'custom': continue + str_resource_key = str(config_rule.custom.resource_key) + if not str_resource_key.startswith('/interface['): continue + json_resource_value = json.loads(config_rule.custom.resource_value) + if 'name' not in json_resource_value: continue + if 'enabled' not in json_resource_value: continue + if not json_resource_value['enabled']: continue + enabled_endpoint_names.add(json_resource_value['name']) + for endpoint in device.device_endpoints: + if endpoint.name not in enabled_endpoint_names: continue + endpoint_uuid = endpoint.endpoint_id.endpoint_uuid.uuid self._name_mapping.set_endpoint_name(endpoint_uuid, endpoint.name) -- GitLab From d20e72b829fb448e9b12300bfba2ee003e839fd0 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Sat, 17 Jun 2023 19:21:03 +0000 Subject: [PATCH 37/62] Hackfest - ContainerLab: - Updated commands --- hackfest/containerlab/commands.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hackfest/containerlab/commands.txt b/hackfest/containerlab/commands.txt index dfd3d062e..54c24eedb 100644 --- a/hackfest/containerlab/commands.txt +++ b/hackfest/containerlab/commands.txt @@ -90,4 +90,4 @@ docker exec -it clab-tfs-scenario-client2 bash ip address add 172.16.2.10/24 dev eth1 ip route add 172.16.1.0/24 via 172.16.2.1 - ping 172.16.2.1 or 172.16.2.10 + ping 172.16.1.1 or 172.16.1.10 -- GitLab From be5606294b9341261ef03fc7997c568d8b3962ba Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Sat, 17 Jun 2023 19:22:59 +0000 Subject: [PATCH 38/62] Device component - gNMI OpenConfig Driver: - Added Delta Sample calculation - Improved timestamp reporting --- .../gnmi_openconfig/DeltaSampleCache.py | 35 ++++++ .../gnmi_openconfig/MonitoringThread.py | 39 +++++-- .../drivers/gnmi_openconfig/SamplesCache.py | 101 ------------------ 3 files changed, 65 insertions(+), 110 deletions(-) create mode 100644 src/device/service/drivers/gnmi_openconfig/DeltaSampleCache.py delete mode 100644 src/device/service/drivers/gnmi_openconfig/SamplesCache.py diff --git a/src/device/service/drivers/gnmi_openconfig/DeltaSampleCache.py b/src/device/service/drivers/gnmi_openconfig/DeltaSampleCache.py new file mode 100644 index 000000000..5083082fe --- /dev/null +++ b/src/device/service/drivers/gnmi_openconfig/DeltaSampleCache.py @@ -0,0 +1,35 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import copy +from typing import Any, Dict, Tuple, Union + +class DeltaSampleCache: + def __init__(self) -> None: + self._previous_samples : Dict[str, Tuple[float, Union[int, float]]] = dict() + + def get_delta(self, path : str, current_timestamp : float, current_value : Any) -> None: + previous_sample = copy.deepcopy(self._previous_samples.get(path)) + self._previous_samples[path] = current_timestamp, current_value + + if not isinstance(current_value, (int, float)): return None + if previous_sample is None: return current_timestamp, 0 + previous_timestamp, previous_value = previous_sample + if not isinstance(previous_value, (int, float)): return None + + delta_value = max(0, current_value - previous_value) + delay = current_timestamp - previous_timestamp + delta_sample = current_timestamp, delta_value / delay + + return delta_sample diff --git a/src/device/service/drivers/gnmi_openconfig/MonitoringThread.py b/src/device/service/drivers/gnmi_openconfig/MonitoringThread.py index 5c40b13b9..7cbd0da87 100644 --- a/src/device/service/drivers/gnmi_openconfig/MonitoringThread.py +++ b/src/device/service/drivers/gnmi_openconfig/MonitoringThread.py @@ -16,7 +16,7 @@ # Ref: https://github.com/openconfig/gnmi/blob/master/proto/gnmi/gnmi.proto from __future__ import annotations -import grpc, logging, queue, threading +import grpc, logging, queue, re, threading from collections.abc import Iterator from datetime import datetime from typing import Dict @@ -26,6 +26,7 @@ from .gnmi.gnmi_pb2 import ( # pylint: disable=no-name-in-module ) from .gnmi.gnmi_pb2_grpc import gNMIStub from .tools.Path import path_from_string, path_to_string +from .DeltaSampleCache import DeltaSampleCache LOGGER = logging.getLogger(__name__) @@ -82,6 +83,7 @@ class MonitoringThread(threading.Thread): self._in_subscriptions = in_subscriptions self._out_samples = out_samples self._response_iterator = None + self._delta_sample_cache = DeltaSampleCache() def stop(self) -> None: self._terminate.set() @@ -131,16 +133,35 @@ class MonitoringThread(threading.Thread): timeout = None # GNMI_SUBSCRIPTION_TIMEOUT = int(sampling_duration) self._response_iterator = self._stub.Subscribe(request_iterator, metadata=metadata, timeout=timeout) for subscribe_response in self._response_iterator: - timestamp = datetime.timestamp(datetime.utcnow()) str_subscribe_response = grpc_message_to_json_string(subscribe_response) self._logger.warning('[run] subscribe_response={:s}'.format(str_subscribe_response)) - for update in subscribe_response.update.update: - str_path = path_to_string(update.path) - if str_path != '/system/name/host-name': continue - #counter_name = update.path[-1].name - value_type = update.val.WhichOneof('value') - value = getattr(update.val, value_type) - sample = (timestamp, str_path, value) + update = subscribe_response.update + timestamp_device = float(update.timestamp) / 1.e9 + timestamp_local = datetime.timestamp(datetime.utcnow()) + # if difference between timestamp from device and local is lower than 1 second + if abs(timestamp_device - timestamp_local) <= 1: + # assume clocks are synchronized, use timestamp from device + timestamp = timestamp_device + else: + # might be clocks are not synchronized, use local timestamp + timestamp = timestamp_local + for update_entry in update.update: + str_path = path_to_string(update_entry.path) + #if str_path != '/system/name/host-name': continue + #counter_name = update_entry.path[-1].name + value_type = update_entry.val.WhichOneof('value') + value = getattr(update_entry.val, value_type) + if re.match(r'^[0-9]+$', value) is not None: + value = int(value) + elif re.match(r'^[0-9]*\.[0-9]*$', value) is not None: + value = float(value) + else: + value = str(value) + delta_sample = self._delta_sample_cache.get_delta(str_path, timestamp, value) + if delta_sample is None: + sample = (timestamp, str_path, value) + else: + sample = (delta_sample[0], str_path, delta_sample[1]) self._logger.warning('[run] sample={:s}'.format(str(sample))) self._out_samples.put_nowait(sample) except grpc.RpcError as e: diff --git a/src/device/service/drivers/gnmi_openconfig/SamplesCache.py b/src/device/service/drivers/gnmi_openconfig/SamplesCache.py deleted file mode 100644 index 28be2d661..000000000 --- a/src/device/service/drivers/gnmi_openconfig/SamplesCache.py +++ /dev/null @@ -1,101 +0,0 @@ -# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Collection of samples through NetConf is very slow and each request collects all the data. -# Populate a cache periodically (when first interface is interrogated). -# Evict data after some seconds, when data is considered as outdated - -import copy, queue, logging, re, threading -from datetime import datetime -from typing import Dict, Tuple -from .templates_old import get_filter, parse -from .GnmiSessionHandler import GnmiSessionHandler - -RE_GET_ENDPOINT_FROM_INTERFACE_KEY = re.compile(r'.*interface\[([^\]]+)\].*') -RE_GET_ENDPOINT_FROM_INTERFACE_XPATH = re.compile(r".*interface\[oci\:name\='([^\]]+)'\].*") - -SAMPLE_EVICTION_SECONDS = 30.0 # seconds -SAMPLE_RESOURCE_KEY = 'interfaces/interface/state/counters' - -def compute_delta_sample(previous_sample, previous_timestamp, current_sample, current_timestamp): - if previous_sample is None: return None - if previous_timestamp is None: return None - if current_sample is None: return None - if current_timestamp is None: return None - delay = current_timestamp - previous_timestamp - field_keys = set(previous_sample.keys()).union(current_sample.keys()) - field_keys.discard('name') - delta_sample = {'name': previous_sample['name']} - for field_key in field_keys: - previous_sample_value = previous_sample[field_key] - if not isinstance(previous_sample_value, (int, float)): continue - current_sample_value = current_sample[field_key] - if not isinstance(current_sample_value, (int, float)): continue - delta_value = current_sample_value - previous_sample_value - if delta_value < 0: continue - delta_sample[field_key] = delta_value / delay - return delta_sample - -class SamplesCache: - def __init__(self, handler : GnmiSessionHandler, logger : logging.Logger) -> None: - self.__handler = handler - self.__logger = logger - self.__lock = threading.Lock() - self.__timestamp = None - self.__absolute_samples = {} - self.__delta_samples = {} - - def _refresh_samples(self) -> None: - with self.__lock: - try: - now = datetime.timestamp(datetime.utcnow()) - if self.__timestamp is not None and (now - self.__timestamp) < SAMPLE_EVICTION_SECONDS: return - str_filter = get_filter(SAMPLE_RESOURCE_KEY) - xml_data = self.__handler.get(filter=str_filter).data_ele - interface_samples = parse(SAMPLE_RESOURCE_KEY, xml_data) - for interface,samples in interface_samples: - match = RE_GET_ENDPOINT_FROM_INTERFACE_KEY.match(interface) - if match is None: continue - interface = match.group(1) - delta_sample = compute_delta_sample( - self.__absolute_samples.get(interface), self.__timestamp, samples, now) - if delta_sample is not None: self.__delta_samples[interface] = delta_sample - self.__absolute_samples[interface] = samples - self.__timestamp = now - except: # pylint: disable=bare-except - self.__logger.exception('Error collecting samples') - - def get(self, resource_key : str) -> Tuple[float, Dict]: - self._refresh_samples() - match = RE_GET_ENDPOINT_FROM_INTERFACE_XPATH.match(resource_key) - with self.__lock: - if match is None: return self.__timestamp, {} - interface = match.group(1) - return self.__timestamp, copy.deepcopy(self.__delta_samples.get(interface, {})) - -def do_sampling( - samples_cache : SamplesCache, logger : logging.Logger, resource_key : str, out_samples : queue.Queue -) -> None: - try: - timestamp, samples = samples_cache.get(resource_key) - counter_name = resource_key.split('/')[-1].split(':')[-1] - value = samples.get(counter_name) - if value is None: - logger.warning('[do_sampling] value not found for {:s}'.format(resource_key)) - return - # resource_key template: //oci:interfaces/oci:interface[oci:name='{:s}']/state/counters/{:s} - sample = (timestamp, resource_key, value) - out_samples.put_nowait(sample) - except: # pylint: disable=bare-except - logger.exception('Error retrieving samples') -- GitLab From 3f93f7e33b8f1ab3d099dbaa8fc8f49044e91acb Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Sat, 17 Jun 2023 19:23:17 +0000 Subject: [PATCH 39/62] Monitoring component: - Defined monitoring period to 10 seconds --- src/monitoring/service/__main__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/monitoring/service/__main__.py b/src/monitoring/service/__main__.py index d0a132c70..d242f0fc0 100644 --- a/src/monitoring/service/__main__.py +++ b/src/monitoring/service/__main__.py @@ -49,7 +49,7 @@ def start_monitoring(name_mapping : NameMapping): monitor_kpi_request = monitoring_pb2.MonitorKpiRequest() monitor_kpi_request.kpi_id.CopyFrom(kpi_id) monitor_kpi_request.monitoring_window_s = 86400 - monitor_kpi_request.sampling_rate_s = 30 + monitor_kpi_request.sampling_rate_s = 10 events_collector._monitoring_client.MonitorKpi(monitor_kpi_request) time.sleep(0.5) # let other tasks run; do not overload CPU -- GitLab From 82dcd5d132b95a02d8d8ee2a68f14328c9cf7828 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Sat, 17 Jun 2023 19:23:57 +0000 Subject: [PATCH 40/62] WebUI component - Grafana: - Removed wrong GROUP BY - Removed summary column "first" --- src/webui/grafana_db_mon_kpis_psql.json | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/webui/grafana_db_mon_kpis_psql.json b/src/webui/grafana_db_mon_kpis_psql.json index 750e5254e..df72f9ab0 100644 --- a/src/webui/grafana_db_mon_kpis_psql.json +++ b/src/webui/grafana_db_mon_kpis_psql.json @@ -144,7 +144,6 @@ "options": { "legend": { "calcs": [ - "first", "min", "mean", "max", @@ -169,7 +168,7 @@ "hide": false, "metricColumn": "kpi_value", "rawQuery": true, - "rawSql": "SELECT\r\n $__time(timestamp), kpi_value AS metric, device_name, endpoint_name, kpi_sample_type\r\nFROM\r\n tfs_monitoring_kpis\r\nWHERE\r\n $__timeFilter(timestamp) AND device_name IN (${device_name}) AND endpoint_name IN (${endpoint_name}) AND kpi_sample_type IN (${kpi_sample_type})\r\nGROUP BY\r\n device_name, endpoint_name, kpi_sample_type\r\nORDER BY\r\n timestamp", + "rawSql": "SELECT\r\n $__time(timestamp), kpi_value AS metric, device_name, endpoint_name, kpi_sample_type\r\nFROM\r\n tfs_monitoring_kpis\r\nWHERE\r\n $__timeFilter(timestamp) AND device_name IN (${device_name}) AND endpoint_name IN (${endpoint_name}) AND kpi_sample_type IN (${kpi_sample_type})\r\nORDER BY\r\n timestamp", "refId": "A", "select": [ [ -- GitLab From 69a6a4aff38accfde6fcdf8eba5ec5dfafc167c2 Mon Sep 17 00:00:00 2001 From: Lluis Gifre Renom Date: Sat, 17 Jun 2023 21:49:35 +0000 Subject: [PATCH 41/62] Update EroPathToHops.py to adapt it to the TAPI server of the Hackfest --- .../service/algorithms/tools/EroPathToHops.py | 48 ++++++++++++++++++- 1 file changed, 47 insertions(+), 1 deletion(-) diff --git a/src/pathcomp/frontend/service/algorithms/tools/EroPathToHops.py b/src/pathcomp/frontend/service/algorithms/tools/EroPathToHops.py index 670757d76..3dd47176f 100644 --- a/src/pathcomp/frontend/service/algorithms/tools/EroPathToHops.py +++ b/src/pathcomp/frontend/service/algorithms/tools/EroPathToHops.py @@ -48,9 +48,52 @@ from common.proto.context_pb2 import Link LOGGER = logging.getLogger(__name__) +MAP_TAPI_UUIDS = { + "c3dbaa44-9cda-5d54-8f99-0f282362be65": "5b835e46-53f7-52e8-9c8a-077322679e36", # node-1-port-13-input => node-1-port-13-output + "1fb9ac86-b7ad-5d6d-87b1-a09d995f1ddd": "c9df6ece-1650-5078-876a-1e488a453625", # node-1-port-14-input => node-1-port-14-output + "aa109937-8291-5a09-853a-97bff463e569": "b245480f-027c-53a0-9320-fca5b9d7a1e1", # node-1-port-15-input => node-1-port-15-output + "6653ae16-42a3-56b5-adf3-71adda024a61": "ac356900-ce2f-5c15-b038-1b05e6f50bf7", # node-1-port-17-input => node-1-port-17-output + "d782ef85-a473-50b4-93b5-2af86024a42a": "dcfeedd3-2d47-5bc8-b31c-ed9f973d8b76", # node-2-port-13-input => node-2-port-13-output + "bbbd83ef-6053-55dc-ab08-06fb0c2bd081": "57bcf45b-eb47-5a9c-86d1-d9cff0c910fd", # node-2-port-14-input => node-2-port-14-output + "27cdf70d-4e48-53ff-bc4f-20addf6524c0": "fd31eff5-392e-5fb5-a6f4-6dfca583344d", # node-2-port-15-input => node-2-port-15-output + "55ac2364-fad8-5a05-ac2b-5003997ff89e": "d12a2591-7f4a-575d-8fda-0bc3d6b7ca32", # node-2-port-17-input => node-2-port-17-output + "59f44a3c-32a5-5abf-af58-45e6fa7ca657": "1977ef5c-4383-5195-9221-0cdf8ee26cb7", # node-3-port-13-input => node-3-port-13-output + "1be3f905-d553-5291-9906-47c0772d45aa": "9def067b-9a75-54df-8867-853f35a42e87", # node-3-port-14-input => node-3-port-14-output + "fb4ece7a-2dd1-593a-b6ca-a787b3b59fc5": "1f294257-132a-54ad-b653-ef8b7517c9d8", # node-3-port-15-input => node-3-port-15-output + "a571d2fe-c7f8-5ac8-b2af-8e5b92a558b0": "5b60a688-deac-567a-8e36-0d52e56fd4fc", # node-3-port-16-input => node-3-port-16-output + "9ea9dc53-2d6a-5f28-b81a-e930f7cbedf9": "2aec14c1-3a84-5cba-8f22-783bd0273cd0", # node-3-port-17-input => node-3-port-17-output + "9ec8e0f3-3378-55e0-bed1-be1fe120a1a9": "ece2ed55-ce16-59d3-8137-3f4cf17e67ab", # node-3-port-18-input => node-3-port-18-output + "a7e114aa-a3b6-52ae-b7b7-0e5fe4dd4d1c": "0a05e43d-a13c-5276-9839-613600f3ff28", # node-4-port-13-input => node-4-port-13-output + "4ca8357a-3468-51e6-bba8-65137486666f": "18926fdf-de5c-5a52-be88-cccc065e5e03", # node-4-port-14-input => node-4-port-14-output + "a7e9f06f-6fd2-594e-8a0c-25bfe8c652d7": "1adb9e17-e499-58dc-8aa2-881ed5ce9670", # node-4-port-15-input => node-4-port-15-output + "9f6a23b2-c71c-5559-8fb3-f76421bea1d9": "049bb1f1-cc04-5b72-8c0f-43891d9637bf", # node-4-port-16-input => node-4-port-16-output + "f1d74c96-41f5-5eb9-a160-a38463184934": "2206440b-ef66-5d3e-8da5-40608fb00a10", # node-4-port-17-input => node-4-port-17-output + + "5b835e46-53f7-52e8-9c8a-077322679e36": "c3dbaa44-9cda-5d54-8f99-0f282362be65", # node-1-port-13-output => node-1-port-13-input + "c9df6ece-1650-5078-876a-1e488a453625": "1fb9ac86-b7ad-5d6d-87b1-a09d995f1ddd", # node-1-port-14-output => node-1-port-14-input + "b245480f-027c-53a0-9320-fca5b9d7a1e1": "aa109937-8291-5a09-853a-97bff463e569", # node-1-port-15-output => node-1-port-15-input + "ac356900-ce2f-5c15-b038-1b05e6f50bf7": "6653ae16-42a3-56b5-adf3-71adda024a61", # node-1-port-17-output => node-1-port-17-input + "dcfeedd3-2d47-5bc8-b31c-ed9f973d8b76": "d782ef85-a473-50b4-93b5-2af86024a42a", # node-2-port-13-output => node-2-port-13-input + "57bcf45b-eb47-5a9c-86d1-d9cff0c910fd": "bbbd83ef-6053-55dc-ab08-06fb0c2bd081", # node-2-port-14-output => node-2-port-14-input + "fd31eff5-392e-5fb5-a6f4-6dfca583344d": "27cdf70d-4e48-53ff-bc4f-20addf6524c0", # node-2-port-15-output => node-2-port-15-input + "d12a2591-7f4a-575d-8fda-0bc3d6b7ca32": "55ac2364-fad8-5a05-ac2b-5003997ff89e", # node-2-port-17-output => node-2-port-17-input + "1977ef5c-4383-5195-9221-0cdf8ee26cb7": "59f44a3c-32a5-5abf-af58-45e6fa7ca657", # node-3-port-13-output => node-3-port-13-input + "9def067b-9a75-54df-8867-853f35a42e87": "1be3f905-d553-5291-9906-47c0772d45aa", # node-3-port-14-output => node-3-port-14-input + "1f294257-132a-54ad-b653-ef8b7517c9d8": "fb4ece7a-2dd1-593a-b6ca-a787b3b59fc5", # node-3-port-15-output => node-3-port-15-input + "5b60a688-deac-567a-8e36-0d52e56fd4fc": "a571d2fe-c7f8-5ac8-b2af-8e5b92a558b0", # node-3-port-16-output => node-3-port-16-input + "2aec14c1-3a84-5cba-8f22-783bd0273cd0": "9ea9dc53-2d6a-5f28-b81a-e930f7cbedf9", # node-3-port-17-output => node-3-port-17-input + "ece2ed55-ce16-59d3-8137-3f4cf17e67ab": "9ec8e0f3-3378-55e0-bed1-be1fe120a1a9", # node-3-port-18-output => node-3-port-18-input + "0a05e43d-a13c-5276-9839-613600f3ff28": "a7e114aa-a3b6-52ae-b7b7-0e5fe4dd4d1c", # node-4-port-13-output => node-4-port-13-input + "18926fdf-de5c-5a52-be88-cccc065e5e03": "4ca8357a-3468-51e6-bba8-65137486666f", # node-4-port-14-output => node-4-port-14-input + "1adb9e17-e499-58dc-8aa2-881ed5ce9670": "a7e9f06f-6fd2-594e-8a0c-25bfe8c652d7", # node-4-port-15-output => node-4-port-15-input + "049bb1f1-cc04-5b72-8c0f-43891d9637bf": "9f6a23b2-c71c-5559-8fb3-f76421bea1d9", # node-4-port-16-output => node-4-port-16-input + "2206440b-ef66-5d3e-8da5-40608fb00a10": "f1d74c96-41f5-5eb9-a160-a38463184934", # node-4-port-17-output => node-4-port-17-input +} + def eropath_to_hops( ero_path : List[Dict], endpoint_to_link_dict : Dict[Tuple[str, str, str], Tuple[Dict, Link]] ) -> List[Dict]: + LOGGER.debug('ero_path = {:s}'.format(str(ero_path))) try: path_hops = [] num_ero_hops = len(ero_path) @@ -71,10 +114,13 @@ def eropath_to_hops( link_tuple = endpoint_to_link_dict[(device_uuid, endpoint_uuid, 'src')] if link_tuple is None: raise Exception('Malformed path') ingress = link_tuple[0]['link_endpoint_ids'][-1] + ingress_ep = ingress['endpoint_id']['endpoint_uuid'] + ingress_ep = MAP_TAPI_UUIDS.get(ingress_ep, ingress_ep) path_hops.append({ 'device': ingress['endpoint_id']['device_id'], - 'ingress_ep': ingress['endpoint_id']['endpoint_uuid'] + 'ingress_ep': ingress_ep }) + LOGGER.debug('path_hops = {:s}'.format(str(path_hops))) return path_hops except: LOGGER.exception('Unhandled exception: ero_path={:s} endpoint_to_link_dict={:s}'.format( -- GitLab From dd13f70b056d06b5ca9a6e073fea7983b5eec495 Mon Sep 17 00:00:00 2001 From: Lluis Gifre Renom Date: Sat, 17 Jun 2023 23:41:33 +0000 Subject: [PATCH 42/62] Device component: - Moved pre-load of drivers after start of gRPC server. Otherwise, K8s kills the component when retrieval of devices takes long time. --- src/device/service/__main__.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/device/service/__main__.py b/src/device/service/__main__.py index a07a2ab90..401711b4e 100644 --- a/src/device/service/__main__.py +++ b/src/device/service/__main__.py @@ -58,13 +58,14 @@ def main(): driver_factory = DriverFactory(DRIVERS) driver_instance_cache = DriverInstanceCache(driver_factory) - # Initialize drivers with existing devices in context - preload_drivers(driver_instance_cache) - # Starting device service grpc_service = DeviceService(driver_instance_cache) grpc_service.start() + # Initialize drivers with existing devices in context + LOGGER.info('Pre-loading drivers...') + preload_drivers(driver_instance_cache) + # Wait for Ctrl+C or termination signal while not terminate.wait(timeout=1.0): pass -- GitLab From d66ba423ab0dd833a217307c5887430afe16db1b Mon Sep 17 00:00:00 2001 From: Lluis Gifre Renom Date: Sat, 17 Jun 2023 23:42:05 +0000 Subject: [PATCH 43/62] Update contextservice.yaml --- manifests/contextservice.yaml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/manifests/contextservice.yaml b/manifests/contextservice.yaml index 288f0677a..659ff7b8d 100644 --- a/manifests/contextservice.yaml +++ b/manifests/contextservice.yaml @@ -23,8 +23,9 @@ spec: #replicas: 1 template: metadata: - annotations: - config.linkerd.io/skip-outbound-ports: "4222" + # Deactivated linkerd for the Hackfest + #annotations: + # config.linkerd.io/skip-outbound-ports: "4222" labels: app: contextservice spec: -- GitLab From a587d4743b02d4ee6def46548ff4e1a551f735e7 Mon Sep 17 00:00:00 2001 From: Lluis Gifre Renom Date: Sat, 17 Jun 2023 23:42:26 +0000 Subject: [PATCH 44/62] Update deviceservice.yaml --- manifests/deviceservice.yaml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/manifests/deviceservice.yaml b/manifests/deviceservice.yaml index 2984869bc..9dbd69553 100644 --- a/manifests/deviceservice.yaml +++ b/manifests/deviceservice.yaml @@ -23,9 +23,10 @@ spec: replicas: 1 template: metadata: - annotations: - # Required for IETF L2VPN SBI when both parent and child run in same K8s cluster with Linkerd - config.linkerd.io/skip-outbound-ports: "8002" + # Deactivated linkerd for the Hackfest + #annotations: + # # Required for IETF L2VPN SBI when both parent and child run in same K8s cluster with Linkerd + # config.linkerd.io/skip-outbound-ports: "8002" labels: app: deviceservice spec: -- GitLab From 33f3a2cc95d2393edcf6e0b7af181bec2e465331 Mon Sep 17 00:00:00 2001 From: Lluis Gifre Renom Date: Sat, 17 Jun 2023 23:43:18 +0000 Subject: [PATCH 45/62] Device component: - Update requirements.in --- src/device/requirements.in | 1 + 1 file changed, 1 insertion(+) diff --git a/src/device/requirements.in b/src/device/requirements.in index 24707e932..c2cb5478a 100644 --- a/src/device/requirements.in +++ b/src/device/requirements.in @@ -15,6 +15,7 @@ anytree==2.8.0 APScheduler==3.8.1 +cryptography==36.0.2 #fastcache==1.1.0 Jinja2==3.0.3 ncclient==0.6.13 -- GitLab From e3c07a4b7dd529c0b138c8589f6ccc970c783b30 Mon Sep 17 00:00:00 2001 From: Lluis Gifre Renom Date: Sat, 17 Jun 2023 23:44:53 +0000 Subject: [PATCH 46/62] Hackfest - ContainerLab: - Add directory tfs-descriptors --- hackfest/containerlab/tfs-descriptors/.gitkeep | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 hackfest/containerlab/tfs-descriptors/.gitkeep diff --git a/hackfest/containerlab/tfs-descriptors/.gitkeep b/hackfest/containerlab/tfs-descriptors/.gitkeep new file mode 100644 index 000000000..e69de29bb -- GitLab From c2c0f3c1daa06c326b09d58263e0a22fe922170f Mon Sep 17 00:00:00 2001 From: Lluis Gifre Renom Date: Sat, 17 Jun 2023 23:45:46 +0000 Subject: [PATCH 47/62] Hackfest - ContainerLab: - Add topology.json descriptor --- .../tfs-descriptors/topology.json | 96 +++++++++++++++++++ 1 file changed, 96 insertions(+) create mode 100644 hackfest/containerlab/tfs-descriptors/topology.json diff --git a/hackfest/containerlab/tfs-descriptors/topology.json b/hackfest/containerlab/tfs-descriptors/topology.json new file mode 100644 index 000000000..e4a49981f --- /dev/null +++ b/hackfest/containerlab/tfs-descriptors/topology.json @@ -0,0 +1,96 @@ +{ + "contexts": [ + {"context_id": {"context_uuid": {"uuid": "admin"}}} + ], + "topologies": [ + {"topology_id": {"context_id": {"context_uuid": {"uuid": "admin"}}, "topology_uuid": {"uuid": "admin"}}} + ], + "devices": [ + { + "device_id": {"device_uuid": {"uuid": "DC1"}}, "device_type": "emu-datacenter", "device_drivers": [0], + "device_config": {"config_rules": [ + {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, + {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, + {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": {"endpoints": [ + {"uuid": "eth1", "type": "copper"}, {"uuid": "eth2", "type": "copper"}, {"uuid": "int", "type": "copper"} + ]}}} + ]} + }, + { + "device_id": {"device_uuid": {"uuid": "DC2"}}, "device_type": "emu-datacenter", "device_drivers": [0], + "device_config": {"config_rules": [ + {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, + {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, + {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": {"endpoints": [ + {"uuid": "eth1", "type": "copper"}, {"uuid": "eth2", "type": "copper"}, {"uuid": "int", "type": "copper"} + ]}}} + ]} + }, + { + "device_id": {"device_uuid": {"uuid": "SRL1"}}, "device_type": "packet-router", "device_drivers": [8], + "device_config": {"config_rules": [ + {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "172.100.100.101"}}, + {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "57400"}}, + {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": { + "username": "admin", "password": "NokiaSrl1!", "use_tls": true + }}} + ]} + }, + { + "device_id": {"device_uuid": {"uuid": "SRL2"}}, "device_type": "packet-router", "device_drivers": [8], + "device_config": {"config_rules": [ + {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "172.100.100.102"}}, + {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "57400"}}, + {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": { + "username": "admin", "password": "NokiaSrl1!", "use_tls": true + }}} + ]} + } + ], + "links": [ + { + "link_id": {"link_uuid": {"uuid": "DC1/eth1==SRL1/ethernet-1/2"}}, + "link_endpoint_ids": [ + {"device_id": {"device_uuid": {"uuid": "DC1"}}, "endpoint_uuid": {"uuid": "eth1"}}, + {"device_id": {"device_uuid": {"uuid": "SRL1"}}, "endpoint_uuid": {"uuid": "ethernet-1/2"}} + ] + }, + { + "link_id": {"link_uuid": {"uuid": "SRL1/ethernet-1/2==DC1/eth1"}}, + "link_endpoint_ids": [ + {"device_id": {"device_uuid": {"uuid": "SRL1"}}, "endpoint_uuid": {"uuid": "ethernet-1/2"}}, + {"device_id": {"device_uuid": {"uuid": "DC1"}}, "endpoint_uuid": {"uuid": "eth1"}} + ] + }, + + { + "link_id": {"link_uuid": {"uuid": "SRL1/ethernet-1/1==SRL2/ethernet-1/1"}}, + "link_endpoint_ids": [ + {"device_id": {"device_uuid": {"uuid": "SRL1"}}, "endpoint_uuid": {"uuid": "ethernet-1/1"}}, + {"device_id": {"device_uuid": {"uuid": "SRL2"}}, "endpoint_uuid": {"uuid": "ethernet-1/1"}} + ] + }, + { + "link_id": {"link_uuid": {"uuid": "SRL2/ethernet-1/1==SRL1/ethernet-1/1"}}, + "link_endpoint_ids": [ + {"device_id": {"device_uuid": {"uuid": "SRL2"}}, "endpoint_uuid": {"uuid": "ethernet-1/1"}}, + {"device_id": {"device_uuid": {"uuid": "SRL1"}}, "endpoint_uuid": {"uuid": "ethernet-1/1"}} + ] + }, + + { + "link_id": {"link_uuid": {"uuid": "DC2/eth1==SRL2/ethernet-1/2"}}, + "link_endpoint_ids": [ + {"device_id": {"device_uuid": {"uuid": "DC2"}}, "endpoint_uuid": {"uuid": "eth1"}}, + {"device_id": {"device_uuid": {"uuid": "SRL2"}}, "endpoint_uuid": {"uuid": "ethernet-1/2"}} + ] + }, + { + "link_id": {"link_uuid": {"uuid": "SRL2/ethernet-1/2==DC2/eth1"}}, + "link_endpoint_ids": [ + {"device_id": {"device_uuid": {"uuid": "SRL2"}}, "endpoint_uuid": {"uuid": "ethernet-1/2"}}, + {"device_id": {"device_uuid": {"uuid": "DC2"}}, "endpoint_uuid": {"uuid": "eth1"}} + ] + } + ] +} -- GitLab From 2051a61b3059b180e80604e7ba3d6718742023df Mon Sep 17 00:00:00 2001 From: Lluis Gifre Renom Date: Sat, 17 Jun 2023 23:46:11 +0000 Subject: [PATCH 48/62] Hackfest - ContainerLab: - Add dc-2-dc-l3-service.json descriptor --- .../tfs-descriptors/dc-2-dc-l3-service.json | 37 +++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 hackfest/containerlab/tfs-descriptors/dc-2-dc-l3-service.json diff --git a/hackfest/containerlab/tfs-descriptors/dc-2-dc-l3-service.json b/hackfest/containerlab/tfs-descriptors/dc-2-dc-l3-service.json new file mode 100644 index 000000000..cb9ef972e --- /dev/null +++ b/hackfest/containerlab/tfs-descriptors/dc-2-dc-l3-service.json @@ -0,0 +1,37 @@ +{ + "services": [ + { + "service_id": { + "context_id": {"context_uuid": {"uuid": "admin"}}, "service_uuid": {"uuid": "dc-2-dc-l3-svc"} + }, + "service_type": 1, + "service_status": {"service_status": 1}, + "service_endpoint_ids": [ + {"device_id":{"device_uuid":{"uuid":"DC1"}},"endpoint_uuid":{"uuid":"int"}}, + {"device_id":{"device_uuid":{"uuid":"DC2"}},"endpoint_uuid":{"uuid":"int"}} + ], + "service_constraints": [], + "service_config": {"config_rules": [ + {"action": 1, "custom": {"resource_key": "/device[SRL1]/settings", "resource_value": { + "static_routes": [{"prefix": "172.16.2.0/24", "next_hop": "172.0.0.2"}] + }}}, + {"action": 1, "custom": {"resource_key": "/device[SRL1]/endpoint[ethernet-1/1]/settings", "resource_value": { + "ipv4_address": "172.0.0.1", "ipv4_prefix": 30, "sub_interface_index": 0 + }}}, + {"action": 1, "custom": {"resource_key": "/device[SRL1]/endpoint[ethernet-1/2]/settings", "resource_value": { + "ipv4_address": "172.16.1.1", "ipv4_prefix": 24, "sub_interface_index": 0 + }}}, + + {"action": 1, "custom": {"resource_key": "/device[SRL2]/settings", "resource_value": { + "static_routes": [{"prefix": "172.16.1.0/24", "next_hop": "172.0.0.1"}] + }}}, + {"action": 1, "custom": {"resource_key": "/device[SRL2]/endpoint[ethernet-1/1]/settings", "resource_value": { + "ipv4_address": "172.0.0.2", "ipv4_prefix": 30, "sub_interface_index": 0 + }}}, + {"action": 1, "custom": {"resource_key": "/device[SRL2]/endpoint[ethernet-1/2]/settings", "resource_value": { + "ipv4_address": "172.16.2.1", "ipv4_prefix": 24, "sub_interface_index": 0 + }}} + ]} + } + ] +} -- GitLab From a8219cc59d7e9c49170c3df231de992ae41a343b Mon Sep 17 00:00:00 2001 From: Lluis Gifre Renom Date: Sat, 17 Jun 2023 23:47:32 +0000 Subject: [PATCH 49/62] Update deploy/tfs.sh script --- deploy/tfs.sh | 287 ++++++++++++++++++++++++++------------------------ 1 file changed, 150 insertions(+), 137 deletions(-) diff --git a/deploy/tfs.sh b/deploy/tfs.sh index e6a0c0c10..1ecb039e3 100755 --- a/deploy/tfs.sh +++ b/deploy/tfs.sh @@ -176,13 +176,14 @@ echo "# Environment variables for TeraFlowSDN deployment" > $ENV_VARS_SCRIPT PYTHONPATH=$(pwd)/src echo "export PYTHONPATH=${PYTHONPATH}" >> $ENV_VARS_SCRIPT -echo "Create Redis secret..." -# first try to delete an old one if exists -kubectl delete secret redis-secrets --namespace=$TFS_K8S_NAMESPACE --ignore-not-found -REDIS_PASSWORD=`uuidgen` -kubectl create secret generic redis-secrets --namespace=$TFS_K8S_NAMESPACE \ - --from-literal=REDIS_PASSWORD=$REDIS_PASSWORD -echo "export REDIS_PASSWORD=${REDIS_PASSWORD}" >> $ENV_VARS_SCRIPT +# Not needed for the Hackfest +#echo "Create Redis secret..." +## first try to delete an old one if exists +#kubectl delete secret redis-secrets --namespace=$TFS_K8S_NAMESPACE --ignore-not-found +#REDIS_PASSWORD=`uuidgen` +#kubectl create secret generic redis-secrets --namespace=$TFS_K8S_NAMESPACE \ +# --from-literal=REDIS_PASSWORD=$REDIS_PASSWORD +#echo "export REDIS_PASSWORD=${REDIS_PASSWORD}" >> $ENV_VARS_SCRIPT for COMPONENT in $TFS_COMPONENTS; do echo "Processing '$COMPONENT' component..." @@ -259,8 +260,9 @@ for COMPONENT in $TFS_COMPONENTS; do echo " Adapting '$COMPONENT' manifest file..." MANIFEST="$TMP_MANIFESTS_FOLDER/${COMPONENT}service.yaml" - # cp ./manifests/"${COMPONENT}"service.yaml "$MANIFEST" - cat ./manifests/"${COMPONENT}"service.yaml | linkerd inject - --proxy-cpu-request "10m" --proxy-cpu-limit "1" --proxy-memory-request "64Mi" --proxy-memory-limit "256Mi" > "$MANIFEST" + # Deactivated linkerd for the Hackfest + cp ./manifests/"${COMPONENT}"service.yaml "$MANIFEST" + #cat ./manifests/"${COMPONENT}"service.yaml | linkerd inject - --proxy-cpu-request "10m" --proxy-cpu-limit "1" --proxy-memory-request "64Mi" --proxy-memory-limit "256Mi" > "$MANIFEST" if [ "$COMPONENT" == "pathcomp" ]; then IMAGE_URL=$(echo "$TFS_REGISTRY_IMAGES/$COMPONENT-frontend:$TFS_IMAGE_TAG" | sed 's,//,/,g' | sed 's,http:/,,g') @@ -375,10 +377,11 @@ if [[ "$TFS_COMPONENTS" == *"webui"* ]]; then GRAFANA_URL_UPDATED="http://${GRAFANA_USERNAME}:${TFS_GRAFANA_PASSWORD}@${GRAFANA_URL}" echo "export GRAFANA_URL_UPDATED=${GRAFANA_URL_UPDATED}" >> $ENV_VARS_SCRIPT - echo ">> Installing Scatter Plot plugin..." - curl -X POST -H "Content-Type: application/json" -H "Content-Length: 0" \ - ${GRAFANA_URL_UPDATED}/api/plugins/michaeldmoore-scatter-panel/install - echo + # Not needed for the Hackfest + #echo ">> Installing Scatter Plot plugin..." + #curl -X POST -H "Content-Type: application/json" -H "Content-Length: 0" \ + # ${GRAFANA_URL_UPDATED}/api/plugins/michaeldmoore-scatter-panel/install + #echo # Ref: https://grafana.com/docs/grafana/latest/http_api/data_source/ QDB_HOST_PORT="${METRICSDB_HOSTNAME}:${QDB_SQL_PORT}" @@ -408,68 +411,71 @@ if [[ "$TFS_COMPONENTS" == *"webui"* ]]; then }' ${GRAFANA_URL_UPDATED}/api/datasources echo - curl -X POST -H "Content-Type: application/json" -H "Accept: application/json" -d '{ - "access" : "proxy", - "type" : "postgres", - "name" : "questdb-slc-grp", - "url" : "'${QDB_HOST_PORT}'", - "database" : "'${QDB_TABLE_SLICE_GROUPS}'", - "user" : "'${QDB_USERNAME}'", - "basicAuth": false, - "isDefault": false, - "jsonData" : { - "sslmode" : "disable", - "postgresVersion" : 1100, - "maxOpenConns" : 0, - "maxIdleConns" : 2, - "connMaxLifetime" : 14400, - "tlsAuth" : false, - "tlsAuthWithCACert" : false, - "timescaledb" : false, - "tlsConfigurationMethod": "file-path", - "tlsSkipVerify" : true - }, - "secureJsonData": {"password": "'${QDB_PASSWORD}'"} - }' ${GRAFANA_URL_UPDATED}/api/datasources - echo - - curl -X POST -H "Content-Type: application/json" -H "Accept: application/json" -d '{ - "access" : "proxy", - "type" : "postgres", - "name" : "cockroachdb", - "url" : "'cockroachdb-public.${CRDB_NAMESPACE}.svc.cluster.local:${CRDB_SQL_PORT}'", - "database" : "'${CRDB_DATABASE}'", - "user" : "'${CRDB_USERNAME}'", - "basicAuth": false, - "isDefault": false, - "jsonData" : { - "sslmode" : "require", - "postgresVersion" : 1100, - "maxOpenConns" : 0, - "maxIdleConns" : 2, - "connMaxLifetime" : 14400, - "tlsAuth" : false, - "tlsAuthWithCACert" : false, - "timescaledb" : false, - "tlsConfigurationMethod": "file-path", - "tlsSkipVerify" : true - }, - "secureJsonData": {"password": "'${CRDB_PASSWORD}'"} - }' ${GRAFANA_URL_UPDATED}/api/datasources - echo - - # adding the datasource of the metrics collection framework - curl -X POST -H "Content-Type: application/json" -H "Accept: application/json" -d '{ - "access" : "proxy", - "type" : "prometheus", - "name" : "prometheus", - "url" : "http://prometheus-k8s.monitoring.svc:9090", - "basicAuth": false, - "isDefault": false, - "jsonData" : { - "httpMethod" : "POST" - } - }' ${GRAFANA_URL_UPDATED}/api/datasources + # Not needed for the Hackfest + #curl -X POST -H "Content-Type: application/json" -H "Accept: application/json" -d '{ + # "access" : "proxy", + # "type" : "postgres", + # "name" : "questdb-slc-grp", + # "url" : "'${QDB_HOST_PORT}'", + # "database" : "'${QDB_TABLE_SLICE_GROUPS}'", + # "user" : "'${QDB_USERNAME}'", + # "basicAuth": false, + # "isDefault": false, + # "jsonData" : { + # "sslmode" : "disable", + # "postgresVersion" : 1100, + # "maxOpenConns" : 0, + # "maxIdleConns" : 2, + # "connMaxLifetime" : 14400, + # "tlsAuth" : false, + # "tlsAuthWithCACert" : false, + # "timescaledb" : false, + # "tlsConfigurationMethod": "file-path", + # "tlsSkipVerify" : true + # }, + # "secureJsonData": {"password": "'${QDB_PASSWORD}'"} + #}' ${GRAFANA_URL_UPDATED}/api/datasources + #echo + + # Not needed for the Hackfest + #curl -X POST -H "Content-Type: application/json" -H "Accept: application/json" -d '{ + # "access" : "proxy", + # "type" : "postgres", + # "name" : "cockroachdb", + # "url" : "'cockroachdb-public.${CRDB_NAMESPACE}.svc.cluster.local:${CRDB_SQL_PORT}'", + # "database" : "'${CRDB_DATABASE}'", + # "user" : "'${CRDB_USERNAME}'", + # "basicAuth": false, + # "isDefault": false, + # "jsonData" : { + # "sslmode" : "require", + # "postgresVersion" : 1100, + # "maxOpenConns" : 0, + # "maxIdleConns" : 2, + # "connMaxLifetime" : 14400, + # "tlsAuth" : false, + # "tlsAuthWithCACert" : false, + # "timescaledb" : false, + # "tlsConfigurationMethod": "file-path", + # "tlsSkipVerify" : true + # }, + # "secureJsonData": {"password": "'${CRDB_PASSWORD}'"} + #}' ${GRAFANA_URL_UPDATED}/api/datasources + #echo + + # Not needed for the Hackfest + ## adding the datasource of the metrics collection framework + #curl -X POST -H "Content-Type: application/json" -H "Accept: application/json" -d '{ + # "access" : "proxy", + # "type" : "prometheus", + # "name" : "prometheus", + # "url" : "http://prometheus-k8s.monitoring.svc:9090", + # "basicAuth": false, + # "isDefault": false, + # "jsonData" : { + # "httpMethod" : "POST" + # } + #}' ${GRAFANA_URL_UPDATED}/api/datasources printf "\n\n" echo ">> Creating and staring dashboards..." @@ -484,68 +490,75 @@ if [[ "$TFS_COMPONENTS" == *"webui"* ]]; then curl -X POST ${GRAFANA_URL_UPDATED}/api/user/stars/dashboard/${DASHBOARD_ID} echo - # Dashboard: Slice Grouping - curl -X POST -H "Content-Type: application/json" -d '@src/webui/grafana_db_slc_grps_psql.json' \ - ${GRAFANA_URL_UPDATED}/api/dashboards/db - echo - DASHBOARD_URL="${GRAFANA_URL_UPDATED}/api/dashboards/uid/tfs-slice-grps" - DASHBOARD_ID=$(curl -s "${DASHBOARD_URL}" | jq '.dashboard.id') - curl -X POST ${GRAFANA_URL_UPDATED}/api/user/stars/dashboard/${DASHBOARD_ID} - echo - - # Dashboard: Component RPCs - curl -X POST -H "Content-Type: application/json" -d '@src/webui/grafana_prom_component_rpc.json' \ - ${GRAFANA_URL_UPDATED}/api/dashboards/db - echo - DASHBOARD_URL="${GRAFANA_URL_UPDATED}/api/dashboards/uid/tfs-comp-rpc" - DASHBOARD_ID=$(curl -s "${DASHBOARD_URL}" | jq '.dashboard.id') - curl -X POST ${GRAFANA_URL_UPDATED}/api/user/stars/dashboard/${DASHBOARD_ID} - echo - - # Dashboard: Device Drivers - curl -X POST -H "Content-Type: application/json" -d '@src/webui/grafana_prom_device_driver.json' \ - ${GRAFANA_URL_UPDATED}/api/dashboards/db - echo - DASHBOARD_URL="${GRAFANA_URL_UPDATED}/api/dashboards/uid/tfs-dev-drv" - DASHBOARD_ID=$(curl -s "${DASHBOARD_URL}" | jq '.dashboard.id') - curl -X POST ${GRAFANA_URL_UPDATED}/api/user/stars/dashboard/${DASHBOARD_ID} - echo - - # Dashboard: Service Handlers - curl -X POST -H "Content-Type: application/json" -d '@src/webui/grafana_prom_service_handler.json' \ - ${GRAFANA_URL_UPDATED}/api/dashboards/db - echo - DASHBOARD_URL="${GRAFANA_URL_UPDATED}/api/dashboards/uid/tfs-svc-hdlr" - DASHBOARD_ID=$(curl -s "${DASHBOARD_URL}" | jq '.dashboard.id') - curl -X POST ${GRAFANA_URL_UPDATED}/api/user/stars/dashboard/${DASHBOARD_ID} - echo - - # Dashboard: Device Execution Details - curl -X POST -H "Content-Type: application/json" -d '@src/webui/grafana_prom_device_exec_details.json' \ - ${GRAFANA_URL_UPDATED}/api/dashboards/db - echo - DASHBOARD_URL="${GRAFANA_URL_UPDATED}/api/dashboards/uid/tfs-dev-exec" - DASHBOARD_ID=$(curl -s "${DASHBOARD_URL}" | jq '.dashboard.id') - curl -X POST ${GRAFANA_URL_UPDATED}/api/user/stars/dashboard/${DASHBOARD_ID} - echo - - # Dashboard: Load Generator Status - curl -X POST -H "Content-Type: application/json" -d '@src/webui/grafana_prom_load_generator.json' \ - ${GRAFANA_URL_UPDATED}/api/dashboards/db - echo - DASHBOARD_URL="${GRAFANA_URL_UPDATED}/api/dashboards/uid/tfs-loadgen-stats" - DASHBOARD_ID=$(curl -s "${DASHBOARD_URL}" | jq '.dashboard.id') - curl -X POST ${GRAFANA_URL_UPDATED}/api/user/stars/dashboard/${DASHBOARD_ID} - echo - - # Dashboard: Load Generator Status - curl -X POST -H "Content-Type: application/json" -d '@src/webui/grafana_prom_tfs_num_pods.json' \ - ${GRAFANA_URL_UPDATED}/api/dashboards/db - echo - DASHBOARD_URL="${GRAFANA_URL_UPDATED}/api/dashboards/uid/tfs-num-pods" - DASHBOARD_ID=$(curl -s "${DASHBOARD_URL}" | jq '.dashboard.id') - curl -X POST ${GRAFANA_URL_UPDATED}/api/user/stars/dashboard/${DASHBOARD_ID} - echo + # Not needed for the Hackfest + ## Dashboard: Slice Grouping + #curl -X POST -H "Content-Type: application/json" -d '@src/webui/grafana_db_slc_grps_psql.json' \ + # ${GRAFANA_URL_UPDATED}/api/dashboards/db + #echo + #DASHBOARD_URL="${GRAFANA_URL_UPDATED}/api/dashboards/uid/tfs-slice-grps" + #DASHBOARD_ID=$(curl -s "${DASHBOARD_URL}" | jq '.dashboard.id') + #curl -X POST ${GRAFANA_URL_UPDATED}/api/user/stars/dashboard/${DASHBOARD_ID} + #echo + + # Not needed for the Hackfest + ## Dashboard: Component RPCs + #curl -X POST -H "Content-Type: application/json" -d '@src/webui/grafana_prom_component_rpc.json' \ + # ${GRAFANA_URL_UPDATED}/api/dashboards/db + #echo + #DASHBOARD_URL="${GRAFANA_URL_UPDATED}/api/dashboards/uid/tfs-comp-rpc" + #DASHBOARD_ID=$(curl -s "${DASHBOARD_URL}" | jq '.dashboard.id') + #curl -X POST ${GRAFANA_URL_UPDATED}/api/user/stars/dashboard/${DASHBOARD_ID} + #echo + + # Not needed for the Hackfest + ## Dashboard: Device Drivers + #curl -X POST -H "Content-Type: application/json" -d '@src/webui/grafana_prom_device_driver.json' \ + # ${GRAFANA_URL_UPDATED}/api/dashboards/db + #echo + #DASHBOARD_URL="${GRAFANA_URL_UPDATED}/api/dashboards/uid/tfs-dev-drv" + #DASHBOARD_ID=$(curl -s "${DASHBOARD_URL}" | jq '.dashboard.id') + #curl -X POST ${GRAFANA_URL_UPDATED}/api/user/stars/dashboard/${DASHBOARD_ID} + #echo + + # Not needed for the Hackfest + ## Dashboard: Service Handlers + #curl -X POST -H "Content-Type: application/json" -d '@src/webui/grafana_prom_service_handler.json' \ + # ${GRAFANA_URL_UPDATED}/api/dashboards/db + #echo + #DASHBOARD_URL="${GRAFANA_URL_UPDATED}/api/dashboards/uid/tfs-svc-hdlr" + #DASHBOARD_ID=$(curl -s "${DASHBOARD_URL}" | jq '.dashboard.id') + #curl -X POST ${GRAFANA_URL_UPDATED}/api/user/stars/dashboard/${DASHBOARD_ID} + #echo + + # Not needed for the Hackfest + ## Dashboard: Device Execution Details + #curl -X POST -H "Content-Type: application/json" -d '@src/webui/grafana_prom_device_exec_details.json' \ + # ${GRAFANA_URL_UPDATED}/api/dashboards/db + #echo + #DASHBOARD_URL="${GRAFANA_URL_UPDATED}/api/dashboards/uid/tfs-dev-exec" + #DASHBOARD_ID=$(curl -s "${DASHBOARD_URL}" | jq '.dashboard.id') + #curl -X POST ${GRAFANA_URL_UPDATED}/api/user/stars/dashboard/${DASHBOARD_ID} + #echo + + # Not needed for the Hackfest + ## Dashboard: Load Generator Status + #curl -X POST -H "Content-Type: application/json" -d '@src/webui/grafana_prom_load_generator.json' \ + # ${GRAFANA_URL_UPDATED}/api/dashboards/db + #echo + #DASHBOARD_URL="${GRAFANA_URL_UPDATED}/api/dashboards/uid/tfs-loadgen-stats" + #DASHBOARD_ID=$(curl -s "${DASHBOARD_URL}" | jq '.dashboard.id') + #curl -X POST ${GRAFANA_URL_UPDATED}/api/user/stars/dashboard/${DASHBOARD_ID} + #echo + + # Not needed for the Hackfest + ## Dashboard: Load Generator Status + #curl -X POST -H "Content-Type: application/json" -d '@src/webui/grafana_prom_tfs_num_pods.json' \ + # ${GRAFANA_URL_UPDATED}/api/dashboards/db + #echo + #DASHBOARD_URL="${GRAFANA_URL_UPDATED}/api/dashboards/uid/tfs-num-pods" + #DASHBOARD_ID=$(curl -s "${DASHBOARD_URL}" | jq '.dashboard.id') + #curl -X POST ${GRAFANA_URL_UPDATED}/api/user/stars/dashboard/${DASHBOARD_ID} + #echo printf "\n\n" fi -- GitLab From e624f8e9338ca36bc4893048114d647b3b7bf9a1 Mon Sep 17 00:00:00 2001 From: Lluis Gifre Renom Date: Sun, 18 Jun 2023 18:07:11 +0000 Subject: [PATCH 50/62] Update Hackfest - ContainerLab: - update commands.txt --- hackfest/containerlab/commands.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hackfest/containerlab/commands.txt b/hackfest/containerlab/commands.txt index 54c24eedb..c6af3341d 100644 --- a/hackfest/containerlab/commands.txt +++ b/hackfest/containerlab/commands.txt @@ -15,7 +15,7 @@ https://gnmic.kmrd.dev/cmd/get/ IMPORTANT: for Nokia SR Linux, use kind "srl" and type "ixr6" ## Download and install the latest release -$ sudo bash -c "$(curl -sL https://get.containerlab.dev)" -- -v 0.41.2 +$ sudo bash -c "$(curl -sL https://get.containerlab.dev)" -- -v 0.42.0 ## Deploy proposed two SR node scenario $ cd ~/tfs-ctrl/hackfest/containerlab -- GitLab From f6f9280d62ceb03847037b79ed9dcb0e4673eb25 Mon Sep 17 00:00:00 2001 From: Lluis Gifre Renom Date: Sun, 18 Jun 2023 18:08:20 +0000 Subject: [PATCH 51/62] Hackfest - ContainerLab: - Update tfs-scenario.clab.yml --- hackfest/containerlab/tfs-scenario.clab.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/hackfest/containerlab/tfs-scenario.clab.yml b/hackfest/containerlab/tfs-scenario.clab.yml index c8d7dfde8..908a8a606 100644 --- a/hackfest/containerlab/tfs-scenario.clab.yml +++ b/hackfest/containerlab/tfs-scenario.clab.yml @@ -32,14 +32,14 @@ topology: kind: srl type: ixr6 cpu: 0.5 - memory: 500MB + memory: 1GB mgmt-ipv4: 172.100.100.101 #startup-config: srl1.cli srl2: kind: srl type: ixr6 cpu: 0.5 - memory: 500MB + memory: 1GB mgmt-ipv4: 172.100.100.102 #startup-config: srl2.cli client1: -- GitLab From 7fa4fa212a333239202cecb1ffd4bd61c4454a56 Mon Sep 17 00:00:00 2001 From: Lluis Gifre Renom Date: Sun, 18 Jun 2023 19:32:27 +0000 Subject: [PATCH 52/62] Device component manifest: - Update limits and startup probe in deviceservice.yaml --- manifests/deviceservice.yaml | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/manifests/deviceservice.yaml b/manifests/deviceservice.yaml index 9dbd69553..21efae309 100644 --- a/manifests/deviceservice.yaml +++ b/manifests/deviceservice.yaml @@ -41,6 +41,11 @@ spec: env: - name: LOG_LEVEL value: "DEBUG" + startupProbe: + exec: + command: ["/bin/grpc_health_probe", "-addr=:2020"] + failureThreshold: 30 + periodSeconds: 10 readinessProbe: exec: command: ["/bin/grpc_health_probe", "-addr=:2020"] @@ -49,11 +54,11 @@ spec: command: ["/bin/grpc_health_probe", "-addr=:2020"] resources: requests: - cpu: 150m - memory: 128Mi - limits: cpu: 500m memory: 512Mi + limits: + cpu: 1000m + memory: 1024Mi --- apiVersion: v1 kind: Service -- GitLab From caffd19301481b501ea73df44cee003380d5b294 Mon Sep 17 00:00:00 2001 From: Lluis Gifre Renom Date: Sun, 18 Jun 2023 20:52:42 +0000 Subject: [PATCH 53/62] Update tfs-scenario.clab.yml --- hackfest/containerlab/tfs-scenario.clab.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hackfest/containerlab/tfs-scenario.clab.yml b/hackfest/containerlab/tfs-scenario.clab.yml index 908a8a606..df197ebea 100644 --- a/hackfest/containerlab/tfs-scenario.clab.yml +++ b/hackfest/containerlab/tfs-scenario.clab.yml @@ -24,7 +24,7 @@ mgmt: topology: kinds: srl: - image: ghcr.io/nokia/srlinux + image: ghcr.io/nokia/srlinux:23.3.1 linux: image: ghcr.io/hellt/network-multitool nodes: -- GitLab From 59aa12ab87303dd230e71337df22e096eade54d8 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Mon, 19 Jun 2023 17:16:14 +0000 Subject: [PATCH 54/62] Hackfest: - Reorganized Hackfest#1 descriptors to new sub-folder - Added descriptors for Hackfest#2 - Minor corrections in --- hackfest/containerlab/commands.txt | 50 +++-- .../{ => hackfest1}/context-topology.json | 0 .../{ => hackfest1}/device-all.json | 0 .../device-netconf-openconfig.json | 0 .../{ => hackfest1}/device-tapi-ols.json | 0 .../{ => hackfest1}/links.json | 0 .../{ => hackfest1}/service-l3vpn.json | 0 .../hackfest2/emulated-topology.json | 210 ++++++++++++++++++ .../hackfest2/emulated-topology.png | Bin 0 -> 41576 bytes .../tfs-descriptors/hackfest2/l3-service.json | 44 ++++ 10 files changed, 282 insertions(+), 22 deletions(-) rename hackfest/tfs-descriptors/{ => hackfest1}/context-topology.json (100%) rename hackfest/tfs-descriptors/{ => hackfest1}/device-all.json (100%) rename hackfest/tfs-descriptors/{ => hackfest1}/device-netconf-openconfig.json (100%) rename hackfest/tfs-descriptors/{ => hackfest1}/device-tapi-ols.json (100%) rename hackfest/tfs-descriptors/{ => hackfest1}/links.json (100%) rename hackfest/tfs-descriptors/{ => hackfest1}/service-l3vpn.json (100%) create mode 100644 hackfest/tfs-descriptors/hackfest2/emulated-topology.json create mode 100644 hackfest/tfs-descriptors/hackfest2/emulated-topology.png create mode 100644 hackfest/tfs-descriptors/hackfest2/l3-service.json diff --git a/hackfest/containerlab/commands.txt b/hackfest/containerlab/commands.txt index c6af3341d..18c629c0a 100644 --- a/hackfest/containerlab/commands.txt +++ b/hackfest/containerlab/commands.txt @@ -24,12 +24,38 @@ $ sudo containerlab deploy --topo tfs-scenario.clab.yml ## Access SR Bash $ docker exec -it clab-tfs-scenario-srl1 bash -## Acess SR CLI +## Access SR CLI $ docker exec -it clab-tfs-scenario-srl1 sr_cli ## Destroy scenario $ sudo containerlab destroy --topo tfs-scenario.clab.yml + +## Enable OpenConfig data models and set as default: +$ docker exec -it clab-tfs-scenario-srl1 sr_cli +# enter candidate +# system management openconfig admin-state enable +# system gnmi-server network-instance mgmt yang-models openconfig +# commit stay +# quit + + +# Configure containerlab clients +docker exec -it clab-tfs-scenario-client1 bash + ip address add 172.16.1.10/24 dev eth1 + ip route add 172.16.2.0/24 via 172.16.1.1 + + ping 172.16.2.1 or 172.16.2.10 + +docker exec -it clab-tfs-scenario-client2 bash + ip address add 172.16.2.10/24 dev eth1 + ip route add 172.16.1.0/24 via 172.16.2.1 + + ping 172.16.1.1 or 172.16.1.10 + + + + ## Install gNMIc $ sudo bash -c "$(curl -sL https://get-gnmic.kmrd.dev)" @@ -52,13 +78,6 @@ $ gnmic -a clab-srlinux-srl1 -u admin -p NokiaSrl1! --skip-verify -e json_ietf s $ssh admin@clab-srlinux-srl1 -## Enable OpenConfig data models and set as default: -$ docker exec -it clab-tfs-scenario-srl1 sr_cli -# enter candidate -# system management openconfig admin-state enable -# system gnmi-server network-instance mgmt yang-models openconfig -# commit stay -# quit # Check configurations done: @@ -76,18 +95,5 @@ gnmic -a 172.100.100.102 -u admin -p NokiaSrl1! --skip-verify -e json_ietf set - gnmic -a 172.100.100.102 -u admin -p NokiaSrl1! --skip-verify -e json_ietf set --delete '/interfaces/interface[name=ethernet-1/1]/subinterfaces/subinterface[index=0]' gnmic -a 172.100.100.102 -u admin -p NokiaSrl1! --skip-verify -e json_ietf set --delete '/interfaces/interface[name=ethernet-1/2]/subinterfaces/subinterface[index=0]' -# Run driver in standalone mode +# Run gNMI Driver in standalone mode (advanced) PYTHONPATH=./src python -m src.device.tests.test_gnmi - -# Configure clients -docker exec -it clab-tfs-scenario-client1 bash - ip address add 172.16.1.10/24 dev eth1 - ip route add 172.16.2.0/24 via 172.16.1.1 - - ping 172.16.2.1 or 172.16.2.10 - -docker exec -it clab-tfs-scenario-client2 bash - ip address add 172.16.2.10/24 dev eth1 - ip route add 172.16.1.0/24 via 172.16.2.1 - - ping 172.16.1.1 or 172.16.1.10 diff --git a/hackfest/tfs-descriptors/context-topology.json b/hackfest/tfs-descriptors/hackfest1/context-topology.json similarity index 100% rename from hackfest/tfs-descriptors/context-topology.json rename to hackfest/tfs-descriptors/hackfest1/context-topology.json diff --git a/hackfest/tfs-descriptors/device-all.json b/hackfest/tfs-descriptors/hackfest1/device-all.json similarity index 100% rename from hackfest/tfs-descriptors/device-all.json rename to hackfest/tfs-descriptors/hackfest1/device-all.json diff --git a/hackfest/tfs-descriptors/device-netconf-openconfig.json b/hackfest/tfs-descriptors/hackfest1/device-netconf-openconfig.json similarity index 100% rename from hackfest/tfs-descriptors/device-netconf-openconfig.json rename to hackfest/tfs-descriptors/hackfest1/device-netconf-openconfig.json diff --git a/hackfest/tfs-descriptors/device-tapi-ols.json b/hackfest/tfs-descriptors/hackfest1/device-tapi-ols.json similarity index 100% rename from hackfest/tfs-descriptors/device-tapi-ols.json rename to hackfest/tfs-descriptors/hackfest1/device-tapi-ols.json diff --git a/hackfest/tfs-descriptors/links.json b/hackfest/tfs-descriptors/hackfest1/links.json similarity index 100% rename from hackfest/tfs-descriptors/links.json rename to hackfest/tfs-descriptors/hackfest1/links.json diff --git a/hackfest/tfs-descriptors/service-l3vpn.json b/hackfest/tfs-descriptors/hackfest1/service-l3vpn.json similarity index 100% rename from hackfest/tfs-descriptors/service-l3vpn.json rename to hackfest/tfs-descriptors/hackfest1/service-l3vpn.json diff --git a/hackfest/tfs-descriptors/hackfest2/emulated-topology.json b/hackfest/tfs-descriptors/hackfest2/emulated-topology.json new file mode 100644 index 000000000..6885c7d90 --- /dev/null +++ b/hackfest/tfs-descriptors/hackfest2/emulated-topology.json @@ -0,0 +1,210 @@ +{ + "contexts": [ + {"context_id": {"context_uuid": {"uuid": "admin"}}} + ], + "topologies": [ + {"topology_id": {"context_id": {"context_uuid": {"uuid": "admin"}}, "topology_uuid": {"uuid": "admin"}}} + ], + "devices": [ + { + "device_id": {"device_uuid": {"uuid": "R1"}}, "device_type": "emu-packet-router", "device_drivers": [0], + "device_endpoints": [], "device_operational_status": 1, "device_config": {"config_rules": [ + {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, + {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, + {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": {"endpoints": [ + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/1"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/2"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/3"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/4"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/5"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/6"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "2/1"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "2/2"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "2/3"} + ]}}} + ]} + }, + { + "device_id": {"device_uuid": {"uuid": "R2"}}, "device_type": "emu-packet-router", "device_drivers": [0], + "device_endpoints": [], "device_operational_status": 1, "device_config": {"config_rules": [ + {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, + {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, + {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": {"endpoints": [ + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/1"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/2"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/3"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/4"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/5"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/6"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "2/1"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "2/2"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "2/3"} + ]}}} + ]} + }, + { + "device_id": {"device_uuid": {"uuid": "R3"}}, "device_type": "emu-packet-router", "device_drivers": [0], + "device_endpoints": [], "device_operational_status": 1, "device_config": {"config_rules": [ + {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, + {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, + {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": {"endpoints": [ + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/1"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/2"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/3"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/4"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/5"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/6"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "2/1"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "2/2"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "2/3"} + ]}}} + ]} + }, + { + "device_id": {"device_uuid": {"uuid": "R4"}}, "device_type": "emu-packet-router", "device_drivers": [0], + "device_endpoints": [], "device_operational_status": 1, "device_config": {"config_rules": [ + {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, + {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, + {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": {"endpoints": [ + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/1"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/2"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/3"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/4"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/5"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/6"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "2/1"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "2/2"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "2/3"} + ]}}} + ]} + }, + { + "device_id": {"device_uuid": {"uuid": "R5"}}, "device_type": "emu-packet-router", "device_drivers": [0], + "device_endpoints": [], "device_operational_status": 1, "device_config": {"config_rules": [ + {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, + {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, + {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": {"endpoints": [ + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/1"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/2"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/3"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/4"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/5"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/6"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "2/1"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "2/2"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "2/3"} + ]}}} + ]} + }, + { + "device_id": {"device_uuid": {"uuid": "R6"}}, "device_type": "emu-packet-router", "device_drivers": [0], + "device_endpoints": [], "device_operational_status": 1, "device_config": {"config_rules": [ + {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, + {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, + {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": {"endpoints": [ + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/1"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/2"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/3"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/4"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/5"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/6"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "2/1"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "2/2"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "2/3"} + ]}}} + ]} + }, + { + "device_id": {"device_uuid": {"uuid": "R7"}}, "device_type": "emu-packet-router", "device_drivers": [0], + "device_endpoints": [], "device_operational_status": 1, "device_config": {"config_rules": [ + {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, + {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, + {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": {"endpoints": [ + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/1"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/2"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/3"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "2/1"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "2/2"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "2/3"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "2/4"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "2/5"}, + {"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "2/6"} + ]}}} + ]} + } + ], + "links": [ + {"link_id": {"link_uuid": {"uuid": "R1==R2"}}, "link_endpoint_ids": [ + {"device_id": {"device_uuid": {"uuid": "R1"}}, "endpoint_uuid": {"uuid": "2/1"}}, + {"device_id": {"device_uuid": {"uuid": "R2"}}, "endpoint_uuid": {"uuid": "2/2"}} + ]}, + {"link_id": {"link_uuid": {"uuid": "R1==R6"}}, "link_endpoint_ids": [ + {"device_id": {"device_uuid": {"uuid": "R1"}}, "endpoint_uuid": {"uuid": "2/2"}}, + {"device_id": {"device_uuid": {"uuid": "R6"}}, "endpoint_uuid": {"uuid": "2/1"}} + ]}, + {"link_id": {"link_uuid": {"uuid": "R1==R7"}}, "link_endpoint_ids": [ + {"device_id": {"device_uuid": {"uuid": "R1"}}, "endpoint_uuid": {"uuid": "2/3"}}, + {"device_id": {"device_uuid": {"uuid": "R7"}}, "endpoint_uuid": {"uuid": "2/1"}} + ]}, + {"link_id": {"link_uuid": {"uuid": "R2==R1"}}, "link_endpoint_ids": [ + {"device_id": {"device_uuid": {"uuid": "R2"}}, "endpoint_uuid": {"uuid": "2/2"}}, + {"device_id": {"device_uuid": {"uuid": "R1"}}, "endpoint_uuid": {"uuid": "2/1"}} + ]}, + {"link_id": {"link_uuid": {"uuid": "R2==R3"}}, "link_endpoint_ids": [ + {"device_id": {"device_uuid": {"uuid": "R2"}}, "endpoint_uuid": {"uuid": "2/1"}}, + {"device_id": {"device_uuid": {"uuid": "R3"}}, "endpoint_uuid": {"uuid": "2/2"}} + ]}, + {"link_id": {"link_uuid": {"uuid": "R3==R2"}}, "link_endpoint_ids": [ + {"device_id": {"device_uuid": {"uuid": "R3"}}, "endpoint_uuid": {"uuid": "2/2"}}, + {"device_id": {"device_uuid": {"uuid": "R2"}}, "endpoint_uuid": {"uuid": "2/1"}} + ]}, + {"link_id": {"link_uuid": {"uuid": "R3==R4"}}, "link_endpoint_ids": [ + {"device_id": {"device_uuid": {"uuid": "R3"}}, "endpoint_uuid": {"uuid": "2/1"}}, + {"device_id": {"device_uuid": {"uuid": "R4"}}, "endpoint_uuid": {"uuid": "2/2"}} + ]}, + {"link_id": {"link_uuid": {"uuid": "R3==R7"}}, "link_endpoint_ids": [ + {"device_id": {"device_uuid": {"uuid": "R3"}}, "endpoint_uuid": {"uuid": "2/3"}}, + {"device_id": {"device_uuid": {"uuid": "R7"}}, "endpoint_uuid": {"uuid": "2/3"}} + ]}, + {"link_id": {"link_uuid": {"uuid": "R4==R3"}}, "link_endpoint_ids": [ + {"device_id": {"device_uuid": {"uuid": "R4"}}, "endpoint_uuid": {"uuid": "2/2"}}, + {"device_id": {"device_uuid": {"uuid": "R3"}}, "endpoint_uuid": {"uuid": "2/1"}} + ]}, + {"link_id": {"link_uuid": {"uuid": "R4==R5"}}, "link_endpoint_ids": [ + {"device_id": {"device_uuid": {"uuid": "R4"}}, "endpoint_uuid": {"uuid": "2/1"}}, + {"device_id": {"device_uuid": {"uuid": "R5"}}, "endpoint_uuid": {"uuid": "2/2"}} + ]}, + {"link_id": {"link_uuid": {"uuid": "R5==R4"}}, "link_endpoint_ids": [ + {"device_id": {"device_uuid": {"uuid": "R5"}}, "endpoint_uuid": {"uuid": "2/2"}}, + {"device_id": {"device_uuid": {"uuid": "R4"}}, "endpoint_uuid": {"uuid": "2/1"}} + ]}, + {"link_id": {"link_uuid": {"uuid": "R5==R6"}}, "link_endpoint_ids": [ + {"device_id": {"device_uuid": {"uuid": "R5"}}, "endpoint_uuid": {"uuid": "2/1"}}, + {"device_id": {"device_uuid": {"uuid": "R6"}}, "endpoint_uuid": {"uuid": "2/2"}} + ]}, + {"link_id": {"link_uuid": {"uuid": "R5==R7"}}, "link_endpoint_ids": [ + {"device_id": {"device_uuid": {"uuid": "R5"}}, "endpoint_uuid": {"uuid": "2/3"}}, + {"device_id": {"device_uuid": {"uuid": "R7"}}, "endpoint_uuid": {"uuid": "2/5"}} + ]}, + {"link_id": {"link_uuid": {"uuid": "R6==R1"}}, "link_endpoint_ids": [ + {"device_id": {"device_uuid": {"uuid": "R6"}}, "endpoint_uuid": {"uuid": "2/1"}}, + {"device_id": {"device_uuid": {"uuid": "R1"}}, "endpoint_uuid": {"uuid": "2/2"}} + ]}, + {"link_id": {"link_uuid": {"uuid": "R6==R5"}}, "link_endpoint_ids": [ + {"device_id": {"device_uuid": {"uuid": "R6"}}, "endpoint_uuid": {"uuid": "2/2"}}, + {"device_id": {"device_uuid": {"uuid": "R5"}}, "endpoint_uuid": {"uuid": "2/1"}} + ]}, + {"link_id": {"link_uuid": {"uuid": "R7==R1"}}, "link_endpoint_ids": [ + {"device_id": {"device_uuid": {"uuid": "R7"}}, "endpoint_uuid": {"uuid": "2/1"}}, + {"device_id": {"device_uuid": {"uuid": "R1"}}, "endpoint_uuid": {"uuid": "2/3"}} + ]}, + {"link_id": {"link_uuid": {"uuid": "R7==R3"}}, "link_endpoint_ids": [ + {"device_id": {"device_uuid": {"uuid": "R7"}}, "endpoint_uuid": {"uuid": "2/3"}}, + {"device_id": {"device_uuid": {"uuid": "R3"}}, "endpoint_uuid": {"uuid": "2/3"}} + ]}, + {"link_id": {"link_uuid": {"uuid": "R7==R5"}}, "link_endpoint_ids": [ + {"device_id": {"device_uuid": {"uuid": "R7"}}, "endpoint_uuid": {"uuid": "2/5"}}, + {"device_id": {"device_uuid": {"uuid": "R5"}}, "endpoint_uuid": {"uuid": "2/3"}} + ]} + ] +} diff --git a/hackfest/tfs-descriptors/hackfest2/emulated-topology.png b/hackfest/tfs-descriptors/hackfest2/emulated-topology.png new file mode 100644 index 0000000000000000000000000000000000000000..e8684e825cd8a92d58c58c785b45203f7b0abee2 GIT binary patch literal 41576 zcmeEuX*iVs-}WHUl%144B$O7BkX?vm9V2T+WvA?9&01MXREWlI#=c}7N{b}Bu`d-e z)cv6vGuQbwd=2h8KmR z&ZXZEKbdG0kb*B%?uMGGsG@eB8Te-3T@_sw6sk1t;NP3H@cjWdt?TY66ypcvKdNTe zJX;hBw{t~Z#n{JcVU*5?gVgtV**P`5wjhSIes_TA|Mpd^?657ie}V zK|JtoRM@04MdIK8gmzNFR{`TNM&w(Zpa=4Wo&!e%UzGFcVNWQO>SZ$Wg;`vc4!#8I zv&bRehCTZKzxw}=>Hozf82>cxE%VW!R&IlL&B_=*K6SB;?f5u z?@y08y*2rKvxrQnU>^-KD%hga=ocPC7+INW9sQbka&(qN7#*r~9F1V$9!ZigKlR{G zh|P5SYnP41Q7=(9a%cL6T4WHVtMgO7!vls^dJUkv&Qn$4>SNA!FyH3t?)>Qvk7z%X@aW~1Ox#GrJO5@&G48PJ? zTZ&9aXk?O}n4Mj|@ym(|yvoDvyx|S^1Uu>;(@GJ_&F04uyoRyv12=`dGy;#=~bL3nT*Sil^ zavWJVaKT+!SJ=EM#En{)(c0SsFP^4!{^*(Rnl7@aDXW{wNwEN1y6#1b5bp_=ED88qArGYyH-)x<4Ga(l0nCOUF10VqWScH1oJW@h9UVNAh#il5=Wtk6MM| zF5O}y7}NUxdUe;y!Nf|`Db?p^^!jG+14fi`1q3@`-tZ?>H2Vkci?6BL`Swi2DY-BV ztITG1nvM*X-yJ^Z*mp|aLzjakh{CX^?};-hG%GU5)NuLvG1}$RBaX4VL+ME>mCnO` z3V*&oT>LW;w^1Z#-nX_`N2wilZ5%B$uWGML4%!-%n$fnVgt1*KDQ1HAGQ)d0VycOY zwR0o#lzJO~%MrH^42wzTE-j>Pq41>5-_yhQPhiwmy;`0ff7|?7X>U`HApUpGdhfm~ zaV<$=5Z&F@=qGbcc8cA5Z)c+p?0?vEE$;97urCNv3)FlTi_dw3*pzom$p#=gi+hVI$uHvJPf^dR}ZrK&HqXrlCmAj1ICIw7I?lPJ+WQ)f&TtK02 z3VEQkxEuxoe@34uki8^t=8|1np1hmI`+m(tb8}_NnR1Ah6{F?RR(x3Wklkiqvb1Bt zeVjvNgO!!v`M+&@9?wr(4yks4w-#2Zx7Mxb1f1HU;B@J!Q1sl0162QAX&nvP+qSN9 zpT0)XUgS7w@(oj9+I3;6XC;3kR=%C9l_b^RF=JTe_M@p+ktsYbHTL@&K>{BvI!|`| zb5qlmLp6et*MXZ;{A1p<&G!0&Z(2ci4E^2?B`DYa79C396mm*x43k_5gp{?v`W#uh zDHn<`1d~3|BBJ792{YUGLdEIkKcb!6gTcV zM24zpkeD#Cy;?=T)>EZQ<{ZVN$o>N*H`QiwjVVc9Go1~5a;wsn@89U%{PpN$k(>9Z z=L_j_8Wi&d#G7&4ON8Qvp@8L2oH)v*BBC6-o1FKeQ)>kk8pBHJ3PK+u7|P?YNAj|xCtcByL%fFN}= z_7lnFN1Vd?%M>|J{DM$MoR~pISU9tEg5%v4zqQ%xcAdL?4z^g+mPd+_Id=9GCW(&j z?m~!h-HMT3YW?OYR1@Tvcqs_@HakZ+E!;w!<7MuAJ3>)-ihKGN&nN081;L;#;-Q0N z<1C&qQhjeia$&nOz1Sc#LL$L1QRvTV!EvmE2(~zqF{YMeYE8=#PREND+&Yq;WM*$m z$xRP*h^rjYN#t8llMMXpGu3jk<4b2QdTLwi8I|{{^R4$3HWVe9i#;J1dRg|5>v1IY z=qZg7ch-k5NSbaxw+o8Bn;wt^wPVzydCe@8a;QP23d1P+*sOPFuc)+(;~^I>1y&U< zT>=9YX|ixi7w@-^rujtrgIhanL(M{5#T2q5nUeF_Q=yN3ESl|v*;=MIWz1yr&(6kp z<_Zyli#ps;g~!99h=o{Q`C5p?gAE+`t9jFEDJ%I(s+fm2;_17~j!R(C=&?WEbA4@! zln+cwPV%oJFYoY~jb$q`tv$O0k=muN;QHZ=)AdWL-eARb5O4kY8BofrrS)j#?xh#l ze7UDA5|_IhxTarwr!An10_mj#JJJ+}H9GKA7B`zHt)k{{3l=yr}!H zZ>37PHW066hf2=mvWNESWi%YpZ8lVP=*kF>nI^FocV%i0i$o2ToIQ{KR#J~fF-ssZ zK&YwVqlW?ss==Mb$at&6{>BN{lR zqqnKl5(rWn*o3@Zql~d9La`T{RNJe@1Lr=+@;Z*2dqE%wwu5qCi%XV)cLt(Ln~3U6 zXHL-Uc4r#5objMOhdPmUXl=OIhuqn3fKiVW=as9FO=k>0z#BMG6_gi>`;e}cecZ_5 zT0idVqa=HK?ZhNc-E_s#ot4%zhkYuJ-Ee^vhU!9YLu!RfuDj>H2rUcm&^Zg8WOhdO z(^BWOiF{@eQ$f7phZOp78#5CXCn6bX4-rYv#CT`hI;DwgGKYK9c!%Cw_y>?p8y?gQ zSKrGdo+YOAev@`#+*$Eb&qR3GHceEV;62qPxwFcX& zL-N@=Kh=MS=7cP%%jk9y2m9D4jU`52x!xTl{q|dWzh1B{A}XO!QU6LCT-%P+=fmx! zg6lmOL`E6{>byS<-iZ{_jP;1Sv=TKpT36-2HIwTc=^jI&MF}V!d38|0zQCfU+Y^ja z!n8!dGyREemUwe!oT9_A`2;=1(!JAz_MLA+9EU3Md53sdP{AVtaFOD!^~(`RN#yi` zVw*K}8wpQ!8)N>?GM-(G(?+@1JbQ9<-JwRTbiAH#oNwrW>I&pXp==`|wB(b;^om!M z)_RRxCO*fEktX%Cv{<|$bDDj(>&}YymM(;f)Lbtz$gXO&z>aSHPFwQebdPybF%?Qu z6>ivLwPItIP+Z&0A2eb$rMJramFd!wG8S(v=KWPHw;HOa$!fV$*YO4^s!1@y-(v}~ z;Y^Ka!-faG6mgWmAW~%&BE|2{_V}I`Bz z>uSpPEbO0&(PNQ3KWF<2=gg{H7TT5e9`rW^_@|g~h~fQ2AN;|@&j%(#+Lx=O&Uo1M z2^DXFJnUK1XLDELl-I)0?c&dzXRJn$;@qmGU$;U;?sk6%^WzH6XmM@{^S2dOEP3U< zJR(`-)rz$pKXlxOfk+M)N0%TMzPyPJ_oCLH^8->nb?hSs2> z@tcy#*k-Flp`yc)^l%lt56?Zbk$v%Jl#tW;dgwz2YSIqxK^rn>uFwBaT)@mvI%xMn z-Qf}aVgXU%L@}e!Bs`{dy`$5h&da&ZxBud>*6GZz#Sjg$O;lFL&&oR}Zk!Z&r8_{4 z`pA!X&6|?t!Mo|ccY}IXG8F^11|oc48KABc{VJtX=Jc0L%WR`{8@f+!;=`OqYo(uS ztkN)Ed`BH2eh!}K39oY3t zO5;Q^r^C^+nq4Ds*WiL#E_s|U+>u~cyx=r&BQ9uf$GfhG+mUzHf1u90Sfa-;vlh=H zXIv?2QC+fTDf)-}*=#&TV`S^5T7N~DwkxR|m-!~?+T~}v<8Z^$j(tAZ^SO^=n<7=C znhv7`0+C9)0D6X=fZfgMvB7bqbO;e5%xbuZT@^LW+qBpYB9RQh{FdqqPi~=(i49 zE6UPeQ@%OzuK{3Bg-BnAQ;Qt3QDAV)Y=Yiv$hXWW`-^pGwoqLgx3up!_`j-XeOl^Z zbv3Y>v#TCC*~VRXm&s;moafVgejcEFtD)n6(3$-HX%b#J@Q*&TLAa=VAgy}jmK6#` z8wVC~%&xu=&z+pao;4f6^{MiT)hPqEtzase`t#F8(CJ-Tdm+rN%l4Zf;P9&>R^z}n zhFN!;8pVDHZn`?j(o-!&XufTEMYU}zV0(S7J+`0b$00$z+<2cug%tZ zF=XQ$=Dod81&>MhdIr`x(|r7{I`_#f4D&)bH7e>9@(5N6Ddr8-4BfiPQZ0OrO0iH_ z4e<$;NqzcO94h+6Hdi$qosWr!F5DkWh1yghqnJ4>aA{2%8l;%hmbIqeI~n_yg{$MD zlD3h;l27Q;kfq|qSZ{zR#{4C79=&P=gRQ(Ggc8^QGreVHLSm49ROLYWTE}Y5{$|W) zf&nfETJ|n;=G)1BzpyI3FVkKF`jBIyV*HvKHF*dch;Q9|3}|laz|Cy$Dc5DQQTkiO zu>%>qT#e|m8}F$p4!`im`OmnNk??0pi?R)a`xKe^r59qeHcJ{yWWW9Od1(ntF)2o1 zeKyK+D6A9gye!;xg zWu@=IG=?Vy(L%A0we0$~95``?UEw)a!Q~P&ct2s1~Ti-@Lnx86^)@5->84*u{QYUO6>SVj1s! zu(%PDhko`NoLNV4SZN3^l4B!v6IC}N==kuHGg(%gavY_bF<&c z1+d_-qA_j0StTB_0xJ!?ruVr2zb8NY00@K;K|pqtW7Ikn0;~|EppR$3Zt^w(gE{5& zV2y+wOKi-KHVY9N)QEuxuMF0DI?l45B97L$=We7ufnM(vY$G(Pol0N~;)i-Tp4&{j z!fU~dqK(GLR6L)po{0TjB+R)!8*EM0^|Z&bYe!rQzyKlv#XJlqqk7p>$Z?(&^5(cs zQkIDdBs8*RTdJSZUvVv>-&#F<${+dEdEXV=gNQc`ozf-6K+ETYea7wYDIoNp`&P@$ zE;lT*ZRtI3>PMI{MKPa*UlKfBHyc4sC!-!@Q_N_qsbqwN2B;CsVcqsv52Xj>PDu@v zs1`UYOZI0wICsaM*2I%{b@KCE=HrNR@#aGuqs5`8%-?>iORj;UL#`5?!P`wGFoZn( z7GP$FFEs3KIa0okU%U8YAHl_rAN}w5__-7ouqruv>5H%xJl}iqD7;{_woZ42$1-#c zRrxm&luwQA!)j`_n*aWp2@m-S*Kx&d3hwn!CcyZ%eT_R_#R^`2RE^|4UI3N6o{M9? z7EygU3Rm+Wv-YVH;w=6$A#Z_Gd#7H{7wpeIwr)V&X>&9@l=pG{zRh&SO$+ zoWcp`fs*&Ck-O%A^ff-^mqYS+(6yT%l-Mu+{lLU)#%NYyXTXguP{U<< z%yb|AUXlAq{CVO@tXHBqrfe@FnrAY&9_r*T=keT6-&^t0w|;wd9;}C8Qn*rd=NelH zTp4__o=|$F>J0kumOR9^{-o3yN?trp(7eOG(I3^l=DP^6Aoll z@h#MVMfWB?*V8}DLMoAyX0edZ+hoRlu)AJR>kZu(4=nwKymsiV)8Ml^JjH=UJis`G;nKUiHcLR}3XZ6q zak=9z`Uu`-#Ctpdf$};^WG~6!KIC>}qA~;HxLxbJUVD%E&YOp20XiIi93vyuExkiX z$GeC+Nx6=V)P?@NQOx*JaA$;$ATT&8NlyIx{o+{tsY+w(&w!WAE;ektnD6f^yRCzO zqvl_wY(5{7XV(HCIZpqD$;@^T0GCdp5uc~GAazai`A)JNWPn)Y(V?)Bx|TSZ#IIN} zGMnbVc(2ldv@%8`jC4AO3`V0)gTEB?u#1B87pf|ocw{a{GVwYTwa01un*DV4-EQ!~zz@X+mOZDz9cSf|^Y>Pgd-xk#om95RWfzr%? z(eKr+J3DLceytK!w%gtv4B>Hbe1rW)B;g}&!RpW$l<*h6Hut-J~hvps++%AN?apJYdGmMN6$7vp9NjW=`##CLnM`~VDZC8jo zyN0JQEQ|HOuWe9*_J+sCGc}n2;Of8bsa_XIXjF~lBkZYU+PiE2P)%gnxjy>MvENJK z#mx4h3t1;e@*ep^3pjZ26yArtdCfoQ5?tRR(za<|eaY@)ysUqDzm^m@K*X%Z`&-3~ z!syYLbT4k+_RcK2Vrpj9OTZRObtu*Tz>+(yIFkjk)#57Ws-RBi7xbCTIl2wycV&EL z19SRree%z>)ZBc-p?emIao|}ky00XU_vr4&95YbvS}+bw8RaPGJ(Drx=_DS7)R50o zH4Md_o^VNjty`&r^$b$ekz$rgv2?77;%K-Jj;E{bveP{NT@#-;tt+N8=kspi_~L*^ z8L~6qE5x5b3k`9q`P++L5@n8RdiSd>#d+xYmh7F5G-spymF^VW**Dp7a_rYCj=pJn zHa?PO4fs+6M((^>=>CuARq5hCu0FPkd1_Y7lV#m!7WgKcDT)uQGQ;R{2Zd(Y`|>}$ z?BJRD@6fdMJiwrh69Iu}tvp>fFX>N!O={`uP9Hpcs-y&G#T3pj{c1W)oDY2gEsZ{( zY?|%9ryo8!>OJnsAN03$;bxXwW;WiJNOb5gdX2eN@%NCHa2TV&0dNFexNF-C@rO;3 zR+F89JZh1QZ5{`VuH~a~x1)!!YE)L+cSW5FOiP>a(Z0A2Zzu$YS&d@KO>TcEenUR| zbx#g0DQB$Pg+sAkV%NA|mVfibY0HADYyg?YI0Viy%LNN(uC@HDfBn|3kr;?_QK67C zv`msmaujp^p4RK`J}FVz{X4~F<$T>IgYXQ9$m{1E*s+(@-z)ovW#xu2CYxoqU}f!Z zU`zCwyEfk3#_r;7TLzx(>i^w%r)2u_O^rR?-L>MN>$hqDZR(0Gc2xiSmp1hy>UCODI-EhpTS!MRMWwG9f^J8U#-8!mt~_73R~;7FzZtNV?W`(o#Aux1+4)7 zA&wCy_8GA}yOy4TQk%jcHJ!@88utUnhhEo1%r)WY3KX8DX<59?6V9|yzx(xMaW&*M zbEU7q>o~GiREXh3JgK6g78X9cbF25vZ|I`@vAa3gZsXCVev!=YV9n1GMNIRnCg#gK z-s=8^DuNCHXSKt;Wcs_0(?;F6t!k9Zt`6bz#f*=2K)q@>USI}7#hxw)vvipwn<_7o z`q$?AtJFvb!&xPESa+@m@Hs300lyn8JBiWl)jyMPLeEQP)Waxwv@=8<;yf!mDdx`y zzJ zweB_7R~45Gt+6o+eguOC0DUTdxvizvuXgKPP&Ex|M5qKy->JmD3EbDn1`2}OuAio2 zJ%FRpI%_QRlM}ghF{iv@6%5V7=R(W(9E?&V?e_vzRpPZZ{SEv>7Wzt9{3SavW4D$B zjS=yvyi)mj^J5Crvy{w7(YvwuJi7x)O%XX5;_r6(#fMCaCDB$}V;tJ_yjoHoG1Ldt z!HTtCD!S#>;+6@uxZ`dgPZP!QuM-2_kG=}AeS_VV*1(cX7lxNd+&gLj+8Ivbn!59( zNPv6NMO#bT`D}A^&N+fIbT^F?TI3-w&vG*Q(PG0w>~`{$jve4$I-NC$&ej>@m=yM(e3Mr{go`9{q^JZ zyRWC9d2vj5U4}kZ=e3|6aC+6+u4|eU5nQ|3|2tz#qgRIGmz`OFb5MR_+CzT-`ExRW z&>uALW6|OccCg1@lM&zd^Jps}_~HhT3{U|PwGU@q5%sH#FJZABAkegYv9?MaY^zEV zoUuXhqPe)27w=`n^pmm(Kt&8Oen)fdrekSUdh&YT6wOAD8xqk{qj@@f@8qlAENSMjKUz}V2(5%?QF}}klCMK zi!14>zGp*0|6~A5Iyxogu`eJ0#-O|{S-PCtQ~JhSl12cR10xpI#d=cu?%ZWAjJ!wQ z3T_FfX07LnSy*BUKM>AM#Qd(}n=1@`q8gF>oBAk_9a{t#FH1k}96(I#ke$^ps=^azY?2q|`InrfLONCKmmola@AnRAEOxNyo z_4nAU`H~CUcf^e3*rnJNJw)w9J?sXbD=n$uG%|}SYtoah0{rIGm|awJnXH@vnf{)t zZrr~xY*tq=28CsG2sAcN1X<)HGdpqjYJd)K$gA2NJL;FN8bSS-G2JD`Lo_Pyt~xaM z^7EVCtw|ClME&E(pV%Ay*J3ADTXk=$&4u*H14b(0CUCC=B8|aIoDj5s@p%B*emo0B zB}HTF9d7nBulI<`cP-_45*12I9jto4y~Ylb+V(T@RFB`s_gs^2Q-H^u%GJK{?=w&h z2z>rSih+9C4{MlLdk-ju+Q(-a2I@l95$%mI7hIW8D2XA)=g5xBGK|4jpej>a#w0_k4lNrOnBNqMIKg#yErzI$t>ak@cjoobyt1jMpPzWNW&! zb^Pxt|AeSdYYUv#VmgGkKoWzq3v{tHv?qC0;n3#>%X1fgy`DFovV*Ma>_Go-(YJ?e zwMj9+*R@}Gg_eWN?kwYg=WdZ0=s5-lE4RQ`q;hm^m zj_G?&6jqH5opH*m?aI~T4{Q-o;}Se;-!1I_08usk!;omAT_|J3MIi1ggMK5JI`;)9 zw`d=NAvBTLVk-rx{-*~iq4%f0`og2xa5VJMs^HqHntpdv??u!tH60|LzY*lxu$i;{ z_jkv_GJ;1(96EX{4q~Z!0)Ug%d%GKri*WgF2vYyZT5zO{9Rx3=BFkakpufu|KkSCz z7OuIYqGLO$!oF`)8mr#nZevssaFyu4ky9Ju6acmFB1j)L+Eclu?BeAC46Z*fAZwSN z9Db0~4Ap<|BBBa`GV|@+vPMsh@t_^BPx4>@mSFvbLsY^C;UrazWthc~L>H&DTkZX8 z1QLT?aV&L<9bnTgz+kyR_J2F6{ljLg-q&QO^L~M8b_jFwx9l{ch;DKYWRBzKhL+A~kr)d5ouQWSru{O8NOS%-66x2UqsvV>q-8aP3wj?)YeLmkMgUZ4 zZ~|m5H2|sm@MF&Q{w+GjjKIej>DBsT&V#Ea{;nlY&@E3osV3P-3}H8cKraG7{x(%8ZE#DS5&s0McuNBUGh?kkU;V!PUj4<*X3mv$d(p?5~^xO zt>+FOCDO0LRScWcvhw1eh(2%{s%YVx+d%-28^vG0ERNPWM<*q@&A5wR%@f3xyt%DE zB%|33{f5rdY-F5xSKF zNB!ms*YSmR3V#-=#$Dp5CR3B!y#dhpq3Y^c6pXV0F-(4>OC)q*Q(lWB_CeQFK8&AH zKc;RpiRKW0?k8$t=gNYTg=XM>^z8nOaH3o=xqE++gMNRZK@6Al2w(y8L^|cbQX$!R zHSty^o{ODCFxhR|SdjEoH}m;Fc^49eUes;~-0`EZ1efG(;i654;`K31sHez7l}Rxv zF10N?pvnS>653>c3i0iGHfDPV)iwJoQK4som+U?kc2IJunU6xuJNSN9W6N4O&@(e| z2Wk$1vd~1vu9GFGCbmPoGbe{oD?m-z{;Z=kw3gK3JR3Gfz4_s$!SK~zh!|}4S`>+4 zQGyRUsj?=pGFA_kKOG`)14EbRytNPE%hjt#^zi- zN?}Q46mXh(fi;*x!Focno?|>GcDE_HupDR*lvnjGjymd(jIcM$ z_YQP8nkXJsI@UA@^AHn8*B6F~1hG69fj%fX$mn&?&K@dtMjXuniSpUcAkSIg^Z8DN z=Z=CN9S(148aYGcpT+8CKLheIKiiu>VM%oh07z69TY(4g)Bhgdo@bEhgk4@G#Xznw z7*)1_rdGg@3>&)lZf_@Uv;hWV5bapP?hB*0fi=4i!R&4Ru`PE$ zf>71!PW@V+`65K3ecRSJ+p&DIl5B{_AE5)@I9DRguZrb5X{IT^T%98<0@9Dh$ zjDcxh+POv@e;7w|_!RpQfCUkknK3D{m_aa1gg*q9bLv+|It76fe+V5y3y}!es+d1q z7o5n+QuaT&q#bM>2RTd;pc_#s9u$6M0Hg^bWc+9StBa#4*~U?h{|C05-p{}_iD(Kb zhm?i0_k6jbm?hrWFAzk}dqrDCjpNaH(B5d__43aMZ-ID`%gSWSVL9VM`o~QVL&LNO z%;b>(WuDvMG+5SKOxS>g@vYKnup3TzEJtVNiZ#_`3`=^l1IGoCYpG<*&0*Q;R2w>G z%BuS&U`YOV+Yt>~5$tvBXs{NSQeeG54)N zx+7sx-ANr88manRj}!w;p_@@e7%sgZh+v<)?Uq41YefJhB8x}{Z5{*naz1CggKT_I zSgbHE?civZV@ofnMDDacMYNPe86Lb{-XmzT8RRRR@E})Nd}$MV4;YYTMe$D%vr_HJ z^xe-0h^P`m?eKuPM6O|aw9D7kyQDf5Pnu{AwgH4ael6?$o|d!ve8;68c^c*`k2%kH zRgHS3Kz4()KbtJ$y7-#v%SlzuP@~&6Amub6{Q+Uz2za|)e}+oH5i~viS}Foyes#JN za1>M%iUBSdNp{6vx(xKA1{26M#5fddYxWDPsp|0+1IF0oowyI zf)3T=WT54~L%V-#u0!5?F;(uqKKDkz-{t1T#=V{5046hsHDF4Ha8K)eaSX1~5|xIok)gqRCfk;IgCk7TyQQ~V56?C0PpPvNA?V(Y*Y>)-T%raE zf2#1dUPf_l2Vwaknnx$;RJ7e~RZI%7Bc>Ql4qkz$P?|FMy3`>?!k$^Li1tS z;J>nDE22eA*qbZYK-T7t;IrYh5}0n|6T0|rJ|46C*~g8|^_84G~WWahu;@#Nd9bo zO!e3YCT=bTf->$bEUB-$xP2&yxnA2 zQxyLpcJ@^hnO(n7665#TasA47LDHB#0nFRbG*-2wH6DyFTU+=IS;11u1Xn>1v=k7``vD_xl2Q;Az+oW^%C$)x8>8sfO{jx8CYm#CeDJ3BG2v(mAjM=ymY-hFE{)1gK$ zAQBpvHMkdApW`Vd+?P*2Om*Z?xJQPJP72Cf-`c7)ZvZiWFwbg%K`$j#y{Jkw?F zW3x60D>8T47xXK{j{P5%)-KX&dMiSvUoWQ;D1&C~4vp~xKr9m+LG5VnY__*gx z_a}Lpc+RWE+RAA9$%HM>DhIV1q_^Vd$7`SV%=d5 zGVc+G@617miY_>H_mj?qpKrhT0f9ojCZs`hyUq7%_MtjQs@3r)ZM01+|M|KS*ZK5K z#m6f3M=s&%wMbE8(bYdt^80d}{i_CrG<$$cV&$5z`$ItJvu{6xFj))b*Bh0G z1(Nj8W@13<3#p@C$U`{3J|cFNU3C?WPbPbiOT}5y$7*Z3aAGEAD_McewDi)|Timoc zxf&j{^cb;K6Qg2w$qFf*M0SbMZb`A2RniZob|&bt{#9N$WhIARQU8K*&mm5<@)(O8 zt5%k(=ADXC>pPm)L?1-$wqz@T1T{qc6SRCDcJuY0%O6|$^uR)F{5bAk{h|C`MS*uH z;X|<&NLsEj2GlbG`M?-Zbe5Vf{tMt=QH&A?)@L^Q0HmxxkMae0BKTqx@~AwzbD}5I zuA(3#QK9ZdfXyz^ejq{}6t4u%SdhA&j9UJjz9qtSoIzj(dPiGWT9lF}T1={lQ%gPh z$aH2mRoKHR%zysDDzS@?TkDlnhJr@0I(vbK-NlEW^=P6BRuiGIgeS)CGlZ1qJzv^< zr`B3Svdc;kCO?+igFiOyJ1OwMST6X>&qb+_=O?l4uRXf2Txm3Tne+;>LMy4zyuXfG z_%L**iaV#p%qsG{HT3A>t>J+gfw9n?)&UGq1L7QiY(mH8bUFor_RMe5{)6^-p`V2w z8akx|IUrtL%y@kIzRZZ*Ljp+ay`^PEZ7uG7;#h>1+8fjuD@D}Nk%;Hg@1{b>4LHKn z;EHc}XhRpFDZdBE{k2EYCbM^aApwj6WB;R(JauKP|QdVw# zdgM!qaYb{(5QZlVF{L5+f&~y279d?EZcsK-wBaP;a$Ndd?#7YM&Y%)So}J#(S27_BMi`qZ$$P z5IzvjwB{x1l(%uSE8{XEUMSK_SM(}GrrRd?PU@7b2G}*S1azQvlfPznA|itggnI{6 zZ14=ty0A*wtxyhTfaXzx)4_Y$Ubp<8^(2t9t|W|*yD~?^4zP_hg&!P!>pVP9GUD}L zpDz_Ug=R#8`Y|=CJY-@Fvftqg6;B>nF$feQt>pjR_5Fd18U-;K<+ZGg+jnJTybfAo zP>jb%?8=fkdZ5Ow1}!Y2y03%uUu_+hhv~prke3YNl;-#|9KwV0 z9%Z=Us%B_3psKfo`?q*4=jo)@2n6Pk4gQGoueYw_E=IEt5Jj~7?{DXc=TT3WpsXGc z)*44@1el+jg4T~gmF~X!F&n@Jy_`m>yPN^Ua~`cFi3`dBF!0KZ!+?2tCYd^dt8r~s zA0W^L{Z4&pwf0{18WraGatqO9lzJiJNC+bZ>zu2qWn%9 zxNMCIS=p|40N=n8&J5&YBHps@?S>U<)APT@jUc|}&aIK9jt_F|D=5u=+~8PL^VyT! z`5v+-cYXbnPP=|v-IrTX#f*Za)cLjp>M0P9%GGwlbO@}}X?G>jOLGH;Grs!o6JUjq zLH{C0cz>=Os~i&S1dKI#J)dtx0j|S^SdGm?Wu!f#$DH-BqdqL5t~`mfP7SNv`>4Ng ztJ0}*9O6au7|REL0sY{QgX^9EQbDK=(Wh051dIRoQwSXD`*AclM69?W8lGfI6?I5A z>RbxmlAQ4B3EMP!UKW<^~|Jw>LTS4&B z+4pFAETDEcnt$7P5f};+mIzkGd|DiWAF~{DEg(u+zTG15aKJ5_F6;eUsQTrSAq0*4 zcMPZD)|xsFzO7(0{3GLb#mBa%a)M8 zaMI%4G704$0U-OgF^gM3;~-1VyWw>De{Xix1LX^tpRrwAP2Z<21c*cy&IxCVd>EEtaaSt095glt(GSuC5$ds)-QinW4 ztVhpnGXrk@ejF@4;}sA6zhH9{wvW6T4waq`c@AP1$S2Sj5?m3-8GhRa=fMvM?ndXyAhltdJ5iS(u`jph!QR0ci~)&sLzYo=qw*nCJ;>4$ld!yQ@2JL(!kq~t z$M>OVQh68ljzRNCCD8n!UnHqyLpr*V4G)~;!_gQ38HV&IFgXc7Q9lf9E<|I2S-@nW z8=pqNMeuGWAxi-kLLC7Deq7;~11r4`zDUjT@7DjF zyc=>t$j_bz79)Yn_bzJXA>4@DK0=f#EQwc@4y}IHBkx$80~j4*N66OT4YYzg`w%Pm z&(G5SR)-MR6&;Q&Z_%=10lL+?19JI<1t~L>8xCjRVX-craWfdKSc(B_A~5@&7hjq9OqB4{#6@%|8&?Zi@|%_{FHOdo)77myv85=j!`%bg@XU@HkpNU!38t< z=8j)0AIyTF)EhA5N_UCP&Y1Z9=j~=96|X6o1Kan8myXvXo;Q-`e$+|BtSDkfx(}w_ z=N`7q_6l-S!~1koa8!SWS7&!7%#~GT=RGi_DPl-uuWhrb*MHqV(dw@JPTRreNG*65Im~#^wO&XFY3S zt|vEc4n}{y_Y1#(&bAgARRHj{2BGA-#guIG?z4#KPW_ueBHPn@m zgYRv{9XA0*rW~H@6^lfdp4AFSEuDG5D!2V%Ax_dxw+e)%>?O&7yCtxB+t z9A{U{^TZ>lHN7fUwa*;3t7!o+b`qhI&AmJU05oHhkIx%Ohs&KkQ{{V9`C$^)5*mN2y!ei)-JRrxiDfDZQ)hfxB1qoRs9i9w*z8B%fC8(+9YEVpMV&IM5Y6D=#7M54FZ86$a#H;q1!2yj2tZxc$yQ9oa8(ktW5rMIWBMq+lNtle zAs+}6hT5rYA<4ablOht-5rXI^5h#8ANR&5T3mKO`ykAPtEPw~F#X{$b+}m95)r>M3 z7606z=-qJz(){i7D?cu`5{wzT+IC8Aewg>uhiN?L6Srwmy05#NPo~zd4>>M^Qlag) zAwIQxmk%K_Sa2Exp_Y7=Ei27OQAt`Q} z!l@(gbIqNra~b}4h|-e_VW3zuP&d9CH^xZB9BHx7+*tbYVPU#u+#2M{#cw8pTowqW zPJ@y%H=DvvaW`=23kO8CNQCn&eE1D8_rN$y6c)XPo>g44owEJO9X}5R!T~N$yogCx zd$ZXrl`ypgQNK9Mi@hV(<=Yy?+TU_t9D>`$)F$00--8<3NG%G_i+_{%wG;y8+@23O zh~W#XsfZ-SnYrf@=6%$U#vED_HGVoIH2v*P6PO+EHW#q6S9%TO{{DBxlqR+Y_r*)A z(%ou;nZkKs>l7&82D#k(0)<%eqRnhHK^#gwLf2-tr z!_s=}4e(iZ)Ayu5w_rp2E~?*_0xXdGJyWMOn|AKo~BwxtJN8q{f z>P}GZZ0C)1J+FqWF(z_7ACJ59`$l7U)8nKY`5S5}hi5y#)6p^qSn>uQS@th0>xMsQ z0aFhxmdf?XfXptaP1yVtj6Wa7_%RG=a}bt#6&!8l>q$%aBPGQ10)h5opBZFEzJm;U z!m@>X;XQ7QX~8B_s65vtCXb{`94pq(=qv#==#W}SlKxD@6@?SBgA;1$Mls7jffK~(KVjNl(Qggt8Rc>IBd1FU=!`ECBAwJpZ zGlZhruk!APhoob=$5RA zrEdF=qrUJYGJZTXx+Llol@wr?!2zV|Yy(&Q_6Eh3h7|!@d1U;i!~7H~Uj0HcHqeP_ zcRr{{&~zDo0xI=%&(o^f5JORY0RsVjjG$J!9*L0`uTQ>fWK&2=$ElGxXnAOvx(?F8 zSbyf|^ZX$kDl6bh@4h`~l+CjzfX=Rrqy33-a}^W+t=wLs#&#=WU34k`I>xa%#3z?K z0T8YszY`U|cY#Al&pfX4${uLCpi<4wDs(Zg(=yg?FJwnPi+k7PhD1W zMbppFM3SfVZ>sZWYPTUhJ~GwsI8^yG*XjpJN`jU_BHIwvZ77MkxQbpX?@)9cFW%jq zb!;I!qLwu<3f2RyA|dKf$$!(Qaf;_M3Q>PUlnVU2tmzFRhT-mv8u#fTNzq2Rcy65&R`v`_Gpt~iEk@Ddi264i2sb_&nM)kRS7stB7y^fk9F zw*2#JOmtd-ly{jM|6S%v&rDc#C&O$d=vb7$(VNJH0iu@KRc@AqOL1SQ!nwJ@A95KA zJc>7*6b>WjSYzlUJPm1&`TQ0_qwqNeP{lODpQ<6x<;`Zq9ISkD6_W(iVKq2ANn`R( z<%BGAT&R*aS1L*XnvsuBR&i=J9wF*k$Mq0un~zDi!Qg{eW8jVj)Jlb$<{^UR@4iRI z3Rw2Aic37~-n{@~gZeceyIaPl@6{4HMt$<_EhbbeN3;WY%p9^C6LPxjm1qQ5keWPB ze?Ec44_*$E`tI5%YLTDB#`?PPNQHROTq0XriBU#r zm6$hV2yk^Bt{Zp!##Y*@dw$!Fhc4BX)|%ep48DYUKzl@uU#~X*!q4WqBRO3l2bs2H z0CHGf$M+V~g)h|0BDpvrg|+|H-k1MVy|(`@ifYMF(LkbSDGkQROq4OIWe(?k+s|{p=ZEtboY&dE?B}(g-Ck>bhU>nr z_jGC7uYSy2L|tX^i4h$5NTb*J%ubb&{o{e9fqQ@V87zCjqn}dEh}$r$&=sa>#H(Lw z^mUH*lVW9ugkv}#)QgYzW^Sqnsb{}UB}th zuawIv4vd|7#;+SXa*}1rJvh0Vx|8{YK;@6&;-{y>wcPG0PujTnO3o+}yyN~oJyf@1 z@xK>4K53n!&xpgQH(GmC`xz}s*JD}&RN6X|00TFT(*lAl^UQq7!#b0x=JZChg@(^y zi>UR_$fF8Qs?>?ak7)}s=3VO~VLAtQ|MJJc0Ft`X@k@Fyy84BTdE56?XdckDC{^Yw!1UkL>j?L9P1|9y;^NF1 z`_7asd4XPX?q3ItV}3N?;-hD6Q9A-|$4U)4@~rp$clcMvX@6j{vSjJzvYpNYBs^ef z%L|9zL^=7|y^gAXhy6l+q69v<{{X5m(upN-f6kegjsJD=ORDikvpg3o%bVdJSj;m3 z+fIpuLuC1e8wxhkrnwBam*iIL;yVr8;RSBQet;R*Gz$1}pwunjozw5WqxyF?xZ?tF zw)v}eo@n6djW5*WSA)bw#r+>{`RAC$eB#{8LXPx2OrBQ`&|$m8^RnP*zOn-21G`>x zuiPdo;TK3(;8$=d7?dN>cz&?J=6se?VgI4`%)tznQTv zugtNPL9572hi>rc+o&UTS$5iHcxjXtbcVQON6R=IwWUMzD=q;*tyVkJZ z6S}O1&p^C9TgbQycg2gE==$hj8(QvZ&sOfs@j5>J6V1 zbp0o0VTYz_9@L5Fl?u-h@(DA@ezlb7H3lIH?In|hN4F~mu`iz=CJE8q zR83P$A66!=nCB|v(%c|>s$pAaCQI^&{iQoRvlF6X%~u z-bk{jlZ6uz_#AnN`w5|+#2+topEy=XnTP8#S7`fDfA>g@Itq;wz{vLt0}ZxH!_qJW zyV%3QzrtgWH&nkIMf3HH^Y|ljzA{|;p54&Bk_usU?y)U`1sT#dMZ0Bv>=&7yFxmcV zo(?1_FVobH_3rT{ryUUS`orf#MkM~d{d)2z^Y{IMakczW8|Bvt0Qk$Jp7C$eDb%&N zX_96t1)H(VevlsAbC|poTKt)pty@8MMhZKF=8RmvC(1XB{c5B_Y82xPBUaM0xY5eW zBedt_c0qNqoi)h^4|0nhqWQDhzVM1NH?f7sACG8kudau_yS~uF9dHXBm2HdE_Di_b z5W?BX*V$9cZh@hd{6{iIyk-7r^Xvr4AeVElm^ywiz3jcYLSks!ssztVHT_mSP_ifC zQk`^sUf-q49b1JR8@?=+7?d>t-o&+xD#%>A2|_`78jmu|U`j#Q@p7na-<1h8L0c}T z?6@s35;M*=KWP5Q_KA|&^rxS)&r??62t461;V<4k&I*1a-c0$WR-#rYRI=B&O224E zO1txonY>bd@2F;gcTc{z&Sdf>Z$FIyKE&sUb_OmT(JuQYew9g`VAHBg`mjk&;&P9H zm};2W+h_b7UGl|dAN@^%LUS@ zpLfixv%2-w;bG4n)@L;BqlbM5*Uo-|ET1ZZyuKH-2 zs8>ovYEnqN%0@43w)+XySMH4xFA${ufyYhKt<*{VtmWF7ob!O62H z1+B9@#$1NPjO^JPz?(ULeu%`AuaO@e>K$N6CL(`tk@z1z5Lg4)m?A&_(T2?>Z(f39 zY)X5pre5=VwN$e1tmF%qBea%>61!CN?`QZC1t1WvD815hwWP<<7F5gID4u)7W(;^@aUh z{dxm=IhL%yOXi43)5(Ui<>jj@()j+c-Y3ZIUMA&0OdZOc@%mG0yQc=8k`S_A8HtrV zEwkfqRS%k@MjAD<3veG65?&wG|b*bNXw_FD3pF_fh%5GbCUS)*K6{wsar+ z5Zy;XzN)^w@rdSvTc0}bMpkkJMjyV|5kLYw|9gdVZglu^Z5{sf;G_BwbeZm)T9H?m z+w*5|uAJX{=af{kBM}L4ls9(0yJb92$F$h-nPs7fTEnc^%qIJBAwS78J*!{Mpl9I1 zMJpT>-C`jkoPL>t!fHZY9c9FyxWSXU`bMdi^#J(V(mfW4S}W8+&4I-G?1BfWk z6OtlWLqfh1q4v66$tE2cBMe44uOo4H-6 zbwQoId#YJxEYU;{;A`XNZ|xB%Zl6!GKC>p4|JKhGcN*f>=cg)?GOI`<{qttEE(tZn z0e*-kv#Df*qNB!vTe`ilKgLdo{BE{>h z`p2$>y&=Cp83{^nHp>9?|03bqPN+i9!Qgy0iqocL+hMmlnp`^aTX=>u*gI@*0LXDg0CCe}enFlcJJ-rtoTDqO*#e#KzQEy(wwcr#mr2;T7( zIiHFk55PkPq~NBon#;#~+y5z?>`S}wXL&8KD$4^jLUBV^qJ{1!ZGt2VAVYPEy^ilpA~0<+5FS6|9UL$szp@M5p6oj%2PrC z!JIQn;;InBMBFijX;TDyLNAIuMt1>QqxOZI+=D~^v@bg1y(Lqlxl-C{w$5RYGyV8N zwF5nEA75RW*8(j<;bsYv2u2blqb28;?T}(rue6eCsVcUV+NE#vc-crX$*YL8fjRI) zY7b!80+GT})~_7>}Fxd;Ww0L?azyie^`bP9=BKO?0A!4#bUWJO|waN7*lj zfeh7YNBA~c>u#i;OW$DLC#3$9)F2}H!fmrSvew*j6gS$b zap&~4EOc2XLFX`+XG5OaSO(>t_Qy|Nly~;?G~(o}M&<>H+phJnC|Y${-Ou5o+8flH z%^>}XqfsrC5qA8@qbpo{V0#47!ylU#val3R6xv98YhJ+fSHFV!my+$=B!{OoXMY8+ zXd~DB`($Qtl-9@CQ!}ucLzDy0=$_1D-k^9!pX-FL`w0g?_FEn6lGGFG;Ah>)=>wsp z7mihuPDoVK>B&8Ng$6fI3x?SjRLZ&MS$(`<`C3pGx8qYuO5qc*iB_{ zt=P&!-WSLIid5EewNHehFAcoOQZ-f&pbBKSGbq)>b1NzHuvEk{J?Vksg0ST^X?$kr zv^$DsXFLFPODpSJtDlulP_EmA9BjhV|c`Tu?sUE0G_`gx(hmWCneTeXnXXf zCeu%Xm-B|Vtx!-X(dIYOlvJP|zpvs+cjVCe29xru_`X15z`1C4>n*Gkc6xPM(*9(w zUNdhTnKfsBK>6YZ7gVzHm$Y*we7JZlHq4i5?* znzq^SEv%zK#b$xl^Q+Da_~hW`Dv|uyXjShbWx(w>N!q}`_NDbGxcw0Ee2ZGrZ0p?w2D&w@+oVN6aHVWR)9_s1)9dPZ0Vw!<97f%QjFr z@=@06YkQO=`vv}C2}L{P*&tG|F71aEn*m>~>(2#Y#Bx)RUz~7LQBu~&1OdJUf^C1B zHU=!_$l?}~d+x`hSk4n7q%!g|*?TiX>Rc60bs*e2q| z(p#SM%hUk|#}F7N%us=Ix=kRz>WP{nVc&*mqaj z<|jgaI*1EH{~J{hQ(K_0#qR?2j8i)&FM8f2iW8oYm8Nyt++!AW8q8$T`G~{3M54&K zHsxo8*8Pc6u?}4%PC`Y>VqiAf@bQt4g-ac>dWRPOI#8S=AcwveXP*uqHmzGvN*`H@&^E~|H(#sy@z=O;W zH-B4R{}a#0kgySy9T*zT7%FL7+bUw4uD%LvfE${m>`>!7S{1=pE^76t9nK%`yl!mc zrueID*`UCgGI?3s6SbEiAv@@fxNC6xop+d=UX)*_Gjeaw-MEJrdo2E#9%lwG%7yk%+_MAacHyLD-%Pq5#_QBX=|q^fVO=7 z!O=-dNpEhG%XzT*xo=4X5(zOM+};`bWslRYJCPqbdRH)+LId>0>(>;IX?lttstS1_ zqE_1Vly`oBp+#<@5(wC-OSA@lKK_L) zWv69L#H~KLb4NrCGH8z&3h0!4yKQ_R%UXUU;F!#It{{O0EmLq$i@Ml5Klw6y-MYl+ zVBnYXbzYlJfe=#4Z7xZ+Nu&xjdW)%MI(s=6*ac^;`e2X@#iW zkD@s!<-snK)0Frm)85oL_2L!KZa6VCSx2wB{B^2L34e4}X>-1m(QEZO9;vF|@|4zq&C-aowPOF@Ey{Fr@;#< zZZ6Tob+Vktx%i1$9H=VspBSI=-UdmL9QbaNUTq`DV5%dPh8C%Kxy zh9_g(B59H)bp~*XU4z27X?|)uW6`A-4lhS-x~ele;`!no11ER*=H%=v(W;1H$^I>r zHR+y)dvr^=zkc*$FIX#b56dRoa*<%{v;9cH%(798mssPtTXGNPP}4^^Yb$kRX1 z=JVGGaZD@h9m2&sIi{wyZkw7@I@4N1KG<8tm0qJCsBMoGnWRs~q^!8mP%$sdxAMyKk&*=>4IMR; zR(sRmaMB*;Ja7bph>kB`VmNvin`nfqW@ao8P4%3HZ5c1e3}ah-_r6ABO}&HzDig=j z%*X54W#bM4MA$prSGhwvz`xmu&)`*+j1;YJ#I&tAs$%5QG*j1$HP}>Nh>ZI!SsEYE z!nyR`k)kPG=2iXsrTf6p&e6T1R0)HA;GfW|EWPmFQhyoQ5=yvLfD*KuwxHIIM2FGM ze(%-0ReLdG&62?d&+v-xKRx-(MDLp;2$YfOyj6zX9Yn4Y`gxIlzR`v~9%t7X2#g**}!h$|q%2SO1H5&^ytmbGC4#kc{(ktG3C1{H=DcA$;{&y@QDs z1^sv9o(ZLWXNcvuLhBQ~E3@USAFE;Aoz~%z`eg5zec{9pC+Az+|;j}t3)kOQ!^ZNuZiCc ziaoplf~C+UMX(~9yJ-p?=15&+T#;Jqzlc&5-r34NO~&iR1Q}@?4idrJpx001Z3gBBHt`7HkBWpypevKPT#*DL z)GbxjvOoC;*jfSRt}=0&!=gy$`^h4r1@jDT%oIzmc;WK|ZAg8q=a`i|AaV53kMwRV zSlmX^Z;B6(1Rpg>J`i&Xr}82Me>=R3aA_D}7Rz0)Tca+v_r{YM)s}}p0#qmV(Wosi z`%hS3(NuQy640?ZMQg)KFf(HuZQttH99xV!H*xVNem^~O-g#Hm+g11_t(t)E^#WcZL!{3 zP2{?y&G+$rD0Hdip3}8(`l?ULV~6|CRt{YwOaMmMOCtBrH{gbgwfl{`V#>R@T1?Cf(-?eYbv>Dc0xe9w9AsynPQ}^%0CM2x# z@@alpedRfsGc9(;^w8o+DB~KjSsBAUNURAGNR)kTaR5>5JBO$gerSlrlxgg>4<3u3 zp{dkvW(c{;S{{tu{dI5i2`#<7bvGXOn#5a*OVw1@R|Yl0h|rKIDRtf=-_^msB8^pg zvuFYB_ei1BqWgQh9S=_c-Q77L#;Cqx4u-Lr45-c0#ea{6LVF0gm_ul|Dp{5CDtzYd zeXm}JT>=Sb*vcZ(H3ART1%WJV9HQzs)cB_mZ{2zcsbXF4WtpKbx+3&WN!;4o#H7bh z2j!SuWr`}YvKm**M%1dtpGnMm4v|yvof&IEj&yx?fqN!jKRdd+f<`3+%E1lYFp_*& z<0PCZZn4(egjwI%&SF&%a(na>Mf5_~de`y4OP*1eDb_|mqQoa)NR?Z9*z*+gSW0Jq zTbGkah|bShMz_(V3coL_5P_6!JRD*WGnQ8}yh1RuH;u)I-MVEd|DTULU7KQ zH#zRnThUOKWGqvNE|h52jHpegYiy#+yTx4-lZDRDR;C!3)1mRv zKDyZ-kZ^9uW zd*j2NX5$wY-Dl#gWi>w8OV)ur&9-SLsXHF9)GxwDHqmv6TU#jUHr0?Ob`l9f!a^LW z=e#Q+JMH0IyfRF@9821|U}spgwn5=QPkGdrrB0fv&f_agpeLTwFB4~E0Xvpoz_GC2 z|42{6GpD<<1Ge5#tfX!eL5@|4R?g2JFiGDlKvCOmtO+iJd}6P5m(ie(p85bue5@*s z3ht~?cuOoNR^a*m`m*!Y%dF7;?4Uy?$JqKM#^lfV-~2!rPNq~R@ zTUV7fN#fLqhTMI_C5KEVK)6WV1xKl~g5leI8DjUx`c`7=l~giGm~h_^5NYVJiv~~? z@JX1tq-lB&Vd{xQc}VxSVZb+}0_DYknui3u6b^$d;757TnGJ2(m2&MjG;40e!{3x; z7w9hAU8gD=oM!t#T#s^Xf5o-NT<88ZmxtBKDuE%#bCuaUq@>9RXoa@a{KVCJCUp0)Kz zge6{sA!W4zh4xw2?@eJ8!ti^@w#~(>6E&)m-eH_!X9#}_YQXt-a?5Kr(Nzw+#{r-; zudun?L4?J({(_VKQ7FGXCA$XF0m}n*&z#JJm>MDr$P%(eNx*?q^{$g#XA(f`@gQh! zj~Nt+&(Us^-~fTlime47yq2)3WlurwcjL zL7E*d5fHd}YD*9D?CRq4P~S)M3{3qx+88z}a+E^0)PHVwPf zYU9p=1aA5>1u1wx$zAv4=d&Rn6LR#8s$dfJts?Laxc4)<#S}!-KHD< z&(`ZHuk?Cuu<`-|;#l$gm8u$~YC+8(N@JK{w< z&9q!o4z;MdDkc5HyOAoV$iLh*g%?Se!qk;mX1R)PyPr)*TC^_J_$~X@R>!``ncZQi zVLF@WvUjQ-^+$+?W%kQF6M3trT>av?eWvw=!>H5wg z5kRG>%4`6U8!~FXR_k+>pmn^a?fR8!{4#$x7%{CkX{9Vk(7Lu%?znE7HYk&UBL}(H z&%-bN#IfS=5vOx|oQ~A>^Mw-00oQ+mqU~E>zTn`Qj%TMF8z4)eh^5j!G9H08qouX5 zcJ^Aa@kwpr(<)u$vNR>);%-4j*A#LPk8-Uh7`8qI)i!LG?2TmJ)qSCr5ti%=47TC- zDOd>gU(d68H!_WJof5CYI zGUKJpa_4;b5IVRskVPH!ymn{uAV~#idW+~{0SCmzoM02>>bpY9N(<;z z5FbLlEv_~CKY_*k%9u-YU;lu`5R1HP?r=MSDMZL4{DR9DX!6A0fnRV;>vLS+m74Vu zHWRs1iqImrYl0k(hy$!|2KEG`aOk@j^#i^Ef zq^ELgcM0d9?{C!V0k1C5B3pE9G(8mE;s%SYAKIpBQsiKw(Z2jcc{{b1(7WHsKU? zfBk~^{)lpgEMUYn%L>K_{)l>Yw9{Z`F3e>}>=KqNp7L<*vEzZ8AsH)_MhBn9f=(BP z)$J_QTj$AJvKiFYW_Ys(M<2i2h*tht|HLnRp#rjbL}@Boj(OB&H>K+X$?8Ni{5>Wq z1se`Ic$CfN1$ms`fP6u^FzXeHqX?R!Z#vR5JbP`yY^n&% zbE$#s*LVV!%o>PFU?~bBY-nkbAy4Y6u zQ@+B<#j)hhZvhQ*DiZO~oU#=*>*PgiE!ZEvBHC^2NbpRLG~Q0*pkycixPbA4a`Pbs z84Zt8DVO9b0=0=f|8^C&{Wr1WQD!BP1zmqdEIc{p3xmcocE$SOIB)`}Y?l!|X}k zUdWEO*=G0VMi#Y%Sb1O*{^_Zuz%O3=neqKMaSwlLqw-p)Bps)zj07J;o;sa{|{(1P6@Go<*Wr<6XX29C;5^RfGF;Dw;t<#n*B*t5Swcb~34OoZ=L ztkJSH-lNIZK@>j;D2-qC$A&li^ThdnAVsf^d+iWh4N_{o}uLmzQtUu)pWEB zO;}V4-75pjOgf);E`@37?a${vjELR*(f`>fBfXR*U+ zrikK`Lw2W(p&Pw;68Ql3wpYZScL}KdUFRywB)nLY{skO67oKK19IyyZG|VPZ^~#y0 zZ-M9yBELz8pW=%L-%Ems34WqDJ+rY!X1GpG`Ji!Vfc19c5p|^oCi_~ic~gi`Ir_By zDOLVOl3GFtMc@pHK6g@CKuNrzQGbp}bR8%__X53CDLSxxYK825XAMY~43d zNT;2#=XXBf&|8Ortp(4;z+-Rw-Uvx?^mXTndB+1P^es2$r(QZNKT8fyju?_{?=}*- zX@2jDHfSdXNkU`2n0~?qEbKJpTEp>7Ns&?(3`I+$zoh;&SM_xbk{Jpb(h%>T08@t` za)bo9@TUzfGE0}lqCi$)q)fMXP|uwmG)dAUr5#JzaSnZuh0ns9O|)$eP>c|@&tb5R zrz9dHk2$=!`{)G8iElc7XdCoq)O*R|}WaX(>R-Xr(&#{D$*0Ngbbv{qitxcW@svX5$ zmdq#|wGg_VmbCw?p@P9HgV%7|%vjRYb2Kl4csj-Je5Ug@eVa*q9HNAU$v!<@?E^*D zVJdN66RGu@#>Onujw2+Y7W$-XI+=ly+`A!rA826{?Y)dqOD!@00Csw5kClJ7|)1T7c1@pyoFYwkpY%^$%;)Yn$yuJvQ?7Rb?o$#kUmeyjv#E-q0)cWKIGwEvZ(qzL(Y&R zY&AY67!X8t4EWh8{oC)Q3I!jjEaqV22>n2mDN*n0(im6wNaa}>S`w6 z0zwYBfh2&mbTRpTEf<{*XMFj6b^73e;__H}pG5!LN|Vhr1fS^Akbm9+!)RgXeun57 zvgGnpSKo#gFuSx6`f1hOo6uPNu$MopdSgB>O=|`#Q7u)k7u_WaZR9rANAh=_&{#n! z6H@-+sBtGUWzuBdmvfR*4jg1-8?Nj-hR_ULJOv_N<7;oXGbDrL{?-^tj2s_FFjg$@ zM=!HFF|o;UbSPB>;dTKQ}oI_K$gFGI(H$CLa1R<_GOUC=v}+-(|VB73Zy zPw6zAv<3Yc1zUVb67QGAyp(J906h?ouikoys{C2<%xdgLL5J`T$4d%Xf+S^CExooS z$f=lTM7wJwF3yGJw#lI9vG`$!x2I{wzDizBi@p$O(gUzhm_Q+W@{{nEF>EiWWEAer zdB@NQV)tjRpIklZ#ea@up)~Osat|}__1t|d*tStWs%s6zKn0wXYbPMRloMCh`5aE? zsXZ_2(}j-IWRq*p_hK9{WZUmk$5Ijjs4gmPXFqaX&u%$Qwz7I7&CulC!qI&x-pA1W zeNkn6AKKRILGnhYr)9pFRiw)7(GwGWme}`U=jWW_V!F1BR6lr7SiW@>m@E5VY}NTk zm9=}Z=nEjO0+3?H`N>TQZ9C$VhPb?t_-WukTP}B<`0$v?6?HY~a~3iQ5L^`Odm7_F z?nEtsfUqB_rk9eAn>8A16^iyrSJ`oFK}Mdn$y)Le>je`TSI^Qe`N?c8M6fb!X~!a` zQ`U%<@^7pC#z-;+{Qry+Z=*|H%3R!)SYn~BN#l`nsOI@z@@j2g%RPj&KNjAK-p5_` zFzLPqN6td*(3nPVTw+y|HiKxje4_EKaImECWy-8FD0GDnDs~>m0|eOc36$t+he`6s zD7s~9cBt72Jev)0`2tzKt#M*)UU;nBFAvDcTJQ^hT1_Yg^?dHB9Jd(MRY8uJhrrgw zjyue{_uk6RY1S*aw|Rc*UCrpVeRt2&O|WNbL>9A`6M?F>G&9_?xR&4aOQy}~x7hIO{Eyo7s*hLe z?W{t(RL{x=JK6AhhEi98Xn#7hbR+`BPK3C2bI-phayfy;O-rv>S61z)7ahszeQ`+7 zhD{V6n3VFZuH!NtOjN!vyo@)elt*W;IZD<7K{!$I6odaM`N_%4jogRXG(*09^_*$0 zjCf2EKo4GJuA(X;c+lxZ(MHcqSUh(mbRTY<727y1hP5WY4kWw^RKv=x<*lEiC)EHmg$h0YCx-F zY9xKHNmgZC-H$3Gisx3TJ4Rtu5z;himZ|ER$=!-Mm(}W4&`kb12D2NSIi!97OSoVf z8Bd;$^v!qeJ!(9kO%#qwtv%M7xRJs@ZgbQgQvG?PDvT5iju?a)2p}-Ir%tZ;+F@Hw z8_U^d8l%!$h@J-c<#e9rP`zcqLgg>>w@CdkdiZNfi=`kf=H-X0*cl$Xtgd*w`_IEe zy1__wtRETpiY(DNRkc9)LgD*{uMD1P+~J~a`CG|&#mJ+sh_CGYGyAoF%8)}#9Sq#x z;Hw!DKK{SHA;N?2bk1dyE1G(T{W9|VB>NxF^MNCZPr&x-&5KB7;5+CA7aiB?l6}4w z!OW{?xhZTu501p~P$*#xk^?knxNfiPDneHl`if1|3}26OM3w9S^#EC2_>{k~Yy|Cd zo`V-9ObGXd5~f=-L(1@6du3)s`e+x3OH=15QbgrR+0?(TL6yy25Z%ZoYHGip9E5{& zDYE;{b_A@*C%6xOD#^%@%z4XMA{H(Si~FEo?KZDBt4>}#NeRPh zRjoZxN^N>K8|r)a!l3f+axp+V2E%0i?8lDA>*dKSbX*_*OX8I)A(EpLqwHBIZdjip z%*ZJVjKn$FjS|H1->9{J=ltag|EhQ9B0eO^?uUN=k?UIyIb6H{WlvhN9iW+38fB9L ziI0F>GXsBW?$~9$Ki7#kzus@md*cWG0XuzfFfdWbJ1#T#k;D zHS9}nQ~31B^T0h@t-2zP-j|wzO~b9LC?piwFv;76SX;=Vp~1&qzcyqOYAib&nMQ5C zfnFuBKwK^k$>mqU)7WXn<};iQnJRl=>~&w_#X>S7w1OmUi_a~;BNkTC$70Y%kF~&^ zSl#Tuk)0-An|Btio@Ki|)VR~}L@T}X@$VD|dUrG)Q<@0)8ikzQd)CAlr) zCcT%alu|&LS1MHth(2~2EeL6bvZ8-+a4mn1gm}8hlH%YM(k>x?NJ#qh(o94|jGP@O zD^+Q0^sm1QVnX!B^djk9_>7622sD$ouM{2H5)f;LrIB=Zw1ggJ6q19&zlo+m@_Fax z!l|E1Tp~M9esV>G&#=lmFB6op7zFt(my`Lmry_Mu>R32Ma-Rc2QF-4Z$*HLzDsnIP z=F7F%K??BtMY7l2!gwXUs)y?H($1_VaTNlTg@{4fuHJ#9&X%ChfYO*EblQPvlfkb~ z-%h18BuvYZ0iypQtuDoY;VYH){^Q;tsyWf^A(?+T<53;S!1N;B4nAVnABfdnU^x!P z3$TVmq}v=?OesYT{y=vBohtl=tcrB@c$-M#HyhD7h5V-}>^e?5NMOG32%HG#5QCl` za07wZyVn)mK%hh;yUqzQw`cP;?t~(kjgD($hh-F5Gu*ULKRVzevTp!48zEtQHP|s# zEk;u!oxlE%xfl;LvcwEwiPfo6pQ+#CvJg2ck^pIs+!ZD;SJShVUP6v9Yp2{&9ClaN z_Q$_}3yn(QW(NnaqPDS;14ESRn)3(1}PAo+^C)-?36-##;Yu+`mIB{@6QKJ%9GS&Jl;KI=Z2cCZbgRcF}V)Cib9j= z%HPtIcMixN`w0-|59Z490C5xAV~aZ6hbrX4bOzC_{yzEx2*bmy264@$+Gq?=A|gt{ z)@n8*s{kd8zQ&+kVc^L>!0ZW5r}m)QUpgR^RY0Apy&iwu4B*k942q7)yahZtSA^7I z&^{3Q!C9Y<(-O{;J374MxVCn)KyB5s*t=3U}u0SnD8EW{Cq)t zaCocvcO&IS)U}2&{0opB-K-iIn0a3rB2;uQnX#2^=8iIGC#n0;vCY-%U+g|03n)uJ zz%milI2Qgj)ScMYd@9YUWywfnZ-|i8@A~6FjT1M)vr~>=orO@J^_!d*KWb-ahVG2F zs-;a-s;sAU{u8uBtQm9BATS^6!h8;%kSXv-Se(7|fmfNpZj)&Pf%Rg9E*P#NdlINV z<}BD&pA!Rtk{;ulTa>};y$s4x>l-r|*J*R@t4T|=xR4}vBjyWRSBRWEZT2ds z_Wa(HBiR*O{}A#I*Jpj=J~OIF@986#Erfo`$K2gySXw}RD4K3n@P+*?`V;)nx$Wow zR&=tqQf+C?8W9k@%Bnjcm+=j}IhWPPrd2WU^{Lbukl7~FLa68n~ z)#shn*q)i@RSO2OfL(My^3mN0aF5DN7A`8(267NUhE22%ExXjk9`mOsLa=b!?&hvH zk|GE+`3<#V4vD=ZNTZS-Syjxnngpz7P}KfnCrNoF0!`8<8QkSNaIXZ?F1uoHHa=)6 z49zdwBD86O0t$1gfACW)un=zx*-~CQw?@}$Uq8f~I)hUR041@T^PCf)yD!DtVVuMP zPb}dhKUWz~%=3;txw>~>=mY^Lvnvs}fc7)s-vf{>FK<&W4<^8JC{QsGPe8v&_ z6}A~=NXJQu=j<<0)iQHHX>>Ogbm7h$)nHwEOYqa?j4KS!MGXCz}~JBHk<) zD&cIG7R4b)ix46C+&*pS#_%5a0B|2)3L_F{4t*{wTFF3_sG#RjTDnCv)EV6xU3k6!ckf zM0^H5)I~I-F-i775DK7i256?U1zp6FzCn?VJB;|(=Oi-lK)f?VH&<}kI_GG z=hz00qj8bOA<$XkMa-)DxA=%&BF(5j^e~B|o_I6f7DSmsb+W|=vm!{^vRK?si}riS zcLZTGm(NdGFNP1fm+jmn$F)Y0P;OTG{RF;xuMG|nPLfjw-3P9GBa72B5}yNH+#)ki zEgK~_=nzTBqNzqgM2rgn8pL#hIJK}aLV;)v!k-gCX@-Is9EtsH7*@jp%|`(FQ2P*# z2??cCh&cfZ#gRL_!i^XCf^tb`NtSrwH7lgMajhnKf#i>n#lX##f|sNUCNWvy_H1Sv-d+w33$j5(4KUH?)w4jyg|aM8F54D?8{&w6)1O@5MLRbVlGL}b2yHwtDp zPRCAP*3i#?>k$QolrN72fR|{p9Rt47T9-qBm z8nJ^7y7f*@Z7p0w>~rggtvJqo$E)H_wEX|prZ2^eBU}_>$Whb46c&l=)j#MNDB|@z zfLEA!)-u@d(XS--QtmH_`@&C0 zz?iW+D?PtlXxELb8sx$!;NBro%7JEmt4`nxKnFW+P=csglEt=ubCNY>|P%mPn%ZXzL^3 z-e~g#5rB6~D$lr`?5IaMBXOk^HAQkJA2(C1)p@YQc}L2yA-w$9NY?T{b*S=$cO#3J zLh@`W`V$B?`iIC=<||9nQ2agqKdu3-?`g z0hv8I>^e#VaN|Msxaeihb?554D9kJqX}?UI zRrvLRYv?KN0$DnK^Y|6W$`Grw8Q{IY6j!~xbe`Pzr5MA)7NAsTzkiGK>IY7kFioB; zocOmw`7Dt!(`)T9D2qyr7iY^E7@ToZQG)n27ErqWLHA}nrQ(};Zvk27QM%xpsog*S z(+iP9;8lsvG0H*+K`86gj5GLmyZ(g&7B`+$O1(_;eB(0^+eB+{rt&6x&(5H!5nYVy z6|^nHiZ)`0BoO>=1VcD?b+@6Yvy`juD3l zNo*lDpTl6pPc0ZD#y!VXwR0WToWC=o6RK^Ju#Y~D!Vy3|AMZRZcR*MB#KD@z+5>f= zcpWd9l`=ACLCQQQ$nD3%M`u%`hB4tZx($|PoMC*wsCo>9;p3Tf?{c;UtB;%}Ic!S& z8h=IOsW)$8lzl;#a1$u%!r4q9f=b`-*$p`FRk{j#eC#J^%dt;Jo5E7P6RZpAn9acFde)-ufNx!=5413Ugk z>~KO|$ne~+dwY|u_gh#ME!(TVVdrN7thp7Vox{Z|I0Qn& zsFN>O%&sg}jDH`bG6O-}1>%byc}X-Dqtj5cbdPSQ_%zG~Y$rsYNWgS z5xCC9m3Pu7>lt1*AXt4x5aiKa2}`!IJPD^vpyAQN3ur+j4^C8Mc0o&U4(45F$DZ;V z>_K}Mlf;pqnCA07N7uDsR0Fq52{K05`1`kng2a!S=A9|qEJgd>>tN%#Qm|&~lhV+0 z;!mF38UL$aO<*C1KT<0i*nKDEc5p46TSK$PnR$_rJ|mhRd-|3v;#@^hD|8cAki-dbGeWY-Ezzsv!>0kURr9TUZcbBiwP)Tw zh|r98hA4ukdh3Xpmu=4-A3`}^==%~fO+4_DcO*q8|IGAQ3On|gZhLn_uvoyFq~ut= zyH~U#q>StwBirEyvU<$I+xRoMmtk9-OPk-MjIa0I=E}ASWc)pj2~;k1x4X z^AlnhbMVqsYZqrQHBc%B>t-MlXza1ne_364JwBKWOxEWyolV zeCxdvUn3qVIy_b82xSFIu28H!4QO*iUw!!^&q8SMN?V}WbtsCRd8raQxh1FRIZptO zert7k>xZ7HEy!REh)$>Lp3`Xi2c46BNnm(Bh0h94z0Q%oOojgRr*nOy)w?KPAG(wY z_i$UE!MMS){JqVNJ8rj5!mB_}i;7pqbG7lje(Zba(^n(u5-bzo2saT0VD9%K>fOl? z_iT-y{no2^m2><02Gjrk*O`1N4wsRc*LtVfn5IT!=U^Xv%y2NEE~K(kSCZU?t8ODV z?WvJb73X!0ld#b1=#&!_(?wVJ_DBw=NX8e1qo6d9O=5 zY~$7#X3%Dd44_5i+@8DH`4eB(ON<;njN-ant@PoVP`pTOg3oA6ex*uohgRR6M0TD_ zx#87a3;7v{H0_^=-@Sk=r);Om%NFcOYj_8T7%jCRotf(n1#o(D4^EtgDARmdzT_6c zWq+ZTKu{hoV{-C5N<0FYDU0^&*E3hzYpNAidt*a=!@b6rqeqTuA&>BJWkwhC@OZ5s zBoum0ej{hDiO{RL`!`=2S7ot+wq%n$cMUf5-rcuA|7@hu;AG+ zc_%(Aw~^}`qJD1L@7b`?WV<1sBXo3wH!}AA#<}-pCU(-gR_g#Bg&sRSQ;N<~>T*DF zPxqOUtxm(+j=Z9{lZaP*+}HBP2Qubq8u5Jrl8YJ@DtTAO3r!&>#o|xy1)0z{*t7h( zu|~5Z9ZPpi+?$;9UZ6cV^qd%%omIo_14Srm7GS|duOPs^*WvJE!4GvS7_ryk(rmhD z84^{uUP3?SebDH1WwwO#z{W}0_{@ZD@tSv*we<8qvSHJr-PyJpT~-F#6S)p1{?i%Q z7m+)l_wgIbT#_YVqp@&r1kzJD0JKpSZApV;aZfxhN(Mlw3u2jGi-l45^|(c;8@v+o z1fSE^R`GOFSJLAytgpY>F~i7A9@`U9?21KvcBiL>KhVw^ehy|T5FprD{N5^Dety`J zDC?g*fLteXAKAz%n2mGv2)-E=8k@bFg6!kCofT4qyjz;3Epp1~3MK;AYBQHf-IYWY z92{Y&8N#>AwO5e@0&$5j?R)wOpe?teb}al>=0RNlf6uYV=*lIDoM(%p3`krw0*ni) z%tR}v{;Kd`XFzIo&L&|movQje%fV&wiw^Z|tjZJ`xtPMRlAosXD)Mh*y%0E88_jSq z@$$Y#-|pJ;S+ZeMdkXtgL|1z}XtJ@z%eZOf+#7j`Te!DZ+)DqOMQdW8U+W~B`dXNi zI@HzGOEd%)71E9v9LL_LmQCs7fvw5ZVBc#q529*D&Qc{v^7gibmpJ_sg_*$pG(vq5;cksb!l(tx+vwN`br{b%$!<`5dNn z%a3kBI4L40pBg^4kacf#NPYnc{AXE*>M{Wd zqcRU^%0p2y-M%ARF-RB+nZ!tzjSE`tp8#{NPYpAY<~Bbl!5rPPZ(~f?P1Tu;W_T7K z(b&`$lTI-LtXFX}bZX5x4mHh0oJ5+&yvq~k@=mv@42gC6NeUsxEBj$plHQ7XILo8I zyuK|-2@)n3{2Ye3vvaxRGm~l2SoA%G3h2BjGUN3QXBIGVrFw|W>j@8Y?lyvdCD)*} z#^gvzY%KCDAlYzDFE2fnjfTbd%ber}^CSU12=DUdOvtPI|Lp=W$C)5VjRi)N)yA8ii{Vb5`q}u3JVD-&iMEy9iXm#ihujG_Z}pYeL7<^rvzz*+OZRE`pG9>WZzW^pa|$JBm3dWgGwV z5LY>qhgsAFpM}2|7@F|cLMZR?H? Date: Fri, 7 Jul 2023 17:27:54 +0000 Subject: [PATCH 55/62] Fix to prevent issue with OpenConfig delays (to be resolved) --- src/device/service/drivers/__init__.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/src/device/service/drivers/__init__.py b/src/device/service/drivers/__init__.py index 6a9726315..89d3bfc01 100644 --- a/src/device/service/drivers/__init__.py +++ b/src/device/service/drivers/__init__.py @@ -85,15 +85,15 @@ DRIVERS.append( ])) if LOAD_ALL_DEVICE_DRIVERS: - from .openconfig.OpenConfigDriver import OpenConfigDriver # pylint: disable=wrong-import-position - DRIVERS.append( - (OpenConfigDriver, [ - { - # Real Packet Router, specifying OpenConfig Driver => use OpenConfigDriver - FilterFieldEnum.DEVICE_TYPE: DeviceTypeEnum.PACKET_ROUTER, - FilterFieldEnum.DRIVER : DeviceDriverEnum.DEVICEDRIVER_OPENCONFIG, - } - ])) + #from .openconfig.OpenConfigDriver import OpenConfigDriver # pylint: disable=wrong-import-position + #DRIVERS.append( + # (OpenConfigDriver, [ + # { + # # Real Packet Router, specifying OpenConfig Driver => use OpenConfigDriver + # FilterFieldEnum.DEVICE_TYPE: DeviceTypeEnum.PACKET_ROUTER, + # FilterFieldEnum.DRIVER : DeviceDriverEnum.DEVICEDRIVER_OPENCONFIG, + # } + # ])) from .gnmi_openconfig.GnmiOpenConfigDriver import GnmiOpenConfigDriver # pylint: disable=wrong-import-position DRIVERS.append( -- GitLab From de6c8978fa74739dd346889669bbee089e15913d Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Fri, 7 Jul 2023 17:42:48 +0000 Subject: [PATCH 56/62] Pre-merge code cleanup --- src/webui/service/templates/service/home.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/webui/service/templates/service/home.html b/src/webui/service/templates/service/home.html index d08b78924..00feaff59 100644 --- a/src/webui/service/templates/service/home.html +++ b/src/webui/service/templates/service/home.html @@ -25,7 +25,7 @@ Add New Service - + --> -- GitLab From f39dbba94a09c498fc7f4288717ccc80ec2f8a61 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Fri, 7 Jul 2023 17:53:07 +0000 Subject: [PATCH 57/62] Pre-merge code cleanup --- deploy/tfs.sh | 287 ++++++++++++++++++++++++-------------------------- 1 file changed, 137 insertions(+), 150 deletions(-) diff --git a/deploy/tfs.sh b/deploy/tfs.sh index 1ecb039e3..5edc0c29b 100755 --- a/deploy/tfs.sh +++ b/deploy/tfs.sh @@ -176,14 +176,13 @@ echo "# Environment variables for TeraFlowSDN deployment" > $ENV_VARS_SCRIPT PYTHONPATH=$(pwd)/src echo "export PYTHONPATH=${PYTHONPATH}" >> $ENV_VARS_SCRIPT -# Not needed for the Hackfest -#echo "Create Redis secret..." -## first try to delete an old one if exists -#kubectl delete secret redis-secrets --namespace=$TFS_K8S_NAMESPACE --ignore-not-found -#REDIS_PASSWORD=`uuidgen` -#kubectl create secret generic redis-secrets --namespace=$TFS_K8S_NAMESPACE \ -# --from-literal=REDIS_PASSWORD=$REDIS_PASSWORD -#echo "export REDIS_PASSWORD=${REDIS_PASSWORD}" >> $ENV_VARS_SCRIPT +echo "Create Redis secret..." +# first try to delete an old one if exists +kubectl delete secret redis-secrets --namespace=$TFS_K8S_NAMESPACE --ignore-not-found +REDIS_PASSWORD=`uuidgen` +kubectl create secret generic redis-secrets --namespace=$TFS_K8S_NAMESPACE \ + --from-literal=REDIS_PASSWORD=$REDIS_PASSWORD +echo "export REDIS_PASSWORD=${REDIS_PASSWORD}" >> $ENV_VARS_SCRIPT for COMPONENT in $TFS_COMPONENTS; do echo "Processing '$COMPONENT' component..." @@ -260,9 +259,8 @@ for COMPONENT in $TFS_COMPONENTS; do echo " Adapting '$COMPONENT' manifest file..." MANIFEST="$TMP_MANIFESTS_FOLDER/${COMPONENT}service.yaml" - # Deactivated linkerd for the Hackfest - cp ./manifests/"${COMPONENT}"service.yaml "$MANIFEST" - #cat ./manifests/"${COMPONENT}"service.yaml | linkerd inject - --proxy-cpu-request "10m" --proxy-cpu-limit "1" --proxy-memory-request "64Mi" --proxy-memory-limit "256Mi" > "$MANIFEST" + #cp ./manifests/"${COMPONENT}"service.yaml "$MANIFEST" + cat ./manifests/"${COMPONENT}"service.yaml | linkerd inject - --proxy-cpu-request "10m" --proxy-cpu-limit "1" --proxy-memory-request "64Mi" --proxy-memory-limit "256Mi" > "$MANIFEST" if [ "$COMPONENT" == "pathcomp" ]; then IMAGE_URL=$(echo "$TFS_REGISTRY_IMAGES/$COMPONENT-frontend:$TFS_IMAGE_TAG" | sed 's,//,/,g' | sed 's,http:/,,g') @@ -377,11 +375,10 @@ if [[ "$TFS_COMPONENTS" == *"webui"* ]]; then GRAFANA_URL_UPDATED="http://${GRAFANA_USERNAME}:${TFS_GRAFANA_PASSWORD}@${GRAFANA_URL}" echo "export GRAFANA_URL_UPDATED=${GRAFANA_URL_UPDATED}" >> $ENV_VARS_SCRIPT - # Not needed for the Hackfest - #echo ">> Installing Scatter Plot plugin..." - #curl -X POST -H "Content-Type: application/json" -H "Content-Length: 0" \ - # ${GRAFANA_URL_UPDATED}/api/plugins/michaeldmoore-scatter-panel/install - #echo + echo ">> Installing Scatter Plot plugin..." + curl -X POST -H "Content-Type: application/json" -H "Content-Length: 0" \ + ${GRAFANA_URL_UPDATED}/api/plugins/michaeldmoore-scatter-panel/install + echo # Ref: https://grafana.com/docs/grafana/latest/http_api/data_source/ QDB_HOST_PORT="${METRICSDB_HOSTNAME}:${QDB_SQL_PORT}" @@ -411,71 +408,68 @@ if [[ "$TFS_COMPONENTS" == *"webui"* ]]; then }' ${GRAFANA_URL_UPDATED}/api/datasources echo - # Not needed for the Hackfest - #curl -X POST -H "Content-Type: application/json" -H "Accept: application/json" -d '{ - # "access" : "proxy", - # "type" : "postgres", - # "name" : "questdb-slc-grp", - # "url" : "'${QDB_HOST_PORT}'", - # "database" : "'${QDB_TABLE_SLICE_GROUPS}'", - # "user" : "'${QDB_USERNAME}'", - # "basicAuth": false, - # "isDefault": false, - # "jsonData" : { - # "sslmode" : "disable", - # "postgresVersion" : 1100, - # "maxOpenConns" : 0, - # "maxIdleConns" : 2, - # "connMaxLifetime" : 14400, - # "tlsAuth" : false, - # "tlsAuthWithCACert" : false, - # "timescaledb" : false, - # "tlsConfigurationMethod": "file-path", - # "tlsSkipVerify" : true - # }, - # "secureJsonData": {"password": "'${QDB_PASSWORD}'"} - #}' ${GRAFANA_URL_UPDATED}/api/datasources - #echo - - # Not needed for the Hackfest - #curl -X POST -H "Content-Type: application/json" -H "Accept: application/json" -d '{ - # "access" : "proxy", - # "type" : "postgres", - # "name" : "cockroachdb", - # "url" : "'cockroachdb-public.${CRDB_NAMESPACE}.svc.cluster.local:${CRDB_SQL_PORT}'", - # "database" : "'${CRDB_DATABASE}'", - # "user" : "'${CRDB_USERNAME}'", - # "basicAuth": false, - # "isDefault": false, - # "jsonData" : { - # "sslmode" : "require", - # "postgresVersion" : 1100, - # "maxOpenConns" : 0, - # "maxIdleConns" : 2, - # "connMaxLifetime" : 14400, - # "tlsAuth" : false, - # "tlsAuthWithCACert" : false, - # "timescaledb" : false, - # "tlsConfigurationMethod": "file-path", - # "tlsSkipVerify" : true - # }, - # "secureJsonData": {"password": "'${CRDB_PASSWORD}'"} - #}' ${GRAFANA_URL_UPDATED}/api/datasources - #echo - - # Not needed for the Hackfest - ## adding the datasource of the metrics collection framework - #curl -X POST -H "Content-Type: application/json" -H "Accept: application/json" -d '{ - # "access" : "proxy", - # "type" : "prometheus", - # "name" : "prometheus", - # "url" : "http://prometheus-k8s.monitoring.svc:9090", - # "basicAuth": false, - # "isDefault": false, - # "jsonData" : { - # "httpMethod" : "POST" - # } - #}' ${GRAFANA_URL_UPDATED}/api/datasources + curl -X POST -H "Content-Type: application/json" -H "Accept: application/json" -d '{ + "access" : "proxy", + "type" : "postgres", + "name" : "questdb-slc-grp", + "url" : "'${QDB_HOST_PORT}'", + "database" : "'${QDB_TABLE_SLICE_GROUPS}'", + "user" : "'${QDB_USERNAME}'", + "basicAuth": false, + "isDefault": false, + "jsonData" : { + "sslmode" : "disable", + "postgresVersion" : 1100, + "maxOpenConns" : 0, + "maxIdleConns" : 2, + "connMaxLifetime" : 14400, + "tlsAuth" : false, + "tlsAuthWithCACert" : false, + "timescaledb" : false, + "tlsConfigurationMethod": "file-path", + "tlsSkipVerify" : true + }, + "secureJsonData": {"password": "'${QDB_PASSWORD}'"} + }' ${GRAFANA_URL_UPDATED}/api/datasources + echo + + curl -X POST -H "Content-Type: application/json" -H "Accept: application/json" -d '{ + "access" : "proxy", + "type" : "postgres", + "name" : "cockroachdb", + "url" : "'cockroachdb-public.${CRDB_NAMESPACE}.svc.cluster.local:${CRDB_SQL_PORT}'", + "database" : "'${CRDB_DATABASE}'", + "user" : "'${CRDB_USERNAME}'", + "basicAuth": false, + "isDefault": false, + "jsonData" : { + "sslmode" : "require", + "postgresVersion" : 1100, + "maxOpenConns" : 0, + "maxIdleConns" : 2, + "connMaxLifetime" : 14400, + "tlsAuth" : false, + "tlsAuthWithCACert" : false, + "timescaledb" : false, + "tlsConfigurationMethod": "file-path", + "tlsSkipVerify" : true + }, + "secureJsonData": {"password": "'${CRDB_PASSWORD}'"} + }' ${GRAFANA_URL_UPDATED}/api/datasources + echo + + # adding the datasource of the metrics collection framework + curl -X POST -H "Content-Type: application/json" -H "Accept: application/json" -d '{ + "access" : "proxy", + "type" : "prometheus", + "name" : "prometheus", + "url" : "http://prometheus-k8s.monitoring.svc:9090", + "basicAuth": false, + "isDefault": false, + "jsonData" : { + "httpMethod" : "POST" + } + }' ${GRAFANA_URL_UPDATED}/api/datasources printf "\n\n" echo ">> Creating and staring dashboards..." @@ -490,75 +484,68 @@ if [[ "$TFS_COMPONENTS" == *"webui"* ]]; then curl -X POST ${GRAFANA_URL_UPDATED}/api/user/stars/dashboard/${DASHBOARD_ID} echo - # Not needed for the Hackfest - ## Dashboard: Slice Grouping - #curl -X POST -H "Content-Type: application/json" -d '@src/webui/grafana_db_slc_grps_psql.json' \ - # ${GRAFANA_URL_UPDATED}/api/dashboards/db - #echo - #DASHBOARD_URL="${GRAFANA_URL_UPDATED}/api/dashboards/uid/tfs-slice-grps" - #DASHBOARD_ID=$(curl -s "${DASHBOARD_URL}" | jq '.dashboard.id') - #curl -X POST ${GRAFANA_URL_UPDATED}/api/user/stars/dashboard/${DASHBOARD_ID} - #echo - - # Not needed for the Hackfest - ## Dashboard: Component RPCs - #curl -X POST -H "Content-Type: application/json" -d '@src/webui/grafana_prom_component_rpc.json' \ - # ${GRAFANA_URL_UPDATED}/api/dashboards/db - #echo - #DASHBOARD_URL="${GRAFANA_URL_UPDATED}/api/dashboards/uid/tfs-comp-rpc" - #DASHBOARD_ID=$(curl -s "${DASHBOARD_URL}" | jq '.dashboard.id') - #curl -X POST ${GRAFANA_URL_UPDATED}/api/user/stars/dashboard/${DASHBOARD_ID} - #echo - - # Not needed for the Hackfest - ## Dashboard: Device Drivers - #curl -X POST -H "Content-Type: application/json" -d '@src/webui/grafana_prom_device_driver.json' \ - # ${GRAFANA_URL_UPDATED}/api/dashboards/db - #echo - #DASHBOARD_URL="${GRAFANA_URL_UPDATED}/api/dashboards/uid/tfs-dev-drv" - #DASHBOARD_ID=$(curl -s "${DASHBOARD_URL}" | jq '.dashboard.id') - #curl -X POST ${GRAFANA_URL_UPDATED}/api/user/stars/dashboard/${DASHBOARD_ID} - #echo - - # Not needed for the Hackfest - ## Dashboard: Service Handlers - #curl -X POST -H "Content-Type: application/json" -d '@src/webui/grafana_prom_service_handler.json' \ - # ${GRAFANA_URL_UPDATED}/api/dashboards/db - #echo - #DASHBOARD_URL="${GRAFANA_URL_UPDATED}/api/dashboards/uid/tfs-svc-hdlr" - #DASHBOARD_ID=$(curl -s "${DASHBOARD_URL}" | jq '.dashboard.id') - #curl -X POST ${GRAFANA_URL_UPDATED}/api/user/stars/dashboard/${DASHBOARD_ID} - #echo - - # Not needed for the Hackfest - ## Dashboard: Device Execution Details - #curl -X POST -H "Content-Type: application/json" -d '@src/webui/grafana_prom_device_exec_details.json' \ - # ${GRAFANA_URL_UPDATED}/api/dashboards/db - #echo - #DASHBOARD_URL="${GRAFANA_URL_UPDATED}/api/dashboards/uid/tfs-dev-exec" - #DASHBOARD_ID=$(curl -s "${DASHBOARD_URL}" | jq '.dashboard.id') - #curl -X POST ${GRAFANA_URL_UPDATED}/api/user/stars/dashboard/${DASHBOARD_ID} - #echo - - # Not needed for the Hackfest - ## Dashboard: Load Generator Status - #curl -X POST -H "Content-Type: application/json" -d '@src/webui/grafana_prom_load_generator.json' \ - # ${GRAFANA_URL_UPDATED}/api/dashboards/db - #echo - #DASHBOARD_URL="${GRAFANA_URL_UPDATED}/api/dashboards/uid/tfs-loadgen-stats" - #DASHBOARD_ID=$(curl -s "${DASHBOARD_URL}" | jq '.dashboard.id') - #curl -X POST ${GRAFANA_URL_UPDATED}/api/user/stars/dashboard/${DASHBOARD_ID} - #echo - - # Not needed for the Hackfest - ## Dashboard: Load Generator Status - #curl -X POST -H "Content-Type: application/json" -d '@src/webui/grafana_prom_tfs_num_pods.json' \ - # ${GRAFANA_URL_UPDATED}/api/dashboards/db - #echo - #DASHBOARD_URL="${GRAFANA_URL_UPDATED}/api/dashboards/uid/tfs-num-pods" - #DASHBOARD_ID=$(curl -s "${DASHBOARD_URL}" | jq '.dashboard.id') - #curl -X POST ${GRAFANA_URL_UPDATED}/api/user/stars/dashboard/${DASHBOARD_ID} - #echo + # Dashboard: Slice Grouping + curl -X POST -H "Content-Type: application/json" -d '@src/webui/grafana_db_slc_grps_psql.json' \ + ${GRAFANA_URL_UPDATED}/api/dashboards/db + echo + DASHBOARD_URL="${GRAFANA_URL_UPDATED}/api/dashboards/uid/tfs-slice-grps" + DASHBOARD_ID=$(curl -s "${DASHBOARD_URL}" | jq '.dashboard.id') + curl -X POST ${GRAFANA_URL_UPDATED}/api/user/stars/dashboard/${DASHBOARD_ID} + echo + + # Dashboard: Component RPCs + curl -X POST -H "Content-Type: application/json" -d '@src/webui/grafana_prom_component_rpc.json' \ + ${GRAFANA_URL_UPDATED}/api/dashboards/db + echo + DASHBOARD_URL="${GRAFANA_URL_UPDATED}/api/dashboards/uid/tfs-comp-rpc" + DASHBOARD_ID=$(curl -s "${DASHBOARD_URL}" | jq '.dashboard.id') + curl -X POST ${GRAFANA_URL_UPDATED}/api/user/stars/dashboard/${DASHBOARD_ID} + echo + + # Dashboard: Device Drivers + curl -X POST -H "Content-Type: application/json" -d '@src/webui/grafana_prom_device_driver.json' \ + ${GRAFANA_URL_UPDATED}/api/dashboards/db + echo + DASHBOARD_URL="${GRAFANA_URL_UPDATED}/api/dashboards/uid/tfs-dev-drv" + DASHBOARD_ID=$(curl -s "${DASHBOARD_URL}" | jq '.dashboard.id') + curl -X POST ${GRAFANA_URL_UPDATED}/api/user/stars/dashboard/${DASHBOARD_ID} + echo + + # Dashboard: Service Handlers + curl -X POST -H "Content-Type: application/json" -d '@src/webui/grafana_prom_service_handler.json' \ + ${GRAFANA_URL_UPDATED}/api/dashboards/db + echo + DASHBOARD_URL="${GRAFANA_URL_UPDATED}/api/dashboards/uid/tfs-svc-hdlr" + DASHBOARD_ID=$(curl -s "${DASHBOARD_URL}" | jq '.dashboard.id') + curl -X POST ${GRAFANA_URL_UPDATED}/api/user/stars/dashboard/${DASHBOARD_ID} + echo + + # Dashboard: Device Execution Details + curl -X POST -H "Content-Type: application/json" -d '@src/webui/grafana_prom_device_exec_details.json' \ + ${GRAFANA_URL_UPDATED}/api/dashboards/db + echo + DASHBOARD_URL="${GRAFANA_URL_UPDATED}/api/dashboards/uid/tfs-dev-exec" + DASHBOARD_ID=$(curl -s "${DASHBOARD_URL}" | jq '.dashboard.id') + curl -X POST ${GRAFANA_URL_UPDATED}/api/user/stars/dashboard/${DASHBOARD_ID} + echo + + # Dashboard: Load Generator Status + curl -X POST -H "Content-Type: application/json" -d '@src/webui/grafana_prom_load_generator.json' \ + ${GRAFANA_URL_UPDATED}/api/dashboards/db + echo + DASHBOARD_URL="${GRAFANA_URL_UPDATED}/api/dashboards/uid/tfs-loadgen-stats" + DASHBOARD_ID=$(curl -s "${DASHBOARD_URL}" | jq '.dashboard.id') + curl -X POST ${GRAFANA_URL_UPDATED}/api/user/stars/dashboard/${DASHBOARD_ID} + echo + + # Dashboard: Load Generator Status + curl -X POST -H "Content-Type: application/json" -d '@src/webui/grafana_prom_tfs_num_pods.json' \ + ${GRAFANA_URL_UPDATED}/api/dashboards/db + echo + DASHBOARD_URL="${GRAFANA_URL_UPDATED}/api/dashboards/uid/tfs-num-pods" + DASHBOARD_ID=$(curl -s "${DASHBOARD_URL}" | jq '.dashboard.id') + curl -X POST ${GRAFANA_URL_UPDATED}/api/user/stars/dashboard/${DASHBOARD_ID} + echo printf "\n\n" fi -- GitLab From f5473b3c2bbc7a6a6d0758a46496dd6f1946d32d Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Fri, 7 Jul 2023 17:55:19 +0000 Subject: [PATCH 58/62] Pre-merge code cleanup --- manifests/contextservice.yaml | 49 +++++++++++++++++----------------- manifests/deviceservice.yaml | 7 +++-- manifests/pathcompservice.yaml | 44 +++++++++++++++--------------- manifests/serviceservice.yaml | 44 +++++++++++++++--------------- manifests/sliceservice.yaml | 44 +++++++++++++++--------------- 5 files changed, 93 insertions(+), 95 deletions(-) diff --git a/manifests/contextservice.yaml b/manifests/contextservice.yaml index 659ff7b8d..ab8ccc521 100644 --- a/manifests/contextservice.yaml +++ b/manifests/contextservice.yaml @@ -23,9 +23,8 @@ spec: #replicas: 1 template: metadata: - # Deactivated linkerd for the Hackfest - #annotations: - # config.linkerd.io/skip-outbound-ports: "4222" + annotations: + config.linkerd.io/skip-outbound-ports: "4222" labels: app: contextservice spec: @@ -80,25 +79,25 @@ spec: protocol: TCP port: 9192 targetPort: 9192 -#--- -#apiVersion: autoscaling/v2 -#kind: HorizontalPodAutoscaler -#metadata: -# name: contextservice-hpa -#spec: -# scaleTargetRef: -# apiVersion: apps/v1 -# kind: Deployment -# name: contextservice -# minReplicas: 1 -# maxReplicas: 20 -# metrics: -# - type: Resource -# resource: -# name: cpu -# target: -# type: Utilization -# averageUtilization: 80 -# #behavior: -# # scaleDown: -# # stabilizationWindowSeconds: 30 +--- +apiVersion: autoscaling/v2 +kind: HorizontalPodAutoscaler +metadata: + name: contextservice-hpa +spec: + scaleTargetRef: + apiVersion: apps/v1 + kind: Deployment + name: contextservice + minReplicas: 1 + maxReplicas: 20 + metrics: + - type: Resource + resource: + name: cpu + target: + type: Utilization + averageUtilization: 80 + #behavior: + # scaleDown: + # stabilizationWindowSeconds: 30 diff --git a/manifests/deviceservice.yaml b/manifests/deviceservice.yaml index 74c58b83d..4bf4d6247 100644 --- a/manifests/deviceservice.yaml +++ b/manifests/deviceservice.yaml @@ -23,10 +23,9 @@ spec: replicas: 1 template: metadata: - # Deactivated linkerd for the Hackfest - #annotations: - # # Required for IETF L2VPN SBI when both parent and child run in same K8s cluster with Linkerd - # config.linkerd.io/skip-outbound-ports: "8002" + annotations: + # Required for IETF L2VPN SBI when both parent and child run in same K8s cluster with Linkerd + config.linkerd.io/skip-outbound-ports: "8002" labels: app: deviceservice spec: diff --git a/manifests/pathcompservice.yaml b/manifests/pathcompservice.yaml index 8aaf99dcb..2fae0c8f2 100644 --- a/manifests/pathcompservice.yaml +++ b/manifests/pathcompservice.yaml @@ -98,25 +98,25 @@ spec: protocol: TCP port: 9192 targetPort: 9192 -#--- -#apiVersion: autoscaling/v2 -#kind: HorizontalPodAutoscaler -#metadata: -# name: pathcompservice-hpa -#spec: -# scaleTargetRef: -# apiVersion: apps/v1 -# kind: Deployment -# name: pathcompservice -# minReplicas: 1 -# maxReplicas: 20 -# metrics: -# - type: Resource -# resource: -# name: cpu -# target: -# type: Utilization -# averageUtilization: 80 -# #behavior: -# # scaleDown: -# # stabilizationWindowSeconds: 30 +--- +apiVersion: autoscaling/v2 +kind: HorizontalPodAutoscaler +metadata: + name: pathcompservice-hpa +spec: + scaleTargetRef: + apiVersion: apps/v1 + kind: Deployment + name: pathcompservice + minReplicas: 1 + maxReplicas: 20 + metrics: + - type: Resource + resource: + name: cpu + target: + type: Utilization + averageUtilization: 80 + #behavior: + # scaleDown: + # stabilizationWindowSeconds: 30 diff --git a/manifests/serviceservice.yaml b/manifests/serviceservice.yaml index 99f8f45a6..6143740e8 100644 --- a/manifests/serviceservice.yaml +++ b/manifests/serviceservice.yaml @@ -70,25 +70,25 @@ spec: protocol: TCP port: 9192 targetPort: 9192 -#--- -#apiVersion: autoscaling/v2 -#kind: HorizontalPodAutoscaler -#metadata: -# name: serviceservice-hpa -#spec: -# scaleTargetRef: -# apiVersion: apps/v1 -# kind: Deployment -# name: serviceservice -# minReplicas: 1 -# maxReplicas: 20 -# metrics: -# - type: Resource -# resource: -# name: cpu -# target: -# type: Utilization -# averageUtilization: 80 -# #behavior: -# # scaleDown: -# # stabilizationWindowSeconds: 30 +--- +apiVersion: autoscaling/v2 +kind: HorizontalPodAutoscaler +metadata: + name: serviceservice-hpa +spec: + scaleTargetRef: + apiVersion: apps/v1 + kind: Deployment + name: serviceservice + minReplicas: 1 + maxReplicas: 20 + metrics: + - type: Resource + resource: + name: cpu + target: + type: Utilization + averageUtilization: 80 + #behavior: + # scaleDown: + # stabilizationWindowSeconds: 30 diff --git a/manifests/sliceservice.yaml b/manifests/sliceservice.yaml index 5ea63ad0c..7f4d022b3 100644 --- a/manifests/sliceservice.yaml +++ b/manifests/sliceservice.yaml @@ -75,25 +75,25 @@ spec: protocol: TCP port: 9192 targetPort: 9192 -#--- -#apiVersion: autoscaling/v2 -#kind: HorizontalPodAutoscaler -#metadata: -# name: sliceservice-hpa -#spec: -# scaleTargetRef: -# apiVersion: apps/v1 -# kind: Deployment -# name: sliceservice -# minReplicas: 1 -# maxReplicas: 20 -# metrics: -# - type: Resource -# resource: -# name: cpu -# target: -# type: Utilization -# averageUtilization: 80 -# #behavior: -# # scaleDown: -# # stabilizationWindowSeconds: 30 +--- +apiVersion: autoscaling/v2 +kind: HorizontalPodAutoscaler +metadata: + name: sliceservice-hpa +spec: + scaleTargetRef: + apiVersion: apps/v1 + kind: Deployment + name: sliceservice + minReplicas: 1 + maxReplicas: 20 + metrics: + - type: Resource + resource: + name: cpu + target: + type: Utilization + averageUtilization: 80 + #behavior: + # scaleDown: + # stabilizationWindowSeconds: 30 -- GitLab From 2d441fa76371640999ad37b950865c6b3f0eefef Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Mon, 10 Jul 2023 07:23:34 +0000 Subject: [PATCH 59/62] Pre-merge code cleanup --- deploy/tfs.sh | 2 +- manifests/contextservice.yaml | 6 +++--- manifests/deviceservice.yaml | 4 ++-- manifests/monitoringservice.yaml | 10 +++++----- manifests/pathcompservice.yaml | 4 ++-- manifests/serviceservice.yaml | 6 +++--- manifests/sliceservice.yaml | 6 +++--- 7 files changed, 19 insertions(+), 19 deletions(-) diff --git a/deploy/tfs.sh b/deploy/tfs.sh index 5edc0c29b..e6a0c0c10 100755 --- a/deploy/tfs.sh +++ b/deploy/tfs.sh @@ -259,7 +259,7 @@ for COMPONENT in $TFS_COMPONENTS; do echo " Adapting '$COMPONENT' manifest file..." MANIFEST="$TMP_MANIFESTS_FOLDER/${COMPONENT}service.yaml" - #cp ./manifests/"${COMPONENT}"service.yaml "$MANIFEST" + # cp ./manifests/"${COMPONENT}"service.yaml "$MANIFEST" cat ./manifests/"${COMPONENT}"service.yaml | linkerd inject - --proxy-cpu-request "10m" --proxy-cpu-limit "1" --proxy-memory-request "64Mi" --proxy-memory-limit "256Mi" > "$MANIFEST" if [ "$COMPONENT" == "pathcomp" ]; then diff --git a/manifests/contextservice.yaml b/manifests/contextservice.yaml index ab8ccc521..96735bf5f 100644 --- a/manifests/contextservice.yaml +++ b/manifests/contextservice.yaml @@ -54,11 +54,11 @@ spec: command: ["/bin/grpc_health_probe", "-addr=:1010"] resources: requests: - cpu: 150m + cpu: 250m memory: 128Mi limits: - cpu: 500m - memory: 512Mi + cpu: 1000m + memory: 1024Mi --- apiVersion: v1 kind: Service diff --git a/manifests/deviceservice.yaml b/manifests/deviceservice.yaml index 4bf4d6247..a99e3e588 100644 --- a/manifests/deviceservice.yaml +++ b/manifests/deviceservice.yaml @@ -53,8 +53,8 @@ spec: command: ["/bin/grpc_health_probe", "-addr=:2020"] resources: requests: - cpu: 500m - memory: 512Mi + cpu: 250m + memory: 128Mi limits: cpu: 1000m memory: 1024Mi diff --git a/manifests/monitoringservice.yaml b/manifests/monitoringservice.yaml index 06ac823a1..dbcfa68a0 100644 --- a/manifests/monitoringservice.yaml +++ b/manifests/monitoringservice.yaml @@ -36,7 +36,7 @@ spec: - containerPort: 9192 env: - name: LOG_LEVEL - value: "DEBUG" + value: "INFO" envFrom: - secretRef: name: qdb-data @@ -48,11 +48,11 @@ spec: command: ["/bin/grpc_health_probe", "-addr=:7070"] resources: requests: - cpu: 50m - memory: 64Mi + cpu: 250m + memory: 256Mi limits: - cpu: 500m - memory: 512Mi + cpu: 1000m + memory: 1024Mi --- apiVersion: v1 kind: Service diff --git a/manifests/pathcompservice.yaml b/manifests/pathcompservice.yaml index 2fae0c8f2..c85922d96 100644 --- a/manifests/pathcompservice.yaml +++ b/manifests/pathcompservice.yaml @@ -69,10 +69,10 @@ spec: # timeoutSeconds: 5 resources: requests: - cpu: 100m + cpu: 250m memory: 256Mi limits: - cpu: 500m + cpu: 1000m memory: 1024Mi --- apiVersion: v1 diff --git a/manifests/serviceservice.yaml b/manifests/serviceservice.yaml index 6143740e8..7d7bdaa4e 100644 --- a/manifests/serviceservice.yaml +++ b/manifests/serviceservice.yaml @@ -45,11 +45,11 @@ spec: command: ["/bin/grpc_health_probe", "-addr=:3030"] resources: requests: - cpu: 150m + cpu: 250m memory: 128Mi limits: - cpu: 500m - memory: 512Mi + cpu: 1000m + memory: 1024Mi --- apiVersion: v1 kind: Service diff --git a/manifests/sliceservice.yaml b/manifests/sliceservice.yaml index 7f4d022b3..e7e5c1604 100644 --- a/manifests/sliceservice.yaml +++ b/manifests/sliceservice.yaml @@ -50,11 +50,11 @@ spec: command: ["/bin/grpc_health_probe", "-addr=:4040"] resources: requests: - cpu: 150m + cpu: 250m memory: 128Mi limits: - cpu: 500m - memory: 512Mi + cpu: 1000m + memory: 1024Mi --- apiVersion: v1 kind: Service -- GitLab From 94059e57e06245fe8136012f4ff5479d5097ac92 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Mon, 10 Jul 2023 07:55:03 +0000 Subject: [PATCH 60/62] Pre-merge code cleanup --- src/device/service/drivers/__init__.py | 19 ++++++------ .../drivers/openconfig/OpenConfigDriver.py | 29 +++++++++---------- 2 files changed, 24 insertions(+), 24 deletions(-) diff --git a/src/device/service/drivers/__init__.py b/src/device/service/drivers/__init__.py index 89d3bfc01..4ae7128b0 100644 --- a/src/device/service/drivers/__init__.py +++ b/src/device/service/drivers/__init__.py @@ -85,16 +85,17 @@ DRIVERS.append( ])) if LOAD_ALL_DEVICE_DRIVERS: - #from .openconfig.OpenConfigDriver import OpenConfigDriver # pylint: disable=wrong-import-position - #DRIVERS.append( - # (OpenConfigDriver, [ - # { - # # Real Packet Router, specifying OpenConfig Driver => use OpenConfigDriver - # FilterFieldEnum.DEVICE_TYPE: DeviceTypeEnum.PACKET_ROUTER, - # FilterFieldEnum.DRIVER : DeviceDriverEnum.DEVICEDRIVER_OPENCONFIG, - # } - # ])) + from .openconfig.OpenConfigDriver import OpenConfigDriver # pylint: disable=wrong-import-position + DRIVERS.append( + (OpenConfigDriver, [ + { + # Real Packet Router, specifying OpenConfig Driver => use OpenConfigDriver + FilterFieldEnum.DEVICE_TYPE: DeviceTypeEnum.PACKET_ROUTER, + FilterFieldEnum.DRIVER : DeviceDriverEnum.DEVICEDRIVER_OPENCONFIG, + } + ])) +if LOAD_ALL_DEVICE_DRIVERS: from .gnmi_openconfig.GnmiOpenConfigDriver import GnmiOpenConfigDriver # pylint: disable=wrong-import-position DRIVERS.append( (GnmiOpenConfigDriver, [ diff --git a/src/device/service/drivers/openconfig/OpenConfigDriver.py b/src/device/service/drivers/openconfig/OpenConfigDriver.py index 48a383f7c..b34efbc8f 100644 --- a/src/device/service/drivers/openconfig/OpenConfigDriver.py +++ b/src/device/service/drivers/openconfig/OpenConfigDriver.py @@ -32,7 +32,6 @@ from device.service.driver_api.AnyTreeTools import TreeNode, get_subnode, set_su from .templates import ALL_RESOURCE_KEYS, EMPTY_CONFIG, compose_config, get_filter, parse, cli_compose_config from .RetryDecorator import retry - DEBUG_MODE = False logging.getLogger('ncclient.manager').setLevel(logging.DEBUG if DEBUG_MODE else logging.WARNING) logging.getLogger('ncclient.transport.ssh').setLevel(logging.DEBUG if DEBUG_MODE else logging.WARNING) @@ -60,19 +59,19 @@ class NetconfSessionHandler: self.__connected = threading.Event() self.__address = address self.__port = int(port) - self.__username = settings.get('username') - self.__password = settings.get('password') - self.__vendor = settings.get('vendor') - self.__version = settings.get('version', "1") - self.__key_filename = settings.get('key_filename') - self.__hostkey_verify = settings.get('hostkey_verify', True) - self.__look_for_keys = settings.get('look_for_keys', True) - self.__allow_agent = settings.get('allow_agent', True) - self.__force_running = settings.get('force_running', False) - self.__commit_per_rule = settings.get('commit_per_rule', False) - self.__device_params = settings.get('device_params', {}) - self.__manager_params = settings.get('manager_params', {}) - self.__nc_params = settings.get('nc_params', {}) + self.__username = settings.get('username') + self.__password = settings.get('password') + self.__vendor = settings.get('vendor') + self.__version = settings.get('version', "1") + self.__key_filename = settings.get('key_filename') + self.__hostkey_verify = settings.get('hostkey_verify', True) + self.__look_for_keys = settings.get('look_for_keys', True) + self.__allow_agent = settings.get('allow_agent', True) + self.__force_running = settings.get('force_running', False) + self.__commit_per_rule = settings.get('commit_per_rule', False) + self.__device_params = settings.get('device_params', {}) + self.__manager_params = settings.get('manager_params', {}) + self.__nc_params = settings.get('nc_params', {}) self.__message_renderer = settings.get('message_renderer','jinja') self.__manager : Manager = None self.__candidate_supported = False @@ -202,7 +201,7 @@ def do_sampling( except: # pylint: disable=bare-except logger.exception('Error retrieving samples') -def edit_config( +def edit_config( # edit the configuration of openconfig devices netconf_handler : NetconfSessionHandler, logger : logging.Logger, resources : List[Tuple[str, Any]], delete=False, commit_per_rule=False, target='running', default_operation='merge', test_option=None, error_option=None, format='xml' # pylint: disable=redefined-builtin -- GitLab From fabf433ea66e41efac44bc513399aebed2958494 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Mon, 10 Jul 2023 08:20:48 +0000 Subject: [PATCH 61/62] Pre-merge code cleanup --- my_deploy.sh | 19 +- .../l2nm_emulated/ConfigRules.py | 9 + src/webui/service/device/forms.py | 6 +- src/webui/service/service/routes.py | 272 +++++++++--------- 4 files changed, 162 insertions(+), 144 deletions(-) diff --git a/my_deploy.sh b/my_deploy.sh index 8fe133477..7e8abb128 100755 --- a/my_deploy.sh +++ b/my_deploy.sh @@ -20,19 +20,18 @@ export TFS_REGISTRY_IMAGES="http://localhost:32000/tfs/" # Set the list of components, separated by spaces, you want to build images for, and deploy. -#export TFS_COMPONENTS="context device pathcomp service slice compute webui load_generator" -export TFS_COMPONENTS="context device pathcomp service slice webui" +export TFS_COMPONENTS="context device pathcomp service slice compute webui load_generator" -# Uncoment to activate Monitoring -export TFS_COMPONENTS="${TFS_COMPONENTS} monitoring" +# Uncomment to activate Monitoring +#export TFS_COMPONENTS="${TFS_COMPONENTS} monitoring" -# Uncoment to activate Automation and Policy Manager +# Uncomment to activate Automation and Policy Manager #export TFS_COMPONENTS="${TFS_COMPONENTS} automation policy" -# Uncoment to activate Optical CyberSecurity +# Uncomment to activate Optical CyberSecurity #export TFS_COMPONENTS="${TFS_COMPONENTS} dbscanserving opticalattackmitigator opticalattackdetector opticalattackmanager" -# Uncoment to activate L3 CyberSecurity +# Uncomment to activate L3 CyberSecurity #export TFS_COMPONENTS="${TFS_COMPONENTS} l3_attackmitigator l3_centralizedattackdetector" # Set the tag you want to use for your images. @@ -42,10 +41,12 @@ export TFS_IMAGE_TAG="dev" export TFS_K8S_NAMESPACE="tfs" # Set additional manifest files to be applied after the deployment -#export TFS_EXTRA_MANIFESTS="manifests/nginx_ingress_http.yaml manifests/servicemonitors.yaml" export TFS_EXTRA_MANIFESTS="manifests/nginx_ingress_http.yaml" -# Uncoment when deploying Optical CyberSecurity +# Uncomment to monitor performance of components +export TFS_EXTRA_MANIFESTS="${TFS_EXTRA_MANIFESTS} manifests/servicemonitors.yaml" + +# Uncomment when deploying Optical CyberSecurity #export TFS_EXTRA_MANIFESTS="${TFS_EXTRA_MANIFESTS} manifests/cachingservice.yaml" # Set the new Grafana admin password diff --git a/src/service/service/service_handlers/l2nm_emulated/ConfigRules.py b/src/service/service/service_handlers/l2nm_emulated/ConfigRules.py index ab5807a9f..e68a62030 100644 --- a/src/service/service/service_handlers/l2nm_emulated/ConfigRules.py +++ b/src/service/service/service_handlers/l2nm_emulated/ConfigRules.py @@ -21,6 +21,12 @@ def setup_config_rules( service_settings : TreeNode, endpoint_settings : TreeNode, endpoint_acls : List [Tuple] ) -> List[Dict]: + if service_settings is None: return [] + if endpoint_settings is None: return [] + + json_settings : Dict = service_settings.value + json_endpoint_settings : Dict = endpoint_settings.value + #mtu = json_settings.get('mtu', 1450 ) # 1512 #address_families = json_settings.get('address_families', [] ) # ['IPV4'] #bgp_as = json_settings.get('bgp_as', 0 ) # 65000 @@ -80,6 +86,9 @@ def teardown_config_rules( service_settings : TreeNode, endpoint_settings : TreeNode ) -> List[Dict]: + if service_settings is None: return [] + if endpoint_settings is None: return [] + #json_settings : Dict = service_settings.value json_endpoint_settings : Dict = endpoint_settings.value diff --git a/src/webui/service/device/forms.py b/src/webui/service/device/forms.py index a6e07fe3c..e884e96a5 100644 --- a/src/webui/service/device/forms.py +++ b/src/webui/service/device/forms.py @@ -18,10 +18,10 @@ from wtforms.validators import DataRequired, Length, NumberRange, ValidationErro from common.proto.context_pb2 import DeviceOperationalStatusEnum class AddDeviceForm(FlaskForm): - device_id = StringField('ID', - validators=[DataRequired(), Length(min=5)]) + device_id = StringField('ID', validators=[DataRequired(), Length(min=5)]) device_type = SelectField('Type') operational_status = SelectField('Operational Status', coerce=int, validators=[NumberRange(min=0)]) + device_drivers_undefined = BooleanField('UNDEFINED / EMULATED') device_drivers_openconfig = BooleanField('OPENCONFIG') device_drivers_transport_api = BooleanField('TRANSPORT_API') @@ -31,9 +31,11 @@ class AddDeviceForm(FlaskForm): device_drivers_xr = BooleanField('XR') device_drivers_ietf_l2vpn = BooleanField('IETF L2VPN') device_drivers_gnmi_openconfig = BooleanField('GNMI OPENCONFIG') + device_config_address = StringField('connect/address',default='127.0.0.1',validators=[DataRequired(), Length(min=5)]) device_config_port = StringField('connect/port',default='0',validators=[DataRequired(), Length(min=1)]) device_config_settings = TextAreaField('connect/settings',default='{}',validators=[DataRequired(), Length(min=2)]) + submit = SubmitField('Add') def validate_operational_status(form, field): diff --git a/src/webui/service/service/routes.py b/src/webui/service/service/routes.py index 3d3d47ab5..fa3d3b164 100644 --- a/src/webui/service/service/routes.py +++ b/src/webui/service/service/routes.py @@ -92,6 +92,12 @@ def home(): ste=ServiceTypeEnum, sse=ServiceStatusEnum, active_drivers=active_drivers) +@service.route('add', methods=['GET', 'POST']) +def add(): + flash('Add service route called', 'danger') + raise NotImplementedError() + #return render_template('service/home.html') + def get_hub_module_name(dev: Device) -> Optional[str]: for cr in dev.device_config.config_rules: if cr.action == ConfigActionEnum.CONFIGACTION_SET and cr.custom and cr.custom.resource_key == "_connect/settings": @@ -103,139 +109,139 @@ def get_hub_module_name(dev: Device) -> Optional[str]: pass return None -#@service.route('add-xr', methods=['GET', 'POST']) -#def add_xr(): -# ### FIXME: copypaste -# if 'context_uuid' not in session or 'topology_uuid' not in session: -# flash("Please select a context!", "warning") -# return redirect(url_for("main.home")) -# -# context_uuid = session['context_uuid'] -# topology_uuid = session['topology_uuid'] -# -# context_client.connect() -# grpc_topology = get_topology(context_client, topology_uuid, context_uuid=context_uuid, rw_copy=False) -# if grpc_topology is None: -# flash('Context({:s})/Topology({:s}) not found'.format(str(context_uuid), str(topology_uuid)), 'danger') -# return redirect(url_for("main.home")) -# else: -# topo_device_uuids = {device_id.device_uuid.uuid for device_id in grpc_topology.device_ids} -# grpc_devices= context_client.ListDevices(Empty()) -# devices = [ -# device for device in grpc_devices.devices -# if device.device_id.device_uuid.uuid in topo_device_uuids and DeviceDriverEnum.DEVICEDRIVER_XR in device.device_drivers -# ] -# devices.sort(key=lambda dev: dev.name) -# -# hub_interfaces_by_device = defaultdict(list) -# leaf_interfaces_by_device = defaultdict(list) -# constellation_name_to_uuid = {} -# dev_ep_to_uuid = {} -# ep_uuid_to_name = {} -# for d in devices: -# constellation_name_to_uuid[d.name] = d.device_id.device_uuid.uuid -# hm_name = get_hub_module_name(d) -# if hm_name is not None: -# hm_if_prefix= hm_name + "|" -# for ep in d.device_endpoints: -# dev_ep_to_uuid[(d.name, ep.name)] = ep.endpoint_id.endpoint_uuid.uuid -# if ep.name.startswith(hm_if_prefix): -# hub_interfaces_by_device[d.name].append(ep.name) -# else: -# leaf_interfaces_by_device[d.name].append(ep.name) -# ep_uuid_to_name[ep.endpoint_id.endpoint_uuid.uuid] = (d.name, ep.name) -# hub_interfaces_by_device[d.name].sort() -# leaf_interfaces_by_device[d.name].sort() -# -# # Find out what endpoints are already used so that they can be disabled -# # in the create screen -# context_obj = get_context(context_client, context_uuid, rw_copy=False) -# if context_obj is None: -# flash('Context({:s}) not found'.format(str(context_uuid)), 'danger') -# return redirect(request.url) -# -# services = context_client.ListServices(context_obj.context_id) -# ep_used_by={} -# for service in services.services: -# if service.service_type == ServiceTypeEnum.SERVICETYPE_TAPI_CONNECTIVITY_SERVICE: -# for ep in service.service_endpoint_ids: -# ep_uuid = ep.endpoint_uuid.uuid -# if ep_uuid in ep_uuid_to_name: -# dev_name, ep_name = ep_uuid_to_name[ep_uuid] -# ep_used_by[f"{ep_name}@{dev_name}"] = service.name -# -# context_client.close() -# -# if request.method != 'POST': -# return render_template('service/add-xr.html', devices=devices, hub_if=hub_interfaces_by_device, leaf_if=leaf_interfaces_by_device, ep_used_by=ep_used_by) -# else: -# service_name = request.form["service_name"] -# if service_name == "": -# flash(f"Service name must be specified", 'danger') -# -# constellation = request.form["constellation"] -# constellation_uuid = constellation_name_to_uuid.get(constellation, None) -# if constellation_uuid is None: -# flash(f"Invalid constellation \"{constellation}\"", 'danger') -# -# hub_if = request.form["hubif"] -# hub_if_uuid = dev_ep_to_uuid.get((constellation, hub_if), None) -# if hub_if_uuid is None: -# flash(f"Invalid hub interface \"{hub_if}\"", 'danger') -# -# leaf_if = request.form["leafif"] -# leaf_if_uuid = dev_ep_to_uuid.get((constellation, leaf_if), None) -# if leaf_if_uuid is None: -# flash(f"Invalid leaf interface \"{leaf_if}\"", 'danger') -# -# if service_name == "" or constellation_uuid is None or hub_if_uuid is None or leaf_if_uuid is None: -# return redirect(request.url) -# -# -# json_context_uuid=json_context_id(context_uuid) -# sr = { -# "name": service_name, -# "service_id": { -# "context_id": {"context_uuid": {"uuid": context_uuid}}, -# "service_uuid": {"uuid": service_name} -# }, -# 'service_type' : ServiceTypeEnum.SERVICETYPE_TAPI_CONNECTIVITY_SERVICE, -# "service_endpoint_ids": [ -# {'device_id': {'device_uuid': {'uuid': constellation_uuid}}, 'endpoint_uuid': {'uuid': hub_if_uuid}, 'topology_id': json_topology_id("admin", context_id=json_context_uuid)}, -# {'device_id': {'device_uuid': {'uuid': constellation_uuid}}, 'endpoint_uuid': {'uuid': leaf_if_uuid}, 'topology_id': json_topology_id("admin", context_id=json_context_uuid)} -# ], -# 'service_status' : {'service_status': ServiceStatusEnum.SERVICESTATUS_PLANNED}, -# 'service_constraints' : [], -# } -# -# json_tapi_settings = { -# 'capacity_value' : 50.0, -# 'capacity_unit' : 'GHz', -# 'layer_proto_name': 'PHOTONIC_MEDIA', -# 'layer_proto_qual': 'tapi-photonic-media:PHOTONIC_LAYER_QUALIFIER_NMC', -# 'direction' : 'UNIDIRECTIONAL', -# } -# config_rule = json_config_rule_set('/settings', json_tapi_settings) -# -# with connected_client(service_client) as sc: -# endpoints, sr['service_endpoint_ids'] = sr['service_endpoint_ids'], [] -# try: -# create_response = sc.CreateService(Service(**sr)) -# except Exception as e: -# flash(f'Failure to update service name {service_name} with endpoints and configuration, exception {str(e)}', 'danger') -# return redirect(request.url) -# -# sr['service_endpoint_ids'] = endpoints -# sr['service_config'] = {'config_rules': [config_rule]} -# -# try: -# update_response = sc.UpdateService(Service(**sr)) -# flash(f'Created service {update_response.service_uuid.uuid}', 'success') -# except Exception as e: -# flash(f'Failure to update service {create_response.service_uuid.uuid} with endpoints and configuration, exception {str(e)}', 'danger') -# return redirect(request.url) -# -# return redirect(url_for('service.home')) +@service.route('add-xr', methods=['GET', 'POST']) +def add_xr(): + ### FIXME: copypaste + if 'context_uuid' not in session or 'topology_uuid' not in session: + flash("Please select a context!", "warning") + return redirect(url_for("main.home")) + + context_uuid = session['context_uuid'] + topology_uuid = session['topology_uuid'] + + context_client.connect() + grpc_topology = get_topology(context_client, topology_uuid, context_uuid=context_uuid, rw_copy=False) + if grpc_topology is None: + flash('Context({:s})/Topology({:s}) not found'.format(str(context_uuid), str(topology_uuid)), 'danger') + return redirect(url_for("main.home")) + else: + topo_device_uuids = {device_id.device_uuid.uuid for device_id in grpc_topology.device_ids} + grpc_devices= context_client.ListDevices(Empty()) + devices = [ + device for device in grpc_devices.devices + if device.device_id.device_uuid.uuid in topo_device_uuids and DeviceDriverEnum.DEVICEDRIVER_XR in device.device_drivers + ] + devices.sort(key=lambda dev: dev.name) + + hub_interfaces_by_device = defaultdict(list) + leaf_interfaces_by_device = defaultdict(list) + constellation_name_to_uuid = {} + dev_ep_to_uuid = {} + ep_uuid_to_name = {} + for d in devices: + constellation_name_to_uuid[d.name] = d.device_id.device_uuid.uuid + hm_name = get_hub_module_name(d) + if hm_name is not None: + hm_if_prefix= hm_name + "|" + for ep in d.device_endpoints: + dev_ep_to_uuid[(d.name, ep.name)] = ep.endpoint_id.endpoint_uuid.uuid + if ep.name.startswith(hm_if_prefix): + hub_interfaces_by_device[d.name].append(ep.name) + else: + leaf_interfaces_by_device[d.name].append(ep.name) + ep_uuid_to_name[ep.endpoint_id.endpoint_uuid.uuid] = (d.name, ep.name) + hub_interfaces_by_device[d.name].sort() + leaf_interfaces_by_device[d.name].sort() + + # Find out what endpoints are already used so that they can be disabled + # in the create screen + context_obj = get_context(context_client, context_uuid, rw_copy=False) + if context_obj is None: + flash('Context({:s}) not found'.format(str(context_uuid)), 'danger') + return redirect(request.url) + + services = context_client.ListServices(context_obj.context_id) + ep_used_by={} + for service in services.services: + if service.service_type == ServiceTypeEnum.SERVICETYPE_TAPI_CONNECTIVITY_SERVICE: + for ep in service.service_endpoint_ids: + ep_uuid = ep.endpoint_uuid.uuid + if ep_uuid in ep_uuid_to_name: + dev_name, ep_name = ep_uuid_to_name[ep_uuid] + ep_used_by[f"{ep_name}@{dev_name}"] = service.name + + context_client.close() + + if request.method != 'POST': + return render_template('service/add-xr.html', devices=devices, hub_if=hub_interfaces_by_device, leaf_if=leaf_interfaces_by_device, ep_used_by=ep_used_by) + else: + service_name = request.form["service_name"] + if service_name == "": + flash(f"Service name must be specified", 'danger') + + constellation = request.form["constellation"] + constellation_uuid = constellation_name_to_uuid.get(constellation, None) + if constellation_uuid is None: + flash(f"Invalid constellation \"{constellation}\"", 'danger') + + hub_if = request.form["hubif"] + hub_if_uuid = dev_ep_to_uuid.get((constellation, hub_if), None) + if hub_if_uuid is None: + flash(f"Invalid hub interface \"{hub_if}\"", 'danger') + + leaf_if = request.form["leafif"] + leaf_if_uuid = dev_ep_to_uuid.get((constellation, leaf_if), None) + if leaf_if_uuid is None: + flash(f"Invalid leaf interface \"{leaf_if}\"", 'danger') + + if service_name == "" or constellation_uuid is None or hub_if_uuid is None or leaf_if_uuid is None: + return redirect(request.url) + + + json_context_uuid=json_context_id(context_uuid) + sr = { + "name": service_name, + "service_id": { + "context_id": {"context_uuid": {"uuid": context_uuid}}, + "service_uuid": {"uuid": service_name} + }, + 'service_type' : ServiceTypeEnum.SERVICETYPE_TAPI_CONNECTIVITY_SERVICE, + "service_endpoint_ids": [ + {'device_id': {'device_uuid': {'uuid': constellation_uuid}}, 'endpoint_uuid': {'uuid': hub_if_uuid}, 'topology_id': json_topology_id("admin", context_id=json_context_uuid)}, + {'device_id': {'device_uuid': {'uuid': constellation_uuid}}, 'endpoint_uuid': {'uuid': leaf_if_uuid}, 'topology_id': json_topology_id("admin", context_id=json_context_uuid)} + ], + 'service_status' : {'service_status': ServiceStatusEnum.SERVICESTATUS_PLANNED}, + 'service_constraints' : [], + } + + json_tapi_settings = { + 'capacity_value' : 50.0, + 'capacity_unit' : 'GHz', + 'layer_proto_name': 'PHOTONIC_MEDIA', + 'layer_proto_qual': 'tapi-photonic-media:PHOTONIC_LAYER_QUALIFIER_NMC', + 'direction' : 'UNIDIRECTIONAL', + } + config_rule = json_config_rule_set('/settings', json_tapi_settings) + + with connected_client(service_client) as sc: + endpoints, sr['service_endpoint_ids'] = sr['service_endpoint_ids'], [] + try: + create_response = sc.CreateService(Service(**sr)) + except Exception as e: + flash(f'Failure to update service name {service_name} with endpoints and configuration, exception {str(e)}', 'danger') + return redirect(request.url) + + sr['service_endpoint_ids'] = endpoints + sr['service_config'] = {'config_rules': [config_rule]} + + try: + update_response = sc.UpdateService(Service(**sr)) + flash(f'Created service {update_response.service_uuid.uuid}', 'success') + except Exception as e: + flash(f'Failure to update service {create_response.service_uuid.uuid} with endpoints and configuration, exception {str(e)}', 'danger') + return redirect(request.url) + + return redirect(url_for('service.home')) @service.get('/detail') def detail(service_uuid: str): -- GitLab From 072c9c9ef69465e82ebf73ac137883a9c561a56e Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Mon, 10 Jul 2023 08:24:44 +0000 Subject: [PATCH 62/62] Pre-merge code cleanup --- src/webui/service/service/routes.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/webui/service/service/routes.py b/src/webui/service/service/routes.py index fa3d3b164..08312e525 100644 --- a/src/webui/service/service/routes.py +++ b/src/webui/service/service/routes.py @@ -98,6 +98,7 @@ def add(): raise NotImplementedError() #return render_template('service/home.html') + def get_hub_module_name(dev: Device) -> Optional[str]: for cr in dev.device_config.config_rules: if cr.action == ConfigActionEnum.CONFIGACTION_SET and cr.custom and cr.custom.resource_key == "_connect/settings": -- GitLab