From c4b613b68b5cc253fb652c3e47fa1f103cef96aa Mon Sep 17 00:00:00 2001
From: armingol <pablo.armingolrobles@telefonica.com>
Date: Wed, 14 Feb 2024 10:54:44 +0100
Subject: [PATCH] first version

---
 manifests/sliceservice.yaml                   |  2 +-
 src/common/tools/descriptor/Loader.py         |  5 ++-
 src/common/tools/descriptor/Tools.py          | 11 +++--
 .../drivers/openconfig/templates/Inventory.py | 33 ++++++++++++++-
 .../drivers/openconfig/templates/Tools.py     |  5 +++
 src/slice/service/SliceServiceServicerImpl.py | 41 +++++++++++--------
 .../service/slice_grouper/SliceGrouper.py     | 28 ++++++-------
 src/webui/service/main/routes.py              |  4 +-
 8 files changed, 90 insertions(+), 39 deletions(-)

diff --git a/manifests/sliceservice.yaml b/manifests/sliceservice.yaml
index e7e5c1604..61f5b1d21 100644
--- a/manifests/sliceservice.yaml
+++ b/manifests/sliceservice.yaml
@@ -36,7 +36,7 @@ spec:
         - containerPort: 9192
         env:
         - name: LOG_LEVEL
-          value: "INFO"
+          value: "DEBUG"
         - name: SLICE_GROUPING
           value: "DISABLE"
         envFrom:
diff --git a/src/common/tools/descriptor/Loader.py b/src/common/tools/descriptor/Loader.py
index 916a73d30..9e536f935 100644
--- a/src/common/tools/descriptor/Loader.py
+++ b/src/common/tools/descriptor/Loader.py
@@ -104,7 +104,7 @@ class DescriptorLoader:
         self.__devices     = self.__descriptors.get('devices'    , [])
         self.__links       = self.__descriptors.get('links'      , [])
         self.__services    = self.__descriptors.get('services'   , [])
-        self.__slices      = self.__descriptors.get('slices'     , [])
+        self.__slices      = self.__descriptors.get('data', []) #Coge de la file el campo slices
         self.__connections = self.__descriptors.get('connections', [])
 
         self.__contexts_add   = None
@@ -194,7 +194,7 @@ class DescriptorLoader:
         _slices = {}
         for slice_ in self.__slices:
             context_uuid = slice_['slice_id']['context_id']['context_uuid']['uuid']
-            _slices.setdefault(context_uuid, []).append(slice_)
+            _slices.setdefault(context_uuid, []).append(slice_) #no tenemos context_uuid en este formato, lo meto a mano?
         return _slices
 
     @property
@@ -215,6 +215,7 @@ class DescriptorLoader:
         # Format CustomConfigRules in Devices, Services and Slices provided in JSON format
         self.__devices  = [format_device_custom_config_rules (device ) for device  in self.__devices ]
         self.__services = [format_service_custom_config_rules(service) for service in self.__services]
+        LOGGERS.INFO(self.__slices)
         self.__slices   = [format_slice_custom_config_rules  (slice_ ) for slice_  in self.__slices  ]
 
         # Context and Topology require to create the entity first, and add devices, links, services,
diff --git a/src/common/tools/descriptor/Tools.py b/src/common/tools/descriptor/Tools.py
index f03c635b8..1811f77d9 100644
--- a/src/common/tools/descriptor/Tools.py
+++ b/src/common/tools/descriptor/Tools.py
@@ -15,6 +15,7 @@
 import copy, json
 from typing import Dict, List, Optional, Tuple, Union
 
+#context es la db, al inicio esta vacía
 def get_descriptors_add_contexts(contexts : List[Dict]) -> List[Dict]:
     contexts_add = copy.deepcopy(contexts)
     for context in contexts_add:
@@ -52,7 +53,7 @@ def get_descriptors_add_slices(slices : List[Dict]) -> List[Dict]:
 TypeResourceValue = Union[str, int, bool, float, dict, list]
 def format_custom_config_rules(config_rules : List[Dict]) -> List[Dict]:
     for config_rule in config_rules:
-        if 'custom' not in config_rule: continue
+       # if 'custom' not in config_rule: continue #suponemos que siempre son custom, quitamos esta linea
         custom_resource_value : TypeResourceValue = config_rule['custom']['resource_value']
         if isinstance(custom_resource_value, (dict, list)):
             custom_resource_value = json.dumps(custom_resource_value, sort_keys=True, indent=0)
@@ -71,10 +72,14 @@ def format_service_custom_config_rules(service : Dict) -> Dict:
     service['service_config']['config_rules'] = config_rules
     return service
 
+#UTILIZA LA FUNCION FORMAT_CUSTOM_CONFIG_RULES 
+#cambio
 def format_slice_custom_config_rules(slice_ : Dict) -> Dict:
-    config_rules = slice_.get('slice_config', {}).get('config_rules', [])
+    #donde cojo los config_rules
+    #las config_rules parecen estar en ACs?
+    config_rules = slice_.get('sdps', []) 
     config_rules = format_custom_config_rules(config_rules)
-    slice_['slice_config']['config_rules'] = config_rules
+    slice_['sdps']['sdp']['attachment-circuits'] = config_rules
     return slice_
 
 def split_devices_by_rules(devices : List[Dict]) -> Tuple[List[Dict], List[Dict]]:
diff --git a/src/device/service/drivers/openconfig/templates/Inventory.py b/src/device/service/drivers/openconfig/templates/Inventory.py
index 2ae67ba47..01b417739 100644
--- a/src/device/service/drivers/openconfig/templates/Inventory.py
+++ b/src/device/service/drivers/openconfig/templates/Inventory.py
@@ -15,7 +15,7 @@
 import logging, lxml.etree as ET
 from typing import Any, Dict, List, Tuple
 from .Namespace import NAMESPACES
-from .Tools import add_value_from_tag
+from .Tools import add_value_from_tag, add_int_from_tag
 
 LOGGER = logging.getLogger(__name__)
 
@@ -56,6 +56,8 @@ def parse(xml_data : ET.Element) -> List[Tuple[str, Dict[str, Any]]]:
     response = []
     LOGGER.debug("InventoryPrueba")
     parent_types = {}
+    #Initialized count to 0 for index
+    count = 0
     for xml_component in xml_data.xpath(XPATH_PORTS, namespaces=NAMESPACES):
         LOGGER.info('xml_component inventario = {:s}'.format(str(ET.tostring(xml_component))))
         inventory = {}
@@ -64,6 +66,7 @@ def parse(xml_data : ET.Element) -> List[Tuple[str, Dict[str, Any]]]:
         inventory['class'] = ''
         inventory['attributes'] = {}
         component_reference = []
+        
 
         component_name = xml_component.find('ocp:name', namespaces=NAMESPACES)
         if component_name is None or component_name.text is None: continue
@@ -84,6 +87,34 @@ def parse(xml_data : ET.Element) -> List[Tuple[str, Dict[str, Any]]]:
         
         if inventory['class'] == 'CPU' or inventory['class'] == 'STORAGE': continue
 
+        ##Added (after the checking of the name and the class)
+        #Physical index- Index of the component in the array
+    
+        add_int_from_tag(inventory['attributes'], 'physical-index', count)
+        count +=1
+
+        ##Added
+        #FRU
+        if inventory['class'] == 'FRU':
+            component_isfru = xml_component.find('ocp:state/ocp:type', namespaces=NAMESPACES)
+            add_value_from_tag(inventory['attributes'], 'isfru', component_isfru)
+        ##ID
+        component_id = xml_component.find('ocp:state/ocp:id', namespaces=NAMESPACES)
+        if not component_id is None:
+            add_value_from_tag(inventory['attributes'], 'id', component_id)
+
+        ##OPER_STATUS
+        component_oper_status = xml_component.find('ocp:state/ocp:oper-status', namespaces=NAMESPACES)
+        if not component_oper_status is None:
+            add_value_from_tag(inventory['attributes'], 'oper-status', component_oper_status)
+
+        ##MODEL_ID  
+        component_model_id = xml_component.find('ocp:state/ocp:entity-id', namespaces=NAMESPACES)
+        if not component_model_id is None:
+            add_value_from_tag(inventory['attributes'], 'model-id', component_model_id)
+    
+    ##
+        
         component_empty = xml_component.find('ocp:state/ocp:empty', namespaces=NAMESPACES)
         if not component_empty is None:
             add_value_from_tag(inventory['attributes'], 'empty', component_empty)
diff --git a/src/device/service/drivers/openconfig/templates/Tools.py b/src/device/service/drivers/openconfig/templates/Tools.py
index 79bebef51..78e61e0ae 100644
--- a/src/device/service/drivers/openconfig/templates/Tools.py
+++ b/src/device/service/drivers/openconfig/templates/Tools.py
@@ -26,6 +26,11 @@ def add_value_from_tag(target : Dict, field_name: str, field_value : ET.Element,
     if cast is not None: field_value = cast(field_value)
     target[field_name] = field_value
 
+def add_int_from_tag(target : Dict, field_name: str, field_value : int, cast=None) -> None:
+    if field_value is None: return
+    if cast is not None: field_value = cast(field_value)
+    target[field_name] = field_value
+
 def add_value_from_collection(target : Dict, field_name: str, field_value : Collection) -> None:
     if field_value is None or len(field_value) == 0: return
     target[field_name] = field_value
diff --git a/src/slice/service/SliceServiceServicerImpl.py b/src/slice/service/SliceServiceServicerImpl.py
index cbe2dd5c7..52552a6ed 100644
--- a/src/slice/service/SliceServiceServicerImpl.py
+++ b/src/slice/service/SliceServiceServicerImpl.py
@@ -12,6 +12,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+#agrupar slices agrupar recursos para no hacer mas configs
+
 from typing import Optional
 import grpc, json, logging #, deepdiff
 from common.proto.context_pb2 import (
@@ -30,33 +32,34 @@ from interdomain.client.InterdomainClient import InterdomainClient
 from service.client.ServiceClient import ServiceClient
 from .slice_grouper.SliceGrouper import SliceGrouper
 
-LOGGER = logging.getLogger(__name__)
+LOGGER = logging.getLogger(__name__) #crea un objeto de registro con el nombre del modulo actual
 
 METRICS_POOL = MetricsPool('Slice', 'RPC')
 
-class SliceServiceServicerImpl(SliceServiceServicer):
+class SliceServiceServicerImpl(SliceServiceServicer): # Implementa el servicio gRPC definido por SliceServiceServicer 
     def __init__(self):
         LOGGER.debug('Creating Servicer...')
-        self._slice_grouper = SliceGrouper()
+        self._slice_grouper = SliceGrouper() #crea una instancia de slicegrouper
         LOGGER.debug('Servicer Created')
 
     def create_update(self, request : Slice) -> SliceId:
         # Set slice status to "SERVICESTATUS_PLANNED" to ensure rest of components are aware the slice is
         # being modified.
         context_client = ContextClient()
-        slice_ro : Optional[Slice] = get_slice_by_id(context_client, request.slice_id, rw_copy=False)
+        slice_ro : Optional[Slice] = get_slice_by_id(context_client, request.slice_id, rw_copy=False) # se obtiene la slice con el sliceId de la req
 
-        slice_rw = Slice()
+        slice_rw = Slice() #crea nueva slice desde la slice de la req
         slice_rw.CopyFrom(request if slice_ro is None else slice_ro)
-        if len(request.name) > 0: slice_rw.name = request.name
+        if len(request.name) > 0: slice_rw.name = request.name #actualizamos el nombre y estado de la slice rw
         slice_rw.slice_owner.CopyFrom(request.slice_owner)                          # pylint: disable=no-member
         slice_rw.slice_status.slice_status = SliceStatusEnum.SLICESTATUS_PLANNED    # pylint: disable=no-member
 
+#copiamos endpoints, reglas y configuraciones de la req a la slice
         copy_endpoint_ids(request.slice_endpoint_ids,        slice_rw.slice_endpoint_ids       ) # pylint: disable=no-member
         copy_constraints (request.slice_constraints,         slice_rw.slice_constraints        ) # pylint: disable=no-member
         copy_config_rules(request.slice_config.config_rules, slice_rw.slice_config.config_rules) # pylint: disable=no-member
 
-        slice_id_with_uuids = context_client.SetSlice(slice_rw)
+        slice_id_with_uuids = context_client.SetSlice(slice_rw) #actualizar o crear la slice en la db
 
         if len(slice_rw.slice_endpoint_ids) < 2: # pylint: disable=no-member
             # unable to identify the kind of slice; just update endpoints, constraints and config rules
@@ -65,8 +68,9 @@ class SliceServiceServicerImpl(SliceServiceServicer):
             reply = context_client.SetSlice(slice_rw)
             context_client.close()
             return reply
+        #si tiene menos de 2 endpoints se omite la actualizacion y se retorna el sliceid
 
-        slice_with_uuids = context_client.GetSlice(slice_id_with_uuids)
+        slice_with_uuids = context_client.GetSlice(slice_id_with_uuids) #obtenemos la slice actualizada
 
         #LOGGER.info('json_current_slice = {:s}'.format(str(json_current_slice)))
         #json_updated_slice = grpc_message_to_json(request)
@@ -74,8 +78,8 @@ class SliceServiceServicerImpl(SliceServiceServicer):
         #changes = deepdiff.DeepDiff(json_current_slice, json_updated_slice)
         #LOGGER.info('changes = {:s}'.format(str(changes)))
 
-        if is_inter_domain(context_client, slice_with_uuids.slice_endpoint_ids):
-            interdomain_client = InterdomainClient()
+        if is_inter_domain(context_client, slice_with_uuids.slice_endpoint_ids): #si la slice es interdominio
+            interdomain_client = InterdomainClient() #que es interdomain client?
             slice_id = interdomain_client.RequestSlice(slice_with_uuids)
             slice_ = context_client.GetSlice(slice_id)
             slice_active = Slice()
@@ -97,10 +101,10 @@ class SliceServiceServicerImpl(SliceServiceServicer):
 
         service_client = ServiceClient()
         try:
-            _service = context_client.GetService(service_id)
+            _service = context_client.GetService(service_id) #obtener info de un servicio si existe
         except: # pylint: disable=bare-except
             # pylint: disable=no-member
-            service_request = Service()
+            service_request = Service() # sino se crea un nuevo servicio
             service_request.service_id.CopyFrom(service_id)
             service_request.service_type = ServiceTypeEnum.SERVICETYPE_UNKNOWN
             service_request.service_status.service_status = ServiceStatusEnum.SERVICESTATUS_PLANNED
@@ -109,6 +113,7 @@ class SliceServiceServicerImpl(SliceServiceServicer):
         service_request = Service()
         service_request.CopyFrom(_service)
 
+#actualiza el servicio con la info de la slice
         # pylint: disable=no-member
         copy_endpoint_ids(request.slice_endpoint_ids, service_request.service_endpoint_ids)
         copy_constraints(request.slice_constraints, service_request.service_constraints)
@@ -162,11 +167,11 @@ class SliceServiceServicerImpl(SliceServiceServicer):
         slice_active.slice_status.slice_status = SliceStatusEnum.SLICESTATUS_ACTIVE # pylint: disable=no-member
         context_client.SetSlice(slice_active)
 
-        service_client.close()
-        context_client.close()
+        service_client.close() #liberar recursos, que es realmente?
+        context_client.close() #db teraflow
         return slice_id
 
-    @safe_and_metered_rpc_method(METRICS_POOL, LOGGER)
+    @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) #agrega funcionalidades de metrica y seguridad
     def CreateSlice(self, request : Slice, context : grpc.ServicerContext) -> SliceId:
         #try:
         #    slice_ = context_client.GetSlice(request.slice_id)
@@ -196,7 +201,7 @@ class SliceServiceServicerImpl(SliceServiceServicer):
 
     @safe_and_metered_rpc_method(METRICS_POOL, LOGGER)
     def DeleteSlice(self, request : SliceId, context : grpc.ServicerContext) -> Empty:
-        context_client = ContextClient()
+        context_client = ContextClient() #coge la info de una slice
         try:
             _slice = context_client.GetSlice(request)
         except: # pylint: disable=bare-except
@@ -205,9 +210,11 @@ class SliceServiceServicerImpl(SliceServiceServicer):
 
         _slice_rw = Slice()
         _slice_rw.CopyFrom(_slice)
+        #cambia el status
         _slice_rw.slice_status.slice_status = SliceStatusEnum.SLICESTATUS_DEINIT # pylint: disable=no-member
         context_client.SetSlice(_slice_rw)
-
+#elimina la slice considerando si es interdominio o no, y desagrupa la slice eliminada
+        #elimina los servicios asociados a la slice
         if is_inter_domain(context_client, _slice.slice_endpoint_ids):
             interdomain_client = InterdomainClient()
             slice_id = interdomain_client.DeleteSlice(request)
diff --git a/src/slice/service/slice_grouper/SliceGrouper.py b/src/slice/service/slice_grouper/SliceGrouper.py
index 2f1a79181..d59531a1b 100644
--- a/src/slice/service/slice_grouper/SliceGrouper.py
+++ b/src/slice/service/slice_grouper/SliceGrouper.py
@@ -14,7 +14,7 @@
 
 import logging, pandas, threading
 from typing import Dict, Optional, Tuple
-from sklearn.cluster import KMeans
+from sklearn.cluster import KMeans #algoritmo de agrupamiento de scikit-learn (biblio de aprendizaje automatico)
 from common.proto.context_pb2 import Slice
 from common.tools.grpc.Tools import grpc_message_to_json_string
 from .Constants import SLICE_GROUPS
@@ -27,30 +27,30 @@ LOGGER = logging.getLogger(__name__)
 
 class SliceGrouper:
     def __init__(self) -> None:
-        self._lock = threading.Lock()
-        self._is_enabled = is_slice_grouping_enabled()
+        self._lock = threading.Lock() #controla el acceso concurrente
+        self._is_enabled = is_slice_grouping_enabled() #esta habilitado el agrupamiento de slices?
         LOGGER.info('Slice Grouping: {:s}'.format('ENABLED' if self._is_enabled else 'DISABLED'))
         if not self._is_enabled: return
 
-        metrics_exporter = MetricsExporter()
+        metrics_exporter = MetricsExporter() #instancia de la clase 
         metrics_exporter.create_table()
 
-        self._slice_groups = create_slice_groups(SLICE_GROUPS)
+        self._slice_groups = create_slice_groups(SLICE_GROUPS) #grupos de slices
 
         # Initialize and fit K-Means with the pre-defined clusters we want, i.e., one per slice group
-        df_groups = pandas.DataFrame(SLICE_GROUPS, columns=['name', 'availability', 'capacity_gbps'])
-        k_means = KMeans(n_clusters=df_groups.shape[0])
+        df_groups = pandas.DataFrame(SLICE_GROUPS, columns=['name', 'availability', 'capacity_gbps']) #data frame con info de los grupos
+        k_means = KMeans(n_clusters=df_groups.shape[0]) #modelo utilizado para el agrupamiento
         k_means.fit(df_groups[['availability', 'capacity_gbps']])
         df_groups['label'] = k_means.predict(df_groups[['availability', 'capacity_gbps']])
         self._k_means = k_means
         self._df_groups = df_groups
 
-        self._group_mapping : Dict[str, Dict] = {
+        self._group_mapping : Dict[str, Dict] = { #Dict = dictionary
             group['name']:{k:v for k,v in group.items() if k != 'name'}
-            for group in list(df_groups.to_dict('records'))
+            for group in list(df_groups.to_dict('records')) #mapeo de nombres de grupo a sus atributos
         }
 
-        label_to_group = {}
+        label_to_group = {} #mapeo de etiquetas a nombres de grupo
         for group_name,group_attrs in self._group_mapping.items():
             label = group_attrs['label']
             availability = group_attrs['availability']
@@ -60,7 +60,7 @@ class SliceGrouper:
             label_to_group[label] = group_name
         self._label_to_group = label_to_group
 
-    def _select_group(self, slice_obj : Slice) -> Optional[Tuple[str, float, float]]:
+    def _select_group(self, slice_obj : Slice) -> Optional[Tuple[str, float, float]]: #selecciona un grupo para una slice
         with self._lock:
             grouping_parameters = get_slice_grouping_parameters(slice_obj)
             LOGGER.debug('[_select_group] grouping_parameters={:s}'.format(str(grouping_parameters)))
@@ -78,16 +78,16 @@ class SliceGrouper:
             return group_name, availability, capacity_gbps
 
     @property
-    def is_enabled(self): return self._is_enabled
+    def is_enabled(self): return self._is_enabled #indica si el agrupamiento de slices esta habilitado
 
-    def group(self, slice_obj : Slice) -> bool:
+    def group(self, slice_obj : Slice) -> bool: #determina el grupo al que debe pertenecer la slice
         LOGGER.debug('[group] slice_obj={:s}'.format(grpc_message_to_json_string(slice_obj)))
         selected_group = self._select_group(slice_obj)
         LOGGER.debug('[group] selected_group={:s}'.format(str(selected_group)))
         if selected_group is None: return False
         return add_slice_to_group(slice_obj, selected_group)
 
-    def ungroup(self, slice_obj : Slice) -> bool:
+    def ungroup(self, slice_obj : Slice) -> bool: # desagrupa la slice de un grupo
         LOGGER.debug('[ungroup] slice_obj={:s}'.format(grpc_message_to_json_string(slice_obj)))
         selected_group = self._select_group(slice_obj)
         LOGGER.debug('[ungroup] selected_group={:s}'.format(str(selected_group)))
diff --git a/src/webui/service/main/routes.py b/src/webui/service/main/routes.py
index 75f036bef..eba758ff3 100644
--- a/src/webui/service/main/routes.py
+++ b/src/webui/service/main/routes.py
@@ -12,7 +12,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import base64, json, logging #, re
+import base64, json, logging
+import traceback #, re
 from flask import jsonify, redirect, render_template, Blueprint, flash, session, url_for, request
 from common.proto.context_pb2 import ContextList, Empty, TopologyId, TopologyList
 from common.tools.descriptor.Loader import DescriptorLoader, compose_notifications
@@ -113,6 +114,7 @@ def home():
     except Exception as e: # pylint: disable=broad-except
         LOGGER.exception('Descriptor load failed')
         flash(f'Descriptor load failed: `{str(e)}`', 'danger')
+        traceback.print_exc()  # Agregar esta línea para imprimir el traceback completo
     finally:
         context_client.close()
         device_client.close()
-- 
GitLab