Newer
Older
# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#      http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Dict, Tuple
from common.proto.context_pb2 import Empty, EndPointId, Service
from common.proto.pathcomp_pb2 import PathCompReply, PathCompRequest
from common.proto.pathcomp_pb2_grpc import PathCompServiceServicer
from common.rpc_method_wrapper.Decorator import create_metrics, safe_and_metered_rpc_method
from common.tools.grpc.Tools import grpc_message_to_json_string
from context.client.ContextClient import ContextClient
from pathcomp.frontend.Config import BACKEND_HOST, BACKEND_PORT, BACKEND_URL
from pathcomp.frontend.service.tools.ComposeRequest import compose_device, compose_link, compose_service
#from pathcomp.frontend.service.tools.Constants import CapacityUnit
LOGGER = logging.getLogger(__name__)
SERVICE_NAME = 'PathComp'
METHOD_NAMES = ['Compute']
METRICS = create_metrics(SERVICE_NAME, METHOD_NAMES)
class PathCompServiceServicerImpl(PathCompServiceServicer):
    def __init__(self) -> None:
        LOGGER.debug('Creating Servicer...')
        LOGGER.debug('Servicer Created')
    @safe_and_metered_rpc_method(METRICS, LOGGER)
    def Compute(self, request : PathCompRequest, context : grpc.ServicerContext) -> PathCompReply:
        import os
        LOGGER.info('os.environ={:s}'.format(str(os.environ)))
        LOGGER.info('[Compute] begin ; request = {:s}'.format(grpc_message_to_json_string(request)))
        algorithm = request.WhichOneof('algorithm')
        if algorithm == 'shortest_path':
            # no attributes
            pass
        elif algorithm == 'k_shortest_path':
            k_inspection = request.k_shortest_path.k_inspection
            k_return = request.k_shortest_path.k_return
        else:
            raise NotImplementedError('Unsupported Algorithm: {:s}'.format(str(algorithm)))
        algorithm = {'id': 'KSP', 'sync': False, 'k_paths': k_return}
        service_list = [
            compose_service(grpc_service, algorithm)
            for grpc_service in request.services
        ]
        get_service_key = lambda service_id: (service_id['contextId'], service_id['service_uuid'])
        service_dict : Dict[Tuple[str, str], Tuple[Dict, Service]] = {
            get_service_key(json_service['serviceId']): (json_service, grpc_service)
            for json_service,grpc_service in zip(service_list, request.services)
        }
        #LOGGER.info('service_dict = {:s}'.format(str(service_dict)))
        #grpc_contexts = context_client.ListContexts(Empty())
        #for grpc_context in grpc_contexts.contexts:
        #    # TODO: add context to request
        #    grpc_topologies = context_client.ListTopologies(grpc_context.context_id)
        #    for grpc_topology in grpc_topologies.topologies:    #pylint: disable=unused-variable
        #        # TODO: add topology to request
        #        pass
        grpc_devices = context_client.ListDevices(Empty())
        device_list = [
            compose_device(grpc_device)
            for grpc_device in grpc_devices.devices
        ]
        endpoint_dict : Dict[str, Dict[str, Tuple[Dict, EndPointId]]] = {
            json_device['device_Id']: {
                json_endpoint['endpoint_id']['endpoint_uuid']: (json_endpoint['endpoint_id'], grpc_endpoint.endpoint_id)
                for json_endpoint,grpc_endpoint in zip(json_device['device_endpoints'], grpc_device.device_endpoints)
            }
            for json_device,grpc_device in zip(device_list, grpc_devices.devices)
        }
        #LOGGER.info('endpoint_dict = {:s}'.format(str(endpoint_dict)))
        link_list = [
            compose_link(grpc_link)
            for grpc_link in grpc_links.links
        ]
        request = {
            'serviceList': service_list,
            'deviceList' : device_list,
            'linkList'   : link_list,
        }
        #with open('pc-req.json', 'w', encoding='UTF-8') as f:
        #    f.write(json.dumps(request, sort_keys=True, indent=4))
        backend_url = BACKEND_URL.format(BACKEND_HOST, BACKEND_PORT)
        reply = requests.post(backend_url, json=request)
        if reply.status_code not in {requests.codes.ok}:
            raise Exception('Backend error({:s}) for request({:s})'.format(
                str(reply.content.decode('UTF-8')), json.dumps(request, sort_keys=True)))
        LOGGER.info('status_code={:s} reply={:s}'.format(
            str(reply.status_code), str(reply.content.decode('UTF-8'))))
        json_reply = reply.json()
        response_list = json_reply.get('response-list', [])
        for response in response_list:
            service_key = get_service_key(response['serviceId'])
            tuple_service = service_dict.get(service_key)
            if tuple_service is None: raise Exception('ServiceKey({:s}) not found'.format(str(service_key)))
            json_service, grpc_service = tuple_service
            # TODO: implement support for multi-point services
            service_endpoint_ids = grpc_service.service_endpoint_ids
            if len(service_endpoint_ids) != 2: raise NotImplementedError('Service must have 2 endpoints')
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
            service = reply.services.add()
            service.CopyFrom(grpc_service)
            connection = reply.connections.add()
            connection.connection_id.connection_uuid.uuid = str(uuid.uuid4())
            connection.service_id.CopyFrom(service.service_id)
            no_path_issue = response.get('noPath', {}).get('issue')
            if no_path_issue is not None:
                # no path found: leave connection with no endpoints
                # no_path_issue == 1 => no path due to a constraint
                continue
            service_paths = response['path']
            for service_path in service_paths:
                # ... "path-capacity": {"total-size": {"value": 200, "unit": 0}},
                # ... "path-latency": {"fixed-latency-characteristic": "10.000000"},
                # ... "path-cost": {"cost-name": "", "cost-value": "5.000000", "cost-algorithm": "0.000000"},
                #path_capacity = service_path['path-capacity']['total-size']
                #path_capacity_value = path_capacity['value']
                #path_capacity_unit = CapacityUnit(path_capacity['unit'])
                #path_latency = service_path['path-latency']['fixed-latency-characteristic']
                #path_cost = service_path['path-cost']
                #path_cost_name = path_cost['cost-name']
                #path_cost_value = path_cost['cost-value']
                #path_cost_algorithm = path_cost['cost-algorithm']
                path_endpoints = service_path['devices']
                for endpoint in path_endpoints:
                    device_uuid = endpoint['device_id']
                    endpoint_uuid = endpoint['endpoint_uuid']
                    endpoint_id = connection.path_hops_endpoint_ids.add()
                    endpoint_id.CopyFrom(endpoint_dict[device_uuid][endpoint_uuid][1])
        LOGGER.info('[Compute] end ; reply = {:s}'.format(grpc_message_to_json_string(reply)))
        return reply