Skip to content
Snippets Groups Projects
Commit b15491a6 authored by Lluis Gifre Renom's avatar Lluis Gifre Renom
Browse files

PathComp Frontend:

- added test-deploy.sh script
- partial arrangement of GitLab CI/CD
- added configuration constants
- arranged Dockerfile
- corrected imports of proto Python files
- implemented composition of backend request
parent 9d31e1d1
No related branches found
No related tags found
1 merge request!54Release 2.0.0
......@@ -13,15 +13,15 @@
# limitations under the License.
# Build, tag and push the Docker image to the GitLab registry
build pathcomp:
build pathcomp_frontend:
variables:
IMAGE_NAME: 'pathcomp' # name of the microservice
IMAGE_NAME: 'pathcomp_frontend' # name of the microservice
IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
stage: build
before_script:
- docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
script:
- docker build -t "$IMAGE_NAME:$IMAGE_TAG" -f ./src/$IMAGE_NAME/Dockerfile .
- docker build -t "$IMAGE_NAME:$IMAGE_TAG" -f ./src/$IMAGE_NAME/frontend/Dockerfile .
- docker tag "$IMAGE_NAME:$IMAGE_TAG" "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
- docker push "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
after_script:
......@@ -32,9 +32,34 @@ build pathcomp:
- changes:
- src/common/**/*.py
- proto/*.proto
- src/$IMAGE_NAME/**/*.{py,in,yml}
- src/$IMAGE_NAME/Dockerfile
- src/$IMAGE_NAME/tests/*.py
- src/$IMAGE_NAME/frontend/**/*.{py,in,yml}
- src/$IMAGE_NAME/frontend/Dockerfile
- src/$IMAGE_NAME/frontend/tests/*.py
- manifests/${IMAGE_NAME}service.yaml
- .gitlab-ci.yml
build pathcomp_backend:
variables:
IMAGE_NAME: 'pathcomp_backend' # name of the microservice
IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
stage: build
before_script:
- docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
script:
- docker build -t "$IMAGE_NAME:$IMAGE_TAG" -f ./src/$IMAGE_NAME/backend/Dockerfile .
- docker tag "$IMAGE_NAME:$IMAGE_TAG" "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
- docker push "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
after_script:
- docker images --filter="dangling=true" --quiet | xargs -r docker rmi
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
- if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
- changes:
- proto/*.proto
- src/$IMAGE_NAME/.gitlab-ci.yml
- src/$IMAGE_NAME/backend/**/*.{c,h,conf}
- src/$IMAGE_NAME/backend/Makefile
- src/$IMAGE_NAME/backend/Dockerfile
- manifests/${IMAGE_NAME}service.yaml
- .gitlab-ci.yml
......
......@@ -11,3 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
BACKEND_URL = 'http://{:s}:{:d}/pathComp/api/v1/compRoute'
BACKEND_HOST = '172.28.0.2'
BACKEND_PORT = 8081
......@@ -56,7 +56,7 @@ RUN find . -type f -exec sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' {} \;
# Create component sub-folders, get specific Python packages
RUN mkdir -p /var/teraflow/pathcomp
WORKDIR /var/teraflow/pathcomp
COPY src/pathcomp/requirements.in requirements.in
COPY src/pathcomp/frontend/requirements.in requirements.in
RUN pip-compile --quiet --output-file=requirements.txt requirements.in
RUN python3 -m pip install -r requirements.txt
......@@ -66,4 +66,4 @@ COPY src/context/. context/
COPY src/pathcomp/. pathcomp/
# Start the service
ENTRYPOINT ["python", "-m", "pathcomp.service"]
ENTRYPOINT ["python", "-m", "pathcomp.frontend.service"]
......@@ -15,7 +15,7 @@
from common.Constants import ServiceNameEnum
from common.Settings import get_service_port_grpc
from common.tools.service.GenericGrpcService import GenericGrpcService
from pathcomp.proto.pathcomp_pb2_grpc import add_PathCompServiceServicer_to_server
from common.proto.pathcomp_pb2_grpc import add_PathCompServiceServicer_to_server
from .PathCompServiceServicerImpl import PathCompServiceServicerImpl
class PathCompService(GenericGrpcService):
......
......@@ -12,14 +12,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import grpc, json, logging, requests, uuid
from typing import List
import grpc, logging, uuid
from common.proto.context_pb2 import Connection, Empty, EndPointId
from common.proto.pathcomp_pb2 import PathCompReply, PathCompRequest
from common.proto.pathcomp_pb2_grpc import PathCompServiceServicer
from common.rpc_method_wrapper.Decorator import create_metrics, safe_and_metered_rpc_method
from common.tools.grpc.Tools import grpc_message_to_json, grpc_message_to_json_string
from context.client.ContextClient import ContextClient
from pathcomp.frontend.Config import BACKEND_HOST, BACKEND_PORT, BACKEND_URL
from pathcomp.frontend.service.tools.ComposeRequest import compose_device, compose_link, compose_service
LOGGER = logging.getLogger(__name__)
......@@ -36,28 +38,63 @@ class PathCompServiceServicerImpl(PathCompServiceServicer):
def Compute(self, request : PathCompRequest, context : grpc.ServicerContext) -> PathCompReply:
LOGGER.info('[Compute] begin ; request = {:s}'.format(grpc_message_to_json_string(request)))
algorithm = request.WhichOneof('algorithm')
if algorithm == 'shortest_path':
# no attributes
pass
elif algorithm == 'k_shortest_path':
k_inspection = request.k_shortest_path.k_inspection
k_return = request.k_shortest_path.k_return
else:
raise NotImplementedError('Unsupported Algorithm: {:s}'.format(str(algorithm)))
context_client = ContextClient()
algorithm = {'id': 'KSP', 'sync': False, 'k_paths': k_return}
service_list = [
compose_service(grpc_service, algorithm)
for grpc_service in request.services
]
# TODO: consider filtering resources
grpc_contexts = context_client.ListContexts(Empty())
#grpc_contexts = context_client.ListContexts(Empty())
#for grpc_context in grpc_contexts.contexts:
# # TODO: add context to request
# grpc_topologies = context_client.ListTopologies(grpc_context.context_id)
# for grpc_topology in grpc_topologies.topologies: #pylint: disable=unused-variable
# # TODO: add topology to request
# pass
grpc_devices = context_client.ListDevices(Empty())
device_list = [
compose_device(grpc_device)
for grpc_device in grpc_devices.devices
]
grpc_links = context_client.ListLinks(Empty())
for grpc_context in grpc_contexts.contexts:
# TODO: add context to request
grpc_topologies = context_client.ListTopologies(grpc_context.context_id)
for grpc_topology in grpc_topologies.topologies: #pylint: disable=unused-variable
# TODO: add topology to request
pass
for grpc_device in grpc_devices.devices: #pylint: disable=unused-variable
# TODO: add device to request
pass
for grpc_link in grpc_links.links: #pylint: disable=unused-variable
# TODO: add link to request
pass
link_list = [
compose_link(grpc_link)
for grpc_link in grpc_links.links
]
request = {
'serviceList': service_list,
'deviceList' : device_list,
'linkList' : link_list,
}
backend_url = BACKEND_URL.format(BACKEND_HOST, BACKEND_PORT)
reply = requests.post(backend_url, json=request)
if reply.status_code not in {requests.codes.ok}:
raise Exception('Backend error({:s}) for request({:s})'.format(
str(reply.content.decode('UTF-8')), json.dumps(request, sort_keys=True)))
LOGGER.info('status_code={:s} reply={:s}'.format(
str(reply.status_code), str(reply.content.decode('UTF-8'))))
reply = PathCompReply()
# TODO: issue path computation request
# TODO: compose reply populating reply.services and reply.connections
for service in request.services:
......
......@@ -12,14 +12,60 @@
# See the License for the specific language governing permissions and
# limitations under the License.
def compose_topology_id(context_uuid, topology_uuid) -> Dict:
from enum import IntEnum
from typing import Dict
from common.proto.context_pb2 import Device, Link, Service
from common.tools.grpc.Tools import grpc_message_to_json_string
class CapacityUnit(IntEnum):
TB = 0
TBPS = 1
GB = 2
GBPS = 3
MB = 4
MBPS = 5
KB = 6
KBPS = 7
GHZ = 8
MHZ = 9
class LinkPortDirection(IntEnum):
BIDIRECTIONAL = 0
INPUT = 1
OUTPUT = 2
UNKNOWN = 3
class TerminationDirection(IntEnum):
BIDIRECTIONAL = 0
SINK = 1
SOURCE = 2
UNKNOWN = 3
class TerminationState(IntEnum):
CAN_NEVER_TERMINATE = 0
NOT_TERMINATED = 1
TERMINATED_SERVER_TO_CLIENT = 2
TERMINATED_CLIENT_TO_SERVER = 3
TERMINATED_BIDIRECTIONAL = 4
PERMENANTLY_TERMINATED = 5
TERMINATION_STATE_UNKNOWN = 6
class LinkForwardingDirection(IntEnum):
BIDIRECTIONAL = 0
UNIDIRECTIONAL = 1
UNKNOWN = 2
def compose_topology_id(context_uuid : str, topology_uuid : str) -> Dict:
return {'contextId': context_uuid, 'topology_uuid': topology_uuid}
def compose_endpoint_id(topology_id : Dict, device_uuid, endpoint_uuid) -> Dict:
def compose_service_id(context_uuid : str, service_uuid : str) -> Dict:
return {'contextId': context_uuid, 'service_uuid': service_uuid}
def compose_endpoint_id(topology_id : Dict, device_uuid : str, endpoint_uuid : str) -> Dict:
return {'topology_id': topology_id, 'device_id': device_uuid, 'endpoint_uuid': endpoint_uuid}
def compose_capacity(value, unit) -> Dict:
return {'total-size': 'value': value, 'unit': unit}
def compose_capacity(value : str, unit : str) -> Dict:
return {'total-size': {'value': value, 'unit': unit}}
def compose_endpoint(
endpoint_id : Dict, endpoint_type : str, link_port_direction : int, termination_direction : int,
......@@ -31,23 +77,110 @@ def compose_endpoint(
'total-potential-capacity': total_potential_capacity, 'available-capacity': available_capacity,
}
def compose_cost_characteristics(cost_name : str, cost_value : str, cost_algorithm : str) -> Dict:
return {'cost-name': cost_name, 'cost-value': cost_value, 'cost-algorithm': cost_algorithm}
def compose_latency_characteristics(fixed_latency_characteristic : str) -> Dict:
return {'fixed-latency-characteristic': fixed_latency_characteristic}
def compose_constraint(constraint_type : str, constraint_value : str) -> Dict:
return {'constraint_type': constraint_type, 'constraint_value': constraint_value}
def compose_device(grpc_device : Device) -> Dict:
device_uuid = grpc_device.device_id.device_uuid.uuid
device_type = grpc_device.device_type
endpoints = []
for device_endpoint in grpc_device.device_endpoints:
topology_id = compose_topology_id(endpoint_context_uuid, endpoint_topology_uuid)
context_uuid = device_endpoint.endpoint_id.topology_id.context_id.context_uuid.uuid
topology_uuid = device_endpoint.endpoint_id.topology_id.topology_uuid.uuid
endpoint_uuid = device_endpoint.endpoint_id.endpoint_uuid.uuid
endpoint_type = device_endpoint.endpoint_type
topology_id = compose_topology_id(context_uuid, topology_uuid)
endpoint_id = compose_endpoint_id(topology_id, device_uuid, endpoint_uuid)
endpoint_type = 'termination'
link_port_direction = 0
termination_direction = 0
termination_state = 0 or 4
total_potential_capacity = compose_capacity(200, 5)
available_capacity = compose_capacity(200, 5)
link_port_direction = LinkPortDirection.BIDIRECTIONAL.value
termination_direction = TerminationDirection.BIDIRECTIONAL.value
termination_state = TerminationState.TERMINATED_BIDIRECTIONAL.value
total_potential_capacity = compose_capacity(200, CapacityUnit.MBPS.value)
available_capacity = compose_capacity(200, CapacityUnit.MBPS.value)
endpoint = compose_endpoint(
endpoint_id, endpoint_type, link_port_direction, termination_direction,
termination_state, total_potential_capacity, available_capacity)
endpoints.append(endpoint)
return {'device_Id': device_uuid, 'device_type': device_type, 'device_endpoints': endpoints}
\ No newline at end of file
return {'device_Id': device_uuid, 'device_type': device_type, 'device_endpoints': endpoints}
def compose_link(grpc_link : Link) -> Dict:
link_uuid = grpc_link.link_id.link_uuid.uuid
endpoint_ids = []
for link_endpoint_id in grpc_link.link_endpoint_ids:
context_uuid = link_endpoint_id.topology_id.context_id.context_uuid.uuid
topology_uuid = link_endpoint_id.topology_id.topology_uuid.uuid
device_uuid = link_endpoint_id.device_id.device_uuid.uuid
endpoint_uuid = link_endpoint_id.endpoint_uuid.uuid
topology_id = compose_topology_id(context_uuid, topology_uuid)
endpoint_id = compose_endpoint_id(topology_id, device_uuid, endpoint_uuid)
endpoint_ids.append({'endpoint_id' : endpoint_id})
forwarding_direction = LinkForwardingDirection.UNIDIRECTIONAL.value
total_potential_capacity = compose_capacity(200, CapacityUnit.MBPS.value)
available_capacity = compose_capacity(200, CapacityUnit.MBPS.value)
cost_characteristics = compose_cost_characteristics('linkcost', '1', '0')
latency_characteristics = compose_latency_characteristics('2')
return {
'link_Id': link_uuid, 'link_endpoint_ids': endpoint_ids, 'forwarding_direction': forwarding_direction,
'total-potential-capacity': total_potential_capacity, 'available-capacity': available_capacity,
'cost-characteristics': cost_characteristics, 'latency-characteristics': latency_characteristics,
}
def compose_service(grpc_service : Service, algorithm : Dict) -> Dict:
context_uuid = grpc_service.service_id.service_id.context_id.context_uuid.uuid
service_uuid = grpc_service.service_id.service_id.service_uuid.uuid
service_id = compose_service_id(context_uuid, service_uuid)
service_type = grpc_service.service_type
endpoint_ids = []
for service_endpoint_id in grpc_service.service_endpoint_ids:
context_uuid = service_endpoint_id.topology_id.context_id.context_uuid.uuid
topology_uuid = service_endpoint_id.topology_id.topology_uuid.uuid
device_uuid = service_endpoint_id.device_id.device_uuid.uuid
endpoint_uuid = service_endpoint_id.endpoint_uuid.uuid
topology_id = compose_topology_id(context_uuid, topology_uuid)
endpoint_id = compose_endpoint_id(topology_id, device_uuid, endpoint_uuid)
endpoint_ids.append(endpoint_id)
constraints = []
for service_constraint in grpc_service.service_constraints:
if service_constraint.WhichOneof('constraint') != 'custom':
MSG = 'Constraint({:s}) not supported'
str_constraint = grpc_message_to_json_string(service_constraint)
raise NotImplementedError(MSG.format(str_constraint))
constraint_type = service_constraint.custom.constraint_type
constraint_value = service_constraint.custom.constraint_value
constraints.append(compose_constraint(constraint_type, constraint_value))
# algorithm to be executed
algorithm_id = algorithm.get('id', 'SP')
# if multiple services included in the request, prevent contention
# If true, services are computed one after the other and resources
# assigned to service i, are considered as used by services i+1..n
sync_paths = algorithm.get('sync', False)
k_paths = algorithm.get('k_paths', 1)
return {
'serviceId': service_id,
'serviceType': service_type,
'service_endpoints_ids': endpoint_ids,
'service_constraints': constraints,
'algId': algorithm_id,
'syncPaths': sync_paths,
'kPaths': k_paths,
}
......@@ -18,7 +18,7 @@ from common.proto.pathcomp_pb2 import PathCompRequest
from common.tools.grpc.Tools import grpc_message_to_json
from context.client.ContextClient import ContextClient
from device.client.DeviceClient import DeviceClient
from pathcomp.client.PathCompClient import PathCompClient
from pathcomp.frontend.client.PathCompClient import PathCompClient
from .Objects import CONTEXTS, DEVICES, LINKS, SERVICES, TOPOLOGIES
from .PrepareTestScenario import ( # pylint: disable=unused-import
# be careful, order of symbols is important here!
......
#!/bin/bash
# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
docker build -t "pathcomp-frontend:latest" -f ./src/pathcomp/frontend/Dockerfile .
docker build -t "pathcomp-backend:latest" -f ./src/pathcomp/backend/Dockerfile .
docker network create --driver=bridge --subnet=172.28.0.0/24 --gateway=172.28.0.254 tfbr
docker run --name pathcomp-frontend -d --network=tfbr --ip 172.28.0.1 pathcomp-frontend:latest
docker run --name pathcomp-backend -d --network=tfbr --ip 172.28.0.2 pathcomp-backend:latest
docker rm -f pathcomp-frontend pathcomp-backend
docker network rm teraflowbridge
docker images --filter="dangling=true" --quiet | xargs -r docker rmi
docker exec -i pathcomp bash -c "pytest --log-level=INFO --verbose pathcomp/tests/test_unitary.py"
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment