Skip to content
Snippets Groups Projects
Commit f28cab56 authored by Lluis Gifre Renom's avatar Lluis Gifre Renom
Browse files

Merge branch 'develop' of https://gitlab.com/teraflow-h2020/controller into feat/webui

parents 6fd16a3e cfe69c57
No related branches found
No related tags found
1 merge request!54Release 2.0.0
Showing
with 819 additions and 273 deletions
......@@ -12,38 +12,47 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging, threading
from flask import Flask
from flask_restful import Api
import functools, logging, threading, time
from typing import Optional, Union
from flask import Flask, request
from flask_restful import Api, Resource
from werkzeug.serving import make_server
from context.Config import RESTAPI_BASE_URL, RESTAPI_SERVICE_PORT
from common.Settings import get_http_bind_address
logging.getLogger('werkzeug').setLevel(logging.WARNING)
BIND_ADDRESS = '0.0.0.0'
LOGGER = logging.getLogger(__name__)
class Server(threading.Thread):
def __init__(self, host=BIND_ADDRESS, port=RESTAPI_SERVICE_PORT, base_url=RESTAPI_BASE_URL):
def log_request(logger, response):
timestamp = time.strftime('[%Y-%b-%d %H:%M]')
logger.info('%s %s %s %s %s', timestamp, request.remote_addr, request.method, request.full_path, response.status)
return response
class GenericRestServer(threading.Thread):
def __init__(
self, bind_port : Union[str, int], base_url : str, bind_address : Optional[str] = None,
cls_name : str = __name__
) -> None:
threading.Thread.__init__(self, daemon=True)
self.host = host
self.port = port
self.logger = logging.getLogger(cls_name)
self.bind_port = bind_port
self.base_url = base_url
self.bind_address = get_http_bind_address() if bind_address is None else bind_address
self.endpoint = 'http://{:s}:{:s}{:s}'.format(str(self.bind_address), str(self.bind_port), str(self.base_url))
self.srv = None
self.ctx = None
self.app = Flask(__name__)
self.app.after_request(functools.partial(log_request, self.logger))
self.api = Api(self.app, prefix=self.base_url)
def add_resource(self, resource, *urls, **kwargs):
def add_resource(self, resource : Resource, *urls, **kwargs):
self.api.add_resource(resource, *urls, **kwargs)
def run(self):
self.srv = make_server(self.host, self.port, self.app, threaded=True)
self.srv = make_server(self.bind_address, self.bind_port, self.app, threaded=True)
self.ctx = self.app.app_context()
self.ctx.push()
endpoint = 'http://{:s}:{:s}{:s}'.format(str(self.host), str(self.port), str(self.base_url))
LOGGER.info('Listening on {:s}...'.format(str(endpoint)))
self.logger.info('Listening on {:s}...'.format(str(self.endpoint)))
self.srv.serve_forever()
def shutdown(self):
......
# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
......@@ -56,7 +56,6 @@ unit test compute:
- docker ps -a
- docker logs $IMAGE_NAME
- docker exec -i $IMAGE_NAME bash -c "coverage run -m pytest --log-level=INFO --verbose $IMAGE_NAME/tests/test_unitary.py --junitxml=/opt/results/${IMAGE_NAME}_report.xml"
- docker exec -i $IMAGE_NAME bash -c "coverage xml -o /opt/results/${IMAGE_NAME}_coverage.xml"
- docker exec -i $IMAGE_NAME bash -c "coverage report --include='${IMAGE_NAME}/*' --show-missing"
coverage: '/TOTAL\s+\d+\s+\d+\s+(\d+%)/'
after_script:
......@@ -64,7 +63,7 @@ unit test compute:
- docker network rm teraflowbridge
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
- if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
- if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
- changes:
- src/$IMAGE_NAME/**/*.{py,in,yml}
- src/$IMAGE_NAME/Dockerfile
......@@ -76,7 +75,6 @@ unit test compute:
when: always
reports:
junit: src/$IMAGE_NAME/tests/${IMAGE_NAME}_report.xml
cobertura: src/$IMAGE_NAME/tests/${IMAGE_NAME}_coverage.xml
# Deployment of the service in Kubernetes Cluster
deploy compute:
......
......@@ -12,23 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from werkzeug.security import generate_password_hash
# General settings
LOG_LEVEL = logging.WARNING
# gRPC settings
GRPC_SERVICE_PORT = 9090
GRPC_MAX_WORKERS = 10
GRPC_GRACE_PERIOD = 60
# REST-API settings
RESTAPI_SERVICE_PORT = 8080
RESTAPI_BASE_URL = '/restconf/data'
# REST-API users
RESTAPI_USERS = { # TODO: implement a database of credentials and permissions
'admin': generate_password_hash('admin'),
}
# Prometheus settings
METRICS_PORT = 9192
......@@ -46,6 +46,7 @@ COPY common/. common
COPY compute/. compute
COPY context/. context
COPY service/. service
COPY slice/. slice
# Start compute service
ENTRYPOINT ["python", "-m", "compute.service"]
......@@ -13,7 +13,10 @@
# limitations under the License.
import grpc, logging
from common.Constants import ServiceNameEnum
from common.Settings import get_service_host, get_service_port_grpc
from common.tools.client.RetryDecorator import retry, delay_exponential
from common.tools.grpc.Tools import grpc_message_to_json_string
from compute.proto.compute_pb2_grpc import ComputeServiceStub
from compute.proto.context_pb2 import (
AuthenticationResult, Empty, Service, ServiceId, ServiceIdList, ServiceStatus, TeraFlowController)
......@@ -24,8 +27,10 @@ DELAY_FUNCTION = delay_exponential(initial=0.01, increment=2.0, maximum=5.0)
RETRY_DECORATOR = retry(max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
class ComputeClient:
def __init__(self, address, port):
self.endpoint = '{:s}:{:s}'.format(str(address), str(port))
def __init__(self, host=None, port=None):
if not host: host = get_service_host(ServiceNameEnum.COMPUTE)
if not port: port = get_service_port_grpc(ServiceNameEnum.COMPUTE)
self.endpoint = '{:s}:{:s}'.format(str(host), str(port))
LOGGER.debug('Creating channel to {:s}...'.format(str(self.endpoint)))
self.channel = None
self.stub = None
......@@ -37,55 +42,55 @@ class ComputeClient:
self.stub = ComputeServiceStub(self.channel)
def close(self):
if(self.channel is not None): self.channel.close()
if self.channel is not None: self.channel.close()
self.channel = None
self.stub = None
@RETRY_DECORATOR
def CheckCredentials(self, request : TeraFlowController) -> AuthenticationResult:
LOGGER.debug('CheckCredentials request: {:s}'.format(str(request)))
LOGGER.debug('CheckCredentials request: {:s}'.format(grpc_message_to_json_string(request)))
response = self.stub.CheckCredentials(request)
LOGGER.debug('CheckCredentials result: {:s}'.format(str(response)))
LOGGER.debug('CheckCredentials result: {:s}'.format(grpc_message_to_json_string(response)))
return response
@RETRY_DECORATOR
def GetConnectivityServiceStatus(self, request : ServiceId) -> ServiceStatus:
LOGGER.debug('GetConnectivityServiceStatus request: {:s}'.format(str(request)))
LOGGER.debug('GetConnectivityServiceStatus request: {:s}'.format(grpc_message_to_json_string(request)))
response = self.stub.GetConnectivityServiceStatus(request)
LOGGER.debug('GetConnectivityServiceStatus result: {:s}'.format(str(response)))
LOGGER.debug('GetConnectivityServiceStatus result: {:s}'.format(grpc_message_to_json_string(response)))
return response
@RETRY_DECORATOR
def CreateConnectivityService(self, request : Service) -> ServiceId:
LOGGER.debug('CreateConnectivityService request: {:s}'.format(str(request)))
LOGGER.debug('CreateConnectivityService request: {:s}'.format(grpc_message_to_json_string(request)))
response = self.stub.CreateConnectivityService(request)
LOGGER.debug('CreateConnectivityService result: {:s}'.format(str(response)))
LOGGER.debug('CreateConnectivityService result: {:s}'.format(grpc_message_to_json_string(response)))
return response
@RETRY_DECORATOR
def EditConnectivityService(self, request : Service) -> ServiceId:
LOGGER.debug('EditConnectivityService request: {:s}'.format(str(request)))
LOGGER.debug('EditConnectivityService request: {:s}'.format(grpc_message_to_json_string(request)))
response = self.stub.EditConnectivityService(request)
LOGGER.debug('EditConnectivityService result: {:s}'.format(str(response)))
LOGGER.debug('EditConnectivityService result: {:s}'.format(grpc_message_to_json_string(response)))
return response
@RETRY_DECORATOR
def DeleteConnectivityService(self, request : Service) -> Empty:
LOGGER.debug('DeleteConnectivityService request: {:s}'.format(str(request)))
LOGGER.debug('DeleteConnectivityService request: {:s}'.format(grpc_message_to_json_string(request)))
response = self.stub.DeleteConnectivityService(request)
LOGGER.debug('DeleteConnectivityService result: {:s}'.format(str(response)))
LOGGER.debug('DeleteConnectivityService result: {:s}'.format(grpc_message_to_json_string(response)))
return response
@RETRY_DECORATOR
def GetAllActiveConnectivityServices(self, request : Empty) -> ServiceIdList:
LOGGER.debug('GetAllActiveConnectivityServices request: {:s}'.format(str(request)))
LOGGER.debug('GetAllActiveConnectivityServices request: {:s}'.format(grpc_message_to_json_string(request)))
response = self.stub.GetAllActiveConnectivityServices(request)
LOGGER.debug('GetAllActiveConnectivityServices result: {:s}'.format(str(response)))
LOGGER.debug('GetAllActiveConnectivityServices result: {:s}'.format(grpc_message_to_json_string(response)))
return response
@RETRY_DECORATOR
def ClearAllConnectivityServices(self, request : Empty) -> Empty:
LOGGER.debug('ClearAllConnectivityServices request: {:s}'.format(str(request)))
LOGGER.debug('ClearAllConnectivityServices request: {:s}'.format(grpc_message_to_json_string(request)))
response = self.stub.ClearAllConnectivityServices(request)
LOGGER.debug('ClearAllConnectivityServices result: {:s}'.format(str(response)))
LOGGER.debug('ClearAllConnectivityServices result: {:s}'.format(grpc_message_to_json_string(response)))
return response
#!/bin/bash -eu
#
# Copyright 2018 Google LLC
# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
......@@ -14,14 +14,27 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#!/bin/bash -e
# Make folder containing the script the root folder for its execution
cd $(dirname $0)
rm -rf proto/*.py
rm -rf proto/__pycache__
touch proto/__init__.py
tee proto/__init__.py << EOF > /dev/null
# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
EOF
python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto context.proto
python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto service.proto
......
This diff is collapsed.
......@@ -20,7 +20,7 @@ DESCRIPTOR = _descriptor.FileDescriptor(
syntax='proto3',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\rservice.proto\x12\x07service\x1a\rcontext.proto2\xfd\x01\n\x0eServiceService\x12\x37\n\rCreateService\x12\x10.context.Service\x1a\x12.context.ServiceId\"\x00\x12\x37\n\rUpdateService\x12\x10.context.Service\x1a\x12.context.ServiceId\"\x00\x12\x35\n\rDeleteService\x12\x12.context.ServiceId\x1a\x0e.context.Empty\"\x00\x12\x42\n\x11GetConnectionList\x12\x12.context.ServiceId\x1a\x17.context.ConnectionList\"\x00\x62\x06proto3'
serialized_pb=b'\n\rservice.proto\x12\x07service\x1a\rcontext.proto2\xb9\x01\n\x0eServiceService\x12\x37\n\rCreateService\x12\x10.context.Service\x1a\x12.context.ServiceId\"\x00\x12\x37\n\rUpdateService\x12\x10.context.Service\x1a\x12.context.ServiceId\"\x00\x12\x35\n\rDeleteService\x12\x12.context.ServiceId\x1a\x0e.context.Empty\"\x00\x62\x06proto3'
,
dependencies=[context__pb2.DESCRIPTOR,])
......@@ -38,7 +38,7 @@ _SERVICESERVICE = _descriptor.ServiceDescriptor(
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=42,
serialized_end=295,
serialized_end=227,
methods=[
_descriptor.MethodDescriptor(
name='CreateService',
......@@ -70,16 +70,6 @@ _SERVICESERVICE = _descriptor.ServiceDescriptor(
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetConnectionList',
full_name='service.ServiceService.GetConnectionList',
index=3,
containing_service=None,
input_type=context__pb2._SERVICEID,
output_type=context__pb2._CONNECTIONLIST,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_SERVICESERVICE)
......
......@@ -12,56 +12,17 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import grpc, logging
from concurrent import futures
from grpc_health.v1.health import HealthServicer, OVERALL_HEALTH
from grpc_health.v1.health_pb2 import HealthCheckResponse
from grpc_health.v1.health_pb2_grpc import add_HealthServicer_to_server
from common.Constants import ServiceNameEnum
from common.Settings import get_service_port_grpc
from common.tools.service.GenericGrpcService import GenericGrpcService
from compute.proto.compute_pb2_grpc import add_ComputeServiceServicer_to_server
from compute.service.ComputeServiceServicerImpl import ComputeServiceServicerImpl
from compute.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD
BIND_ADDRESS = '0.0.0.0'
LOGGER = logging.getLogger(__name__)
class ComputeService:
def __init__(self, address=BIND_ADDRESS, port=GRPC_SERVICE_PORT, max_workers=GRPC_MAX_WORKERS,
grace_period=GRPC_GRACE_PERIOD):
self.address = address
self.port = port
self.endpoint = None
self.max_workers = max_workers
self.grace_period = grace_period
self.compute_servicer = None
self.health_servicer = None
self.pool = None
self.server = None
def start(self):
self.endpoint = '{:s}:{:s}'.format(str(self.address), str(self.port))
LOGGER.debug('Starting Service (tentative endpoint: {:s}, max_workers: {:s})...'.format(
str(self.endpoint), str(self.max_workers)))
self.pool = futures.ThreadPoolExecutor(max_workers=self.max_workers)
self.server = grpc.server(self.pool) # , interceptors=(tracer_interceptor,))
class ComputeService(GenericGrpcService):
def __init__(self, cls_name: str = __name__) -> None:
port = get_service_port_grpc(ServiceNameEnum.COMPUTE)
super().__init__(port, cls_name=cls_name)
self.compute_servicer = ComputeServiceServicerImpl()
add_ComputeServiceServicer_to_server(self.compute_servicer, self.server)
self.health_servicer = HealthServicer(
experimental_non_blocking=True, experimental_thread_pool=futures.ThreadPoolExecutor(max_workers=1))
add_HealthServicer_to_server(self.health_servicer, self.server)
port = self.server.add_insecure_port(self.endpoint)
self.endpoint = '{:s}:{:s}'.format(str(self.address), str(port))
LOGGER.info('Listening on {:s}...'.format(str(self.endpoint)))
self.server.start()
self.health_servicer.set(OVERALL_HEALTH, HealthCheckResponse.SERVING) # pylint: disable=maybe-no-member
LOGGER.debug('Service started')
def stop(self):
LOGGER.debug('Stopping service (grace period {:s} seconds)...'.format(str(self.grace_period)))
self.health_servicer.enter_graceful_shutdown()
self.server.stop(self.grace_period)
LOGGER.debug('Service stopped')
def install_servicers(self):
add_ComputeServiceServicer_to_server(self.compute_servicer, self.server)
......@@ -14,10 +14,10 @@
import logging, signal, sys, threading
from prometheus_client import start_http_server
from common.Settings import get_setting, wait_for_environment_variables
from compute.Config import (
GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD, LOG_LEVEL, RESTAPI_SERVICE_PORT, RESTAPI_BASE_URL,
METRICS_PORT)
from common.Constants import ServiceNameEnum
from common.Settings import (
ENVVAR_SUFIX_SERVICE_HOST, ENVVAR_SUFIX_SERVICE_PORT_GRPC, get_env_var_name, get_log_level, get_metrics_port,
wait_for_environment_variables)
from .ComputeService import ComputeService
from .rest_server.RestServer import RestServer
from .rest_server.nbi_plugins.ietf_l2vpn import register_ietf_l2vpn
......@@ -32,20 +32,13 @@ def signal_handler(signal, frame): # pylint: disable=redefined-outer-name
def main():
global LOGGER # pylint: disable=global-statement
grpc_service_port = get_setting('COMPUTESERVICE_SERVICE_PORT_GRPC', default=GRPC_SERVICE_PORT )
max_workers = get_setting('MAX_WORKERS', default=GRPC_MAX_WORKERS )
grace_period = get_setting('GRACE_PERIOD', default=GRPC_GRACE_PERIOD )
log_level = get_setting('LOG_LEVEL', default=LOG_LEVEL )
restapi_service_port = get_setting('RESTAPI_SERVICE_PORT', default=RESTAPI_SERVICE_PORT)
restapi_base_url = get_setting('RESTAPI_BASE_URL', default=RESTAPI_BASE_URL )
metrics_port = get_setting('METRICS_PORT', default=METRICS_PORT )
log_level = get_log_level()
logging.basicConfig(level=log_level)
LOGGER = logging.getLogger(__name__)
wait_for_environment_variables([
'CONTEXTSERVICE_SERVICE_HOST', 'CONTEXTSERVICE_SERVICE_PORT_GRPC',
'SERVICESERVICE_SERVICE_HOST', 'SERVICESERVICE_SERVICE_PORT_GRPC'
get_env_var_name(ServiceNameEnum.CONTEXT, ENVVAR_SUFIX_SERVICE_HOST ),
get_env_var_name(ServiceNameEnum.CONTEXT, ENVVAR_SUFIX_SERVICE_PORT_GRPC),
])
signal.signal(signal.SIGINT, signal_handler)
......@@ -54,13 +47,14 @@ def main():
LOGGER.info('Starting...')
# Start metrics server
metrics_port = get_metrics_port()
start_http_server(metrics_port)
# Starting compute service
grpc_service = ComputeService(port=grpc_service_port, max_workers=max_workers, grace_period=grace_period)
grpc_service = ComputeService()
grpc_service.start()
rest_server = RestServer(port=restapi_service_port, base_url=restapi_base_url)
rest_server = RestServer()
register_ietf_l2vpn(rest_server)
rest_server.start()
......
......@@ -12,45 +12,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging, threading, time
from flask import Flask, request
from flask_restful import Api, Resource
from werkzeug.serving import make_server
from compute.Config import RESTAPI_BASE_URL, RESTAPI_SERVICE_PORT
logging.getLogger('werkzeug').setLevel(logging.WARNING)
BIND_ADDRESS = '0.0.0.0'
LOGGER = logging.getLogger(__name__)
def log_request(response):
timestamp = time.strftime('[%Y-%b-%d %H:%M]')
LOGGER.info('%s %s %s %s %s', timestamp, request.remote_addr, request.method, request.full_path, response.status)
return response
class RestServer(threading.Thread):
def __init__(self, host=BIND_ADDRESS, port=RESTAPI_SERVICE_PORT, base_url=RESTAPI_BASE_URL):
threading.Thread.__init__(self, daemon=True)
self.host = host
self.port = port
self.base_url = base_url
self.srv = None
self.ctx = None
self.app = Flask(__name__)
self.app.after_request(log_request)
self.api = Api(self.app, prefix=self.base_url)
def add_resource(self, resource : Resource, *urls, **kwargs):
self.api.add_resource(resource, *urls, **kwargs)
def run(self):
self.srv = make_server(self.host, self.port, self.app, threaded=True)
self.ctx = self.app.app_context()
self.ctx.push()
endpoint = 'http://{:s}:{:s}{:s}'.format(str(self.host), str(self.port), str(self.base_url))
LOGGER.info('Listening on {:s}...'.format(str(endpoint)))
self.srv.serve_forever()
def shutdown(self):
self.srv.shutdown()
from common.Constants import ServiceNameEnum
from common.Settings import get_service_baseurl_http, get_service_port_http
from common.tools.service.GenericRestServer import GenericRestServer
class RestServer(GenericRestServer):
def __init__(self, cls_name: str = __name__) -> None:
bind_port = get_service_port_http(ServiceNameEnum.COMPUTE)
base_url = get_service_baseurl_http(ServiceNameEnum.COMPUTE)
super().__init__(bind_port, base_url, cls_name=cls_name)
......@@ -25,4 +25,30 @@ BEARER_MAPPINGS = {
'R2-EMU:13/2/1': ('R2-EMU', '13/2/1', '12.12.12.1', '65000:120', 450, '3.4.2.1', 24),
'R3-INF:13/2/1': ('R3-INF', '13/2/1', '20.20.20.1', '65000:200', 500, '3.3.1.1', 24),
'R4-EMU:13/2/1': ('R4-EMU', '13/2/1', '22.22.22.1', '65000:220', 550, '3.4.1.1', 24),
'R1@D1:3/1': ('R1@D1', '3/1', '10.0.1.1', '65001:101', 100, '1.1.3.1', 24),
'R1@D1:3/2': ('R1@D1', '3/2', '10.0.1.1', '65001:101', 100, '1.1.3.2', 24),
'R1@D1:3/3': ('R1@D1', '3/3', '10.0.1.1', '65001:101', 100, '1.1.3.3', 24),
'R2@D1:3/1': ('R2@D1', '3/1', '10.0.1.2', '65001:102', 100, '1.2.3.1', 24),
'R2@D1:3/2': ('R2@D1', '3/2', '10.0.1.2', '65001:102', 100, '1.2.3.2', 24),
'R2@D1:3/3': ('R2@D1', '3/3', '10.0.1.2', '65001:102', 100, '1.2.3.3', 24),
'R3@D1:3/1': ('R3@D1', '3/1', '10.0.1.3', '65001:103', 100, '1.3.3.1', 24),
'R3@D1:3/2': ('R3@D1', '3/2', '10.0.1.3', '65001:103', 100, '1.3.3.2', 24),
'R3@D1:3/3': ('R3@D1', '3/3', '10.0.1.3', '65001:103', 100, '1.3.3.3', 24),
'R4@D1:3/1': ('R4@D1', '3/1', '10.0.1.4', '65001:104', 100, '1.4.3.1', 24),
'R4@D1:3/2': ('R4@D1', '3/2', '10.0.1.4', '65001:104', 100, '1.4.3.2', 24),
'R4@D1:3/3': ('R4@D1', '3/3', '10.0.1.4', '65001:104', 100, '1.4.3.3', 24),
'R1@D2:3/1': ('R1@D2', '3/1', '10.0.2.1', '65002:101', 100, '2.1.3.1', 24),
'R1@D2:3/2': ('R1@D2', '3/2', '10.0.2.1', '65002:101', 100, '2.1.3.2', 24),
'R1@D2:3/3': ('R1@D2', '3/3', '10.0.2.1', '65002:101', 100, '2.1.3.3', 24),
'R2@D2:3/1': ('R2@D2', '3/1', '10.0.2.2', '65002:102', 100, '2.2.3.1', 24),
'R2@D2:3/2': ('R2@D2', '3/2', '10.0.2.2', '65002:102', 100, '2.2.3.2', 24),
'R2@D2:3/3': ('R2@D2', '3/3', '10.0.2.2', '65002:102', 100, '2.2.3.3', 24),
'R3@D2:3/1': ('R3@D2', '3/1', '10.0.2.3', '65002:103', 100, '2.3.3.1', 24),
'R3@D2:3/2': ('R3@D2', '3/2', '10.0.2.3', '65002:103', 100, '2.3.3.2', 24),
'R3@D2:3/3': ('R3@D2', '3/3', '10.0.2.3', '65002:103', 100, '2.3.3.3', 24),
'R4@D2:3/1': ('R4@D2', '3/1', '10.0.2.4', '65002:104', 100, '2.4.3.1', 24),
'R4@D2:3/2': ('R4@D2', '3/2', '10.0.2.4', '65002:104', 100, '2.4.3.2', 24),
'R4@D2:3/3': ('R4@D2', '3/3', '10.0.2.4', '65002:104', 100, '2.4.3.3', 24),
}
......@@ -17,44 +17,47 @@ from flask import request
from flask.json import jsonify
from flask_restful import Resource
from common.Constants import DEFAULT_CONTEXT_UUID
from common.Settings import get_setting
from context.client.ContextClient import ContextClient
from context.proto.context_pb2 import ServiceId
from context.proto.context_pb2 import ServiceId, SliceStatusEnum
from service.client.ServiceClient import ServiceClient
from service.proto.context_pb2 import ServiceStatusEnum
from .tools.Authentication import HTTP_AUTH
from .tools.ContextMethods import get_service, get_slice
from .tools.HttpStatusCodes import HTTP_GATEWAYTIMEOUT, HTTP_NOCONTENT, HTTP_OK, HTTP_SERVERERROR
LOGGER = logging.getLogger(__name__)
class L2VPN_Service(Resource):
def __init__(self) -> None:
super().__init__()
self.context_client = ContextClient(
get_setting('CONTEXTSERVICE_SERVICE_HOST'), get_setting('CONTEXTSERVICE_SERVICE_PORT_GRPC'))
self.service_client = ServiceClient(
get_setting('SERVICESERVICE_SERVICE_HOST'), get_setting('SERVICESERVICE_SERVICE_PORT_GRPC'))
@HTTP_AUTH.login_required
def get(self, vpn_id : str):
LOGGER.debug('VPN_Id: {:s}'.format(str(vpn_id)))
LOGGER.debug('Request: {:s}'.format(str(request)))
# pylint: disable=no-member
service_id_request = ServiceId()
service_id_request.context_id.context_uuid.uuid = DEFAULT_CONTEXT_UUID
service_id_request.service_uuid.uuid = vpn_id
response = jsonify({})
try:
service_reply = self.context_client.GetService(service_id_request)
if service_reply.service_id != service_id_request: # pylint: disable=no-member
raise Exception('Service retrieval failed. Wrong Service Id was returned')
service_ready_status = ServiceStatusEnum.SERVICESTATUS_ACTIVE
service_status = service_reply.service_status.service_status
response = jsonify({})
response.status_code = HTTP_OK if service_status == service_ready_status else HTTP_GATEWAYTIMEOUT
context_client = ContextClient()
target = get_service(context_client, vpn_id)
if target is not None:
if target.service_id.service_uuid.uuid != vpn_id: # pylint: disable=no-member
raise Exception('Service retrieval failed. Wrong Service Id was returned')
service_ready_status = ServiceStatusEnum.SERVICESTATUS_ACTIVE
service_status = target.service_status.service_status # pylint: disable=no-member
response.status_code = HTTP_OK if service_status == service_ready_status else HTTP_GATEWAYTIMEOUT
return response
target = get_slice(context_client, vpn_id)
if target is not None:
if target.slice_id.slice_uuid.uuid != vpn_id: # pylint: disable=no-member
raise Exception('Slice retrieval failed. Wrong Slice Id was returned')
slice_ready_status = SliceStatusEnum.SLICESTATUS_ACTIVE
slice_status = target.slice_status.slice_status # pylint: disable=no-member
response.status_code = HTTP_OK if slice_status == slice_ready_status else HTTP_GATEWAYTIMEOUT
return response
raise Exception('VPN({:s}) not found in database'.format(str(vpn_id)))
except Exception as e: # pylint: disable=broad-except
LOGGER.exception('Something went wrong Retrieving Service {:s}'.format(str(request)))
LOGGER.exception('Something went wrong Retrieving VPN({:s})'.format(str(request)))
response = jsonify({'error': str(e)})
response.status_code = HTTP_SERVERERROR
return response
......@@ -70,7 +73,8 @@ class L2VPN_Service(Resource):
service_id_request.service_uuid.uuid = vpn_id
try:
self.service_client.DeleteService(service_id_request)
service_client = ServiceClient()
service_client.DeleteService(service_id_request)
response = jsonify({})
response.status_code = HTTP_NOCONTENT
except Exception as e: # pylint: disable=broad-except
......
......@@ -21,7 +21,7 @@ from .L2VPN_Services import L2VPN_Services
from .L2VPN_Service import L2VPN_Service
from .L2VPN_SiteNetworkAccesses import L2VPN_SiteNetworkAccesses
URL_PREFIX = '/ietf-l2vpn-svc:l2vpn-svc'
URL_PREFIX = '/ietf-l2vpn-svc:l2vpn-svc'
def _add_resource(rest_server : RestServer, resource : Resource, *urls, **kwargs):
urls = [(URL_PREFIX + url) for url in urls]
......
......@@ -36,7 +36,7 @@ SCHEMA_VPN_SERVICE = {
'required': ['vpn-id', 'vpn-svc-type', 'svc-topo', 'customer-name'],
'properties': {
'vpn-id': {'type': 'string', 'pattern': REGEX_UUID},
'vpn-svc-type': {'enum': ['vpws']},
'vpn-svc-type': {'enum': ['vpws', 'vpls']},
'svc-topo': {'enum': ['any-to-any']},
'customer-name': {'const': 'osm'},
},
......
This diff is collapsed.
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment