diff --git a/deploy/all.sh b/deploy/all.sh index 25d69b485daff4dd0307e6fa85d3a4d47d54b72a..d87f5b08a845e12993dbd699c1805a316fb25ee3 100755 --- a/deploy/all.sh +++ b/deploy/all.sh @@ -179,3 +179,16 @@ export GRAF_EXT_PORT_HTTP=${GRAF_EXT_PORT_HTTP:-"3000"} ./deploy/show.sh echo "Done!" + + +TFS_NAMESPACE="tfs" +PCEP_EXT_PORT_HTTP="4189" + +PCEP_PORT_HTTP=$(kubectl --namespace ${TFS_NAMESPACE} get service pcepservice -o 'jsonpath={.spec.ports[?(@.name=="pcep")].port}') +PATCH='{"data": {"'${PCEP_EXT_PORT_HTTP}'": "'${TFS_NAMESPACE}'/pcepservice:'${PCEP_PORT_HTTP}'"}}' +kubectl patch configmap nginx-ingress-tcp-microk8s-conf --namespace ingress --patch "${PATCH}" + +PORT_MAP='{"containerPort": '${PCEP_EXT_PORT_HTTP}', "hostPort": '${PCEP_EXT_PORT_HTTP}'}' +CONTAINER='{"name": "nginx-ingress-microk8s", "ports": ['${PORT_MAP}']}' +PATCH='{"spec": {"template": {"spec": {"containers": ['${CONTAINER}']}}}}' +kubectl patch daemonset nginx-ingress-microk8s-controller --namespace ingress --patch "${PATCH}" \ No newline at end of file diff --git a/deploy/expose_pcep.sh b/deploy/expose_pcep.sh new file mode 100755 index 0000000000000000000000000000000000000000..a19db84708c0457c9c9ddceb06f7b77c0f55f9c3 --- /dev/null +++ b/deploy/expose_pcep.sh @@ -0,0 +1,11 @@ +TFS_NAMESPACE="tfs" +PCEP_EXT_PORT_HTTP="4189" + +PCEP_PORT_HTTP=$(kubectl --namespace ${TFS_NAMESPACE} get service pcepservice -o 'jsonpath={.spec.ports[?(@.name=="pcep")].port}') +PATCH='{"data": {"'${PCEP_EXT_PORT_HTTP}'": "'${TFS_NAMESPACE}'/pcepservice:'${PCEP_PORT_HTTP}'"}}' +kubectl patch configmap nginx-ingress-tcp-microk8s-conf --namespace ingress --patch "${PATCH}" + +PORT_MAP='{"containerPort": '${PCEP_EXT_PORT_HTTP}', "hostPort": '${PCEP_EXT_PORT_HTTP}'}' +CONTAINER='{"name": "nginx-ingress-microk8s", "ports": ['${PORT_MAP}']}' +PATCH='{"spec": {"template": {"spec": {"containers": ['${CONTAINER}']}}}}' +kubectl patch daemonset nginx-ingress-microk8s-controller --namespace ingress --patch "${PATCH}" \ No newline at end of file diff --git a/manifests/pcepservice.yaml b/manifests/pcepservice.yaml new file mode 100644 index 0000000000000000000000000000000000000000..4a7e93949d05414603418e086d7bcc5f91569c47 --- /dev/null +++ b/manifests/pcepservice.yaml @@ -0,0 +1,103 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: apps/v1 +kind: Deployment +metadata: + name: pcepservice +spec: + selector: + matchLabels: + app: pcepservice + replicas: 1 + template: + metadata: + annotations: + config.linkerd.io/skip-outbound-ports: "4189" + labels: + app: pcepservice + spec: + terminationGracePeriodSeconds: 5 + containers: + - name: server + image: localhost:32000/tfs/pcep:dev + imagePullPolicy: Always + ports: + - containerPort: 20050 + - containerPort: 9192 + - containerPort: 6666 + - containerPort: 4189 + env: + - name: LOG_LEVEL + value: "DEBUG" + readinessProbe: + exec: + command: ["/bin/grpc_health_probe", "-addr=:20050"] + livenessProbe: + exec: + command: ["/bin/grpc_health_probe", "-addr=:20050"] + resources: + requests: + cpu: 50m + memory: 64Mi + limits: + cpu: 500m + memory: 512Mi +--- +#Internal ClusterIP service +apiVersion: v1 +kind: Service +metadata: + name: pcepservice + labels: + app: pcepservice +spec: + type: ClusterIP + selector: + app: pcepservice + ports: + - name: grpc + protocol: TCP + port: 20050 + targetPort: 20050 + - name: metrics + protocol: TCP + port: 9192 + targetPort: 9192 + - name: pcep + protocol: TCP + port: 4189 + targetPort: 4189 + - name: managm + protocol: TCP + port: 6666 + targetPort: 6666 + +#NodePortService +# apiVersion: v1 +# kind: Service +# metadata: +# name: pceservice-ext +# labels: +# app: pceservice +# spec: +# type: NodePort +# externalTrafficPolicy: Local +# selector: +# app: pceservice +# ports: +# - name: pcep +# protocol: TCP +# port: 4189 +# targetPort: 4189 diff --git a/src/common/Constants.py b/src/common/Constants.py index c7ba01f69978fd3c601dcfe30180015d524b1100..a858974e7a22fb407b09ad6c06fdd1722553e04f 100644 --- a/src/common/Constants.py +++ b/src/common/Constants.py @@ -60,6 +60,7 @@ class ServiceNameEnum(Enum): FORECASTER = 'forecaster' E2EORCHESTRATOR = 'e2eorchestrator' BGPLS = 'bgpls-speaker' + PCEP = 'pcep' # Used for test and debugging only DLT_GATEWAY = 'dltgateway' @@ -88,6 +89,7 @@ DEFAULT_SERVICE_GRPC_PORTS = { ServiceNameEnum.FORECASTER .value : 10040, ServiceNameEnum.E2EORCHESTRATOR .value : 10050, ServiceNameEnum.BGPLS .value : 20030, + ServiceNameEnum.PCEP .value : 20050, # Used for test and debugging only ServiceNameEnum.DLT_GATEWAY .value : 50051, diff --git a/src/pcep/.gitlab-ci.yml b/src/pcep/.gitlab-ci.yml new file mode 100644 index 0000000000000000000000000000000000000000..dd861a02587ac0bf08b48cc6e603410f5386ce4c --- /dev/null +++ b/src/pcep/.gitlab-ci.yml @@ -0,0 +1,224 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Build, tag, and push the Docker image to the GitLab Docker registry +build service: + variables: + IMAGE_NAME: 'pcep' # name of the microservice + IMAGE_TAG: 'latest' # tag of the container image (production, development, etc) + stage: build + before_script: + - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY + script: + - docker build -t "$IMAGE_NAME:$IMAGE_TAG" -f ./src/$IMAGE_NAME/Dockerfile . + - docker tag "$IMAGE_NAME:$IMAGE_TAG" "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG" + - docker push "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG" + after_script: + - docker images --filter="dangling=true" --quiet | xargs -r docker rmi + rules: + - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)' + - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' + - changes: + - src/common/**/*.py + - proto/*.proto + - src/$IMAGE_NAME/**/*.{py,in,yml} + - src/$IMAGE_NAME/Dockerfile + - src/$IMAGE_NAME/tests/*.py + - manifests/${IMAGE_NAME}service.yaml + - .gitlab-ci.yml + +# Apply unit test to the component +unit_test service: + variables: + IMAGE_NAME: 'pcep' # name of the microservice + IMAGE_TAG: 'latest' # tag of the container image (production, development, etc) + stage: unit_test + needs: + - build service + before_script: + - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY + - if docker network list | grep teraflowbridge; then echo "teraflowbridge is already created"; else docker network create --driver=bridge teraflowbridge; fi + + # Context-related + - if docker container ls | grep crdb; then docker rm -f crdb; else echo "CockroachDB container is not in the system"; fi + - if docker volume ls | grep crdb; then docker volume rm -f crdb; else echo "CockroachDB volume is not in the system"; fi + - if docker container ls | grep nats; then docker rm -f nats; else echo "NATS container is not in the system"; fi + + # Device-related + - if docker container ls | grep context; then docker rm -f context; else echo "context image is not in the system"; fi + - if docker container ls | grep device; then docker rm -f device; else echo "device image is not in the system"; fi + + # Pathcomp-related + - if docker container ls | grep pathcomp-frontend; then docker rm -f pathcomp-frontend; else echo "pathcomp-frontend image is not in the system"; fi + - if docker container ls | grep pathcomp-backend; then docker rm -f pathcomp-backend; else echo "pathcomp-backend image is not in the system"; fi + + # Service-related + - if docker container ls | grep $IMAGE_NAME; then docker rm -f $IMAGE_NAME; else echo "$IMAGE_NAME image is not in the system"; fi + + script: + - docker pull "cockroachdb/cockroach:latest-v22.2" + - docker pull "nats:2.9" + - docker pull "$CI_REGISTRY_IMAGE/context:$IMAGE_TAG" + - docker pull "$CI_REGISTRY_IMAGE/device:$IMAGE_TAG" + - docker pull "$CI_REGISTRY_IMAGE/pathcomp-frontend:$IMAGE_TAG" + - docker pull "$CI_REGISTRY_IMAGE/pathcomp-backend:$IMAGE_TAG" + - docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG" + + # Context preparation + - docker volume create crdb + - > + docker run --name crdb -d --network=teraflowbridge -p 26257:26257 -p 8080:8080 + --env COCKROACH_DATABASE=tfs_test --env COCKROACH_USER=tfs --env COCKROACH_PASSWORD=tfs123 + --volume "crdb:/cockroach/cockroach-data" + cockroachdb/cockroach:latest-v22.2 start-single-node + - > + docker run --name nats -d --network=teraflowbridge -p 4222:4222 -p 8222:8222 + nats:2.9 --http_port 8222 --user tfs --pass tfs123 + - echo "Waiting for initialization..." + - while ! docker logs crdb 2>&1 | grep -q 'finished creating default user \"tfs\"'; do sleep 1; done + - docker logs crdb + - while ! docker logs nats 2>&1 | grep -q 'Server is ready'; do sleep 1; done + - docker logs nats + - docker ps -a + - CRDB_ADDRESS=$(docker inspect crdb --format "{{.NetworkSettings.Networks.teraflowbridge.IPAddress}}") + - echo $CRDB_ADDRESS + - NATS_ADDRESS=$(docker inspect nats --format "{{.NetworkSettings.Networks.teraflowbridge.IPAddress}}") + - echo $NATS_ADDRESS + - > + docker run --name context -d -p 1010:1010 + --env "CRDB_URI=cockroachdb://tfs:tfs123@${CRDB_ADDRESS}:26257/tfs_test?sslmode=require" + --env "MB_BACKEND=nats" + --env "NATS_URI=nats://tfs:tfs123@${NATS_ADDRESS}:4222" + --network=teraflowbridge + $CI_REGISTRY_IMAGE/context:$IMAGE_TAG + - CONTEXTSERVICE_SERVICE_HOST=$(docker inspect context --format "{{.NetworkSettings.Networks.teraflowbridge.IPAddress}}") + - echo $CONTEXTSERVICE_SERVICE_HOST + + # Device preparation + - > + docker run --name device -d -p 2020:2020 + --env "CONTEXTSERVICE_SERVICE_HOST=${CONTEXTSERVICE_SERVICE_HOST}" + --network=teraflowbridge + $CI_REGISTRY_IMAGE/device:$IMAGE_TAG + - DEVICESERVICE_SERVICE_HOST=$(docker inspect device --format "{{.NetworkSettings.Networks.teraflowbridge.IPAddress}}") + - echo $DEVICESERVICE_SERVICE_HOST + + # PathComp preparation + - > + docker run --name pathcomp-backend -d -p 8081:8081 + --network=teraflowbridge + $CI_REGISTRY_IMAGE/pathcomp-backend:$IMAGE_TAG + - PATHCOMP_BACKEND_HOST=$(docker inspect pathcomp-backend --format "{{.NetworkSettings.Networks.teraflowbridge.IPAddress}}") + - echo $PATHCOMP_BACKEND_HOST + - sleep 1 + - > + docker run --name pathcomp-frontend -d -p 10020:10020 + --env "CONTEXTSERVICE_SERVICE_HOST=${CONTEXTSERVICE_SERVICE_HOST}" + --env "PATHCOMP_BACKEND_HOST=${PATHCOMP_BACKEND_HOST}" + --env "PATHCOMP_BACKEND_PORT=8081" + --network=teraflowbridge + $CI_REGISTRY_IMAGE/pathcomp-frontend:$IMAGE_TAG + - sleep 1 + - PATHCOMPSERVICE_SERVICE_HOST=$(docker inspect pathcomp-frontend --format "{{.NetworkSettings.Networks.teraflowbridge.IPAddress}}") + - echo $PATHCOMPSERVICE_SERVICE_HOST + + # Service preparation + - > + docker run --name $IMAGE_NAME -d -p 3030:3030 -p 4189:4189 -p 6666:6666 + --env "CONTEXTSERVICE_SERVICE_HOST=${CONTEXTSERVICE_SERVICE_HOST}" + --env "DEVICESERVICE_SERVICE_HOST=${DEVICESERVICE_SERVICE_HOST}" + --env "PATHCOMPSERVICE_SERVICE_HOST=${PATHCOMPSERVICE_SERVICE_HOST}" + --volume "$PWD/src/$IMAGE_NAME/tests:/opt/results" + --network=teraflowbridge + $CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG + + # Check status before the tests + - sleep 5 + - docker ps -a + - docker logs context + - docker logs device + - docker logs pathcomp-frontend + - docker logs pathcomp-backend + - docker logs $IMAGE_NAME + + # Run the tests + - > + docker exec -i $IMAGE_NAME bash -c + "coverage run -m pytest --log-level=INFO --verbose $IMAGE_NAME/tests/test_unitary.py --junitxml=/opt/results/${IMAGE_NAME}_report.xml" + - docker exec -i $IMAGE_NAME bash -c "coverage report --include='${IMAGE_NAME}/*' --show-missing" + + coverage: '/TOTAL\s+\d+\s+\d+\s+(\d+%)/' + after_script: + # Check status after the tests + - docker ps -a + - docker logs context + - docker logs device + - docker logs pathcomp-frontend + - docker logs pathcomp-backend + - docker logs $IMAGE_NAME + + - docker rm -f $IMAGE_NAME + - docker rm -f pathcomp-frontend + - docker rm -f pathcomp-backend + - docker rm -f device + - docker rm -f context + + - docker rm -f $IMAGE_NAME crdb nats + - docker volume rm -f crdb + - docker network rm teraflowbridge + - docker volume prune --force + - docker image prune --force + + rules: + - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)' + - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' + - changes: + - src/common/**/*.py + - proto/*.proto + - src/$IMAGE_NAME/**/*.{py,in,yml} + - src/$IMAGE_NAME/Dockerfile + - src/$IMAGE_NAME/tests/*.py + - manifests/${IMAGE_NAME}service.yaml + - .gitlab-ci.yml + + artifacts: + when: always + reports: + junit: src/$IMAGE_NAME/tests/${IMAGE_NAME}_report.xml + +## Deployment of the service in Kubernetes Cluster +#deploy service: +# variables: +# IMAGE_NAME: 'service' # name of the microservice +# IMAGE_TAG: 'latest' # tag of the container image (production, development, etc) +# stage: deploy +# needs: +# - unit test service +# # - integ_test execute +# script: +# - 'sed -i "s/$IMAGE_NAME:.*/$IMAGE_NAME:$IMAGE_TAG/" manifests/${IMAGE_NAME}service.yaml' +# - kubectl version +# - kubectl get all +# - kubectl apply -f "manifests/${IMAGE_NAME}service.yaml" +# - kubectl get all +# # environment: +# # name: test +# # url: https://example.com +# # kubernetes: +# # namespace: test +# rules: +# - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)' +# when: manual +# - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' +# when: manual diff --git a/src/pcep/Dockerfile b/src/pcep/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..48f2db27d3da9b3caf584445246f113af24d12db --- /dev/null +++ b/src/pcep/Dockerfile @@ -0,0 +1,95 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM python:3.9-slim + +# Install dependencies +RUN apt-get --yes --quiet --quiet update && \ + apt-get --yes --quiet --quiet install wget g++ && \ + rm -rf /var/lib/apt/lists/* + +# Set Python to show logs as they occur +ENV PYTHONUNBUFFERED=0 + +# Download the gRPC health probe +RUN GRPC_HEALTH_PROBE_VERSION=v0.2.0 && \ + wget -qO/bin/grpc_health_probe https://github.com/grpc-ecosystem/grpc-health-probe/releases/download/${GRPC_HEALTH_PROBE_VERSION}/grpc_health_probe-linux-amd64 && \ + chmod +x /bin/grpc_health_probe + +#Ping +RUN apt-get update && apt-get install -y iputils-ping g++ && \ + rm -rf /var/lib/apt/lists/* + +# Get generic Python packages +RUN python3 -m pip install --upgrade pip +RUN python3 -m pip install --upgrade setuptools wheel +RUN python3 -m pip install --upgrade pip-tools + +# Install OpenJDK-11 +RUN apt-get update && \ + apt-get install -y openjdk-11-jre-headless && \ + apt-get clean; + +# Get common Python packages +# Note: this step enables sharing the previous Docker build steps among all the Python components +WORKDIR /var/teraflow +COPY common_requirements.in common_requirements.in +RUN pip-compile --quiet --output-file=common_requirements.txt common_requirements.in +RUN python3 -m pip install -r common_requirements.txt + +# Add common files into working directory +WORKDIR /var/teraflow/common +COPY src/common/. ./ +RUN rm -rf proto + +# Create proto sub-folder, copy .proto files, and generate Python code +RUN mkdir -p /var/teraflow/common/proto +WORKDIR /var/teraflow/common/proto +RUN touch __init__.py +COPY proto/*.proto ./ +RUN python3 -m grpc_tools.protoc -I=. --python_out=. --grpc_python_out=. *.proto +RUN rm *.proto +RUN find . -type f -exec sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' {} \; + +# Create component sub-folders, get specific Python packages +RUN mkdir -p /var/teraflow/pcep +WORKDIR /var/teraflow/pcep +COPY src/pcep/requirements.in requirements.in +RUN pip-compile --quiet --output-file=requirements.txt requirements.in +RUN python3 -m pip install -r requirements.txt + +# Java module necessary config files +WORKDIR /var/teraflow/pcep +RUN mkdir -p /resources +COPY src/pcep/service/resources/. resources/ + +# Add the Tools Module +WORKDIR /var/teraflow/pcep +RUN mkdir -p /tools +COPY src/pcep/service/tools/. resources/ + +# Add component files into working directory +WORKDIR /var/teraflow +COPY src/context/. context/ +COPY src/device/. device/ +COPY src/pathcomp/frontend/. pathcomp/frontend/ +COPY src/service/. service/ +COPY src/pcep/. pcep/ + +#Added +EXPOSE 4189 +EXPOSE 6666 + +# Start the service +ENTRYPOINT ["python", "-m", "pcep.service"] diff --git a/src/pcep/client/PcepClient.py b/src/pcep/client/PcepClient.py new file mode 100644 index 0000000000000000000000000000000000000000..93b0d830218c7e11238809d03633e94cd909cf77 --- /dev/null +++ b/src/pcep/client/PcepClient.py @@ -0,0 +1,78 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import grpc, logging +from common.Constants import ServiceNameEnum +from common.Settings import get_service_host, get_service_port_grpc +from common.proto.context_pb2 import Empty, Service, ServiceId +from common.proto.pcep_pb2_grpc import PcepServiceStub +from common.proto.pcep_pb2 import RequestRq, RequestRp, PceIpRq, PceIpRp +from common.tools.client.RetryDecorator import retry, delay_exponential +from common.tools.grpc.Tools import grpc_message_to_json_string + +LOGGER = logging.getLogger(__name__) +MAX_RETRIES = 15 +DELAY_FUNCTION = delay_exponential(initial=0.01, increment=2.0, maximum=5.0) +RETRY_DECORATOR = retry(max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect') + +class PcepClient: + def __init__(self, host=None, port=None): + if not host: host = get_service_host(ServiceNameEnum.PCEP) + if not port: port = get_service_port_grpc(ServiceNameEnum.PCEP) + self.endpoint = '{:s}:{:s}'.format(str(host), str(port)) + LOGGER.debug('Creating channel to {:s}...'.format(str(self.endpoint))) + self.channel = None + self.stub = None + self.connect() + LOGGER.debug('Channel created') + + def connect(self): + LOGGER.debug('Creating channel to {:s}...'.format(str(self.endpoint))) + self.channel = grpc.insecure_channel(self.endpoint) + self.stub = PcepServiceStub(self.channel) + self.host = get_service_host(ServiceNameEnum.PCEP) + LOGGER.debug('Channel created') + + def close(self): + if self.channel is not None: self.channel.close() + self.channel = None + self.stub = None + + @RETRY_DECORATOR + def sendRequest(self, request : RequestRq) -> RequestRp: + LOGGER.debug('Send request: {:s}'.format(grpc_message_to_json_string(request))) + response = self.stub.sendRequest(request) + LOGGER.debug('Send request result: {:s}'.format(grpc_message_to_json_string(response))) + return response + + @RETRY_DECORATOR + def configuratePCE(self, request : PceIpRq) -> PceIpRp: + LOGGER.debug("Configuring the PCE") + LOGGER.debug('Configurate PCE: {:s}'.format(grpc_message_to_json_string(request))) + response = self.stub.configuratePCE(request) + LOGGER.debug('Configurate PCE result: {:s}'.format(grpc_message_to_json_string(response))) + return response + + ''' + @RETRY_DECORATOR + def showLSPDB(self): + LOGGER.debug("Showing the LSP Paths in the PCE") + #Make the gRPC call + requestlsp = LSPdb_Request() + port = 10060 + channel = grpc.insecure_channel(f'{self.host}:{port}') + stub_lsp = pceServiceStub(channel) + responselsp = stub_lsp.getLSPdb(requestlsp) + return responselsp + ''' diff --git a/src/pcep/service/PcepService.py b/src/pcep/service/PcepService.py new file mode 100644 index 0000000000000000000000000000000000000000..744532b3b73e60ff83017349a382c885f646a6fa --- /dev/null +++ b/src/pcep/service/PcepService.py @@ -0,0 +1,40 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +from pcep.service.tools.GrpcServer import GrpcServer +from common.Constants import ServiceNameEnum +from common.Settings import get_service_port_grpc, get_service_host +from common.proto.pcep_pb2_grpc import add_PcepServiceServicer_to_server +from common.tools.service.GenericGrpcService import GenericGrpcService +from .PcepServiceServicerImpl import PcepServiceServicerImpl + +LOGGER = logging.getLogger(__name__) + +class PcepService(GenericGrpcService): + def __init__(self, pcepServer : GrpcServer,cls_name: str = __name__) -> None: + port = get_service_port_grpc(ServiceNameEnum.PCEP) #El enum en common.constants + LOGGER.debug('PORT... (PCEP Service)') + LOGGER.debug(port) + + host = get_service_host(ServiceNameEnum.PCEP) #El enum en common.constants + LOGGER.debug('HOST... (PCEP Service)') + LOGGER.debug(host) + + super().__init__(port, cls_name=cls_name) + self.pcep_servicer = PcepServiceServicerImpl(pcepServer) + LOGGER.debug('Creating Servicer... (PCEP Service)') + + def install_servicers(self): + add_PcepServiceServicer_to_server(self.pcep_servicer, self.server) diff --git a/src/pcep/service/PcepServiceServicerImpl.py b/src/pcep/service/PcepServiceServicerImpl.py new file mode 100644 index 0000000000000000000000000000000000000000..4975a71dcb338d0250b0df8e9f60109377f5abff --- /dev/null +++ b/src/pcep/service/PcepServiceServicerImpl.py @@ -0,0 +1,42 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import grpc, logging +from pcep.service.tools.GrpcServer import GrpcServer +from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method +from common.proto.pcep_pb2 import (RequestRq, RequestRp, PceIpRq, PceIpRp) +from common.proto.pcep_pb2_grpc import PcepServiceServicer + +LOGGER = logging.getLogger(__name__) + +METRICS_POOL = MetricsPool('Service', 'RPC') + +class PcepServiceServicerImpl(PcepServiceServicer): + def __init__(self, pcepServer : GrpcServer) -> None: + LOGGER.debug('Creating Servicer...') + self.pcepServer=pcepServer + LOGGER.debug('Servicer Created') + + @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def configuratePCE(self, request : PceIpRq, context : grpc.ServicerContext) -> PceIpRp: + LOGGER.debug("(ConfiguratePCE) Create pce instance %s",request) + # configurateIP=self.pcepServer.connectToJavaPcep(request.address) + #return PceIpRp(addressRp=configurateIP) + return PceIpRp(addressRp="127.0.0.1") + + @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) + def sendRequest(self, request : RequestRq, context : grpc.ServicerContext) -> RequestRp: + LOGGER.debug("(Send Request) Send: %s",request.command) + message=self.pcepServer.requestToJavaPcep(request.command) + return RequestRp(commandRp=message) diff --git a/src/pcep/service/__main__.py b/src/pcep/service/__main__.py new file mode 100644 index 0000000000000000000000000000000000000000..dc943fab393c2189ad3906d298a9951c56fbbd89 --- /dev/null +++ b/src/pcep/service/__main__.py @@ -0,0 +1,71 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging, signal, sys, threading +from prometheus_client import start_http_server +from common.Settings import get_log_level, get_metrics_port +from .PcepService import PcepService +from .tools.GrpcServer import GrpcServer +import socket + +terminate = threading.Event() +LOGGER = logging.getLogger(__name__) + +def signal_handler(signal, frame): # pylint: disable=redefined-outer-name + LOGGER.warning('Terminate signal received') + LOGGER.warning(signal) + terminate.set() + +def main(): + global LOGGER # pylint: disable=global-statement + log_level = get_log_level() + logging.basicConfig(level=log_level, format="[%(asctime)s] %(levelname)s:%(name)s:%(message)s") + LOGGER = logging.getLogger(__name__) + + signal.signal(signal.SIGINT, signal_handler) + signal.signal(signal.SIGTERM, signal_handler) + + LOGGER.info('Starting... (MAIN.py)') + + # Start metrics server + metrics_port = get_metrics_port() + start_http_server(metrics_port) + + # Initialize ServiceHandler Factory + #service_handler_factory = ServiceHandlerFactory(SERVICE_HANDLERS) + + pcep_server = GrpcServer() #Create instance of the GRPC Server + + ip_address = socket.gethostbyname(socket.gethostname()) + + LOGGER.debug("IP address of the PCEP POD: %s", ip_address) + LOGGER.debug(ip_address) + + pcep_server.connectToJavaPcep(ip_address) + + # Starting pcep service + pcep_service = PcepService(pcep_server) + pcep_service.start() + + # Wait for Ctrl+C or termination signal + while not terminate.wait(timeout=0.1): pass + LOGGER.info('Terminating...') + pcep_server.terminateGrpcServer() + pcep_service.stop() + + LOGGER.info('Bye') + return 0 + +if __name__ == '__main__': + sys.exit(main()) diff --git a/src/pcep/service/resources/PCE-jar-with-dependencies.jar b/src/pcep/service/resources/PCE-jar-with-dependencies.jar new file mode 100644 index 0000000000000000000000000000000000000000..17bb6e5ef06611d7fe4209721729d2a9b6ae6c10 Binary files /dev/null and b/src/pcep/service/resources/PCE-jar-with-dependencies.jar differ diff --git a/src/pcep/service/resources/PCEServerConfiguration.xml b/src/pcep/service/resources/PCEServerConfiguration.xml new file mode 100644 index 0000000000000000000000000000000000000000..f9c42c60c491c07030fb09a7dedaf0b8f93b08a0 --- /dev/null +++ b/src/pcep/service/resources/PCEServerConfiguration.xml @@ -0,0 +1,128 @@ + + + + + 4189 + 6666 + + + + 0.0.0.0 + + + + 60 + + + 30 + + + 120 + + + + 5 + + + + 60 + + + + + + + + + + + + + PCEServer.log + PCEPParserServer.log + OSPFParser.log + true + true + 1 + 1 + + + + topologia_ifusion.xml + + false + BGP4Parameters_2.xml + + 1000 + true + false + false + true + false + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/pcep/service/tools/GrpcServer.py b/src/pcep/service/tools/GrpcServer.py new file mode 100644 index 0000000000000000000000000000000000000000..eefecf6c1c4edb5aa6eaf8b94f086335ccad7291 --- /dev/null +++ b/src/pcep/service/tools/GrpcServer.py @@ -0,0 +1,133 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging,threading, queue +import logging,threading +import grpc +import logging + +from .JavaRunner import JavaRunner + +LOGGER = logging.getLogger(__name__) + +#_ONE_DAY_IN_SECONDS = 60 * 60 * 24 +#SERVER_ADDRESS = 'localhost:2021' + +class GrpcServer(): + + def __init__(self) -> None: # pylint: disable=super-init-not-called + self.__lock = threading.Lock() + self.__started = threading.Event() + self.__terminate = threading.Event() + self.__out_samples = queue.Queue() + self.__server=grpc.aio.server() + self.__runnerList=[] + + def connectToJavaPcep(self, address): + runner = JavaRunner(address) + + # Sets IpAddress in XML config file for java program + runner.setPeer(address) + + process_thread = threading.Thread(target=runner.execPcep) + process_thread.start() + self.__runnerList.append(runner) + + return process_thread + + ''' + def ConnectThread(self) -> bool: + # TODO: Metodos necesarios para conectarte al speaker + # If started, assume it is already connected + if self.__started.is_set(): return True + self.__started.set() + self.__server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) + grpcService_pb2_grpc.add_pceServiceServicer_to_server(self, self.__server) + self.__server.add_insecure_port(SERVER_ADDRESS) + LOGGER.info("Starting server on %s", SERVER_ADDRESS) + self.__server.start() + try: + while True: + time.sleep(_ONE_DAY_IN_SECONDS) + except KeyboardInterrupt: + LOGGER.info("DISCONNECT") + self.Disconnect() + return True + + def Connect(self): + grpcThread = threading.Thread(target=self.ConnectThread) + grpcThread.start() + LOGGER.info("Calling the JavaRunner") + return True + + def Disconnect(self) -> bool: + self.__terminate.set() + # If not started, assume it is already disconnected + if not self.__started.is_set(): return True + LOGGER.info("Keyboard interrupt, stop server") + self.__server.stop(0) + # Disconnect triggers deactivation of sampling events + # self.__scheduler.shutdown() + # exit(0) + return True + + def update(): + with grpc.insecure_channel('localhost:10060') as channel: + #n = "initiate lsp directo 10.95.86.214 1.1.1.1 1.1.1.3 m1228800 na192.168.3.11-192.168.3.13" + n = "terminate lsp 10.95.86.214 0 nombre" + #n="create candidatepath 10.95.86.214 1.1.1.1 4 97 m69644288 nn1.1.1.3 m69640192 nn1.1.1.2" + stub = grpcService_pb2_grpc.pceServiceStub(channel) + request = grpcService_pb2.commandRequest(command=n) + print("updateService req: " ,request) + response = stub.update(request) + print("updateService client received: " ,response.commandResp) + + def requestToJavaPcep(self,message): + with grpc.insecure_channel('localhost:10060') as channel: + #n = "initiate lsp largo2 10.95.86.214 1.1.1.1 1.1.1.2 m69644288 nn1.1.1.3 m69640192 nn1.1.1.2" + #n = "initiate lsp directo 10.95.86.214 1.1.1.1 1.1.1.3 m1228800 na192.168.3.11-192.168.3.13" + LOGGER.debug("LLego al request") + stub = grpcService_pb2_grpc.pceServiceStub(channel) + LOGGER.debug("updateService req 2: %s" ,message) + request = grpcService_pb2.commandRequest(command=message) + LOGGER.debug("updateService req 2: %s" ,request) + response = stub.update(request) + LOGGER.debug("updateServide client received: %s" ,response.commandResp) + LOGGER.debug("updateServide client received IP: %s" ,response.ipAddress) + return response.ipAddress + + def terminateRunners(self): + for runner in self.__runnerList: + runner.endBGPSpeaker() + return True + + def terminateGrpcServer(self): + LOGGER.debug("Terminating java programs...") + self.terminateRunners() + LOGGER.debug("Disconnecting grpc server...") + self.Disconnect() + return True + + def terminateRunnerById(self,speaker_id): + """ + Disconnect from BGP-LS speaker given an speaker Id. Its the same + as the java running proccess PID. + """ + for runner in self.__runnerList: + if(runner.getPid()==speaker_id): + runner.endBGPSpeaker() + self.__runnerList.remove(runner) + + return True + ''' diff --git a/src/pcep/service/tools/Grpc_TestClient/gRPC_PCEPClient.py b/src/pcep/service/tools/Grpc_TestClient/gRPC_PCEPClient.py new file mode 100644 index 0000000000000000000000000000000000000000..99fe94350a7c5b0558e777d42f6025f6f59e26e6 --- /dev/null +++ b/src/pcep/service/tools/Grpc_TestClient/gRPC_PCEPClient.py @@ -0,0 +1,78 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import grpc, logging +from common.Constants import ServiceNameEnum +from common.Settings import get_service_host, get_service_port_grpc +from common.tools.client.RetryDecorator import retry, delay_exponential +from pcep.service.tools.Grpc_TestClient.grpcService_pb2 import LSPdb_Request, Session_Request +from pcep.service.tools.Grpc_TestClient.grpcService_pb2 import commandRequest +from pcep.service.tools.Grpc_TestClient.grpcService_pb2_grpc import pceServiceStub + +LOGGER = logging.getLogger(__name__) +MAX_RETRIES = 15 +DELAY_FUNCTION = delay_exponential(initial=0.01, increment=2.0, maximum=5.0) +RETRY_DECORATOR = retry(max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect') +PORT = 10060 + +class GrpcPcepClient: + def __init__(self, host=None, port=None): + if not host: host = get_service_host(ServiceNameEnum.PCEP) + #if not port: port = get_service_port_grpc(ServiceNameEnum.PCEP) + if not port: port = PORT #Maybe should not be hardcoded + self.endpoint = '{:s}:{:s}'.format(str(host), str(port)) + LOGGER.warning('Creating channel to {:s}...'.format(str(self.endpoint))) + self.channel = None + self.stub = None + self.connect() + LOGGER.warning('Channel created') + + def connect(self): + #LOGGER.warning('IAM IN CONNECT') + LOGGER.warning('Creating channel to {:s}...'.format(str(self.endpoint))) + self.channel = grpc.insecure_channel(self.endpoint) + self.stub = pceServiceStub(self.channel) + LOGGER.warning('Channel created') + + def close(self): + #LOGGER.warning('IAM IN CLOSE') + if self.channel is not None: self.channel.close() + self.channel = None + self.stub = None + + @RETRY_DECORATOR + def showSessions(self): + LOGGER.warning("IAM IN SHOWSESSIONS") + requestsessions = Session_Request() + responsesessions = self.stub.getSessionsInfo(requestsessions) + return responsesessions + + @RETRY_DECORATOR + def showLSPDB(self): + #LOGGER.warning("IAM IN SHOWLSPDB") + requestlsp = LSPdb_Request() + responselsp = self.stub.getLSPdb(requestlsp) + return responselsp + + @RETRY_DECORATOR + def sendUpdate(self, data): + #LOGGER.warning("IAM IN SEND UPDATE") + commandrequest = commandRequest() # Create an instance without passing data to __init__ + commandrequest.command = data + #LOGGER.warning("This is Command Request: %s", str(commandrequest)) + commandresponse = self.stub.update(commandrequest) + #LOGGER.warning("This is Command Response: %s", str(commandresponse)) + #LOGGER.warning("Success: %s", str(commandresponse.success)) + #LOGGER.warning("Error Message: %s", str(commandresponse.error_message)) + return commandresponse diff --git a/src/pcep/service/tools/Grpc_TestClient/grpcService.proto b/src/pcep/service/tools/Grpc_TestClient/grpcService.proto new file mode 100644 index 0000000000000000000000000000000000000000..ff0ac0b028d22b13f900ff37309df099c95102ac --- /dev/null +++ b/src/pcep/service/tools/Grpc_TestClient/grpcService.proto @@ -0,0 +1,60 @@ +// Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; +package src.main.proto; + +//el modulo python abre la comunicacion +//el servidor java responde con un ACK (En caso de que se haya mandado un comando) o con la informacion de la LSPDB (En caso de peticion de LSPDB) + +// Command request from the client +message commandRequest{ + string command = 1; +} + +// Response after a Command Request (If bad request show error log) +message commandResponse{ + bool success = 1; + string error_message = 2; +} + +// LSP database request from the client +message LSPdb_Request{ +} + +// Response containing LSP database information +message LSPdb_Response{ + string LSPdb_data = 2; +} + +// Active PCEP sessions request from the client +message Session_Request{ +} + +// Response containing LSP database information +message Session_Response{ + string Session_data = 2; +} + +// Defining a Service, a Service can have multiple RPC operations +service pceService { + // RPC method for command execution + rpc update(commandRequest) returns (commandResponse); + + // RPC method for retrieving LSP database + rpc getLSPdb(LSPdb_Request) returns (LSPdb_Response); + + // RPC method for retrieving LSP database + rpc getSessionsInfo(Session_Request) returns (Session_Response); +} diff --git a/src/pcep/service/tools/Grpc_TestClient/grpcService_pb2.py b/src/pcep/service/tools/Grpc_TestClient/grpcService_pb2.py new file mode 100644 index 0000000000000000000000000000000000000000..314442ae410d645acb3fc0d113e9bfbfab41aa92 --- /dev/null +++ b/src/pcep/service/tools/Grpc_TestClient/grpcService_pb2.py @@ -0,0 +1,96 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: grpcService.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x11grpcService.proto\x12\x0esrc.main.proto\"!\n\x0e\x63ommandRequest\x12\x0f\n\x07\x63ommand\x18\x01 \x01(\t\"9\n\x0f\x63ommandResponse\x12\x0f\n\x07success\x18\x01 \x01(\x08\x12\x15\n\rerror_message\x18\x02 \x01(\t\"\x0f\n\rLSPdb_Request\"$\n\x0eLSPdb_Response\x12\x12\n\nLSPdb_data\x18\x02 \x01(\t\"\x11\n\x0fSession_Request\"(\n\x10Session_Response\x12\x14\n\x0cSession_data\x18\x02 \x01(\t2\xf8\x01\n\npceService\x12I\n\x06update\x12\x1e.src.main.proto.commandRequest\x1a\x1f.src.main.proto.commandResponse\x12I\n\x08getLSPdb\x12\x1d.src.main.proto.LSPdb_Request\x1a\x1e.src.main.proto.LSPdb_Response\x12T\n\x0fgetSessionsInfo\x12\x1f.src.main.proto.Session_Request\x1a .src.main.proto.Session_Responseb\x06proto3') + +_COMMANDREQUEST = DESCRIPTOR.message_types_by_name['commandRequest'] +_COMMANDRESPONSE = DESCRIPTOR.message_types_by_name['commandResponse'] +_LSPDB_REQUEST = DESCRIPTOR.message_types_by_name['LSPdb_Request'] +_LSPDB_RESPONSE = DESCRIPTOR.message_types_by_name['LSPdb_Response'] +_SESSION_REQUEST = DESCRIPTOR.message_types_by_name['Session_Request'] +_SESSION_RESPONSE = DESCRIPTOR.message_types_by_name['Session_Response'] +commandRequest = _reflection.GeneratedProtocolMessageType('commandRequest', (_message.Message,), { + 'DESCRIPTOR' : _COMMANDREQUEST, + '__module__' : 'grpcService_pb2' + # @@protoc_insertion_point(class_scope:src.main.proto.commandRequest) + }) +_sym_db.RegisterMessage(commandRequest) + +commandResponse = _reflection.GeneratedProtocolMessageType('commandResponse', (_message.Message,), { + 'DESCRIPTOR' : _COMMANDRESPONSE, + '__module__' : 'grpcService_pb2' + # @@protoc_insertion_point(class_scope:src.main.proto.commandResponse) + }) +_sym_db.RegisterMessage(commandResponse) + +LSPdb_Request = _reflection.GeneratedProtocolMessageType('LSPdb_Request', (_message.Message,), { + 'DESCRIPTOR' : _LSPDB_REQUEST, + '__module__' : 'grpcService_pb2' + # @@protoc_insertion_point(class_scope:src.main.proto.LSPdb_Request) + }) +_sym_db.RegisterMessage(LSPdb_Request) + +LSPdb_Response = _reflection.GeneratedProtocolMessageType('LSPdb_Response', (_message.Message,), { + 'DESCRIPTOR' : _LSPDB_RESPONSE, + '__module__' : 'grpcService_pb2' + # @@protoc_insertion_point(class_scope:src.main.proto.LSPdb_Response) + }) +_sym_db.RegisterMessage(LSPdb_Response) + +Session_Request = _reflection.GeneratedProtocolMessageType('Session_Request', (_message.Message,), { + 'DESCRIPTOR' : _SESSION_REQUEST, + '__module__' : 'grpcService_pb2' + # @@protoc_insertion_point(class_scope:src.main.proto.Session_Request) + }) +_sym_db.RegisterMessage(Session_Request) + +Session_Response = _reflection.GeneratedProtocolMessageType('Session_Response', (_message.Message,), { + 'DESCRIPTOR' : _SESSION_RESPONSE, + '__module__' : 'grpcService_pb2' + # @@protoc_insertion_point(class_scope:src.main.proto.Session_Response) + }) +_sym_db.RegisterMessage(Session_Response) + +_PCESERVICE = DESCRIPTOR.services_by_name['pceService'] +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _COMMANDREQUEST._serialized_start=37 + _COMMANDREQUEST._serialized_end=70 + _COMMANDRESPONSE._serialized_start=72 + _COMMANDRESPONSE._serialized_end=129 + _LSPDB_REQUEST._serialized_start=131 + _LSPDB_REQUEST._serialized_end=146 + _LSPDB_RESPONSE._serialized_start=148 + _LSPDB_RESPONSE._serialized_end=184 + _SESSION_REQUEST._serialized_start=186 + _SESSION_REQUEST._serialized_end=203 + _SESSION_RESPONSE._serialized_start=205 + _SESSION_RESPONSE._serialized_end=245 + _PCESERVICE._serialized_start=248 + _PCESERVICE._serialized_end=496 +# @@protoc_insertion_point(module_scope) diff --git a/src/pcep/service/tools/Grpc_TestClient/grpcService_pb2_grpc.py b/src/pcep/service/tools/Grpc_TestClient/grpcService_pb2_grpc.py new file mode 100644 index 0000000000000000000000000000000000000000..a2a3ca301d2e50ca92e5690d7ce1367c87da2b55 --- /dev/null +++ b/src/pcep/service/tools/Grpc_TestClient/grpcService_pb2_grpc.py @@ -0,0 +1,144 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +import pcep.service.tools.Grpc_TestClient.grpcService_pb2 as grpcService__pb2 +#import grpcService_pb2 as grpcService__pb2 + + +class pceServiceStub(object): + """Defining a Service, a Service can have multiple RPC operations""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.update = channel.unary_unary( + '/src.main.proto.pceService/update', + request_serializer=grpcService__pb2.commandRequest.SerializeToString, + response_deserializer=grpcService__pb2.commandResponse.FromString, + ) + self.getLSPdb = channel.unary_unary( + '/src.main.proto.pceService/getLSPdb', + request_serializer=grpcService__pb2.LSPdb_Request.SerializeToString, + response_deserializer=grpcService__pb2.LSPdb_Response.FromString, + ) + self.getSessionsInfo = channel.unary_unary( + '/src.main.proto.pceService/getSessionsInfo', + request_serializer=grpcService__pb2.Session_Request.SerializeToString, + response_deserializer=grpcService__pb2.Session_Response.FromString, + ) + +class pceServiceServicer(object): + """Defining a Service, a Service can have multiple RPC operations""" + + def update(self, request, context): + """RPC method for command execution""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def getLSPdb(self, request, context): + """RPC method for retrieving LSP database""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def getSessionsInfo(self, request, context): + """RPC method for retrieving LSP database""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + +def add_pceServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'update': grpc.unary_unary_rpc_method_handler( + servicer.update, + request_deserializer=grpcService__pb2.commandRequest.FromString, + response_serializer=grpcService__pb2.commandResponse.SerializeToString, + ), + 'getLSPdb': grpc.unary_unary_rpc_method_handler( + servicer.getLSPdb, + request_deserializer=grpcService__pb2.LSPdb_Request.FromString, + response_serializer=grpcService__pb2.LSPdb_Response.SerializeToString, + ), + 'getSessionsInfo': grpc.unary_unary_rpc_method_handler( + servicer.getSessionsInfo, + request_deserializer=grpcService__pb2.Session_Request.FromString, + response_serializer=grpcService__pb2.Session_Response.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'src.main.proto.pceService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + # This class is part of an EXPERIMENTAL API. +class pceService(object): + """Defining a Service, a Service can have multiple RPC operations""" + + @staticmethod + def update(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/src.main.proto.pceService/update', + grpcService__pb2.commandRequest.SerializeToString, + grpcService__pb2.commandResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def getLSPdb(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/src.main.proto.pceService/getLSPdb', + grpcService__pb2.LSPdb_Request.SerializeToString, + grpcService__pb2.LSPdb_Response.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def getSessionsInfo(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/src.main.proto.pceService/getSessionsInfo', + grpcService__pb2.Session_Request.SerializeToString, + grpcService__pb2.Session_Response.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/src/pcep/service/tools/Grpc_TestClient/test_gRPC_client.py b/src/pcep/service/tools/Grpc_TestClient/test_gRPC_client.py new file mode 100644 index 0000000000000000000000000000000000000000..b4eadb87768bbfff11fd2d43e4c4c086b5751c85 --- /dev/null +++ b/src/pcep/service/tools/Grpc_TestClient/test_gRPC_client.py @@ -0,0 +1,48 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import grpc +import logging + +from grpcService_pb2 import LSPdb_Request, Session_Request +from grpcService_pb2_grpc import pceServiceStub +def run(): + # Create a gRPC channel + channel = grpc.insecure_channel('10.1.114.109:10060') + # channel = grpc.insecure_channel('localhost:10060') + + # Create a gRPC stub + stub = pceServiceStub(channel) + + ''' + #LSPDB + # Construct a request (LSPDB Request) + requestlsp = LSPdb_Request() + # Make the gRPC call + responselsp = stub.getLSPdb(requestlsp) + # Print the received response + print("\nRESPUESTA RECIBIDA") + print(responselsp) + ''' + + # Construct a request (Session Request) + sessionrequest = Session_Request() + # Make the gRPC call + sessionresponse = stub.getSessionsInfo(sessionrequest) + # Print the received response + print("\nRESPUESTA RECIBIDA") + print(sessionresponse) +if __name__ == '__main__': + logging.basicConfig() + run() diff --git a/src/pcep/service/tools/JavaRunner.py b/src/pcep/service/tools/JavaRunner.py new file mode 100644 index 0000000000000000000000000000000000000000..27d090f1fdc25b245433ed118a3dd2054843648e --- /dev/null +++ b/src/pcep/service/tools/JavaRunner.py @@ -0,0 +1,147 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging,threading, time +import logging +from lxml import etree +import os +import subprocess + +XML_CONFIG_FILE="/var/teraflow/pcep/service/resources/PCEServerConfiguration.xml" + +LOGGER = logging.getLogger(__name__) + +class JavaRunner: + + def __init__(self,address): + self.__process=None + self.__lock = threading.Lock() + self.__address = address + + def getCurrentLocalPort(self): + with self.__lock: + return self.__localPort + + def getCurrentMngPort(self): + with self.__lock: + return self.__managementPort + def getPid(self): + return self.__process.pid + + def execAndKill(self): + + LOGGER.debug("Before exec and kill") + os.chdir("/var/teraflow/pcep/service/resources/") + cwd = os.getcwd() + LOGGER.info("Current working directory: %s", cwd) + # Security shell=False + self.__process=subprocess.Popen(['java -jar PCE-jar-with-dependencies.jar '],#+ XML_CONFIG_FILE + shell=False,start_new_session=True,stdout=subprocess.PIPE) + LOGGER.debug("Time to sleep") + java_pid = self.__process.pid + print("Java PID:", java_pid) + time.sleep(15) + self.__process.terminate() + + def execPcep(self) -> bool: + """ + Executes java pcep in non-blocking process + """ + LOGGER.debug("Executing JavaRunner") + os.chdir("/var/teraflow/pcep/service/resources/") + try: + #self.__process = subprocess.Popen(['java', '-jar', 'Ejecutable_2.jar'], + # shell=False, start_new_session=True, + # stdout=subprocess.PIPE, stderr=subprocess.PIPE) + + LOGGER.debug("Executing command: %s", ('java', '-Dlog4j.configurationFile=log4j2.xml', '-jar', 'PCE-jar-with-dependencies.jar', 'PCEServerConfiguration.xml')) + self.__process = subprocess.Popen(['java', '-Dlog4j.configurationFile=log4j2.xml', '-jar', 'PCE-jar-with-dependencies.jar', 'PCEServerConfiguration.xml'], + + shell=False, start_new_session=True, + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + + stdout_thread = threading.Thread(target=self.read_stdout) + stderr_thread = threading.Thread(target=self.read_stderr) + + stdout_thread.start() + stderr_thread.start() + + self.__process.wait() + + stdout_thread.join() + stderr_thread.join() + + except subprocess.CalledProcessError as err: + LOGGER.debug('ERROR: %s', err) + + return self.__process + + def read_stdout(self): + try: + for line in iter(self.__process.stdout.readline, b''): + print(f"STDOUT: {line.decode('utf-8')}", flush=True) + except Exception as e: + print(f"Error in read_stdout: {e}") + + def read_stderr(self): + try: + for line in iter(self.__process.stderr.readline, b''): + print(f"STDERR: {line.decode('utf-8')}", flush=True) + except Exception as e: + print(f"Error in read_stderr: {e}") + + def setPort(self,port): + self.__peerPort=port + return True + def setAsNumber(self,asNumber): + self.__asNumber=asNumber + return True + + def setPeer(self, address) -> bool: + """ + Sets XML existing config file with peer address and port. TODO: as_number + """ + XMLParser = etree.XMLParser(remove_blank_text=False) + tree = etree.parse(XML_CONFIG_FILE, parser=XMLParser) + root = tree.getroot() + + # Find the element + pceAddress = root.find(".//LocalPCEAddress") + + #Check if the element is found + if pceAddress is not None: + LOGGER.debug("Old LocalPCEAddress value: %s", pceAddress.text) + # Update the text content of the element with the new address + pceAddress.text = str(address) + LOGGER.debug("New LocalPCEAddress value: %s", pceAddress.text) + # Save the modified XML back to the file + tree.write(XML_CONFIG_FILE, pretty_print=True) + + return True # Return True to indicate success + + return False # Return False if the element is not found + + #return True + + def endBGPSpeaker(self) -> bool: + """ + Kills java program connected to BGPLS Speaker with SIGKILL signal + """ + LOGGER.debug("sending kill signal to process %s",self.__process.pid) + LOGGER.debug("PID: %d",self.__process.pid) + self.__process.kill() + return True + + def getRunnerInfo(self): + return self.__address,self.__asNumber,self.__peerPort diff --git a/src/pcep/service/tools/protos/grpcService.proto b/src/pcep/service/tools/protos/grpcService.proto new file mode 100644 index 0000000000000000000000000000000000000000..eb915dc9026481acc84b8452c87c2a952b8a82c2 --- /dev/null +++ b/src/pcep/service/tools/protos/grpcService.proto @@ -0,0 +1,32 @@ +syntax = "proto3"; +package src.main.proto; + +//el modulo python abre la comunicacion +//el servidor java responde con un ACK (En caso de que se haya mandado un comando) o con la informacion de la LSPDB (En caso de peticion de LSPDB) + +// Command request from the client +message commandRequest{ + string command = 1; +} + +// Empty response for command execution acknowledgment +message commandResponse{ +} + +// LSP database request from the client +message LSPdb_Request{ +} + +// Response containing LSP database information +message LSPdb_Response{ + string LSPdb_data = 2; +} + +// Defining a Service, a Service can have multiple RPC operations +service pceService { + // RPC method for command execution + rpc update(commandRequest) returns (commandResponse); + + // RPC method for retrieving LSP database + rpc getLSPdb(LSPdb_Request) returns (LSPdb_Response); +} diff --git a/src/pcep/service/tools/protos/grpcService_pb2.py b/src/pcep/service/tools/protos/grpcService_pb2.py new file mode 100644 index 0000000000000000000000000000000000000000..a91c744448125e77a6c8305576d2f05fd7ae1df5 --- /dev/null +++ b/src/pcep/service/tools/protos/grpcService_pb2.py @@ -0,0 +1,67 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: grpcService.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x11grpcService.proto\x12\x0esrc.main.proto\"!\n\x0e\x63ommandRequest\x12\x0f\n\x07\x63ommand\x18\x01 \x01(\t\"\x11\n\x0f\x63ommandResponse\"\x0f\n\rLSPdb_Request\"$\n\x0eLSPdb_Response\x12\x12\n\nLSPdb_data\x18\x02 \x01(\t2\xa2\x01\n\npceService\x12I\n\x06update\x12\x1e.src.main.proto.commandRequest\x1a\x1f.src.main.proto.commandResponse\x12I\n\x08getLSPdb\x12\x1d.src.main.proto.LSPdb_Request\x1a\x1e.src.main.proto.LSPdb_Responseb\x06proto3') + + + +_COMMANDREQUEST = DESCRIPTOR.message_types_by_name['commandRequest'] +_COMMANDRESPONSE = DESCRIPTOR.message_types_by_name['commandResponse'] +_LSPDB_REQUEST = DESCRIPTOR.message_types_by_name['LSPdb_Request'] +_LSPDB_RESPONSE = DESCRIPTOR.message_types_by_name['LSPdb_Response'] +commandRequest = _reflection.GeneratedProtocolMessageType('commandRequest', (_message.Message,), { + 'DESCRIPTOR' : _COMMANDREQUEST, + '__module__' : 'grpcService_pb2' + # @@protoc_insertion_point(class_scope:src.main.proto.commandRequest) + }) +_sym_db.RegisterMessage(commandRequest) + +commandResponse = _reflection.GeneratedProtocolMessageType('commandResponse', (_message.Message,), { + 'DESCRIPTOR' : _COMMANDRESPONSE, + '__module__' : 'grpcService_pb2' + # @@protoc_insertion_point(class_scope:src.main.proto.commandResponse) + }) +_sym_db.RegisterMessage(commandResponse) + +LSPdb_Request = _reflection.GeneratedProtocolMessageType('LSPdb_Request', (_message.Message,), { + 'DESCRIPTOR' : _LSPDB_REQUEST, + '__module__' : 'grpcService_pb2' + # @@protoc_insertion_point(class_scope:src.main.proto.LSPdb_Request) + }) +_sym_db.RegisterMessage(LSPdb_Request) + +LSPdb_Response = _reflection.GeneratedProtocolMessageType('LSPdb_Response', (_message.Message,), { + 'DESCRIPTOR' : _LSPDB_RESPONSE, + '__module__' : 'grpcService_pb2' + # @@protoc_insertion_point(class_scope:src.main.proto.LSPdb_Response) + }) +_sym_db.RegisterMessage(LSPdb_Response) + +_PCESERVICE = DESCRIPTOR.services_by_name['pceService'] +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _COMMANDREQUEST._serialized_start=37 + _COMMANDREQUEST._serialized_end=70 + _COMMANDRESPONSE._serialized_start=72 + _COMMANDRESPONSE._serialized_end=89 + _LSPDB_REQUEST._serialized_start=91 + _LSPDB_REQUEST._serialized_end=106 + _LSPDB_RESPONSE._serialized_start=108 + _LSPDB_RESPONSE._serialized_end=144 + _PCESERVICE._serialized_start=147 + _PCESERVICE._serialized_end=309 +# @@protoc_insertion_point(module_scope) diff --git a/src/pcep/service/tools/protos/grpcService_pb2_grpc.py b/src/pcep/service/tools/protos/grpcService_pb2_grpc.py new file mode 100644 index 0000000000000000000000000000000000000000..d16f15ce100082f8a67a32c9beb38e2b046269b7 --- /dev/null +++ b/src/pcep/service/tools/protos/grpcService_pb2_grpc.py @@ -0,0 +1,110 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from . import grpcService_pb2 as grpcService__pb2 + +class pceServiceStub(object): + """Defining a Service, a Service can have multiple RPC operation""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.update = channel.unary_unary( + '/src.main.proto.pceService/update', + request_serializer=grpcService__pb2.commandRequest.SerializeToString, + response_deserializer=grpcService__pb2.commandResponse.FromString, + ) + self.getLSPdb = channel.unary_unary( + '/src.main.proto.pceService/getLSPdb', + request_serializer=grpcService__pb2.LSPdb_Request.SerializeToString, + response_deserializer=grpcService__pb2.LSPdb_Response.FromString, + ) + +class pceServiceServicer(object): + """Defining a Service, a Service can have multiple RPC operations""" + + def update(self, request, context): + """RPC method for command execution""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def getLSPdb(self, request, context): + """RPC method for retrieving LSP database""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + +def add_pceServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'update': grpc.unary_unary_rpc_method_handler( + servicer.update, + request_deserializer=grpcService__pb2.commandRequest.FromString, + response_serializer=grpcService__pb2.commandResponse.SerializeToString, + ), + 'getLSPdb': grpc.unary_unary_rpc_method_handler( + servicer.getLSPdb, + request_deserializer=grpcService__pb2.LSPdb_Request.FromString, + response_serializer=grpcService__pb2.LSPdb_Response.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'src.main.proto.pceService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class pceService(object): + """Defining a Service, a Service can have multiple RPC operations""" + + @staticmethod + def update(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/src.main.proto.pceService/update', + grpcService__pb2.commandRequest.SerializeToString, + grpcService__pb2.commandResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def getLSPdb(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/src.main.proto.pceService/getLSPdb', + grpcService__pb2.LSPdb_Request.SerializeToString, + grpcService__pb2.LSPdb_Response.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/src/webui/Dockerfile b/src/webui/Dockerfile index fd749ac128632461d2faa43e723777c84356062b..dbb65bef0ab367123676139dbdaad6b6521060c0 100644 --- a/src/webui/Dockerfile +++ b/src/webui/Dockerfile @@ -85,6 +85,9 @@ COPY --chown=webui:webui src/service/client/. service/client/ COPY --chown=webui:webui src/slice/__init__.py slice/__init__.py COPY --chown=webui:webui src/slice/client/. slice/client/ COPY --chown=webui:webui src/webui/. webui/ +COPY --chown=webui:webui src/pcep/__init__.py pcep/__init__.py +COPY --chown=webui:webui src/pcep/client/. pcep/client/ +COPY --chown=webui:webui src/pcep/service/tools/. pcep/service/tools/ COPY --chown=webui:webui src/bgpls_speaker/__init__.py bgpls_speaker/__init__.py COPY --chown=webui:webui src/bgpls_speaker/client/. bgpls_speaker/client/ diff --git a/src/webui/service/pcep/forms.py b/src/webui/service/pcep/forms.py new file mode 100644 index 0000000000000000000000000000000000000000..ee50e5fa8006a37bb49694d13f202a2ec10b8f1f --- /dev/null +++ b/src/webui/service/pcep/forms.py @@ -0,0 +1,67 @@ +# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# external imports +#TIPOS DE FORMULARIO + +from flask_wtf import FlaskForm +from wtforms import SelectField, SubmitField +from wtforms import StringField, SelectField, TextAreaField, SubmitField, BooleanField +from wtforms.validators import DataRequired, Length, NumberRange, ValidationError +from common.proto.context_pb2 import DeviceOperationalStatusEnum + +class AddDeviceForm(FlaskForm): + device_id = StringField('ID', + validators=[DataRequired(), Length(min=5)]) + device_type = SelectField('Type', choices = []) + operational_status = SelectField('Operational Status', + # choices=[(-1, 'Select...'), (0, 'Undefined'), (1, 'Disabled'), (2, 'Enabled')], + coerce=int, + validators=[NumberRange(min=0)]) + device_drivers_undefined = BooleanField('UNDEFINED / EMULATED') + device_drivers_openconfig = BooleanField('OPENCONFIG') + device_drivers_transport_api = BooleanField('TRANSPORT_API') + device_drivers_p4 = BooleanField('P4') + device_drivers_ietf_network_topology = BooleanField('IETF_NETWORK_TOPOLOGY') + device_drivers_onf_tr_352 = BooleanField('ONF_TR_352') + device_drivers_xr = BooleanField('XR') + device_config_address = StringField('connect/address',default='127.0.0.1',validators=[DataRequired(), Length(min=5)]) + device_config_port = StringField('connect/port',default='0',validators=[DataRequired(), Length(min=1)]) + device_config_settings = TextAreaField('connect/settings',default='{}',validators=[DataRequired(), Length(min=2)]) + submit = SubmitField('Add') + + def validate_operational_status(form, field): + if field.data not in DeviceOperationalStatusEnum.DESCRIPTOR.values_by_number: + raise ValidationError('The operational status value selected is incorrect!') + +class ConfigForm(FlaskForm): + device_key_config = StringField('Key configuration') + device_value_config = StringField('Value configuration') + submit = SubmitField('Add') + +class UpdateDeviceForm(FlaskForm): + update_operational_status = SelectField('Operational Status', + choices=[(-1, 'Select...'), (0, 'Undefined'), (1, 'Disabled'), (2, 'Enabled')], + coerce=int, + validators=[NumberRange(min=0)]) + + submit = SubmitField('Update') + +class ConfigIpPCEForm(FlaskForm): + pce_address = StringField('ip',default='127.0.0.1',validators=[DataRequired(), Length(min=5)]) + submit = SubmitField('Submit') + +class SendPathForm(FlaskForm): + command = StringField('command',render_kw={"placeholder": "initiate lsp nombre_lsp 10.95.86.214 1.1.1.1 1.1.1.2 m1228800 na192.168.3.11-192.168.3.13 m819200 na192.168.2.13-192.168.2.12"},validators=[DataRequired(), Length(min=5)]) + submit = SubmitField('Submit') diff --git a/src/webui/service/pcep/routes.py b/src/webui/service/pcep/routes.py new file mode 100644 index 0000000000000000000000000000000000000000..1a6e90374812425481ca993fdb7e90f6b78a59ae --- /dev/null +++ b/src/webui/service/pcep/routes.py @@ -0,0 +1,226 @@ +import logging +from flask import render_template, Blueprint, flash, session, redirect, url_for +from common.proto.context_pb2 import ( + DeviceDriverEnum, DeviceList, DeviceOperationalStatusEnum, + Empty, TopologyId) +from common.tools.object_factory.Context import json_context_id +from common.tools.object_factory.Topology import json_topology_id +from context.client.ContextClient import ContextClient +from device.client.DeviceClient import DeviceClient +from webui.service.pcep.forms import SendPathForm + +from pcep.client.PcepClient import PcepClient +from pcep.service.tools.Grpc_TestClient.gRPC_PCEPClient import GrpcPcepClient + + +pcep = Blueprint('pcep', __name__, url_prefix='/pcep') +context_client = ContextClient() +device_client = DeviceClient() +pcep_client = PcepClient() +grpc_pcep_client = GrpcPcepClient() +logger = logging.getLogger(__name__) + +@pcep.get('/') +def home(): + if 'context_uuid' not in session or 'topology_uuid' not in session: + flash("Please select a context!", "warning") + return redirect(url_for("main.home")) + + context_uuid = session['context_uuid'] + topology_uuid = session['topology_uuid'] + + context_client.connect() + json_topo_id = json_topology_id(topology_uuid, context_id=json_context_id(context_uuid)) + grpc_topology = context_client.GetTopology(TopologyId(**json_topo_id)) + topo_device_uuids = {device_id.device_uuid.uuid for device_id in grpc_topology.device_ids} + + if grpc_topology is None: + flash('Context({:s})/Topology({:s}) not found'.format(str(context_uuid), str(topology_uuid)), 'danger') + devices = [] + else: + topo_device_uuids = {device_id.device_uuid.uuid for device_id in grpc_topology.device_ids} + grpc_devices: DeviceList = context_client.ListDevices(Empty()) + devices = [ + device for device in grpc_devices.devices + if device.device_id.device_uuid.uuid in topo_device_uuids + ] + # ListNewDevices discovered from bgpls + logger.info('pcep/home') + context_client.close() + + return render_template( + 'pcep/home.html', devices=devices, dde=DeviceDriverEnum, + dose=DeviceOperationalStatusEnum) + +@pcep.route('managePCEP', methods=['GET', 'POST']) +def managePCEP(): + pcep_manage_form = SendPathForm() + if pcep_manage_form.validate_on_submit(): + command_data = pcep_manage_form.command.data + #logger.info('Send Path ip:%s',command_data) + grpc_pcep_client.connect() + command_response = grpc_pcep_client.sendUpdate(command_data) + #logger.info('THIS IS THE RESPONSE:%s',command_response) + #logger.info('THIS IS THE RESPONSE BOOL:%s',command_response.success) + #logger.info('THIS IS THE RESPONSE ERROR:%s',command_response.error_message) + if command_response.success: + flash(f'Command: "{pcep_manage_form.command.data}" was executed sucesfully!', 'success') + else: + flash(f'Command "{pcep_manage_form.command.data}" was not executed sucesfully', 'danger') + flash(f'Error Information: "{command_response.error_message}"', 'warning') + grpc_pcep_client.close() + + grpc_pcep_client.connect() + sessions = grpc_pcep_client.showSessions() + grpc_pcep_client.close() + + return render_template('pcep/managePCEP.html', pcep_manage_form=pcep_manage_form, sessions=sessions) + +@pcep.route('lspdbPCEP', methods=['GET', 'POST']) +def lspdbPCEP(): + grpc_pcep_client.connect() + lspdb = grpc_pcep_client.showLSPDB() + if lspdb: + flash(f'LSPDB data retrieved', 'success') + grpc_pcep_client.close() + return render_template('pcep/lspdb.html', lspdb=lspdb) + +''' +@pcep.route('add/', methods=['GET', 'POST']) +def add(device_name): + """" + Add a discovered device from bgpls protocol. Populate form from + existent info in bgpls. + """ + # TODO: Conect to device and get necessary info + form = AddDeviceForm() + + logger.info('pcep/add') + + # listing enum values + form.operational_status.choices = [] + for key, _ in DeviceOperationalStatusEnum.DESCRIPTOR.values_by_name.items(): + form.operational_status.choices.append( + (DeviceOperationalStatusEnum.Value(key), key.replace('DEVICEOPERATIONALSTATUS_', ''))) + + form.device_type.choices = [] + # items for Device Type field + for device_type in DeviceTypeEnum: + form.device_type.choices.append((device_type.value,device_type.value)) + + if form.validate_on_submit(): + device_obj = Device() + # Device UUID: + device_obj.device_id.device_uuid.uuid = form.device_id.data # pylint: disable=no-member + + # Device type: + device_obj.device_type = str(form.device_type.data) + + # Device configurations: + config_rule = device_obj.device_config.config_rules.add() # pylint: disable=no-member + config_rule.action = ConfigActionEnum.CONFIGACTION_SET + config_rule.custom.resource_key = '_connect/address' + config_rule.custom.resource_value = form.device_config_address.data + + config_rule = device_obj.device_config.config_rules.add() # pylint: disable=no-member + config_rule.action = ConfigActionEnum.CONFIGACTION_SET + config_rule.custom.resource_key = '_connect/port' + config_rule.custom.resource_value = form.device_config_port.data + + config_rule = device_obj.device_config.config_rules.add() # pylint: disable=no-member + config_rule.action = ConfigActionEnum.CONFIGACTION_SET + config_rule.custom.resource_key = '_connect/settings' + + try: + device_config_settings = json.loads(form.device_config_settings.data) + except: # pylint: disable=bare-except + device_config_settings = form.device_config_settings.data + + if isinstance(device_config_settings, dict): + config_rule.custom.resource_value = json.dumps(device_config_settings) + else: + config_rule.custom.resource_value = str(device_config_settings) + + # Device status: + device_obj.device_operational_status = form.operational_status.data + + # Device drivers: + if form.device_drivers_undefined.data: + device_obj.device_drivers.append(DeviceDriverEnum.DEVICEDRIVER_UNDEFINED) + if form.device_drivers_openconfig.data: + device_obj.device_drivers.append(DeviceDriverEnum.DEVICEDRIVER_OPENCONFIG) + if form.device_drivers_transport_api.data: + device_obj.device_drivers.append(DeviceDriverEnum.DEVICEDRIVER_TRANSPORT_API) + if form.device_drivers_p4.data: + device_obj.device_drivers.append(DeviceDriverEnum.DEVICEDRIVER_P4) + if form.device_drivers_ietf_network_topology.data: + device_obj.device_drivers.append(DeviceDriverEnum.DEVICEDRIVER_IETF_NETWORK_TOPOLOGY) + if form.device_drivers_onf_tr_352.data: + device_obj.device_drivers.append(DeviceDriverEnum.DEVICEDRIVER_ONF_TR_352) + if form.device_drivers_xr.data: + device_obj.device_drivers.append(DeviceDriverEnum.DEVICEDRIVER_XR) + + try: + device_client.connect() + logger.info('add device from pcep:%s',device_obj) + response: DeviceId = device_client.AddDevice(device_obj) + device_client.close() + flash(f'New device was created with ID "{response.device_uuid.uuid}".', 'success') + #pcep_client.connect() + #pcep_client.configuratePCE(PceIpRq(address=device_obj.device_id.device_uuid.uuid)) + #pcep_client.close() + return redirect(url_for('device.home')) + except Exception as e: + flash(f'Problem adding the device. {e.details()}', 'danger') + + # Prefill data with discovered info from speaker + # Device Name from bgpls + form.device_name=device_name + device=device_name + form.device_id.data=device_name + # Default values (TODO: NOT WORKING) + form.device_type.data=DeviceTypeEnum.EMULATED_PACKET_ROUTER + form.device_config_settings.data=str('{"username": "admin", "password": "admin"}') + + return render_template('pcep/add.html', form=form, device=device, + submit_text='Add New Device') + +@pcep.route('detail/', methods=['GET', 'POST']) +def detail(device_uuid: str): + request = DeviceId() + request.device_uuid.uuid = device_uuid + context_client.connect() + response = context_client.GetDevice(request) + context_client.close() + return render_template('pcep/detail.html', device=response, + dde=DeviceDriverEnum, + dose=DeviceOperationalStatusEnum) + + +@pcep.route('sendPath', methods=['GET', 'POST']) +def sendPath(): + + pcep_client.connect() + form = SendPathForm() + if form.validate_on_submit(): + logger.info('Send Path ip:%s',form.command.data) + pcep_client.sendRequest(RequestRq(command=form.command.data)) + logger.info('Prueba 1') + flash(f'Command "{form.command.data}" added successfully!', 'success') + logger.info('Prueba 2') + pcep_client.close() + logger.info('Prueba 3') + return render_template('pcep/sendPath.html',form=form) + +@pcep.route('formPcep', methods=['GET','POST']) +def formPcep(): + #conectar con pcep? + form = ConfigIpPCEForm() + if request.method=="POST": + address = form.pce_address.data + logger.info("FORM formPcep: %s ", address) + + flash(f'Pcep "{address}" added successfully!', 'success') + + return redirect(url_for('pcep.home')) +''' diff --git a/src/webui/service/templates/base.html b/src/webui/service/templates/base.html index 60cd5aebde39019b34c6ff2b763137f5c3d05035..a330741354bab24df44819d01a72c79dbd0986af 100644 --- a/src/webui/service/templates/base.html +++ b/src/webui/service/templates/base.html @@ -103,6 +103,12 @@ Load Generator {% endif %} +