Skip to content
Snippets Groups Projects
Commit b51eac16 authored by Lluis Gifre Renom's avatar Lluis Gifre Renom
Browse files

PathComp component:

- initial skeleton (not functional)
parent fee580ca
No related branches found
No related tags found
1 merge request!54Release 2.0.0
Showing
with 3817 additions and 3 deletions
# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
apiVersion: apps/v1
kind: Deployment
metadata:
name: pathcompservice
spec:
selector:
matchLabels:
app: pathcompservice
template:
metadata:
labels:
app: pathcompservice
spec:
terminationGracePeriodSeconds: 5
containers:
- name: server
image: registry.gitlab.com/teraflow-h2020/controller/pathcomp:latest
imagePullPolicy: Always
ports:
- containerPort: 10020
env:
- name: LOG_LEVEL
value: "INFO"
readinessProbe:
exec:
command: ["/bin/grpc_health_probe", "-addr=:10020"]
livenessProbe:
exec:
command: ["/bin/grpc_health_probe", "-addr=:10020"]
resources:
requests:
cpu: 250m
memory: 512Mi
limits:
cpu: 700m
memory: 1024Mi
---
apiVersion: v1
kind: Service
metadata:
name: pathcompservice
spec:
type: ClusterIP
selector:
app: pathcompservice
ports:
- name: grpc
protocol: TCP
port: 10020
targetPort: 10020
// Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto3";
package pathcomp;
import "context.proto";
service PathCompService {
rpc Compute(PathCompRequest) returns (PathCompReply) {}
}
message PathCompRequest {
repeated context.Service services = 1;
}
message PathCompReply {
repeated context.Connection connections = 1;
}
#!/bin/bash
# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
./report_coverage_all.sh | grep --color -E -i "^pathcomp/.*$|$"
#!/bin/bash
# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
PROJECTDIR=`pwd`
cd $PROJECTDIR/src
RCFILE=$PROJECTDIR/coverage/.coveragerc
# Run unitary tests and analyze coverage of code at same time
# Useful flags for pytest:
#-o log_cli=true -o log_file=service.log -o log_file_level=DEBUG
coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \
pathcomp/tests/test_unitary.py
......@@ -97,3 +97,6 @@ coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \
coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \
slice/tests/test_unitary.py
coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \
pathcomp/tests/test_unitary.py
......@@ -46,6 +46,7 @@ class ServiceNameEnum(Enum):
COMPUTE = 'compute'
CYBERSECURITY = 'cybersecurity'
INTERDOMAIN = 'interdomain'
PATHCOMP = 'pathcomp'
# Default gRPC service ports
DEFAULT_SERVICE_GRPC_PORTS = {
......@@ -60,6 +61,7 @@ DEFAULT_SERVICE_GRPC_PORTS = {
ServiceNameEnum.COMPUTE .value : 9090,
ServiceNameEnum.CYBERSECURITY.value : 10000,
ServiceNameEnum.INTERDOMAIN .value : 10010,
ServiceNameEnum.PATHCOMP .value : 10020,
}
# Default HTTP/REST-API service ports
......
......@@ -15,10 +15,10 @@
import copy
from typing import Dict, List
def get_link_uuid(a_device_id : Dict, a_endpoint_id : Dict, z_device_id : Dict, z_endpoint_id : Dict) -> str:
def get_link_uuid(a_endpoint_id : Dict, z_endpoint_id : Dict) -> str:
return '{:s}/{:s}=={:s}/{:s}'.format(
a_device_id['device_uuid']['uuid'], a_endpoint_id['endpoint_uuid']['uuid'],
z_device_id['device_uuid']['uuid'], z_endpoint_id['endpoint_uuid']['uuid'])
a_endpoint_id['device_id']['device_uuid']['uuid'], a_endpoint_id['endpoint_uuid']['uuid'],
a_endpoint_id['device_id']['device_uuid']['uuid'], z_endpoint_id['endpoint_uuid']['uuid'])
def json_link_id(link_uuid : str):
return {'link_uuid': {'uuid': link_uuid}}
......
......@@ -18,6 +18,11 @@ from common.Constants import DEFAULT_CONTEXT_UUID
from common.tools.object_factory.Context import json_context_id
from context.proto.context_pb2 import ServiceStatusEnum, ServiceTypeEnum
def get_service_uuid(a_endpoint_id : Dict, z_endpoint_id : Dict) -> str:
return 'svc:{:s}/{:s}=={:s}/{:s}'.format(
a_endpoint_id['device_id']['device_uuid']['uuid'], a_endpoint_id['endpoint_uuid']['uuid'],
a_endpoint_id['device_id']['device_uuid']['uuid'], z_endpoint_id['endpoint_uuid']['uuid'])
def json_service_id(service_uuid : str, context_id : Optional[Dict] = None):
result = {'service_uuid': {'uuid': service_uuid}}
if context_id is not None: result['context_id'] = copy.deepcopy(context_id)
......
# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Build, tag and push the Docker image to the GitLab registry
build pathcomp:
variables:
IMAGE_NAME: 'pathcomp' # name of the microservice
IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
stage: build
before_script:
- docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
script:
- docker build -t "$IMAGE_NAME:$IMAGE_TAG" -f ./src/$IMAGE_NAME/Dockerfile ./src/
- docker tag "$IMAGE_NAME:$IMAGE_TAG" "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
- docker push "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
after_script:
- docker images --filter="dangling=true" --quiet | xargs -r docker rmi
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
- if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
- changes:
- src/$IMAGE_NAME/**/*.{py,in,yml}
- src/$IMAGE_NAME/Dockerfile
- src/$IMAGE_NAME/tests/*.py
- manifests/${IMAGE_NAME}service.yaml
- .gitlab-ci.yml
# Apply unit test to the component
unit test pathcomp:
variables:
IMAGE_NAME: 'pathcomp' # name of the microservice
IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
stage: unit_test
needs:
- build pathcomp
before_script:
- docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
- if docker network list | grep teraflowbridge; then echo "teraflowbridge is already created"; else docker network create -d bridge teraflowbridge; fi
- if docker container ls | grep $IMAGE_NAME; then docker rm -f $IMAGE_NAME; else echo "$IMAGE_NAME image is not in the system"; fi
script:
- docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
- docker run --name $IMAGE_NAME -d -p 3030:3030 -v "$PWD/src/$IMAGE_NAME/tests:/opt/results" --network=teraflowbridge $CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG
- sleep 5
- docker ps -a
- docker logs $IMAGE_NAME
- docker exec -i $IMAGE_NAME bash -c "coverage run -m pytest --log-level=INFO --verbose $IMAGE_NAME/tests/test_unitary.py --junitxml=/opt/results/${IMAGE_NAME}_report.xml"
- docker exec -i $IMAGE_NAME bash -c "coverage report --include='${IMAGE_NAME}/*' --show-missing"
coverage: '/TOTAL\s+\d+\s+\d+\s+(\d+%)/'
after_script:
- docker rm -f $IMAGE_NAME
- docker network rm teraflowbridge
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
- if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
- changes:
- src/$IMAGE_NAME/**/*.{py,in,yml}
- src/$IMAGE_NAME/Dockerfile
- src/$IMAGE_NAME/tests/*.py
- src/$IMAGE_NAME/tests/Dockerfile
- manifests/${IMAGE_NAME}service.yaml
- .gitlab-ci.yml
artifacts:
when: always
reports:
junit: src/$IMAGE_NAME/tests/${IMAGE_NAME}_report.xml
# Deployment of the service in Kubernetes Cluster
deploy pathcomp:
variables:
IMAGE_NAME: 'pathcomp' # name of the microservice
IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
stage: deploy
needs:
- unit test pathcomp
# - integ_test execute
script:
- 'sed -i "s/$IMAGE_NAME:.*/$IMAGE_NAME:$IMAGE_TAG/" manifests/${IMAGE_NAME}service.yaml'
- kubectl version
- kubectl get all
- kubectl apply -f "manifests/${IMAGE_NAME}service.yaml"
- kubectl get all
# environment:
# name: test
# url: https://example.com
# kubernetes:
# namespace: test
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
when: manual
- if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
when: manual
# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
FROM python:3-slim
# Install dependencies
RUN apt-get --yes --quiet --quiet update && \
apt-get --yes --quiet --quiet install wget g++ && \
rm -rf /var/lib/apt/lists/*
# Set Python to show logs as they occur
ENV PYTHONUNBUFFERED=0
# Download the gRPC health probe
RUN GRPC_HEALTH_PROBE_VERSION=v0.2.0 && \
wget -qO/bin/grpc_health_probe https://github.com/grpc-ecosystem/grpc-health-probe/releases/download/${GRPC_HEALTH_PROBE_VERSION}/grpc_health_probe-linux-amd64 && \
chmod +x /bin/grpc_health_probe
# Get generic Python packages
RUN python3 -m pip install --upgrade pip setuptools wheel pip-tools
# Set working directory
WORKDIR /var/teraflow
# Create module sub-folders
RUN mkdir -p /var/teraflow/pathcomp
# Get Python packages per module
COPY pathcomp/requirements.in pathcomp/requirements.in
RUN pip-compile --output-file=pathcomp/requirements.txt pathcomp/requirements.in
RUN python3 -m pip install -r pathcomp/requirements.txt
# Add files into working directory
COPY common/. common
COPY context/. context
COPY pathcomp/. pathcomp
# Start service service
ENTRYPOINT ["python", "-m", "pathcomp.service"]
# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import grpc, logging
from common.Constants import ServiceNameEnum
from common.Settings import get_service_host, get_service_port_grpc
from common.tools.client.RetryDecorator import retry, delay_exponential
from common.tools.grpc.Tools import grpc_message_to_json_string
from pathcomp.proto.pathcomp_pb2 import PathCompReply, PathCompRequest
from pathcomp.proto.pathcomp_pb2_grpc import PathCompServiceStub
LOGGER = logging.getLogger(__name__)
MAX_RETRIES = 15
DELAY_FUNCTION = delay_exponential(initial=0.01, increment=2.0, maximum=5.0)
RETRY_DECORATOR = retry(max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
class PathCompClient:
def __init__(self, host=None, port=None):
if not host: host = get_service_host(ServiceNameEnum.PATHCOMP)
if not port: port = get_service_port_grpc(ServiceNameEnum.PATHCOMP)
self.endpoint = '{:s}:{:s}'.format(str(host), str(port))
LOGGER.debug('Creating channel to {:s}...'.format(str(self.endpoint)))
self.channel = None
self.stub = None
self.connect()
LOGGER.debug('Channel created')
def connect(self):
self.channel = grpc.insecure_channel(self.endpoint)
self.stub = PathCompServiceStub(self.channel)
def close(self):
if self.channel is not None: self.channel.close()
self.channel = None
self.stub = None
@RETRY_DECORATOR
def Compute(self, request : PathCompRequest) -> PathCompReply:
LOGGER.debug('Compute request: {:s}'.format(grpc_message_to_json_string(request)))
response = self.stub.Compute(request)
LOGGER.debug('Compute result: {:s}'.format(grpc_message_to_json_string(response)))
return response
# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/bin/bash -eu
#
# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Make folder containing the script the root folder for its execution
cd $(dirname $0)
rm -rf proto/*.py
rm -rf proto/__pycache__
tee proto/__init__.py << EOF > /dev/null
# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
EOF
python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto context.proto
python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto kpi_sample_types.proto
python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto pathcomp.proto
rm proto/context_pb2_grpc.py
rm proto/kpi_sample_types_pb2_grpc.py
sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/context_pb2.py
sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/kpi_sample_types_pb2.py
sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/pathcomp_pb2.py
sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/pathcomp_pb2_grpc.py
# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
This diff is collapsed.
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: kpi_sample_types.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='kpi_sample_types.proto',
package='kpi_sample_types',
syntax='proto3',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x16kpi_sample_types.proto\x12\x10kpi_sample_types*\xbe\x01\n\rKpiSampleType\x12\x19\n\x15KPISAMPLETYPE_UNKNOWN\x10\x00\x12%\n!KPISAMPLETYPE_PACKETS_TRANSMITTED\x10\x65\x12\"\n\x1eKPISAMPLETYPE_PACKETS_RECEIVED\x10\x66\x12$\n\x1fKPISAMPLETYPE_BYTES_TRANSMITTED\x10\xc9\x01\x12!\n\x1cKPISAMPLETYPE_BYTES_RECEIVED\x10\xca\x01\x62\x06proto3'
)
_KPISAMPLETYPE = _descriptor.EnumDescriptor(
name='KpiSampleType',
full_name='kpi_sample_types.KpiSampleType',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='KPISAMPLETYPE_UNKNOWN', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='KPISAMPLETYPE_PACKETS_TRANSMITTED', index=1, number=101,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='KPISAMPLETYPE_PACKETS_RECEIVED', index=2, number=102,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='KPISAMPLETYPE_BYTES_TRANSMITTED', index=3, number=201,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='KPISAMPLETYPE_BYTES_RECEIVED', index=4, number=202,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=45,
serialized_end=235,
)
_sym_db.RegisterEnumDescriptor(_KPISAMPLETYPE)
KpiSampleType = enum_type_wrapper.EnumTypeWrapper(_KPISAMPLETYPE)
KPISAMPLETYPE_UNKNOWN = 0
KPISAMPLETYPE_PACKETS_TRANSMITTED = 101
KPISAMPLETYPE_PACKETS_RECEIVED = 102
KPISAMPLETYPE_BYTES_TRANSMITTED = 201
KPISAMPLETYPE_BYTES_RECEIVED = 202
DESCRIPTOR.enum_types_by_name['KpiSampleType'] = _KPISAMPLETYPE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
# @@protoc_insertion_point(module_scope)
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: pathcomp.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from . import context_pb2 as context__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='pathcomp.proto',
package='pathcomp',
syntax='proto3',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x0epathcomp.proto\x12\x08pathcomp\x1a\rcontext.proto\"5\n\x0fPathCompRequest\x12\"\n\x08services\x18\x01 \x03(\x0b\x32\x10.context.Service\"9\n\rPathCompReply\x12(\n\x0b\x63onnections\x18\x01 \x03(\x0b\x32\x13.context.Connection2R\n\x0fPathCompService\x12?\n\x07\x43ompute\x12\x19.pathcomp.PathCompRequest\x1a\x17.pathcomp.PathCompReply\"\x00\x62\x06proto3'
,
dependencies=[context__pb2.DESCRIPTOR,])
_PATHCOMPREQUEST = _descriptor.Descriptor(
name='PathCompRequest',
full_name='pathcomp.PathCompRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='services', full_name='pathcomp.PathCompRequest.services', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=43,
serialized_end=96,
)
_PATHCOMPREPLY = _descriptor.Descriptor(
name='PathCompReply',
full_name='pathcomp.PathCompReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='connections', full_name='pathcomp.PathCompReply.connections', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=98,
serialized_end=155,
)
_PATHCOMPREQUEST.fields_by_name['services'].message_type = context__pb2._SERVICE
_PATHCOMPREPLY.fields_by_name['connections'].message_type = context__pb2._CONNECTION
DESCRIPTOR.message_types_by_name['PathCompRequest'] = _PATHCOMPREQUEST
DESCRIPTOR.message_types_by_name['PathCompReply'] = _PATHCOMPREPLY
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
PathCompRequest = _reflection.GeneratedProtocolMessageType('PathCompRequest', (_message.Message,), {
'DESCRIPTOR' : _PATHCOMPREQUEST,
'__module__' : 'pathcomp_pb2'
# @@protoc_insertion_point(class_scope:pathcomp.PathCompRequest)
})
_sym_db.RegisterMessage(PathCompRequest)
PathCompReply = _reflection.GeneratedProtocolMessageType('PathCompReply', (_message.Message,), {
'DESCRIPTOR' : _PATHCOMPREPLY,
'__module__' : 'pathcomp_pb2'
# @@protoc_insertion_point(class_scope:pathcomp.PathCompReply)
})
_sym_db.RegisterMessage(PathCompReply)
_PATHCOMPSERVICE = _descriptor.ServiceDescriptor(
name='PathCompService',
full_name='pathcomp.PathCompService',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=157,
serialized_end=239,
methods=[
_descriptor.MethodDescriptor(
name='Compute',
full_name='pathcomp.PathCompService.Compute',
index=0,
containing_service=None,
input_type=_PATHCOMPREQUEST,
output_type=_PATHCOMPREPLY,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_PATHCOMPSERVICE)
DESCRIPTOR.services_by_name['PathCompService'] = _PATHCOMPSERVICE
# @@protoc_insertion_point(module_scope)
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from . import pathcomp_pb2 as pathcomp__pb2
class PathCompServiceStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Compute = channel.unary_unary(
'/pathcomp.PathCompService/Compute',
request_serializer=pathcomp__pb2.PathCompRequest.SerializeToString,
response_deserializer=pathcomp__pb2.PathCompReply.FromString,
)
class PathCompServiceServicer(object):
"""Missing associated documentation comment in .proto file."""
def Compute(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_PathCompServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'Compute': grpc.unary_unary_rpc_method_handler(
servicer.Compute,
request_deserializer=pathcomp__pb2.PathCompRequest.FromString,
response_serializer=pathcomp__pb2.PathCompReply.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'pathcomp.PathCompService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class PathCompService(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def Compute(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/pathcomp.PathCompService/Compute',
pathcomp__pb2.PathCompRequest.SerializeToString,
pathcomp__pb2.PathCompReply.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment