Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • tfs/controller
1 result
Show changes
Showing
with 253 additions and 549 deletions
#!/bin/bash
# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
########################################################################################################################
# Define your deployment settings here
########################################################################################################################
# If not already set, set the name of the Kubernetes namespace to deploy to.
export TFS_K8S_NAMESPACE=${TFS_K8S_NAMESPACE:-"tfs"}
########################################################################################################################
# Automated steps start here
########################################################################################################################
kubectl --namespace $TFS_K8S_NAMESPACE logs deployment/l3-centralizedattackdetectorservice -c server
...@@ -36,19 +36,26 @@ INTERDOMAIN_TOPOLOGY_NAME = 'inter' # contains the abstract inter-domain top ...@@ -36,19 +36,26 @@ INTERDOMAIN_TOPOLOGY_NAME = 'inter' # contains the abstract inter-domain top
# Default service names # Default service names
class ServiceNameEnum(Enum): class ServiceNameEnum(Enum):
CONTEXT = 'context' CONTEXT = 'context'
DEVICE = 'device' DEVICE = 'device'
SERVICE = 'service' SERVICE = 'service'
SLICE = 'slice' SLICE = 'slice'
AUTOMATION = 'automation' AUTOMATION = 'automation'
POLICY = 'policy' POLICY = 'policy'
MONITORING = 'monitoring' MONITORING = 'monitoring'
DLT = 'dlt' DLT = 'dlt'
COMPUTE = 'compute' COMPUTE = 'compute'
CYBERSECURITY = 'cybersecurity' CYBERSECURITY = 'cybersecurity'
INTERDOMAIN = 'interdomain' INTERDOMAIN = 'interdomain'
PATHCOMP = 'pathcomp' PATHCOMP = 'pathcomp'
WEBUI = 'webui' L3_AM = 'l3-attackmitigator'
L3_CAD = 'l3-centralizedattackdetector'
WEBUI = 'webui'
DBSCANSERVING = 'dbscanserving'
OPTICALATTACKMANAGER = 'opticalattackmanager'
OPTICALATTACKDETECTOR = 'opticalattackdetector'
OPTICALATTACKMITIGATOR = 'opticalattackmitigator'
CACHING = 'caching'
# Used for test and debugging only # Used for test and debugging only
DLT_GATEWAY = 'dltgateway' DLT_GATEWAY = 'dltgateway'
...@@ -56,18 +63,23 @@ class ServiceNameEnum(Enum): ...@@ -56,18 +63,23 @@ class ServiceNameEnum(Enum):
# Default gRPC service ports # Default gRPC service ports
DEFAULT_SERVICE_GRPC_PORTS = { DEFAULT_SERVICE_GRPC_PORTS = {
ServiceNameEnum.CONTEXT .value : 1010, ServiceNameEnum.CONTEXT .value : 1010,
ServiceNameEnum.DEVICE .value : 2020, ServiceNameEnum.DEVICE .value : 2020,
ServiceNameEnum.SERVICE .value : 3030, ServiceNameEnum.SERVICE .value : 3030,
ServiceNameEnum.SLICE .value : 4040, ServiceNameEnum.SLICE .value : 4040,
ServiceNameEnum.AUTOMATION .value : 5050, ServiceNameEnum.AUTOMATION .value : 5050,
ServiceNameEnum.POLICY .value : 6060, ServiceNameEnum.POLICY .value : 6060,
ServiceNameEnum.MONITORING .value : 7070, ServiceNameEnum.MONITORING .value : 7070,
ServiceNameEnum.DLT .value : 8080, ServiceNameEnum.DLT .value : 8080,
ServiceNameEnum.COMPUTE .value : 9090, ServiceNameEnum.COMPUTE .value : 9090,
ServiceNameEnum.CYBERSECURITY.value : 10000, ServiceNameEnum.L3_CAD .value : 10001,
ServiceNameEnum.INTERDOMAIN .value : 10010, ServiceNameEnum.L3_AM .value : 10002,
ServiceNameEnum.PATHCOMP .value : 10020, ServiceNameEnum.DBSCANSERVING .value : 10008,
ServiceNameEnum.OPTICALATTACKDETECTOR .value : 10006,
ServiceNameEnum.OPTICALATTACKMITIGATOR .value : 10007,
ServiceNameEnum.OPTICALATTACKMANAGER .value : 10005,
ServiceNameEnum.INTERDOMAIN .value : 10010,
ServiceNameEnum.PATHCOMP .value : 10020,
# Used for test and debugging only # Used for test and debugging only
ServiceNameEnum.DLT_GATEWAY .value : 50051, ServiceNameEnum.DLT_GATEWAY .value : 50051,
......
...@@ -65,7 +65,7 @@ def main(): ...@@ -65,7 +65,7 @@ def main():
rest_server.start() rest_server.start()
# Wait for Ctrl+C or termination signal # Wait for Ctrl+C or termination signal
while not terminate.wait(timeout=0.1): pass while not terminate.wait(timeout=1.0): pass
LOGGER.info('Terminating...') LOGGER.info('Terminating...')
grpc_service.stop() grpc_service.stop()
......
...@@ -44,9 +44,17 @@ def main(): ...@@ -44,9 +44,17 @@ def main():
start_http_server(metrics_port) start_http_server(metrics_port)
# Get Database Engine instance and initialize database, if needed # Get Database Engine instance and initialize database, if needed
LOGGER.info('Getting SQLAlchemy DB Engine...')
db_engine = Engine.get_engine() db_engine = Engine.get_engine()
if db_engine is None: return -1 if db_engine is None:
Engine.create_database(db_engine) LOGGER.error('Unable to get SQLAlchemy DB Engine...')
return -1
try:
Engine.create_database(db_engine)
except: # pylint: disable=bare-except # pragma: no cover
LOGGER.exception('Failed to check/create the database: {:s}'.format(str(db_engine.url)))
rebuild_database(db_engine) rebuild_database(db_engine)
# Get message broker instance # Get message broker instance
...@@ -57,7 +65,7 @@ def main(): ...@@ -57,7 +65,7 @@ def main():
grpc_service.start() grpc_service.start()
# Wait for Ctrl+C or termination signal # Wait for Ctrl+C or termination signal
while not terminate.wait(timeout=0.1): pass while not terminate.wait(timeout=1.0): pass
LOGGER.info('Terminating...') LOGGER.info('Terminating...')
grpc_service.stop() grpc_service.stop()
......
...@@ -42,12 +42,6 @@ class Engine: ...@@ -42,12 +42,6 @@ class Engine:
LOGGER.exception('Failed to connect to database: {:s}'.format(str(crdb_uri))) LOGGER.exception('Failed to connect to database: {:s}'.format(str(crdb_uri)))
return None return None
try:
Engine.create_database(engine)
except: # pylint: disable=bare-except # pragma: no cover
LOGGER.exception('Failed to check/create to database: {:s}'.format(str(crdb_uri)))
return None
return engine return engine
@staticmethod @staticmethod
......
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
# build, tag and push the Docker image to the gitlab registry # Build, tag, and push the Docker image to the GitLab Docker registry
build dbscanserving: build dbscanserving:
variables: variables:
IMAGE_NAME: 'dbscanserving' # name of the microservice IMAGE_NAME: 'dbscanserving' # name of the microservice
...@@ -21,24 +21,23 @@ build dbscanserving: ...@@ -21,24 +21,23 @@ build dbscanserving:
before_script: before_script:
- docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
script: script:
- docker build -t "$IMAGE_NAME:$IMAGE_TAG" -f ./src/$IMAGE_NAME/Dockerfile ./src/ - docker build -t "$IMAGE_NAME:$IMAGE_TAG" -f ./src/$IMAGE_NAME/Dockerfile .
- docker tag "$IMAGE_NAME:$IMAGE_TAG" "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG" - docker tag "$IMAGE_NAME:$IMAGE_TAG" "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
- docker push "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG" - docker push "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
after_script: after_script:
- docker images --filter="dangling=true" --quiet | xargs -r docker rmi - docker images --filter="dangling=true" --quiet | xargs -r docker rmi
rules: rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)' - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
- if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
- changes: - changes:
- src/$IMAGE_NAME/**/*.{py,in,yml} - src/$IMAGE_NAME/**/*.{py,in,yml}
- src/$IMAGE_NAME/Dockerfile - src/$IMAGE_NAME/Dockerfile
- src/$IMAGE_NAME/tests/*.py - src/$IMAGE_NAME/tests/*.py
- src/$IMAGE_NAME/tests/Dockerfile
- manifests/${IMAGE_NAME}service.yaml - manifests/${IMAGE_NAME}service.yaml
- .gitlab-ci.yml - .gitlab-ci.yml
# apply unit test to the dbscanserving component # Apply unit test to the component
unit test dbscanserving: unit_test dbscanserving:
variables: variables:
IMAGE_NAME: 'dbscanserving' # name of the microservice IMAGE_NAME: 'dbscanserving' # name of the microservice
IMAGE_TAG: 'latest' # tag of the container image (production, development, etc) IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
...@@ -48,14 +47,17 @@ unit test dbscanserving: ...@@ -48,14 +47,17 @@ unit test dbscanserving:
before_script: before_script:
- docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY - docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
- if docker network list | grep teraflowbridge; then echo "teraflowbridge is already created"; else docker network create -d bridge teraflowbridge; fi - if docker network list | grep teraflowbridge; then echo "teraflowbridge is already created"; else docker network create -d bridge teraflowbridge; fi
- if docker container ls | grep $IMAGE_NAME; then docker rm -f $IMAGE_NAME; else echo "$IMAGE_NAME image is not in the system"; fi - if docker container ls | grep $IMAGE_NAME; then docker rm -f $IMAGE_NAME; else echo "$IMAGE_NAME container is not in the system"; fi
script: script:
- docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG" - docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
- docker run --name $IMAGE_NAME -d -p 10006:10006 -v "$PWD/src/$IMAGE_NAME/tests:/opt/results" --network=teraflowbridge --rm $CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG - docker run --name $IMAGE_NAME -d -p 10008:10008 -v "$PWD/src/$IMAGE_NAME/tests:/home/${IMAGE_NAME}/results" --network=teraflowbridge --rm $CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG
- sleep 5 - sleep 5
- docker ps -a - docker ps -a
- docker logs $IMAGE_NAME - docker logs $IMAGE_NAME
- docker exec -i $IMAGE_NAME bash -c "coverage run -m pytest --log-level=INFO --verbose $IMAGE_NAME/tests/test_unitary.py --junitxml=/opt/results/${IMAGE_NAME}_report.xml" - docker exec ps -a
- sleep 5
- docker logs $IMAGE_NAME
- docker exec -i $IMAGE_NAME bash -c "coverage run -m pytest --log-level=INFO --verbose $IMAGE_NAME/tests/test_unitary.py --junitxml=/home/${IMAGE_NAME}/results/${IMAGE_NAME}_report.xml"
- docker exec -i $IMAGE_NAME bash -c "coverage report --include='${IMAGE_NAME}/*' --show-missing" - docker exec -i $IMAGE_NAME bash -c "coverage report --include='${IMAGE_NAME}/*' --show-missing"
coverage: '/TOTAL\s+\d+\s+\d+\s+(\d+%)/' coverage: '/TOTAL\s+\d+\s+\d+\s+(\d+%)/'
after_script: after_script:
...@@ -78,27 +80,27 @@ unit test dbscanserving: ...@@ -78,27 +80,27 @@ unit test dbscanserving:
# Deployment of the dbscanserving service in Kubernetes Cluster # Deployment of the dbscanserving service in Kubernetes Cluster
deploy dbscanserving: # deploy dbscanserving:
variables: # variables:
IMAGE_NAME: 'dbscanserving' # name of the microservice # IMAGE_NAME: 'dbscanserving' # name of the microservice
IMAGE_TAG: 'latest' # tag of the container image (production, development, etc) # IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
stage: deploy # stage: deploy
needs: # needs:
- unit test dbscanserving # - unit test dbscanserving
# - integ_test execute # # - integ_test execute
script: # script:
- 'sed -i "s/$IMAGE_NAME:.*/$IMAGE_NAME:$IMAGE_TAG/" manifests/${IMAGE_NAME}service.yaml' # - 'sed -i "s/$IMAGE_NAME:.*/$IMAGE_NAME:$IMAGE_TAG/" manifests/${IMAGE_NAME}service.yaml'
- kubectl version # - kubectl version
- kubectl get all # - kubectl get all
- kubectl apply -f "manifests/${IMAGE_NAME}service.yaml" # - kubectl apply -f "manifests/${IMAGE_NAME}service.yaml"
- kubectl get all # - kubectl get all
# environment: # # environment:
# name: test # # name: test
# url: https://example.com # # url: https://example.com
# kubernetes: # # kubernetes:
# namespace: test # # namespace: test
rules: # rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)' # - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
when: manual # when: manual
- if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"' # - if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
when: manual # when: manual
\ No newline at end of file \ No newline at end of file
...@@ -11,16 +11,3 @@ ...@@ -11,16 +11,3 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import logging
# General settings
LOG_LEVEL = logging.DEBUG
# gRPC settings
GRPC_SERVICE_PORT = 10006
GRPC_MAX_WORKERS = 10
GRPC_GRACE_PERIOD = 60
# Prometheus settings
METRICS_PORT = 9192
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
FROM python:3-slim FROM python:3.9-slim
# Install dependencies # Install dependencies
RUN apt-get --yes --quiet --quiet update && \ RUN apt-get --yes --quiet --quiet update && \
...@@ -27,22 +27,56 @@ RUN GRPC_HEALTH_PROBE_VERSION=v0.2.0 && \ ...@@ -27,22 +27,56 @@ RUN GRPC_HEALTH_PROBE_VERSION=v0.2.0 && \
wget -qO/bin/grpc_health_probe https://github.com/grpc-ecosystem/grpc-health-probe/releases/download/${GRPC_HEALTH_PROBE_VERSION}/grpc_health_probe-linux-amd64 && \ wget -qO/bin/grpc_health_probe https://github.com/grpc-ecosystem/grpc-health-probe/releases/download/${GRPC_HEALTH_PROBE_VERSION}/grpc_health_probe-linux-amd64 && \
chmod +x /bin/grpc_health_probe chmod +x /bin/grpc_health_probe
# Creating a user for security reasons
RUN groupadd -r teraflow && useradd -u 1001 --no-log-init -r -m -g teraflow teraflow
USER teraflow
# set working directory
RUN mkdir -p /home/teraflow/controller/common
WORKDIR /home/teraflow/controller
# Get Python packages per module
ENV VIRTUAL_ENV=/home/teraflow/venv
RUN python3 -m venv ${VIRTUAL_ENV}
ENV PATH="${VIRTUAL_ENV}/bin:${PATH}"
# Get generic Python packages # Get generic Python packages
RUN python3 -m pip install --upgrade pip setuptools wheel pip-tools RUN python3 -m pip install --upgrade pip
RUN python3 -m pip install --upgrade setuptools wheel
RUN python3 -m pip install --upgrade pip-tools
# Get common Python packages
# Note: this step enables sharing the previous Docker build steps among all the Python components
COPY --chown=teraflow:teraflow common_requirements.in common_requirements.in
RUN pip-compile --quiet --output-file=common_requirements.txt common_requirements.in
RUN python3 -m pip install -r common_requirements.txt
# Add common files into working directory
WORKDIR /home/teraflow/controller/common
COPY --chown=teraflow:teraflow src/common/. ./
RUN rm -rf proto
# Set working directory # Create proto sub-folder, copy .proto files, and generate Python code
WORKDIR /var/teraflow RUN mkdir -p /home/teraflow/controller/common/proto
WORKDIR /home/teraflow/controller/common/proto
RUN touch __init__.py
COPY --chown=teraflow:teraflow proto/*.proto ./
RUN python3 -m grpc_tools.protoc -I=. --python_out=. --grpc_python_out=. *.proto
RUN rm *.proto
RUN find . -type f -exec sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' {} \;
# Create module sub-folders # Create module sub-folders
RUN mkdir -p /var/teraflow/dbscanserving RUN mkdir -p /home/teraflow/controller/dbscanserving
WORKDIR /home/teraflow/controller
# Get Python packages per module # Get Python packages per module
COPY dbscanserving/requirements.in dbscanserving/requirements.in COPY --chown=teraflow:teraflow ./src/dbscanserving/requirements.in dbscanserving/requirements.in
RUN pip-compile --output-file=dbscanserving/requirements.txt dbscanserving/requirements.in # consider common and specific requirements to avoid inconsistencies with dependencies
RUN pip-compile --quiet --output-file=dbscanserving/requirements.txt dbscanserving/requirements.in common_requirements.in
RUN python3 -m pip install -r dbscanserving/requirements.txt RUN python3 -m pip install -r dbscanserving/requirements.txt
COPY common/. common # Add component files into working directory
COPY dbscanserving/. dbscanserving COPY --chown=teraflow:teraflow ./src/dbscanserving/. dbscanserving
# Start dbscanserving service # Start the service
ENTRYPOINT ["python", "-m", "dbscanserving.service"] ENTRYPOINT ["python", "-m", "dbscanserving.service"]
...@@ -11,4 +11,3 @@ ...@@ -11,4 +11,3 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
...@@ -12,37 +12,62 @@ ...@@ -12,37 +12,62 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import grpc, logging import logging
from common.tools.client.RetryDecorator import retry, delay_exponential from typing import Counter
from dbscanserving.proto.dbscanserving_pb2 import DetectionRequest, DetectionResponse
from dbscanserving.proto.dbscanserving_pb2_grpc import DetectorStub import grpc
from common.Constants import ServiceNameEnum
from common.proto.dbscanserving_pb2 import DetectionRequest, DetectionResponse
from common.proto.dbscanserving_pb2_grpc import DetectorStub
from common.Settings import get_service_host, get_service_port_grpc
from common.tools.client.RetryDecorator import delay_exponential, retry
LOGGER = logging.getLogger(__name__) LOGGER = logging.getLogger(__name__)
MAX_RETRIES = 15 MAX_RETRIES = 15
DELAY_FUNCTION = delay_exponential(initial=0.01, increment=2.0, maximum=5.0) DELAY_FUNCTION = delay_exponential(initial=0.01, increment=2.0, maximum=5.0)
RETRY_DECORATOR = retry(max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect') RETRY_DECORATOR = retry(
max_retries=MAX_RETRIES,
delay_function=DELAY_FUNCTION,
prepare_method_name="connect",
)
class DbscanServingClient: class DbscanServingClient:
def __init__(self, address, port): def __init__(self, host=None, port=None):
self.endpoint = '{:s}:{:s}'.format(str(address), str(port)) if not host:
LOGGER.debug('Creating channel to {:s}...'.format(str(self.endpoint))) host = get_service_host(ServiceNameEnum.DBSCANSERVING)
if not port:
port = get_service_port_grpc(ServiceNameEnum.DBSCANSERVING)
self.endpoint = "{:s}:{:s}".format(str(host), str(port))
LOGGER.debug("Creating channel to {:s}...".format(str(self.endpoint)))
self.channel = None self.channel = None
self.stub = None self.stub = None
self.connect() self.connect()
LOGGER.debug('Channel created') LOGGER.debug("Channel created")
def connect(self): def connect(self):
self.channel = grpc.insecure_channel(self.endpoint) self.channel = grpc.insecure_channel(self.endpoint)
self.stub = DetectorStub(self.channel) self.stub = DetectorStub(self.channel)
def close(self): def close(self):
if(self.channel is not None): self.channel.close() if self.channel is not None:
self.channel.close()
self.channel = None self.channel = None
self.stub = None self.stub = None
@RETRY_DECORATOR @RETRY_DECORATOR
def Detect(self, request : DetectionRequest) -> DetectionResponse: def Detect(self, request: DetectionRequest) -> DetectionResponse:
LOGGER.debug('Detect request: {:s}'.format(str(request))) LOGGER.debug(
response = self.stub.Detect(request) "Detect request with {} samples and {} features".format(
LOGGER.debug('Detect result: {:s}'.format(str(response))) request.num_samples, request.num_features
)
)
response: DetectionResponse = self.stub.Detect(request)
LOGGER.debug(
"Detect result with {} cluster indices [{}]".format(
len(response.cluster_indices), Counter(response.cluster_indices)
)
)
return response return response
...@@ -11,4 +11,3 @@ ...@@ -11,4 +11,3 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
#!/bin/bash -eu
#
# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Make folder containing the script the root folder for its execution
cd $(dirname $0)
rm -rf proto/*.py
rm -rf proto/__pycache__
tee proto/__init__.py << EOF > /dev/null
# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
EOF
# building current service protos
python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto dbscanserving.proto
sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/dbscanserving_pb2.py
sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/dbscanserving_pb2_grpc.py
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: dbscanserving.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='dbscanserving.proto',
package='dbscanserving',
syntax='proto3',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x13\x64\x62scanserving.proto\x12\rdbscanserving\"\x1a\n\x06Sample\x12\x10\n\x08\x66\x65\x61tures\x18\x01 \x03(\x02\"\xc2\x01\n\x10\x44\x65tectionRequest\x12\x0b\n\x03\x65ps\x18\x01 \x01(\x02\x12\x13\n\x0bmin_samples\x18\x02 \x01(\x05\x12%\n\x06metric\x18\x03 \x01(\x0e\x32\x15.dbscanserving.Metric\x12\x13\n\x0bnum_samples\x18\x04 \x01(\x05\x12\x14\n\x0cnum_features\x18\x05 \x01(\x05\x12&\n\x07samples\x18\x06 \x03(\x0b\x32\x15.dbscanserving.Sample\x12\x12\n\nidentifier\x18\x07 \x01(\x05\",\n\x11\x44\x65tectionResponse\x12\x17\n\x0f\x63luster_indices\x18\x01 \x03(\x05*\x17\n\x06Metric\x12\r\n\tEUCLIDEAN\x10\x00\x32W\n\x08\x44\x65tector\x12K\n\x06\x44\x65tect\x12\x1f.dbscanserving.DetectionRequest\x1a .dbscanserving.DetectionResponseb\x06proto3'
)
_METRIC = _descriptor.EnumDescriptor(
name='Metric',
full_name='dbscanserving.Metric',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='EUCLIDEAN', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=309,
serialized_end=332,
)
_sym_db.RegisterEnumDescriptor(_METRIC)
Metric = enum_type_wrapper.EnumTypeWrapper(_METRIC)
EUCLIDEAN = 0
_SAMPLE = _descriptor.Descriptor(
name='Sample',
full_name='dbscanserving.Sample',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='features', full_name='dbscanserving.Sample.features', index=0,
number=1, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=38,
serialized_end=64,
)
_DETECTIONREQUEST = _descriptor.Descriptor(
name='DetectionRequest',
full_name='dbscanserving.DetectionRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='eps', full_name='dbscanserving.DetectionRequest.eps', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='min_samples', full_name='dbscanserving.DetectionRequest.min_samples', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='metric', full_name='dbscanserving.DetectionRequest.metric', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='num_samples', full_name='dbscanserving.DetectionRequest.num_samples', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='num_features', full_name='dbscanserving.DetectionRequest.num_features', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='samples', full_name='dbscanserving.DetectionRequest.samples', index=5,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='identifier', full_name='dbscanserving.DetectionRequest.identifier', index=6,
number=7, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=67,
serialized_end=261,
)
_DETECTIONRESPONSE = _descriptor.Descriptor(
name='DetectionResponse',
full_name='dbscanserving.DetectionResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='cluster_indices', full_name='dbscanserving.DetectionResponse.cluster_indices', index=0,
number=1, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=263,
serialized_end=307,
)
_DETECTIONREQUEST.fields_by_name['metric'].enum_type = _METRIC
_DETECTIONREQUEST.fields_by_name['samples'].message_type = _SAMPLE
DESCRIPTOR.message_types_by_name['Sample'] = _SAMPLE
DESCRIPTOR.message_types_by_name['DetectionRequest'] = _DETECTIONREQUEST
DESCRIPTOR.message_types_by_name['DetectionResponse'] = _DETECTIONRESPONSE
DESCRIPTOR.enum_types_by_name['Metric'] = _METRIC
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Sample = _reflection.GeneratedProtocolMessageType('Sample', (_message.Message,), {
'DESCRIPTOR' : _SAMPLE,
'__module__' : 'dbscanserving_pb2'
# @@protoc_insertion_point(class_scope:dbscanserving.Sample)
})
_sym_db.RegisterMessage(Sample)
DetectionRequest = _reflection.GeneratedProtocolMessageType('DetectionRequest', (_message.Message,), {
'DESCRIPTOR' : _DETECTIONREQUEST,
'__module__' : 'dbscanserving_pb2'
# @@protoc_insertion_point(class_scope:dbscanserving.DetectionRequest)
})
_sym_db.RegisterMessage(DetectionRequest)
DetectionResponse = _reflection.GeneratedProtocolMessageType('DetectionResponse', (_message.Message,), {
'DESCRIPTOR' : _DETECTIONRESPONSE,
'__module__' : 'dbscanserving_pb2'
# @@protoc_insertion_point(class_scope:dbscanserving.DetectionResponse)
})
_sym_db.RegisterMessage(DetectionResponse)
_DETECTOR = _descriptor.ServiceDescriptor(
name='Detector',
full_name='dbscanserving.Detector',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=334,
serialized_end=421,
methods=[
_descriptor.MethodDescriptor(
name='Detect',
full_name='dbscanserving.Detector.Detect',
index=0,
containing_service=None,
input_type=_DETECTIONREQUEST,
output_type=_DETECTIONRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_DETECTOR)
DESCRIPTOR.services_by_name['Detector'] = _DETECTOR
# @@protoc_insertion_point(module_scope)
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from . import dbscanserving_pb2 as dbscanserving__pb2
class DetectorStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Detect = channel.unary_unary(
'/dbscanserving.Detector/Detect',
request_serializer=dbscanserving__pb2.DetectionRequest.SerializeToString,
response_deserializer=dbscanserving__pb2.DetectionResponse.FromString,
)
class DetectorServicer(object):
"""Missing associated documentation comment in .proto file."""
def Detect(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_DetectorServicer_to_server(servicer, server):
rpc_method_handlers = {
'Detect': grpc.unary_unary_rpc_method_handler(
servicer.Detect,
request_deserializer=dbscanserving__pb2.DetectionRequest.FromString,
response_serializer=dbscanserving__pb2.DetectionResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'dbscanserving.Detector', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class Detector(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def Detect(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/dbscanserving.Detector/Detect',
dbscanserving__pb2.DetectionRequest.SerializeToString,
dbscanserving__pb2.DetectionResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
...@@ -12,11 +12,4 @@ ...@@ -12,11 +12,4 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
grpcio-health-checking
prometheus-client
pytest
pytest-benchmark
grpcio
scikit-learn scikit-learn
coverage
\ No newline at end of file
...@@ -12,59 +12,25 @@ ...@@ -12,59 +12,25 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import grpc
import logging import logging
from concurrent import futures
from grpc_health.v1.health import HealthServicer, OVERALL_HEALTH
from grpc_health.v1.health_pb2 import HealthCheckResponse
from grpc_health.v1.health_pb2_grpc import add_HealthServicer_to_server
from dbscanserving.proto.dbscanserving_pb2_grpc import add_DetectorServicer_to_server
from dbscanserving.service.DbscanServiceServicerImpl import DbscanServiceServicerImpl
from dbscanserving.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD
BIND_ADDRESS = '0.0.0.0' from common.Constants import ServiceNameEnum
LOGGER = logging.getLogger(__name__) from common.proto.dbscanserving_pb2_grpc import add_DetectorServicer_to_server
from common.Settings import get_service_port_grpc
from common.tools.service.GenericGrpcService import GenericGrpcService
class DbscanService: from dbscanserving.service.DbscanServiceServicerImpl import \
def __init__( DbscanServiceServicerImpl
self, address=BIND_ADDRESS, port=GRPC_SERVICE_PORT, max_workers=GRPC_MAX_WORKERS,
grace_period=GRPC_GRACE_PERIOD):
self.address = address LOGGER = logging.getLogger(__name__)
self.port = port
self.endpoint = None
self.max_workers = max_workers
self.grace_period = grace_period
self.dbscan_servicer = None
self.health_servicer = None
self.pool = None
self.server = None
def start(self):
self.endpoint = '{:s}:{:s}'.format(str(self.address), str(self.port))
LOGGER.debug('Starting Service (tentative endpoint: {:s}, max_workers: {:s})...'.format(
str(self.endpoint), str(self.max_workers)))
self.pool = futures.ThreadPoolExecutor(max_workers=self.max_workers) class DbscanService(GenericGrpcService):
self.server = grpc.server(self.pool) # , interceptors=(tracer_interceptor,)) def __init__(self, cls_name: str = __name__):
port = get_service_port_grpc(ServiceNameEnum.DBSCANSERVING)
super().__init__(port, cls_name=cls_name)
self.dbscan_servicer = DbscanServiceServicerImpl() self.dbscan_servicer = DbscanServiceServicerImpl()
add_DetectorServicer_to_server(self.dbscan_servicer, self.server)
self.health_servicer = HealthServicer(
experimental_non_blocking=True, experimental_thread_pool=futures.ThreadPoolExecutor(max_workers=1))
add_HealthServicer_to_server(self.health_servicer, self.server)
port = self.server.add_insecure_port(self.endpoint) def install_servicers(self):
self.endpoint = '{:s}:{:s}'.format(str(self.address), str(port)) add_DetectorServicer_to_server(self.dbscan_servicer, self.server)
LOGGER.info('Listening on {:s}...'.format(self.endpoint))
self.server.start()
self.health_servicer.set(OVERALL_HEALTH, HealthCheckResponse.SERVING) # pylint: disable=maybe-no-member
LOGGER.debug('Service started')
def stop(self):
LOGGER.debug('Stopping service (grace period {:s} seconds)...'.format(str(self.grace_period)))
self.health_servicer.enter_graceful_shutdown()
self.server.stop(self.grace_period)
LOGGER.debug('Service stopped')
...@@ -12,32 +12,43 @@ ...@@ -12,32 +12,43 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import os, grpc, logging import logging
import grpc
from sklearn.cluster import DBSCAN from sklearn.cluster import DBSCAN
from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method
from dbscanserving.proto.dbscanserving_pb2 import DetectionRequest, DetectionResponse from common.method_wrappers.Decorator import (MetricsPool,
from dbscanserving.proto.dbscanserving_pb2_grpc import DetectorServicer safe_and_metered_rpc_method)
from common.proto.dbscanserving_pb2 import DetectionRequest, DetectionResponse
from common.proto.dbscanserving_pb2_grpc import DetectorServicer
LOGGER = logging.getLogger(__name__) LOGGER = logging.getLogger(__name__)
METRICS_POOL = MetricsPool('DbscanServing', 'RPC') METRICS_POOL = MetricsPool("DBSCANServing", "RPC")
class DbscanServiceServicerImpl(DetectorServicer): class DbscanServiceServicerImpl(DetectorServicer):
def __init__(self): def __init__(self):
LOGGER.debug('Creating Servicer...') LOGGER.debug("Creating Servicer...")
LOGGER.debug('Servicer Created') LOGGER.debug("Servicer Created")
@safe_and_metered_rpc_method(METRICS_POOL, LOGGER) @safe_and_metered_rpc_method(METRICS_POOL, LOGGER)
def Detect(self, request : DetectionRequest, context : grpc.ServicerContext) -> DetectionResponse: def Detect(
self, request: DetectionRequest, context: grpc.ServicerContext
) -> DetectionResponse:
if request.num_samples != len(request.samples): if request.num_samples != len(request.samples):
context.set_details("The sample dimension declared does not match with the number of samples received.") context.set_details(
LOGGER.debug(f"The sample dimension declared does not match with the number of samples received. Declared: {request.num_samples} - Received: {len(request.samples)}") f"The sample dimension declared ({request.num_samples}) does not match with the number of samples received ({len(request.samples)})."
)
LOGGER.debug(
f"The sample dimension declared does not match with the number of samples received. Declared: {request.num_samples} - Received: {len(request.samples)}"
)
context.set_code(grpc.StatusCode.INVALID_ARGUMENT) context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
return DetectionResponse() return DetectionResponse()
# TODO: implement the validation of the features dimension # TODO: implement the validation of the features dimension
clusters = DBSCAN(eps=request.eps, min_samples=request.min_samples).fit_predict([[x for x in sample.features] for sample in request.samples]) clusters = DBSCAN(eps=request.eps, min_samples=request.min_samples).fit_predict(
[[x for x in sample.features] for sample in request.samples]
)
response = DetectionResponse() response = DetectionResponse()
for cluster in clusters: for cluster in clusters:
response.cluster_indices.append(cluster) response.cluster_indices.append(cluster)
......
...@@ -11,4 +11,3 @@ ...@@ -11,4 +11,3 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
...@@ -12,53 +12,55 @@ ...@@ -12,53 +12,55 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import os, logging, signal, sys, time, threading, multiprocessing import logging
import signal
import sys
import threading
from common.Settings import get_log_level, get_metrics_port
from prometheus_client import start_http_server from prometheus_client import start_http_server
from common.Settings import get_setting
from dbscanserving.Config import (
GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD, LOG_LEVEL, METRICS_PORT)
from dbscanserving.service.DbscanService import DbscanService from dbscanserving.service.DbscanService import DbscanService
terminate = threading.Event() terminate = threading.Event()
LOGGER = None LOGGER = None
def signal_handler(signal, frame): # pylint: disable=redefined-outer-name
LOGGER.warning('Terminate signal received') def signal_handler(signal, frame): # pylint: disable=redefined-outer-name
LOGGER.warning("Terminate signal received")
terminate.set() terminate.set()
def main():
global LOGGER # pylint: disable=global-statement
service_port = get_setting('DBSCANSERVICE_SERVICE_PORT_GRPC', default=GRPC_SERVICE_PORT) def main():
max_workers = get_setting('MAX_WORKERS', default=GRPC_MAX_WORKERS ) global LOGGER # pylint: disable=global-statement
grace_period = get_setting('GRACE_PERIOD', default=GRPC_GRACE_PERIOD)
log_level = get_setting('LOG_LEVEL', default=LOG_LEVEL )
metrics_port = get_setting('METRICS_PORT', default=METRICS_PORT )
log_level = get_log_level()
logging.basicConfig(level=log_level) logging.basicConfig(level=log_level)
LOGGER = logging.getLogger(__name__) LOGGER = logging.getLogger(__name__)
signal.signal(signal.SIGINT, signal_handler) signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler) signal.signal(signal.SIGTERM, signal_handler)
LOGGER.info('Starting...') LOGGER.info("Starting...")
# Start metrics server # Start metrics server
metrics_port = get_metrics_port()
start_http_server(metrics_port) start_http_server(metrics_port)
# Starting CentralizedCybersecurity service # Starting CentralizedCybersecurity service
grpc_service = DbscanService( grpc_service = DbscanService()
port=service_port, max_workers=max_workers, grace_period=grace_period)
grpc_service.start() grpc_service.start()
# Wait for Ctrl+C or termination signal # Wait for Ctrl+C or termination signal
while not terminate.wait(timeout=0.1): pass while not terminate.wait(timeout=1):
pass
LOGGER.info('Terminating...') LOGGER.info("Terminating...")
grpc_service.stop() grpc_service.stop()
LOGGER.info('Bye') LOGGER.info("Bye")
return 0 return 0
if __name__ == '__main__':
if __name__ == "__main__":
sys.exit(main()) sys.exit(main())
...@@ -11,4 +11,3 @@ ...@@ -11,4 +11,3 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.